diff --git a/.circleci/config.yml b/.circleci/config.yml
index 4de7cc8ad994..af28cb31bbb7 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -1,78 +1,274 @@
-version: 2.0
+version: 2
jobs:
build:
docker:
- - image: ronykoz/content-build-node911:latest
+ - image: devdemisto/content-build-2and3:2.0.0.2832 # disable-secrets-detection
+ resource_class: medium+
environment:
- CONTENT_VERSION: "18.11.2"
- GIT_SHA1: "3ac083b891d2f146ffbad6c7ce0a3be9e4f94b92"
+ CONTENT_VERSION: "19.10.1"
+ SERVER_VERSION: "5.0.0"
+ GIT_SHA1: "8c2c76794ce844ded59c4c5e9484858c7c3c16d3" # guardrails-disable-line disable-secrets-detection
steps:
+ - run:
+ name: Look It's a New CircleCI Step
+ when: always
+ command: |
+ echo 'blah blah blah'
+ echo 'YAY go Content'
- checkout
+ - setup_remote_docker
- run:
name: Prepare Environment
+ when: always
command: |
echo 'export CIRCLE_ARTIFACTS="/home/circleci/project/artifacts"' >> $BASH_ENV
+ echo 'export PATH="/home/circleci/.pyenv/shims:/home/circleci/.local/bin:/home/circleci/.pyenv/bin:${PATH}"' >> $BASH_ENV # disable-secrets-detection
+ echo 'export PYTHONPATH="/home/circleci/project:${PYTHONPATH}"' >> $BASH_ENV
+ echo "=== sourcing $BASH_ENV ==="
source $BASH_ENV
sudo mkdir -p -m 777 $CIRCLE_ARTIFACTS
- - run:
- name: Install dependencies
- command: |
chmod +x ./Tests/scripts/*
+ chmod +x ./Tests/lastest_server_build_scripts/*
+ pyenv versions
+ python --version
+ python3 --version
+ echo "Parameters: NIGHTLY: $NIGHTLY, NON_AMI_RUN: $NON_AMI_RUN, SERVER_BRANCH_NAME: $SERVER_BRANCH_NAME"
- add_ssh_keys:
fingerprints:
- - "02:df:a5:6a:53:9a:f5:5d:bd:a6:fc:b2:db:9b:c9:47"
+ - "02:df:a5:6a:53:9a:f5:5d:bd:a6:fc:b2:db:9b:c9:47" # disable-secrets-detection
+ - "f5:25:6a:e5:ac:4b:84:fb:60:54:14:82:f1:e9:6c:f9" # disable-secrets-detection
+ - run:
+ name: Create ID Set
+ when: always
+ command: |
+ python ./Tests/scripts/update_id_set.py -r
+ - run:
+ name: Infrastucture testing
+ when: always
+ command: |
+ pytest ./Tests/scripts/hook_validations/tests/ -v
+ pytest ./Tests/scripts/infrastructure_tests/ -v
+ pytest ./Tests/scripts/test_configure_tests.py -v
- run:
name: Validate Files and Yaml
+ when: always
command: |
- python ./Tests/scripts/validate_files_structure.py -c true
+ # Run flake8 on all excluding Integraions and Scripts (they will be handled in linting)
+ ./Tests/scripts/pyflake.sh *.py
+ find . -maxdepth 1 -type d -not \( -path . -o -path ./Integrations -o -path ./Scripts -o -path ./Beta_Integrations \) | xargs ./Tests/scripts/pyflake.sh
+
+ [ -n "${BACKWARD_COMPATIBILITY}" ] && CHECK_BACKWARD=false || CHECK_BACKWARD=true
+ python ./Tests/scripts/validate_files.py -c true -b $CHECK_BACKWARD
- run:
name: Configure Test Filter
+ when: always
command: |
[ -n "${NIGHTLY}" ] && IS_NIGHTLY=true || IS_NIGHTLY=false
python ./Tests/scripts/configure_tests.py -n $IS_NIGHTLY
+ - run:
+ name: Spell Checks
+ command: |
+ python ./Tests/scripts/circleci_spell_checker.py $CIRCLE_BRANCH
- run:
name: Build Content Descriptor
- command: ./setContentDescriptor.sh $CIRCLE_BUILD_NUM $GIT_SHA1 $CONTENT_VERSION
+ when: always
+ command: |
+ if [ -n "${GITHUB_TOKEN}" ] ;
+ then
+ python3 release_notes.py $CONTENT_VERSION $GIT_SHA1 $CIRCLE_BUILD_NUM $SERVER_VERSION --github-token $GITHUB_TOKEN
+
+ else
+ python3 release_notes.py $CONTENT_VERSION $GIT_SHA1 $CIRCLE_BUILD_NUM $SERVER_VERSION
+ fi
- run:
name: Common Server Documentation
+ when: always
command: ./Documentation/commonServerDocs.sh
- run:
name: Create Content Artifacts
+ when: always
command: python content_creator.py $CIRCLE_ARTIFACTS
- store_artifacts:
path: artifacts
destination: artifacts
+ - run:
+ name: Run Unit Testing and Lint
+ when: always
+ command: SKIP_GIT_COMPARE_FILTER=${NIGHTLY} ./Tests/scripts/run_all_pkg_dev_tasks.sh
+ - run:
+ name: Download Artifacts
+ when: always
+ command: |
+ if [[ $CIRCLE_BRANCH =~ pull/[0-9]+ ]]; then
+ echo "Skipping instance tests for forked PRs"
+ exit 0
+ fi
+ if ./Tests/scripts/is_ami.sh ;
+ then
+ echo "Using AMI - Not downloading artifacts"
+
+ else
+ ./Tests/scripts/server_get_artifact.sh $SERVER_CI_TOKEN
+ cp demistoserver.sh ./Tests/scripts/awsinstancetool/ansibleinstall/demistoserver.sh
+ fi
- run:
name: Download Configuration
+ when: always
command: |
- ./Tests/scripts/download_demisto_conf.sh
+ if [[ $CIRCLE_BRANCH =~ pull/[0-9]+ ]]; then
+ echo "Skipping instance tests for forked PRs"
+ exit 0
+ fi
+ if ./Tests/scripts/is_ami.sh ;
+ then
+ ./Tests/scripts/download_demisto_conf.sh
+
+ else
+ ./Tests/lastest_server_build_scripts/download_demisto_conf.sh
+ fi
- run:
name: Create Instance
+ when: always
command: |
- ./Tests/scripts/create_instance.sh instance.json
+ if [[ $CIRCLE_BRANCH =~ pull/[0-9]+ ]]; then
+ echo "Skipping instance tests for forked PRs"
+ exit 0
+ fi
+ if ./Tests/scripts/is_ami.sh ;
+ then
+ if [ -n "${NIGHTLY}" ] ;
+ then
+ export IFRA_ENV_TYPE=Content-Master
+
+ else
+ export IFRA_ENV_TYPE=Content-Env
+ fi
+ python ./Tests/scripts/awsinstancetool/aws_instance_tool.py -envType $IFRA_ENV_TYPE -outfile ./env_results.json
+
+ else
+ python ./Tests/scripts/awsinstancetool/aws_instance_tool.py -envType CustomBuild -outfile ./env_results.json
+ fi
- run:
name: Setup Instance
+ when: always
+ command: |
+ if [[ $CIRCLE_BRANCH =~ pull/[0-9]+ ]]; then
+ echo "Skipping instance tests for forked PRs"
+ exit 0
+ fi
+ if ./Tests/scripts/is_ami.sh ;
+ then
+ python ./Tests/scripts/run_content_installation.py
+ python ./Tests/scripts/wait_until_server_ready.py -c $(cat secret_conf_path) -v $CONTENT_VERSION
+
+ else
+ ./Tests/lastest_server_build_scripts/run_installer_on_instance.sh
+ python ./Tests/scripts/wait_until_server_ready.py -c $(cat secret_conf_path) -v $CONTENT_VERSION --non-ami
+ fi
+ - run:
+ name: Run Tests - Latest GA
+ shell: /bin/bash
+ when: always
+ command: |
+ if [[ $CIRCLE_BRANCH =~ pull/[0-9]+ ]]; then
+ echo "Skipping instance tests for forked PRs"
+ exit 0
+ fi
+ if ./Tests/scripts/is_ami.sh ;
+ then
+ ./Tests/scripts/run_tests.sh "Demisto GA"
+
+ else
+ ./Tests/lastest_server_build_scripts/run_tests.sh
+ fi
+ - run:
+ name: Run Tests - One Before GA
+ shell: /bin/bash
+ when: always
command: |
- ./Tests/scripts/run_installer_on_instance.sh
- ./Tests/scripts/wait_until_server_ready.sh
+ if [[ $CIRCLE_BRANCH =~ pull/[0-9]+ ]]; then
+ echo "Skipping instance tests for forked PRs"
+ exit 0
+ fi
+ if ./Tests/scripts/is_ami.sh ;
+ then
+ ./Tests/scripts/run_tests.sh "Demisto one before GA"
+
+ else
+ echo "Not AMI run, can't run on this version"
+ fi
- run:
- name: Run Tests
+ name: Run Tests - Two Before GA
shell: /bin/bash
- command: ./Tests/scripts/run_tests.sh
+ when: always
+ command: |
+ if [[ $CIRCLE_BRANCH =~ pull/[0-9]+ ]]; then
+ echo "Skipping instance tests for forked PRs"
+ exit 0
+ fi
+ if ./Tests/scripts/is_ami.sh ;
+ then
+ ./Tests/scripts/run_tests.sh "Demisto two before GA"
+
+ else
+ echo "Not AMI run, can't run on this version"
+ fi
+ - run:
+ name: Run Tests - Server Master
+ shell: /bin/bash
+ when: always
+ command: |
+ if [[ $CIRCLE_BRANCH =~ pull/[0-9]+ ]]; then
+ echo "Skipping instance tests for forked PRs"
+ exit 0
+ fi
+ if ./Tests/scripts/is_ami.sh ;
+ then
+ ./Tests/scripts/run_tests.sh "Server Master"
+
+ else
+ echo "Not AMI run, can't run on this version"
+ fi
- run:
name: Slack Notifier
shell: /bin/bash
- command: ./Tests/scripts/slack_notifier.sh
+ command: |
+ if [[ $CIRCLE_BRANCH =~ pull/[0-9]+ ]]; then
+ echo "Skipping instance tests for forked PRs"
+ exit 0
+ fi
+ ./Tests/scripts/slack_notifier.sh ./env_results.json
+ when: always
+ - run:
+ name: Validate Docker Images
+ shell: /bin/bash
+ command: ./Tests/scripts/validate_docker_images.sh
when: always
- run:
name: Instance Test
- command: ./Tests/scripts/instance_test.sh
+ command: |
+ if [[ $CIRCLE_BRANCH =~ pull/[0-9]+ ]]; then
+ echo "Skipping instance tests for forked PRs"
+ exit 0
+ fi
+ ./Tests/scripts/instance_test.sh
when: always
- run:
name: Destroy Instances
- command: ./Tests/scripts/destroy_instances.sh $CIRCLE_ARTIFACTS
+ command: |
+ if [[ $CIRCLE_BRANCH =~ pull/[0-9]+ ]]; then
+ echo "Skipping instance tests for forked PRs"
+ exit 0
+ fi
+ python ./Tests/scripts/destroy_instances.py $CIRCLE_ARTIFACTS ./env_results.json
when: always
- store_artifacts:
path: artifacts
destination: artifacts
when: always
+
+workflows:
+ version: 2
+ commit:
+ jobs:
+ - build
diff --git a/.github/config.yml b/.github/config.yml
new file mode 100644
index 000000000000..1529de87d451
--- /dev/null
+++ b/.github/config.yml
@@ -0,0 +1,6 @@
+newPRWelcomeComment: >
+ Hi and welcome to the Demisto Content project!
+ Thank you and congrats on your first pull request, we will review it soon!
+ Until then you can check out our [documentation](https://github.com/demisto/content/tree/master/docs) for more details.
+ We would be thrilled to see you get involved in our [Slack DFIR community](https://go.demisto.com/join-our-slack-community) for discussions.
+ Hope you have a great time here :)
\ No newline at end of file
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index e33bc8b903d2..2e4241f659ac 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -1,3 +1,5 @@
+
+
## Status
Ready/In Progress/In Hold(Reason for hold)
@@ -30,5 +32,16 @@ x.x.x
- [ ] Documentation (with link to it)
- [ ] Code Review
+## Dependencies
+Mention the dependencies of the entity you changed as given from the precommit hooks in checkboxes, and tick after tested them.
+- [ ] Dependency 1
+- [ ] Dependency 2
+- [ ] Dependency 3
+
## Additional changes
Describe additional changes done, for example adding a function to common server.
+
+## Technical writer review
+Mention and link to the files that require a technical writer review.
+- [ ] [YAML file](link)
+- [ ] [CHANGELOG](link)
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index fe72f9f370d2..d6050235005c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,21 @@
.DS_Store
.idea
-_site
\ No newline at end of file
+.vscode
+_site
+TestData/EmailWithNonUnicodeAttachmentName.eml
+TestData/EmailWithNonUnicodeSubject.eml
+*.pyc
+.pytest_cache
+
+CommonServerPython.py
+!Scripts/CommonServerPython/CommonServerPython.py
+CommonServerUserPython.py
+demistomock.py
+Tests/filter_file.txt
+Tests/id_set.json
+.mypy_cache
+Scripts/*/*_unified.yml
+Integrations/*/*_unified.yml
+Beta_Integrations/*/*_unified.yml
+conftest.py
+!Tests/scripts/dev_envs/pytest/conftest.py
diff --git a/.guardrails/ignore b/.guardrails/ignore
new file mode 100644
index 000000000000..6ef4617bd08f
--- /dev/null
+++ b/.guardrails/ignore
@@ -0,0 +1 @@
+Integrations/Active_Directory_Query/key.pem
diff --git a/.hooks/pre-commit b/.hooks/pre-commit
index 013b9a5e750c..f166bb2318d4 100755
--- a/.hooks/pre-commit
+++ b/.hooks/pre-commit
@@ -1,15 +1,37 @@
#!/bin/bash
# validating that each modified file has a valid schema, release notes, proper prefix & suffix
-echo "Validating files"
-python Tests/scripts/validate_files_structure.py
+echo "Validating files..."
+if [[ -z "${WINDIR}" ]]
+ then
+ PYTHONPATH="`pwd`:${PYTHONPATH}" python Tests/scripts/validate_files.py -t true
+ else
+ python Tests/scripts/validate_files.py
+fi
+
+RES=$?
+
+echo ""
+if [[ -n "$CONTENT_PRECOMMIT_RUN_DEV_TASKS" ]]; then
+ echo "Running content dev tasks (flake8, mypy, pylint, pytst) as env variable CONTENT_PRECOMMIT_RUN_DEV_TASKS is set."
+ ./Tests/scripts/run_all_pkg_dev_tasks.sh
+ RES=$(($RES + $?))
+else
+ echo "Skipping running dev tasks (flake8, mypy, pylint, pytest). If you want to run this as part of the precommit hook"
+ echo 'set CONTENT_PRECOMMIT_RUN_DEV_TASKS=1. You can add the following line to ~/.zshrc:'
+ echo 'echo "export CONTENT_PRECOMMIT_RUN_DEV_TASKS=1" >> ~/.zshrc'
+ echo ""
+ echo 'Or if you want to manually run dev tasks: ./Tests/scripts/pkg_dev_test_tasks.py -d '
+ echo 'Example: ./Tests/scripts/pkg_dev_test_tasks.py -d Scripts/ParseEmailFiles'
+fi
-if [[ $? -ne 0 ]]
-then
+if [[ $RES -ne 0 ]]
+ then
echo "Please fix the aforementioned errors and then commit again"
exit 1
fi
+
# prevent push to master
if [ -z "$1" ]; then
protected_branch='master'
@@ -19,3 +41,5 @@ if [ -z "$1" ]; then
exit 1
fi
fi
+
+echo ""
diff --git a/Beta_Integrations/AWS-Athena/AWS-Athena.py b/Beta_Integrations/AWS-Athena/AWS-Athena.py
new file mode 100644
index 000000000000..d5e5f3055936
--- /dev/null
+++ b/Beta_Integrations/AWS-Athena/AWS-Athena.py
@@ -0,0 +1,247 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import boto3
+import json
+from datetime import datetime, date
+from botocore.config import Config
+from botocore.parsers import ResponseParserError
+import urllib3.util
+
+# Disable insecure warnings
+urllib3.disable_warnings()
+
+"""PARAMETERS"""
+AWS_DEFAULT_REGION = demisto.params().get('defaultRegion')
+AWS_ROLE_ARN = demisto.params().get('roleArn')
+AWS_ROLE_SESSION_NAME = demisto.params().get('roleSessionName')
+AWS_ROLE_SESSION_DURATION = demisto.params().get('sessionDuration')
+AWS_ROLE_POLICY = None
+AWS_ACCESS_KEY_ID = demisto.params().get('access_key')
+AWS_SECRET_ACCESS_KEY = demisto.params().get('secret_key')
+VERIFY_CERTIFICATE = not demisto.params().get('insecure', True)
+proxies = handle_proxy(proxy_param_name='proxy', checkbox_default_value=False)
+config = Config(
+ connect_timeout=1,
+ retries=dict(
+ max_attempts=5
+ ),
+ proxies=proxies
+)
+
+
+"""HELPER FUNCTIONS"""
+
+
+class DatetimeEncoder(json.JSONEncoder):
+ # pylint: disable=method-hidden
+ def default(self, obj):
+ if isinstance(obj, datetime):
+ return obj.strftime('%Y-%m-%dT%H:%M:%S')
+ elif isinstance(obj, date):
+ return obj.strftime('%Y-%m-%d')
+ # Let the base class default method raise the TypeError
+ return json.JSONEncoder.default(self, obj)
+
+
+def aws_session(service='athena', region=None, roleArn=None, roleSessionName=None, roleSessionDuration=None,
+ rolePolicy=None):
+ kwargs = {}
+ if roleArn and roleSessionName is not None:
+ kwargs.update({
+ 'RoleArn': roleArn,
+ 'RoleSessionName': roleSessionName,
+ })
+ elif AWS_ROLE_ARN and AWS_ROLE_SESSION_NAME is not None:
+ kwargs.update({
+ 'RoleArn': AWS_ROLE_ARN,
+ 'RoleSessionName': AWS_ROLE_SESSION_NAME,
+ })
+
+ if roleSessionDuration is not None:
+ kwargs.update({'DurationSeconds': int(roleSessionDuration)})
+ elif AWS_ROLE_SESSION_DURATION is not None:
+ kwargs.update({'DurationSeconds': int(AWS_ROLE_SESSION_DURATION)})
+
+ if rolePolicy is not None:
+ kwargs.update({'Policy': rolePolicy})
+ elif AWS_ROLE_POLICY is not None:
+ kwargs.update({'Policy': AWS_ROLE_POLICY})
+ if kwargs and AWS_ACCESS_KEY_ID is None:
+
+ if AWS_ACCESS_KEY_ID is None:
+ sts_client = boto3.client('sts', config=config, verify=VERIFY_CERTIFICATE)
+ sts_response = sts_client.assume_role(**kwargs)
+ if region is not None:
+ client = boto3.client(
+ service_name=service,
+ region_name=region,
+ aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
+ aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
+ aws_session_token=sts_response['Credentials']['SessionToken'],
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ else:
+ client = boto3.client(
+ service_name=service,
+ region_name=AWS_DEFAULT_REGION,
+ aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
+ aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
+ aws_session_token=sts_response['Credentials']['SessionToken'],
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ elif AWS_ACCESS_KEY_ID and AWS_ROLE_ARN:
+ sts_client = boto3.client(
+ service_name='sts',
+ aws_access_key_id=AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ kwargs.update({
+ 'RoleArn': AWS_ROLE_ARN,
+ 'RoleSessionName': AWS_ROLE_SESSION_NAME,
+ })
+ sts_response = sts_client.assume_role(**kwargs)
+ client = boto3.client(
+ service_name=service,
+ region_name=AWS_DEFAULT_REGION,
+ aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
+ aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
+ aws_session_token=sts_response['Credentials']['SessionToken'],
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ else:
+ if region is not None:
+ client = boto3.client(
+ service_name=service,
+ region_name=region,
+ aws_access_key_id=AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ else:
+ client = boto3.client(
+ service_name=service,
+ region_name=AWS_DEFAULT_REGION,
+ aws_access_key_id=AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+
+ return client
+
+
+def start_query_execution_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ data = []
+ kwargs = {'QueryString': args.get('QueryString')}
+ if args.get('ClientRequestToken') is not None:
+ kwargs.update({'ClientRequestToken': args.get('ClientRequestToken')})
+ if args.get('Database') is not None:
+ kwargs.update({'QueryExecutionContext': {'Database': args.get('Database')}})
+ if args.get('OutputLocation') is not None:
+ kwargs.update({'ResultConfiguration': {'OutputLocation': args.get('OutputLocation')}})
+ if args.get('EncryptionOption') is not None:
+ kwargs.update({'ResultConfiguration': {'EncryptionConfiguration': {'EncryptionOption': args.get('EncryptionOption')}}})
+ if args.get('KmsKey') is not None:
+ kwargs.update({'ResultConfiguration': {'EncryptionConfiguration': {'KmsKey': args.get('KmsKey')}}})
+ if args.get('WorkGroup') is not None:
+ kwargs.update({'WorkGroup': args.get('WorkGroup')})
+
+ response = client.start_query_execution(**kwargs)
+
+ data.append({
+ 'QueryString': args.get('QueryString'),
+ 'QueryExecutionId': response['QueryExecutionId']
+ })
+ ec = {'AWS.Athena.Query': data}
+ human_readable = tableToMarkdown('AWS Athena Query', data)
+ return_outputs(human_readable, ec)
+
+
+def stop_query_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ response = client.stop_query_execution(QueryExecutionId=args.get('QueryExecutionId'))
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Query {query} was Deleted ".format(query=args.get('QueryExecutionId')))
+
+
+def get_query_execution_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {'QueryExecutionId': args.get('QueryExecutionId')}
+ response = client.get_query_execution(**kwargs)
+ try:
+ raw = json.loads(json.dumps(response, cls=DatetimeEncoder))
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.Athena.Query(val.QueryExecutionId === obj.QueryExecutionId)': raw}
+ human_readable = tableToMarkdown('AWS Athena Query', raw)
+ return_outputs(human_readable, ec)
+
+
+def get_query_results_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {'QueryExecutionId': args.get('QueryExecutionId')}
+ response = client.get_query_results(**kwargs)
+ ec = {'AWS.Athena.Query(val.QueryExecutionId === obj.QueryExecutionId)': response}
+ human_readable = tableToMarkdown('AWS Athena Query', response)
+ return_outputs(human_readable, ec)
+
+
+"""COMMAND BLOCK"""
+try:
+ LOG('Command being called is {command}'.format(command=demisto.command()))
+ if demisto.command() == 'test-module':
+ client = aws_session()
+ response = client.list_named_queries()
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results('ok')
+
+ elif demisto.command() == 'aws-athena-start-query':
+ start_query_execution_command(demisto.args())
+
+ elif demisto.command() == 'aws-athena-stop-query':
+ stop_query_command(demisto.args())
+
+ elif demisto.command() == 'aws-athena-get-query-execution':
+ get_query_execution_command(demisto.args())
+
+ elif demisto.command() == 'aws-athena-get-query-results':
+ get_query_results_command(demisto.args())
+
+
+except ResponseParserError as e:
+ return_error('Could not connect to the AWS endpoint. Please check that the region is valid.\n {error}'.format(
+ error=type(e)))
+ LOG(e)
+
+except Exception as e:
+ return_error('Error has occurred in the AWS Athena Integration: {error}\n {message}'.format(
+ error=type(e), message=e))
diff --git a/Beta_Integrations/AWS-Athena/AWS-Athena.yml b/Beta_Integrations/AWS-Athena/AWS-Athena.yml
new file mode 100644
index 000000000000..3fd9729f9144
--- /dev/null
+++ b/Beta_Integrations/AWS-Athena/AWS-Athena.yml
@@ -0,0 +1,148 @@
+commonfields:
+ id: AWS - Athena - Beta
+ version: -1
+name: AWS - Athena - Beta
+display: AWS - Athena (Beta)
+category: IT Services
+description: Amazon Web Services Athena
+configuration:
+- display: Role Arn
+ name: roleArn
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Role Session Name
+ name: roleSessionName
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: AWS Default Region
+ name: defaultRegion
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Role Session Duration
+ name: sessionDuration
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Access Key
+ name: access_key
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Secret Key
+ name: secret_key
+ defaultvalue: ""
+ type: 4
+ required: false
+- display: Trust any cert (Not Secure)
+ name: insecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Use system proxy
+ name: proxy
+ defaultvalue: ""
+ type: 8
+ required: false
+script:
+ script: ''
+ type: python
+ commands:
+ - name: aws-athena-start-query
+ arguments:
+ - name: QueryString
+ required: true
+ description: The SQL query statements to be executed.
+ - name: ClientRequestToken
+ auto: PREDEFINED
+ predefined:
+ - private
+ - public-read
+ - public-read-write
+ - authenticated-read
+ description: A unique case-sensitive string used to ensure the request to create
+ the query is idempotent (executes only once).
+ - name: Database
+ description: The name of the database.
+ - name: OutputLocation
+ description: he location in Amazon S3 where your query results are stored, such
+ as s3://path/to/query/bucket/.
+ - name: EncryptionOption
+ description: Indicates whether Amazon S3 server-side encryption with Amazon
+ S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS
+ ), or client-side encryption with KMS-managed keys (CSE-KMS) is used.
+ - name: KmsKey
+ description: For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID.
+ - name: WorkGroup
+ description: The name of the workgroup in which the query is being started.
+ - name: roleArn
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ - name: roleSessionName
+ description: An identifier for the assumed role session.
+ - name: roleSessionDuration
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ - name: region
+ description: The AWS Region, if not specified the default region will be used.
+ description: Start Athena Query.
+ - name: aws-athena-stop-query
+ arguments:
+ - name: QueryExecutionId
+ required: true
+ description: The unique ID of the query execution to stop. This field is auto-populated
+ if not provided.
+ - name: region
+ description: The AWS Region, if not specified the default region will be used.
+ - name: roleArn
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ - name: roleSessionName
+ description: An identifier for the assumed role session.
+ - name: roleSessionDuration
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ description: Stops a query execution. Requires you to have access to the workgroup
+ in which the query ran.
+ - name: aws-athena-get-query-execution
+ arguments:
+ - name: region
+ description: The AWS Region, if not specified the default region will be used.
+ - name: roleArn
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ - name: roleSessionName
+ description: An identifier for the assumed role session.
+ - name: roleSessionDuration
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ - name: QueryExecutionId
+ required: true
+ description: The unique ID of the query execution.
+ description: Returns information about a single execution of a query if you have
+ access to the workgroup in which the query ran.
+ - name: aws-athena-get-query-results
+ arguments:
+ - name: QueryExecutionId
+ required: true
+ description: The unique ID of the query execution.
+ - name: region
+ description: The AWS Region, if not specified the default region will be used.
+ - name: roleArn
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ - name: roleSessionName
+ description: An identifier for the assumed role session.
+ - name: roleSessionDuration
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ description: Returns the results of a single query execution specified by QueryExecutionId
+ if you have access to the workgroup in which the query ran.
+ dockerimage: demisto/boto3py3:1.0.0.1030
+ runonce: false
+ subtype: python3
+beta: true
+tests:
+- Beta-Athena-Test
diff --git a/Beta_Integrations/AWS-Athena/AWS-Athena_CHANGELOG.md b/Beta_Integrations/AWS-Athena/AWS-Athena_CHANGELOG.md
new file mode 100644
index 000000000000..594507ae86ea
--- /dev/null
+++ b/Beta_Integrations/AWS-Athena/AWS-Athena_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+#### New Integration
+Amazon Web Services Athena
\ No newline at end of file
diff --git a/Beta_Integrations/AWS-Athena/AWS-Athena_description.md b/Beta_Integrations/AWS-Athena/AWS-Athena_description.md
new file mode 100644
index 000000000000..267a5aa9626c
--- /dev/null
+++ b/Beta_Integrations/AWS-Athena/AWS-Athena_description.md
@@ -0,0 +1,18 @@
+Before you can use the AWS Athena integration in Demisto, you need to perform several configuration steps in your AWS environment.
+
+### Prerequisites
+- Attach an instance profile with the required permissions to the Demisto server or engine that is running
+on your AWS environment.
+- Instance profile requires minimum permission: sts:AssumeRole.
+- Instance profile requires permission to assume the roles needed by the AWS integrations.
+
+### Configure AWS Settings
+1. Create an IAM Role for the Instance Profile.
+2. Attach a Role to the Instance Profile.
+3. Configure the Necessary IAM Roles that the AWS Integration Can Assume.
+
+For detailed instructions, [see the AWS Integrations Configuration Guide](https://support.demisto.com/hc/en-us/articles/360005686854-AWS-Integrations-Configuration-Guide).
+
+Command descriptions, input descriptions, and output descriptions are taken from the Amazon ACM documentation. For more information, see the [Amazon Athena documention](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/athena.html).
+
+ Note: This is a beta Integration, which lets you implement and test pre-release software. Since the integration is beta, it might contain bugs. Updates to the integration during the beta phase might include non-backward compatible features. We appreciate your feedback on the quality and usability of the integration to help us identify issues, fix them, and continually improve.
\ No newline at end of file
diff --git a/Beta_Integrations/AWS-Athena/AWS-Athena_image.png b/Beta_Integrations/AWS-Athena/AWS-Athena_image.png
new file mode 100644
index 000000000000..3ec06e277d29
Binary files /dev/null and b/Beta_Integrations/AWS-Athena/AWS-Athena_image.png differ
diff --git a/Beta_Integrations/AWS-Athena/CHANGELOG.md b/Beta_Integrations/AWS-Athena/CHANGELOG.md
new file mode 100644
index 000000000000..2a400fe6ac2b
--- /dev/null
+++ b/Beta_Integrations/AWS-Athena/CHANGELOG.md
@@ -0,0 +1,3 @@
+## [Unreleased]
+* Packaged AWS Athena to follow standardization of integrations.
+* Bugfix for Proxy/Insecure issues.
\ No newline at end of file
diff --git a/Beta_Integrations/Blueliv/Blueliv.png b/Beta_Integrations/Blueliv/Blueliv.png
new file mode 100644
index 000000000000..8e69f04ffb3a
Binary files /dev/null and b/Beta_Integrations/Blueliv/Blueliv.png differ
diff --git a/Beta_Integrations/Blueliv/Blueliv.py b/Beta_Integrations/Blueliv/Blueliv.py
new file mode 100644
index 000000000000..26175601fc50
--- /dev/null
+++ b/Beta_Integrations/Blueliv/Blueliv.py
@@ -0,0 +1,100 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+
+from sdk.blueliv_api import BluelivAPI
+
+''' GLOBALS/PARAMS '''
+
+TOKEN = demisto.params().get('token')
+URL = demisto.params()['url']
+SERVER = URL[:-1] if URL.endswith('/') else URL
+
+if not demisto.params().get('proxy', False):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+''' HELPER FUNCTIONS '''
+
+
+def verify_response_code(response):
+
+ if response.status_code != 200:
+ raise ValueError(response.error_msg)
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+
+ response = api.crime_servers.last('all')
+ verify_response_code(response)
+ demisto.results('ok')
+
+
+def get_botips_feed_command():
+
+ response = api.bot_ips.recent('full')
+ verify_response_code(response)
+ human_readable = tableToMarkdown('Bot IP feed', response.items)
+ return_outputs(human_readable, {})
+
+
+def get_crimeservers_feed_command():
+
+ response = api.crime_servers.last('all')
+ verify_response_code(response)
+ human_readable = tableToMarkdown('Crimeservers feed', response.items)
+ return_outputs(human_readable, {})
+
+
+def get_malware_feed_command():
+
+ response = api.malwares.recent('all')
+ verify_response_code(response)
+ human_readable = tableToMarkdown('Malware feed', response.items)
+ return_outputs(human_readable, {})
+
+
+def get_attackingips_feed_command():
+
+ response = api.attacking_ips.recent('all')
+ verify_response_code(response)
+ human_readable = tableToMarkdown('Attacking IPs feed', response.items)
+ return_outputs(human_readable, {})
+
+
+def get_hacktivism_feed_command():
+
+ response = api.hacktivism_ops.last('all')
+ verify_response_code(response)
+ human_readable = tableToMarkdown('Hacktivism feed', response.items)
+ return_outputs(human_readable, {})
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+COMMANDS = {
+ 'test-module': test_module,
+ 'blueliv-get-botips-feed': get_botips_feed_command,
+ 'blueliv-get-crimeservers-feed': get_crimeservers_feed_command,
+ 'blueliv-get-malware-feed': get_malware_feed_command,
+ 'blueliv-get-attackingips-feed': get_attackingips_feed_command,
+ 'blueliv-get-hacktivism-feed': get_hacktivism_feed_command
+}
+
+try:
+ api = BluelivAPI(
+ base_url=SERVER,
+ token=TOKEN
+ )
+ LOG('Command being called is {}'.format(demisto.command()))
+ command_func = COMMANDS.get(demisto.command())
+ if command_func is not None:
+ command_func()
+except Exception as e:
+ return_error(str(e))
diff --git a/Beta_Integrations/Blueliv/Blueliv.yml b/Beta_Integrations/Blueliv/Blueliv.yml
new file mode 100644
index 000000000000..0193f76363b7
--- /dev/null
+++ b/Beta_Integrations/Blueliv/Blueliv.yml
@@ -0,0 +1,51 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: Blueliv_Beta
+ version: -1
+configuration:
+- defaultvalue: https://api.blueliv.com
+ display: Server URL (e.g., https://api.blueliv.com)
+ name: url
+ required: true
+ type: 0
+- display: API Token
+ name: token
+ required: true
+ type: 4
+- display: Use system proxy settings
+ name: proxy
+ type: 8
+ required: false
+description: Blueliv reduces risk through actionable, dynamic and targeted threat intelligence, trusted by your organization.
+display: Blueliv (Beta)
+name: Blueliv_Beta
+script:
+ commands:
+ - description: Data set collection that gives the latest STIX Indicators about bot
+ ips gathered by Blueliv.
+ execution: false
+ name: blueliv-get-botips-feed
+ - description: Data set collection that gives the latest STIX Indicators about known
+ malicious servers gathered by Blueliv.
+ execution: false
+ name: blueliv-get-crimeservers-feed
+ - description: Data set collection that gives the latest STIX Indicators about malware
+ hashes gathered and analyzed by Blueliv.
+ execution: false
+ name: blueliv-get-malware-feed
+ - description: Data set collection that gives the latest STIX Indicators about attacking
+ IPs gathered and analyzed by Blueliv.
+ execution: false
+ name: blueliv-get-attackingips-feed
+ - description: 'Data related to the number of hacktivism tweets recently created.
+ Blueliv provides two types of feeds: the first one contains the most popular
+ hacktivism hashtags and the second one contains the countries where more number
+ of hacktivism tweets are coming from.'
+ execution: false
+ name: blueliv-get-hacktivism-feed
+ dockerimage: demisto/blueliv:1.0.0.165
+ isfetch: false
+ runonce: false
+ script: ''
+ type: python
+beta: true
diff --git a/Beta_Integrations/Blueliv/Blueliv_description.md b/Beta_Integrations/Blueliv/Blueliv_description.md
new file mode 100644
index 000000000000..cf9944213322
--- /dev/null
+++ b/Beta_Integrations/Blueliv/Blueliv_description.md
@@ -0,0 +1 @@
+Note: This is a beta Integration, which lets you implement and test pre-release software. Since the integration is beta, it might contain bugs. Updates to the integration during the beta phase might include non-backward compatible features. We appreciate your feedback on the quality and usability of the integration to help us identify issues, fix them, and continually improve.
\ No newline at end of file
diff --git a/Beta_Integrations/MailListener_-_POP3/MailListener_-_POP3.py b/Beta_Integrations/MailListener_-_POP3/MailListener_-_POP3.py
new file mode 100644
index 000000000000..5e277f4883cb
--- /dev/null
+++ b/Beta_Integrations/MailListener_-_POP3/MailListener_-_POP3.py
@@ -0,0 +1,363 @@
+import demistomock as demisto
+
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+import poplib
+import base64
+import quopri
+from email.parser import Parser
+from htmlentitydefs import name2codepoint
+from HTMLParser import HTMLParser, HTMLParseError
+
+
+''' GLOBALS/PARAMS '''
+SERVER = demisto.params().get('server', '')
+EMAIL = demisto.params().get('email', '')
+PASSWORD = demisto.params().get('password', '')
+PORT = int(demisto.params().get('port', '995'))
+SSL = demisto.params().get('ssl')
+FETCH_TIME = demisto.params().get('fetch_time', '7 days')
+
+# pop3 server connection object.
+pop3_server_conn = None # type: ignore
+
+TIME_REGEX = re.compile(r'^([\w,\d: ]*) (([+-]{1})(\d{2}):?(\d{2}))?[\s\w\(\)]*$')
+DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
+
+
+def connect_pop3_server():
+ global pop3_server_conn
+
+ if pop3_server_conn is None:
+ if SSL:
+ pop3_server_conn = poplib.POP3_SSL(SERVER, PORT) # type: ignore
+ else:
+ pop3_server_conn = poplib.POP3(SERVER, PORT) # type: ignore
+
+ pop3_server_conn.getwelcome() # type: ignore
+ pop3_server_conn.user(EMAIL) # type: ignore
+ pop3_server_conn.pass_(PASSWORD) # type: ignore
+
+
+def close_pop3_server_connection():
+ global pop3_server_conn
+ if pop3_server_conn is not None:
+ pop3_server_conn.quit()
+ pop3_server_conn = None
+
+
+def get_user_emails():
+ _, mails_list, _ = pop3_server_conn.list() # type: ignore
+
+ mails = []
+ index = ''
+
+ for mail in mails_list:
+ try:
+ index = mail.split(' ')[0]
+ (resp_message, lines, octets) = pop3_server_conn.retr(index) # type: ignore
+ msg_content = unicode(b'\r\n'.join(lines), errors='ignore').encode("utf-8")
+ msg = Parser().parsestr(msg_content)
+ msg['index'] = index
+ mails.append(msg)
+ except Exception as e:
+ demisto.error("Failed to get email with index " + index + 'from the server.')
+ raise e
+
+ return mails
+
+
+def get_attachment_name(headers):
+ name = headers.get('content-description', '')
+
+ if re.match(r'^.+\..{3,5}$', name):
+ return name
+
+ content_disposition = headers.get('content-disposition', '')
+
+ if content_disposition:
+ m = re.search('filename="(.*?)"', content_disposition)
+ if m:
+ name = m.group(1)
+
+ if re.match('^.+\..{3,5}$', name):
+ return name
+
+ extension = re.match(r'.*[\\/]([\d\w]{2,4}).*', headers.get('content-type', 'txt')).group(1) # type: ignore
+
+ return name + '.' + extension
+
+
+def parse_base64(text):
+ if re.match("^=?.*?=$", text):
+ res = re.search('=\?.*?\?[A-Z]{1}\?(.*?)\?=', text, re.IGNORECASE)
+ if res:
+ res = res.group(1)
+ return base64.b64decode(res) # type: ignore
+ return text
+
+
+class TextExtractHtmlParser(HTMLParser):
+ def __init__(self):
+ HTMLParser.__init__(self)
+ self._texts = [] # type: list
+ self._ignore = False
+
+ def handle_starttag(self, tag, attrs):
+ if tag in ('p', 'br') and not self._ignore:
+ self._texts.append('\n')
+ elif tag in ('script', 'style'):
+ self._ignore = True
+
+ def handle_startendtag(self, tag, attrs):
+ if tag in ('br', 'tr') and not self._ignore:
+ self._texts.append('\n')
+
+ def handle_endtag(self, tag):
+ if tag in ('p', 'tr'):
+ self._texts.append('\n')
+ elif tag in ('script', 'style'):
+ self._ignore = False
+
+ def handle_data(self, data):
+ if data and not self._ignore:
+ stripped = data.strip()
+ if stripped:
+ self._texts.append(re.sub(r'\s+', ' ', stripped))
+
+ def handle_entityref(self, name):
+ if not self._ignore and name in name2codepoint:
+ self._texts.append(unichr(name2codepoint[name]))
+
+ def handle_charref(self, name):
+ if not self._ignore:
+ if name.startswith('x'):
+ c = unichr(int(name[1:], 16))
+ else:
+ c = unichr(int(name))
+ self._texts.append(c)
+
+ def get_text(self):
+ return "".join(self._texts)
+
+
+def html_to_text(html):
+ parser = TextExtractHtmlParser()
+ try:
+ parser.feed(html)
+ parser.close()
+ except HTMLParseError:
+ pass
+ return parser.get_text()
+
+
+def get_email_context(email_data):
+ context_headers = email_data._headers
+ context_headers = [{'Name': v[0], 'Value': v[1]}
+ for v in context_headers]
+ headers = dict([(h['Name'].lower(), h['Value']) for h in context_headers])
+
+ context = {
+ 'Mailbox': EMAIL,
+ 'ID': email_data.get('Message-ID', 'None'),
+ 'Labels': ', '.join(email_data.get('labelIds', '')),
+ 'Headers': context_headers,
+ 'Format': headers.get('content-type', '').split(';')[0],
+ 'Subject': parse_base64(headers.get('subject')),
+ 'Body': email_data._payload,
+ 'From': headers.get('from'),
+ 'To': headers.get('to'),
+ 'Cc': headers.get('cc', []),
+ 'Bcc': headers.get('bcc', []),
+ 'Date': headers.get('date', ''),
+ 'Html': None,
+ }
+
+ if 'text/html' in context['Format']:
+ context['Html'] = context['Body']
+ context['Body'] = html_to_text(context['Body'])
+
+ if 'multipart' in context['Format']:
+ context['Body'], context['Html'], context['Attachments'] = parse_mail_parts(email_data._payload)
+ context['Attachment Names'] = ', '.join(
+ [attachment['Name'] for attachment in context['Attachments']])
+
+ raw = dict(email_data)
+ raw['Body'] = context['Body']
+ context['RawData'] = json.dumps(raw)
+ return context, headers
+
+
+def parse_mail_parts(parts):
+ body = unicode("", "utf-8")
+ html = unicode("", "utf-8")
+
+ attachments = [] # type: ignore
+ for part in parts:
+ context_headers = part._headers
+ context_headers = [{'Name': v[0], 'Value': v[1]}
+ for v in context_headers]
+ headers = dict([(h['Name'].lower(), h['Value']) for h in context_headers])
+
+ content_type = headers.get('content-type', 'text/plain')
+
+ is_attachment = headers.get('content-disposition', '').startswith('attachment')\
+ or headers.get('x-attachment-id') or "image" in content_type
+
+ if 'multipart' in content_type or isinstance(part._payload, list):
+ part_body, part_html, part_attachments = parse_mail_parts(part._payload)
+ body += part_body
+ html += part_html
+ attachments.extend(part_attachments)
+ elif not is_attachment:
+ if headers.get('content-transfer-encoding') == 'base64':
+ text = base64.b64decode(part._payload).decode('utf-8')
+ elif headers.get('content-transfer-encoding') == 'quoted-printable':
+ decoded_string = quopri.decodestring(part._payload)
+ text = unicode(decoded_string, "utf-8")
+ else:
+ text = quopri.decodestring(part._payload)
+
+ if not isinstance(text, unicode):
+ text = text.decode('unicode-escape')
+
+ if 'text/html' in content_type:
+ html += text
+ else:
+ body += text
+
+ else:
+ attachments.append({
+ 'ID': headers.get('x-attachment-id', 'None'),
+ 'Name': get_attachment_name(headers),
+ 'Data': part._payload
+ })
+
+ return body, html, attachments
+
+
+def parse_time(t):
+ base_time, _, _, _, _ = TIME_REGEX.findall(t)[0]
+ return datetime.strptime(base_time, '%a, %d %b %Y %H:%M:%S').isoformat() + 'Z'
+
+
+def create_incident_labels(parsed_msg, headers):
+ labels = [
+ {'type': 'Email/ID', 'value': parsed_msg['ID']},
+ {'type': 'Email/subject', 'value': parsed_msg['Subject']},
+ {'type': 'Email/text', 'value': parsed_msg['Body']},
+ {'type': 'Email/from', 'value': parsed_msg['From']},
+ {'type': 'Email/html', 'value': parsed_msg['Html']},
+ ]
+ labels.extend([{'type': 'Email/to', 'value': to}
+ for to in headers.get('To', '').split(',')])
+ labels.extend([{'type': 'Email/cc', 'value': cc}
+ for cc in headers.get('Cc', '').split(',')])
+ labels.extend([{'type': 'Email/bcc', 'value': bcc}
+ for bcc in headers.get('Bcc', '').split(',')])
+ for key, val in headers.items():
+ labels.append({'type': 'Email/Header/' + key, 'value': val})
+
+ return labels
+
+
+@logger
+def mail_to_incident(msg):
+ parsed_msg, headers = get_email_context(msg)
+
+ file_names = []
+ for attachment in parsed_msg.get('Attachments', []):
+ file_data = base64.urlsafe_b64decode(attachment['Data'].encode('ascii'))
+
+ # save the attachment
+ file_result = fileResult(attachment['Name'], file_data)
+
+ # check for error
+ if file_result['Type'] == entryTypes['error']:
+ demisto.error(file_result['Contents'])
+ raise Exception(file_result['Contents'])
+
+ file_names.append({
+ 'path': file_result['FileID'],
+ 'name': attachment['Name'],
+ })
+
+ return {
+ 'name': parsed_msg['Subject'],
+ 'details': parsed_msg['Body'],
+ 'labels': create_incident_labels(parsed_msg, headers),
+ 'occurred': parse_time(parsed_msg['Date']),
+ 'attachment': file_names,
+ 'rawJSON': parsed_msg['RawData']
+ }
+
+
+def fetch_incidents():
+ last_run = demisto.getLastRun()
+ last_fetch = last_run.get('time')
+
+ # handle first time fetch
+ if last_fetch is None:
+ last_fetch, _ = parse_date_range(FETCH_TIME, date_format=DATE_FORMAT)
+
+ last_fetch = datetime.strptime(last_fetch, DATE_FORMAT)
+ current_fetch = last_fetch
+
+ incidents = []
+ messages = get_user_emails()
+
+ for msg in messages:
+ try:
+ incident = mail_to_incident(msg)
+ except Exception as e:
+ demisto.error("failed to create incident from email, index = {}, subject = {}, date = {}".format(
+ msg['index'], msg['subject'], msg['date']))
+ raise e
+
+ temp_date = datetime.strptime(
+ incident['occurred'], DATE_FORMAT)
+
+ # update last run
+ if temp_date > last_fetch:
+ last_fetch = temp_date + timedelta(seconds=1)
+
+ # avoid duplication due to weak time query
+ if temp_date > current_fetch:
+ incidents.append(incident)
+
+ demisto.setLastRun({'time': last_fetch.isoformat().split('.')[0] + 'Z'})
+
+ return demisto.incidents(incidents)
+
+
+def test_module():
+ resp_message, _, _ = pop3_server_conn.list() # type: ignore
+ if "OK" in resp_message:
+ demisto.results('ok')
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+
+def main():
+ try:
+ handle_proxy()
+ connect_pop3_server()
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module()
+ if demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+ sys.exit(0)
+ except Exception as e:
+ LOG(str(e))
+ LOG.print_log()
+ raise e
+ finally:
+ close_pop3_server_connection()
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Beta_Integrations/MailListener_-_POP3/MailListener_-_POP3.yml b/Beta_Integrations/MailListener_-_POP3/MailListener_-_POP3.yml
new file mode 100644
index 000000000000..c63098e9494b
--- /dev/null
+++ b/Beta_Integrations/MailListener_-_POP3/MailListener_-_POP3.yml
@@ -0,0 +1,55 @@
+category: Messaging
+commonfields:
+ id: MailListener - POP3 Beta
+ version: -1
+configuration:
+- display: Server URL (e.g. example.com)
+ name: server
+ required: true
+ type: 0
+- defaultvalue: '995'
+ display: Port
+ name: port
+ required: false
+ type: 0
+- display: Email
+ name: email
+ required: true
+ type: 0
+- display: Password
+ name: password
+ required: true
+ type: 4
+- defaultvalue: 'True'
+ display: Use SSL connection
+ name: ssl
+ required: false
+ type: 8
+- display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- defaultvalue: 3 days
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days)
+ name: fetch_time
+ required: false
+ type: 0
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+description: Listen to a mailbox, enable incident triggering via e-mail
+display: MailListener - POP3 (Beta)
+name: MailListener - POP3 Beta
+script:
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+beta: true
+tests:
+- MailListener-POP3 - Test
diff --git a/Beta_Integrations/MailListener_-_POP3/MailListener_-_POP3_description.md b/Beta_Integrations/MailListener_-_POP3/MailListener_-_POP3_description.md
new file mode 100644
index 000000000000..cf9944213322
--- /dev/null
+++ b/Beta_Integrations/MailListener_-_POP3/MailListener_-_POP3_description.md
@@ -0,0 +1 @@
+Note: This is a beta Integration, which lets you implement and test pre-release software. Since the integration is beta, it might contain bugs. Updates to the integration during the beta phase might include non-backward compatible features. We appreciate your feedback on the quality and usability of the integration to help us identify issues, fix them, and continually improve.
\ No newline at end of file
diff --git a/Beta_Integrations/MailListener_-_POP3/MailListener_-_POP3_image.png b/Beta_Integrations/MailListener_-_POP3/MailListener_-_POP3_image.png
new file mode 100644
index 000000000000..9a25ad863629
Binary files /dev/null and b/Beta_Integrations/MailListener_-_POP3/MailListener_-_POP3_image.png differ
diff --git a/Beta_Integrations/PaloAlto_TrapsESM_Beta/CHANGELOG.md b/Beta_Integrations/PaloAlto_TrapsESM_Beta/CHANGELOG.md
new file mode 100644
index 000000000000..4117bdc5e161
--- /dev/null
+++ b/Beta_Integrations/PaloAlto_TrapsESM_Beta/CHANGELOG.md
@@ -0,0 +1,2 @@
+## [Unreleased]
+-
\ No newline at end of file
diff --git a/Beta_Integrations/PaloAlto_TrapsESM_Beta/PaloAlto_TrapsESM_Beta.png b/Beta_Integrations/PaloAlto_TrapsESM_Beta/PaloAlto_TrapsESM_Beta.png
new file mode 100644
index 000000000000..dc0c3e7ca76e
Binary files /dev/null and b/Beta_Integrations/PaloAlto_TrapsESM_Beta/PaloAlto_TrapsESM_Beta.png differ
diff --git a/Beta_Integrations/PaloAlto_TrapsESM_Beta/PaloAlto_TrapsESM_Beta.py b/Beta_Integrations/PaloAlto_TrapsESM_Beta/PaloAlto_TrapsESM_Beta.py
new file mode 100644
index 000000000000..801b94e1ef13
--- /dev/null
+++ b/Beta_Integrations/PaloAlto_TrapsESM_Beta/PaloAlto_TrapsESM_Beta.py
@@ -0,0 +1,261 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+
+import json
+import requests
+from bs4 import BeautifulSoup
+from base64 import b64decode, b64encode
+from Crypto.PublicKey import RSA
+from Crypto.Cipher import PKCS1_v1_5
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+USERNAME = demisto.params().get('username')
+PASSWORD = demisto.params().get('password')
+URL = demisto.params().get('url')
+
+USE_SSL = False
+
+
+def logout_traps():
+ headers = {
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
+ "Chrome/74.0.3729.131 Safari/537.36",
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,"
+ "application/signed-exchange;v=b3",
+ "Accept-Language": "Accept-Language:en-US,en;q=0.9",
+ "Accept-Encoding": "gzip, deflate"
+ }
+ result = requests.request('GET', URL + '/EndpointSecurityManager/Account/Logout',
+ headers=headers, verify=USE_SSL)
+ c = result.content
+ demisto.results(c)
+
+
+def get_new_request_token(cookies):
+ headers = {
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
+ "Chrome/74.0.3729.131 Safari/537.36",
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,"
+ "application/signed-exchange;v=b3",
+ "Accept-Language": "Accept-Language:en-US,en;q=0.9",
+ "Accept-Encoding": "gzip, deflate"
+ }
+ result = requests.request('GET', URL + '/EndpointSecurityManager/HashManagement/Hashes',
+ headers=headers, verify=USE_SSL, cookies=cookies)
+ # Extracting Token
+ c = result.content
+ request_verification_token = None
+
+ soup = BeautifulSoup(c, 'html.parser')
+ request_verification_obj = soup.find('input', {"name": "__RequestVerificationToken"})
+ request_verification_token = request_verification_obj['value']
+ return request_verification_token
+
+
+def get_ct_cookie():
+ headers = {
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
+ "Chrome/74.0.3729.131 Safari/537.36",
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,"
+ "application/signed-exchange;v=b3",
+ "Accept-Language": "Accept-Language:en-US,en;q=0.9",
+ "Accept-Encoding": "gzip, deflate"
+ }
+ result = requests.request('GET', URL + '/EndpointSecurityManager/Account/Login',
+ headers=headers, verify=USE_SSL)
+ # Extracting Token
+ c = result.content
+ soup = BeautifulSoup(c, 'html.parser')
+ request_verification_obj = soup.find('input', {"name": "__RequestVerificationToken"})
+ request_verification_token = request_verification_obj['value']
+
+ # Extracting Cookie
+ cookie_dough = result.cookies
+ ct_value = None
+ for cookie in cookie_dough:
+ baked_cookie = cookie.__dict__
+ if baked_cookie['name'] == 'ct':
+ ct_value = baked_cookie['value']
+ return request_verification_token, ct_value
+
+
+def get_rsa_csid_key(request_verification_token, ct_value):
+ headers = {
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
+ "Chrome/74.0.3729.131 Safari/537.36",
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,"
+ "application/signed-exchange;v=b3",
+ "Accept-Language": "Accept-Language:en-US,en;q=0.9",
+ "Accept-Encoding": "gzip, deflate",
+ "token": request_verification_token
+ }
+ cookies = {'ct': ct_value}
+ result = requests.request('POST', URL + '/EndpointSecurityManager/Account/PublicKey',
+ headers=headers, cookies=cookies, verify=USE_SSL)
+ # Extracting Cookie
+ cookie_dough = result.cookies
+ csid = None
+ for cookie in cookie_dough:
+ baked_cookie = cookie.__dict__
+ if baked_cookie['name'] == 'csid':
+ csid = baked_cookie['value']
+ # Extracting RSA
+ key = result.json().get('key')
+ salt = result.json().get('salt')
+ return key, salt, csid
+
+
+def bytes_to_integer(data):
+ output = 0
+ size = len(data)
+ for index in range(size):
+ output |= data[index] << (8 * (size - 1 - index))
+ return output
+
+
+def get_auth_cookie():
+ request_verification_token, ct_value = get_ct_cookie()
+ key, salt, csid = get_rsa_csid_key(request_verification_token, ct_value)
+ headers = {
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
+ "Chrome/74.0.3729.131 Safari/537.36",
+ "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,"
+ "application/signed-exchange;v=b3",
+ "Accept-Language": "Accept-Language:en-US,en;q=0.9",
+ "Accept-Encoding": "gzip, deflate",
+ "token": request_verification_token
+ }
+ cookies = {
+ 'ct': ct_value,
+ 'csid': csid
+ }
+ salted_password = (salt + PASSWORD).encode()
+
+ keyDER = b64decode(key)
+ keyPub = RSA.importKey(keyDER)
+
+ # Encrypt the session key with the public RSA key
+ cipher_rsa = PKCS1_v1_5.new(keyPub)
+ encrypted_pass = cipher_rsa.encrypt(salted_password)
+ b64pass = b64encode(encrypted_pass)
+
+ payload = {
+ "Username": USERNAME,
+ "Password": b64pass
+ }
+ result = requests.request('POST', URL + '/EndpointSecurityManager/Account/Login',
+ headers=headers, cookies=cookies, data=payload, verify=USE_SSL)
+ # Extracting Cookie
+ cookie_dough = result.cookies
+ csid = None
+ for cookie in cookie_dough:
+ baked_cookie = cookie.__dict__
+ if baked_cookie['name'] == 'auth':
+ cookies['auth'] = baked_cookie['value']
+ return request_verification_token, cookies
+
+
+def traps_esm_hash_detail():
+ request_verification_token, cookies = get_auth_cookie()
+ file_hash = demisto.args().get('hash')
+ token = get_new_request_token(cookies)
+ headers = {
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
+ "Chrome/74.0.3729.131 Safari/537.36",
+ "Accept": "application/json, text/javascript, */*; q=0.01",
+ "Accept-Language": "Accept-Language:en-US,en;q=0.9",
+ "Accept-Encoding": "gzip, deflate",
+ "Referer": URL + "/EndpointSecurityManager/HashManagement/Hashes",
+ "X-Requested-With": "XMLHttpRequest",
+ "Origin": URL,
+ "Content-Type": "application/json; charset=UTF-8",
+ "Token": token
+ }
+ payload_raw = {
+ 'hash': file_hash
+ }
+
+ payload = json.dumps(payload_raw)
+
+ auth_cookie = {
+ 'ct': cookies['ct'],
+ 'csid': cookies['csid'],
+ 'auth': cookies['auth']
+ }
+
+ result = requests.request('POST', URL + '/EndpointSecurityManager/HashManagement/HashesDetail',
+ headers=headers, cookies=auth_cookie, data=payload, verify=USE_SSL)
+
+ hash_results = json.loads(result.content)
+
+ hr_table = {
+ 'Result': hash_results['LocalAnalysis'][0]['Result'],
+ 'Verdict History': hash_results['VerdictHistory'],
+ 'Quarantined': hash_results['Quarantined']
+ }
+
+ ec = {
+ 'TrapsESM': hash_results
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': 'The verdict of hash {} is {}'.format(file_hash, hash_results['LocalAnalysis'][0]['Result']),
+ 'HumanReadable': tableToMarkdown('Hash Verdict for {}'.format(file_hash), hr_table),
+ 'EntryContext': ec
+ })
+
+
+def traps_esm_override_hash_verdict():
+ request_verification_token, cookies = get_auth_cookie()
+ token = get_new_request_token(cookies)
+ file_hash = demisto.args().get('hash')
+ verdict = demisto.args().get('verdict')
+ headers = {
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
+ "Chrome/74.0.3729.131 Safari/537.36",
+ "Accept": "application/json, text/javascript, */*; q=0.01",
+ "Accept-Language": "Accept-Language:en-US,en;q=0.9",
+ "Accept-Encoding": "gzip, deflate",
+ "X-Requested-With": "XMLHttpRequest",
+ "Origin": URL,
+ "Content-Type": "application/json; charset=UTF-8",
+ "token": token
+ }
+ payload = {
+ 'request': [file_hash],
+ 'verdict': verdict
+ }
+
+ result = requests.request('POST', URL + '/EndpointSecurityManager/HashManagement/OverrideHashVerdict',
+ headers=headers, cookies=cookies, data=payload, verify=USE_SSL)
+
+ demisto.results(result.content)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('Command being called is %s' % (demisto.command()))
+
+try:
+ if demisto.command() == 'test-module':
+ auth_cookie = get_auth_cookie()
+ if auth_cookie:
+ demisto.results('ok')
+ elif demisto.command() == 'traps-esm-hash-detail':
+ traps_esm_hash_detail()
+ elif demisto.command() == 'traps-esm-override-hash-verdict':
+ traps_esm_override_hash_verdict()
+
+# Log exceptions
+except Exception as e:
+ LOG(str(e))
+ LOG.print_log()
+ raise
diff --git a/Beta_Integrations/PaloAlto_TrapsESM_Beta/PaloAlto_TrapsESM_Beta.yml b/Beta_Integrations/PaloAlto_TrapsESM_Beta/PaloAlto_TrapsESM_Beta.yml
new file mode 100644
index 000000000000..92acf2af9500
--- /dev/null
+++ b/Beta_Integrations/PaloAlto_TrapsESM_Beta/PaloAlto_TrapsESM_Beta.yml
@@ -0,0 +1,59 @@
+category: Endpoint
+commonfields:
+ id: Palo Alto Traps ESM (Beta)
+ version: -1
+configuration:
+- display: URL of Traps ESM Server
+ name: url
+ required: true
+ type: 0
+- display: Username of Account to be used.
+ name: username
+ required: false
+ type: 0
+- display: Password of Account to be used.
+ name: password
+ required: false
+ type: 4
+description: Palo Alto Traps ESM beta integration.
+display: Palo Alto Traps ESM (Beta)
+name: Palo Alto Traps ESM (Beta)
+beta: true
+script:
+ commands:
+ - arguments:
+ - default: false
+ isArray: false
+ name: hash
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieve details about a specific hash
+ execution: false
+ name: traps-esm-hash-detail
+ - arguments:
+ - default: false
+ description: Hash to be changed.
+ isArray: false
+ name: hash
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Verdict of the hash.
+ isArray: false
+ name: verdict
+ predefined:
+ - Benign
+ - Malware
+ required: true
+ secret: false
+ deprecated: false
+ description: Override a verdict for a specific hash.
+ execution: false
+ name: traps-esm-override-hash-verdict
+ isfetch: false
+ runonce: false
+ script: ''
+ type: python
+ dockerimage: demisto/btfl-soup:1.0.0.925
diff --git a/Beta_Integrations/PaloAlto_TrapsESM_Beta/PaloAlto_TrapsESM_Beta_CHANGELOG.md b/Beta_Integrations/PaloAlto_TrapsESM_Beta/PaloAlto_TrapsESM_Beta_CHANGELOG.md
new file mode 100644
index 000000000000..1a49d67f7446
--- /dev/null
+++ b/Beta_Integrations/PaloAlto_TrapsESM_Beta/PaloAlto_TrapsESM_Beta_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+#### New Integration
+Palo Alto Traps ESM beta integration.
\ No newline at end of file
diff --git a/Beta_Integrations/PaloAlto_TrapsESM_Beta/PaloAlto_TrapsESM_Beta_description.md b/Beta_Integrations/PaloAlto_TrapsESM_Beta/PaloAlto_TrapsESM_Beta_description.md
new file mode 100644
index 000000000000..cf9944213322
--- /dev/null
+++ b/Beta_Integrations/PaloAlto_TrapsESM_Beta/PaloAlto_TrapsESM_Beta_description.md
@@ -0,0 +1 @@
+Note: This is a beta Integration, which lets you implement and test pre-release software. Since the integration is beta, it might contain bugs. Updates to the integration during the beta phase might include non-backward compatible features. We appreciate your feedback on the quality and usability of the integration to help us identify issues, fix them, and continually improve.
\ No newline at end of file
diff --git a/Beta_Integrations/ProofpointThreatResponse/Pipfile b/Beta_Integrations/ProofpointThreatResponse/Pipfile
new file mode 100644
index 000000000000..66ad1243db8b
--- /dev/null
+++ b/Beta_Integrations/ProofpointThreatResponse/Pipfile
@@ -0,0 +1,22 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+
+[packages]
+certifi = "==2017.11.5"
+chardet = "==3.0.4"
+idna = "==2.6"
+olefile = "==0.44"
+requests = "==2.18.4"
+urllib3 = "==1.22"
+PyYAML = "==3.12"
+
+[requires]
+python_version = "2.7"
diff --git a/Beta_Integrations/ProofpointThreatResponse/Pipfile.lock b/Beta_Integrations/ProofpointThreatResponse/Pipfile.lock
new file mode 100644
index 000000000000..3a60bb88ac9e
--- /dev/null
+++ b/Beta_Integrations/ProofpointThreatResponse/Pipfile.lock
@@ -0,0 +1,308 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "1fd564b978cf016eca093f3dfd295ed6ecae2fed0d591fcda830d512fa1fe4b8"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==1.5"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
+ "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
+ ],
+ "version": "==2019.6.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:45d1272aad6cfd7a8a06cf5c73f2ceb6a190f6acc1fa707e7f82a4c053b28b18",
+ "sha256:bc37850f0cc42a1725a796ef7d92690651bf1af37d744cc63161dac62cabee17"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==3.8.1"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.3'",
+ "version": "==1.0.2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16",
+ "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.3.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:9ff1b1c5a354142de080b8a4e9803e5d0d59283c93aed808617c787d16768375",
+ "sha256:b7143592e374e50584564794fcb8aaf00a23025f9db866627f89a21491847a8d"
+ ],
+ "version": "==0.20"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:02b260c8deb80db09325b99edf62ae344ce9bc64d68b7a634410b8e9a568edbf",
+ "sha256:18f9c401083a4ba6e162355873f906315332ea7035803d0fd8166051e3d402e3",
+ "sha256:1f2c6209a8917c525c1e2b55a716135ca4658a3042b5122d4e3413a4030c26ce",
+ "sha256:2f06d97f0ca0f414f6b707c974aaf8829c2292c1c497642f63824119d770226f",
+ "sha256:616c94f8176808f4018b39f9638080ed86f96b55370b5a9463b2ee5c926f6c5f",
+ "sha256:63b91e30ef47ef68a30f0c3c278fbfe9822319c15f34b7538a829515b84ca2a0",
+ "sha256:77b454f03860b844f758c5d5c6e5f18d27de899a3db367f4af06bec2e6013a8e",
+ "sha256:83fe27ba321e4cfac466178606147d3c0aa18e8087507caec78ed5a966a64905",
+ "sha256:84742532d39f72df959d237912344d8a1764c2d03fe58beba96a87bfa11a76d8",
+ "sha256:874ebf3caaf55a020aeb08acead813baf5a305927a71ce88c9377970fe7ad3c2",
+ "sha256:9f5caf2c7436d44f3cec97c2fa7791f8a675170badbfa86e1992ca1b84c37009",
+ "sha256:a0c8758d01fcdfe7ae8e4b4017b13552efa7f1197dd7358dc9da0576f9d0328a",
+ "sha256:a4def978d9d28cda2d960c279318d46b327632686d82b4917516c36d4c274512",
+ "sha256:ad4f4be843dace866af5fc142509e9b9817ca0c59342fdb176ab6ad552c927f5",
+ "sha256:ae33dd198f772f714420c5ab698ff05ff900150486c648d29951e9c70694338e",
+ "sha256:b4a2b782b8a8c5522ad35c93e04d60e2ba7f7dcb9271ec8e8c3e08239be6c7b4",
+ "sha256:c462eb33f6abca3b34cdedbe84d761f31a60b814e173b98ede3c81bb48967c4f",
+ "sha256:fd135b8d35dfdcdb984828c84d695937e58cc5f49e1c854eb311c4d6aa03f4f1"
+ ],
+ "version": "==1.4.2"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9",
+ "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe"
+ ],
+ "version": "==19.1"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e",
+ "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8"
+ ],
+ "markers": "python_version < '3.6'",
+ "version": "==2.3.4"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42",
+ "sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300"
+ ],
+ "index": "pypi",
+ "version": "==1.9.5"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
+ "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
+ ],
+ "version": "==2.4.2"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:8fc39199bdda3d9d025d3b1f4eb99a192c20828030ea7c9a0d2840721de7d347",
+ "sha256:d100a02770f665f5dcf7e3f08202db29857fee6d15f34c942be0a511f39814f0"
+ ],
+ "index": "pypi",
+ "version": "==4.6.5"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
+ "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ ],
+ "version": "==2.22.0"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:510df890afe08d36eca5bb16b4aa6308a6f85e3159ad3013bac8b9de7bd5a010",
+ "sha256:88d3402dd8b3c69a9e4f9d3a73ad11b15920c6efd36bc27bf1f701cf4a8e4646"
+ ],
+ "index": "pypi",
+ "version": "==1.7.0"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
+ "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
+ ],
+ "version": "==1.25.3"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e",
+ "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
+ ],
+ "version": "==0.6.0"
+ }
+ }
+}
diff --git a/Beta_Integrations/ProofpointThreatResponse/ProofpointThreatResponse.py b/Beta_Integrations/ProofpointThreatResponse/ProofpointThreatResponse.py
new file mode 100644
index 000000000000..39a7c794d05e
--- /dev/null
+++ b/Beta_Integrations/ProofpointThreatResponse/ProofpointThreatResponse.py
@@ -0,0 +1,227 @@
+import demistomock as demisto
+from CommonServerPython import *
+''' IMPORTS '''
+import requests
+import json
+requests.packages.urllib3.disable_warnings()
+
+if not demisto.getParam('proxy'):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+''' GLOBAL VARS '''
+BASE_URL = demisto.params().get('url')
+if BASE_URL[-1] != '/':
+ BASE_URL += '/'
+API_KEY = demisto.params().get('apikey')
+VERIFY_CERTIFICATE = False
+
+''' COMMAND FUNCTIONS '''
+
+
+def get_list(list_id):
+ fullurl = BASE_URL + 'api/lists/{}/members.json'.format(list_id)
+ res = requests.get(
+ fullurl,
+ headers={
+ 'Content-Type': 'application/json',
+ 'Authorization': API_KEY
+ },
+ verify=VERIFY_CERTIFICATE
+ )
+
+ if res.status_code < 200 or res.status_code >= 300:
+ return_error('Get list failed. URL: {}, StatusCode: {}, Response: {}'.format(fullurl, res.status_code, res.text))
+
+ return res.json()
+
+
+def get_list_command():
+ ''' Retrieves all indicators of a the given list ID in Threat Response '''
+ list_id = demisto.args().get('list-id')
+ list_items = get_list(list_id)
+
+ demisto.results({'list': list_items})
+
+
+def add_to_list(list_id, indicator, comment, expiration):
+ fullurl = BASE_URL + 'api/lists/{}/members.json'.format(list_id)
+
+ indicator = {
+ 'member': indicator
+ }
+ if comment:
+ indicator['description'] = comment
+
+ if expiration:
+ indicator['expiration'] = expiration
+
+ res = requests.post(
+ fullurl,
+ headers={
+ 'Authorization': API_KEY
+ },
+ verify=VERIFY_CERTIFICATE,
+ json=indicator
+ )
+
+ if res.status_code < 200 or res.status_code >= 300:
+ return_error('Add to list failed. URL: {}, Request Body: {}, StatusCode: {}, Response: {}'.format(
+ fullurl, json.dumps(indicator), res.status_code, res.content))
+
+ return res.json()
+
+
+def add_to_list_command():
+ ''' Adds given indicators to the given list ID in Threat Response '''
+ list_id = demisto.args().get('list-id')
+ indicators = argToList(demisto.args().get('indicator'))
+ comment = demisto.args().get('comment')
+ expiration = demisto.args().get('expiration')
+
+ message = ''
+ for indicator in indicators:
+ add_to_list(list_id, indicator, comment, expiration)
+ message += '{} added successfully to {}\n'.format(indicator, list_id)
+
+ demisto.results(message)
+
+
+def block_ip_command():
+ ''' Adds given IPs to the relevant blacklist in Threat Response '''
+ list_id = demisto.params().get('blacklist_ip')
+ ips = argToList(demisto.args().get('ip'))
+ expiration = demisto.args().get('expiration')
+
+ message = ''
+ for ip in ips:
+ add_to_list(list_id, ip, None, expiration)
+ message += '{} added successfully to block_ip list\n'.format(ip)
+
+ demisto.results(message)
+
+
+def block_domain_command():
+ ''' Adds given domains to the relevant blacklist in Threat Response '''
+ list_id = demisto.params().get('blacklist_domain')
+ domains = argToList(demisto.args().get('domain'))
+ expiration = demisto.args().get('expiration')
+
+ message = ''
+ for domain in domains:
+ add_to_list(list_id, domain, None, expiration)
+ message += '{} added successfully to block_domain list\n'.format(domain)
+
+ demisto.results(message)
+
+
+def block_url_command():
+ ''' Adds given URLs to the relevant blacklist in Threat Response '''
+ list_id = demisto.params().get('blacklist_url')
+ urls = argToList(demisto.args().get('url'))
+ expiration = demisto.args().get('expiration')
+
+ message = ''
+ for url in urls:
+ add_to_list(list_id, url, None, expiration)
+ message += '{} added successfully to block_url list\n'.format(url)
+
+ demisto.results(message)
+
+
+def block_hash_command():
+ ''' Adds given hashes to the relevant blacklist in Threat Response '''
+ list_id = demisto.params().get('blacklist_hash')
+ hashes = argToList(demisto.args().get('hash'))
+ expiration = demisto.args().get('expiration')
+
+ message = ''
+ for h in hashes:
+ add_to_list(list_id, h, None, expiration)
+ message += '{} added successfully to block_hash list\n'.format(h)
+
+ demisto.results(message)
+
+
+def search_indicators(list_id, indicator_filter):
+ list_indicators = get_list(list_id)
+ found_items = []
+ for item in list_indicators:
+ item_indicator = demisto.get(item, 'host.host')
+ if indicator_filter in item_indicator:
+ found_items.append(item)
+
+ return found_items
+
+
+def search_indicator_command():
+ ''' Retrieves indicators of a list, using a filter '''
+ list_id = demisto.args().get('list-id')
+ indicator_filter = demisto.args().get('filter')
+ found = search_indicators(list_id, indicator_filter)
+
+ demisto.results({'indicators': found})
+
+
+def delete_indicator(list_id, indicator_filter):
+ indicator = search_indicators(list_id, indicator_filter)
+ if len(indicator) == 0:
+ return_error('{} not exists in {}'.format(indicator_filter, list_id))
+
+ indicator_id = indicator.get('id') # pylint: disable=E1101
+ fullurl = BASE_URL + 'api/lists/{}/members/{}.json'.format(list_id, indicator_id)
+ res = requests.delete(
+ fullurl,
+ headers={
+ 'Authorization': API_KEY
+ },
+ verify=VERIFY_CERTIFICATE
+ )
+ if res.status_code < 200 or res.status_code >= 300:
+ return_error('Delete indicator failed. URL: {}, StatusCode: {}, Response: {}'.format(fullurl, res.status_code, res.text))
+
+
+def delete_indicator_command():
+ ''' Deletes an indicator from a list '''
+ list_id = demisto.args().get('list-id')
+ indicator = demisto.args().get('indicator')
+ delete_indicator(list_id, indicator)
+
+ demisto.results('{} deleted successfully from list {}'.format(list_id, indicator))
+
+
+def test():
+ get_list(demisto.params().get('blacklist_ip'))
+
+
+''' EXECUTION CODE '''
+LOG('command is %s' % (demisto.command(), ))
+if demisto.command() == 'test-module':
+ test()
+ demisto.results('ok')
+
+elif demisto.command() == 'proofpoint-tr-get-list':
+ get_list_command()
+
+elif demisto.command() == 'proofpoint-tr-add-to-list':
+ add_to_list_command()
+
+elif demisto.command() == 'proofpoint-tr-block-ip':
+ block_ip_command()
+
+elif demisto.command() == 'proofpoint-tr-block-domain':
+ block_domain_command()
+
+elif demisto.command() == 'proofpoint-tr-block-url':
+ block_url_command()
+
+elif demisto.command() == 'proofpoint-tr-block-hash':
+ block_hash_command()
+
+elif demisto.command() == 'proofpoint-tr-delete-indicator':
+ delete_indicator_command()
+
+elif demisto.command() == 'proofpoint-tr-search-indicator':
+ search_indicator_command()
diff --git a/Beta_Integrations/ProofpointThreatResponse/ProofpointThreatResponse.yml b/Beta_Integrations/ProofpointThreatResponse/ProofpointThreatResponse.yml
new file mode 100644
index 000000000000..675c43954b8e
--- /dev/null
+++ b/Beta_Integrations/ProofpointThreatResponse/ProofpointThreatResponse.yml
@@ -0,0 +1,132 @@
+commonfields:
+ id: Proofpoint Threat Response
+ version: -1
+name: Proofpoint Threat Response
+display: Proofpoint Threat Response (Beta)
+category: Email Gateway
+description: 'Threat management platform to orchestrate and automate incident response.'
+configuration:
+- display: Server URL (e.g. https://192.168.0.1)
+ name: url
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: API Key
+ name: apikey
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Trust any certificate (unsecure)
+ name: insecure
+ defaultvalue: "true"
+ type: 8
+ required: false
+- display: Use system proxy
+ name: proxy
+ defaultvalue: "false"
+ type: 8
+ required: false
+- display: ID of IPs blacklist
+ name: blacklist_ip
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: ID of Domains blacklist
+ name: blacklist_domain
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: ID of URLs blacklist
+ name: blacklist_url
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: ID of hashes blacklist
+ name: blacklist_hash
+ defaultvalue: ""
+ type: 0
+ required: false
+script:
+ script: ''
+ type: python
+ commands:
+ - name: proofpoint-tr-get-list
+ arguments:
+ - name: list-id
+ required: true
+ description: ID of the list
+ description: Get list items
+ - name: proofpoint-tr-add-to-list
+ arguments:
+ - name: list-id
+ required: true
+ - name: indicator
+ required: true
+ description: 'Could be ip,url,domain,hash. For example: "192.168.1.1,192.168.1.2"'
+ isArray: true
+ - name: comment
+ description: Comment regarding the member
+ - name: expiration
+ description: Expiration of the member
+ description: Add member to list
+ execution: true
+ - name: proofpoint-tr-block-ip
+ arguments:
+ - name: ip
+ required: true
+ description: List of IPs
+ isArray: true
+ - name: expiration
+ description: Expiration of the IP
+ description: 'Block IP '
+ execution: true
+ - name: proofpoint-tr-block-domain
+ arguments:
+ - name: domain
+ required: true
+ description: List of domains
+ isArray: true
+ - name: expiration
+ description: Expiration of the Domain
+ description: Block Domain
+ - name: proofpoint-tr-search-indicator
+ arguments:
+ - name: list-id
+ required: true
+ description: ID of the list
+ - name: filter
+ required: true
+ description: Filter for the search. For example, "1.1" will return [1.1.1.1,
+ 22.22.1.1,1.1.22.22]
+ description: 'Return a list of indicators '
+ - name: proofpoint-tr-delete-indicator
+ arguments:
+ - name: list-id
+ required: true
+ description: ID of the list
+ - name: indicator
+ required: true
+ description: 'Could be ip,url,domain,hash. For example: "demisto.com"'
+ description: Delete an indicator from a list
+ - name: proofpoint-tr-block-url
+ arguments:
+ - name: url
+ required: true
+ description: List of URLs
+ isArray: true
+ - name: expiration
+ description: Expiration of the URLs
+ description: Block URL
+ - name: proofpoint-tr-block-hash
+ arguments:
+ - name: hash
+ required: true
+ description: List of hashes
+ isArray: true
+ - name: expiration
+ description: Expiration of the hash
+ description: Block hash
+ runonce: false
+beta: true
+tests:
+- No test - beta_integration
diff --git a/Beta_Integrations/ProofpointThreatResponse/ProofpointThreatResponse_CHANGELOG.md b/Beta_Integrations/ProofpointThreatResponse/ProofpointThreatResponse_CHANGELOG.md
new file mode 100644
index 000000000000..b6c9eab381fe
--- /dev/null
+++ b/Beta_Integrations/ProofpointThreatResponse/ProofpointThreatResponse_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+#### New Integration
+Threat management platform to orchestrate and automate incident response.
\ No newline at end of file
diff --git a/Beta_Integrations/ProofpointThreatResponse/ProofpointThreatResponse_description.md b/Beta_Integrations/ProofpointThreatResponse/ProofpointThreatResponse_description.md
new file mode 100644
index 000000000000..c4cc32a9be64
--- /dev/null
+++ b/Beta_Integrations/ProofpointThreatResponse/ProofpointThreatResponse_description.md
@@ -0,0 +1,3 @@
+In order to create API Key
+ Settings -> API Key -> Add api key
+ Note: This is a beta Integration, which lets you implement and test pre-release software. Since the integration is beta, it might contain bugs. Updates to the integration during the beta phase might include non-backward compatible features. We appreciate your feedback on the quality and usability of the integration to help us identify issues, fix them, and continually improve.
\ No newline at end of file
diff --git a/Beta_Integrations/ProofpointThreatResponse/ProofpointThreatResponse_image.png b/Beta_Integrations/ProofpointThreatResponse/ProofpointThreatResponse_image.png
new file mode 100644
index 000000000000..c6b38472e746
Binary files /dev/null and b/Beta_Integrations/ProofpointThreatResponse/ProofpointThreatResponse_image.png differ
diff --git a/Beta_Integrations/Telegram/Telegram.py b/Beta_Integrations/Telegram/Telegram.py
new file mode 100644
index 000000000000..7ff6bb43124f
--- /dev/null
+++ b/Beta_Integrations/Telegram/Telegram.py
@@ -0,0 +1,150 @@
+from CommonServerPython import *
+
+''' IMPORTS '''
+
+import json
+import requests
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+TOKEN = demisto.params().get('token')
+BASE_URL = 'https://api.telegram.org/bot{}/'.format(TOKEN)
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url_suffix, params=None, data=None):
+ result = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ verify=False,
+ params=params,
+ data=data
+ )
+ if result.status_code not in {200}:
+ return_error('Error in API call to Telegram Integration [%d] - %s' % (result.status_code, result.reason))
+
+ return result.json()
+
+
+def get_updates():
+ return http_request('GET', 'getUpdates')
+
+
+def get_bot():
+ return http_request('GET', 'getMe')
+
+
+def item_to_incident(item):
+ incident = {
+ 'name': 'Example Incident: ' + item.get('name'),
+ 'occurred': item.get('createdDate'),
+ 'rawJSON': json.dumps(item)
+ }
+
+ return incident
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """
+ Performs basic get request to get item samples
+ """
+ contents = get_bot()
+ if contents['ok']:
+ demisto.results("ok")
+ else:
+ error_code = contents['error_code']
+ description = contents['description']
+ demisto.results(f'{error_code} {description}')
+
+
+def telegram_send_message():
+ """
+ Gets details about a items using IDs or some other filters
+ """
+ user_id = demisto.args().get('userID')
+ if user_id is None:
+ username = demisto.args().get('username')
+ if username is not None:
+ user_id = str(get_user_id(username))
+
+ if user_id is None:
+ return_error(f'username {username} does not exists, please use list_user command')
+ message = demisto.args().get('message')
+ contents = http_request('GET', "sendMessage?chat_id=" + user_id + "&&text=" + message)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Message sent', contents, 'result', removeNull=True),
+ 'EntryContext': contents
+ })
+
+
+def get_users():
+ users = {}
+
+ contents = get_updates()
+ for result in contents['result']:
+ user_data = result['message']
+ if 'username' in user_data['from']:
+ users[user_data['from']['username']] = user_data['from']['id']
+ # not all users have a username, so no choice but to save by their first_name (data can be overwritten)
+ else:
+ users[user_data['from']['first_name']] = user_data['from']['id']
+ return users
+
+
+def telegram_list_users():
+ users = get_users()
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': users,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Users', users, removeNull=True),
+
+ 'EntryContext': users
+ })
+
+
+def get_user_id(username):
+ users = get_users()
+ if username in users:
+ return users[username]
+ else:
+ return
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+
+def main():
+ LOG(f'command is {demisto.command()}')
+
+ try:
+ # Remove proxy if not set to true in params
+ handle_proxy()
+
+ if demisto.command() == 'test-module':
+ test_module()
+ elif demisto.command() == 'send-message':
+ telegram_send_message()
+ elif demisto.command() == 'list-users':
+ telegram_list_users()
+
+ except Exception as ex:
+ return_error(str(ex))
+
+
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Beta_Integrations/Telegram/Telegram.yml b/Beta_Integrations/Telegram/Telegram.yml
new file mode 100644
index 000000000000..7cc59e5566ea
--- /dev/null
+++ b/Beta_Integrations/Telegram/Telegram.yml
@@ -0,0 +1,47 @@
+category: Messaging
+commonfields:
+ id: Telegram_Beta
+ version: -1
+configuration:
+- display: API Token
+ name: token
+ required: true
+ type: 4
+description: Telegram integration
+display: Telegram (Beta)
+name: Telegram_Beta
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The recipient ID
+ isArray: false
+ name: userID
+ required: true
+ secret: false
+ - default: false
+ description: The recipient username
+ isArray: false
+ name: username
+ required: true
+ secret: false
+ - default: false
+ description: The message to send
+ isArray: false
+ name: message
+ required: true
+ secret: false
+ deprecated: false
+ description: Sends a message
+ execution: false
+ name: telegram-send-message
+ - deprecated: false
+ description: List users
+ execution: false
+ name: telegram-list-users
+ dockerimage: demisto/python3:3.7.3.221
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+beta: true
diff --git a/Beta_Integrations/Telegram/Telegram_description.md b/Beta_Integrations/Telegram/Telegram_description.md
new file mode 100644
index 000000000000..e91ea3697156
--- /dev/null
+++ b/Beta_Integrations/Telegram/Telegram_description.md
@@ -0,0 +1 @@
+Note: This is a beta Integration, which lets you implement and test pre-release software. Since the integration is beta, it might contain bugs. Updates to the integration during the beta phase might include non-backward compatible features. We appreciate your feedback on the quality and usability of the integration to help us identify issues, fix them, and continually improve.
\ No newline at end of file
diff --git a/Beta_Integrations/Telegram/Telegram_image.png b/Beta_Integrations/Telegram/Telegram_image.png
new file mode 100644
index 000000000000..118f6dff92c9
Binary files /dev/null and b/Beta_Integrations/Telegram/Telegram_image.png differ
diff --git a/Beta_Integrations/integration-Exabeam.yml b/Beta_Integrations/integration-Exabeam.yml
deleted file mode 100644
index 378df19c8114..000000000000
--- a/Beta_Integrations/integration-Exabeam.yml
+++ /dev/null
@@ -1,883 +0,0 @@
-commonfields:
- id: Exabeam
- version: -1
-name: Exabeam
-display: Exabeam
-category: Analytics & SIEM
-image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHgAAAAVCAYAAACNDipWAAAPmklEQVR42u1Zd1SUVxZXV3fXZHfdxJy4KRqT3T1qbHQp0qVXB+m91wGpQ6/SHVQwUgQLIqiIgAKitBmaDB3p0hkEgiAl1mh8e98XvnEUIyb/7Dkm3znfOfDe/d677/7u/d1736z4rU//wv3P4rrqojtmp/6x4o/n/Xui2hkpAoXJKKGnPmjFH8/79VwYajJVLE1BciWpiMI4P5bQXffZ+3Q+T08/G2VFzXgjI8uQ6upb635X4NZN9e00qzw1qVaaiBTgzRxs9nifzpeTk7fS2MiyfNtWXiQlqfDg5s2yTb8bcEcf3PvAnXWOsb/0KNIoiUe+jXmXJx8t/AnPjT9aWNM1//2f34dzmppYl+7YLoBkZZRmmczqL383AKf3lUZql0UhvfI45FBzqn/4wT0ONYe0Fp8+3V8f9P4AzI8UFNRnruTkb/hdgFvKZmmaVhxCZowYZMKgP62a7JQj526MdRgr3kxAlIrUpznDbbK/dQ8/32AhYyOLMF0d4/N6uiapHu4+lgnxiZ9wyyQlpVrQvPwjQ0MiA1h1DZtfX4NRwRSGdcK8vPxj83KvEbqknz3/ka9vkKGZmQ2sbXTSyMA808jI4iTNy88xLCzyyzcBvGunIJKWUniop2eSCPqcBn3Sra0cAnx9gnh/Sf8jcQmbqI5uVAN9Myyf4eTk5uvnF7z9dTl7W+dPDrp4mlpa2IVrUfRTDQ3Ms/R0TZMdHVxtYI1/ccs2NbZsCAmOCIAzRaSnZ8rExcVvge9CdQ4YXgA9j4Qfit4df+zEB44OB7X09UzjDfXNsgwNLA5HRR0WeGfDDy/c/bfXLTrbihmGjMpD0KVBhh851zI9uN2EmThNKTuOVEsSUFjrdZdf3XL1DayCA58Q4BP7iWe3MALjwiuE8N8y0kqjJSXl4qRsZuYlje3f8r/4zzc7kLWVY25tTd0qcq79dsdGGWnl4a83b0OSEvLsgoLr/8bj4CiaO3cIot2wJqZenF937hBAPLv2IFER6ftJiWkarwBsal3Ks3sPEhQQx/L4O0Kf3buEkIiw1AswZszrZ4iNOWIoJaU4y8uzqD/eb9cekJd+BsD5cctqHzDUgzW59OEh5GFPrPdEeTlTipTNvpTDh8e/3cYHDqc4uVdUdg7OQuhDyEsqzCoqaA7y8ojgNfDexLiwsNSj7OxcpWWNP/90YTW9NanYlumLrCoCUEzLmeK7D6dW47mxB1MfBDVlVGqXxaL9pXQU2XYt57dELnioD1aaj1cE7RWTnVNXO1ClrKjRzssr8gIrLLdPlV1TU/fFyyg+6QmGIQzo5enHKfIgwvIAPAzC4+vXb3KYhH74mISqilajooI6A7z8hq0NtVBDTbtWeI/kMwyagrzaSG0t61PuCMbjMP+csl+vWklRIx++L5TYKzfFB4bExj1+PMmOlL94MUdUTETmB2xYcNIXykqazep4fSHJx1ger3Xh4mV9Ut7G2klBWXl/k4KcGgOi94aNjVOBhroOa4+Q5HPseKBb55kzGcT9wtWrRbvgPM+FBMURLzi8uPi+e4oKGmX7ZFX6sQMK8IsRdpOXUxtRUz1QpiCv3rkHZLGOutpGZcsaP7vvsp9LpRdyrvRB3rciRwcWRreQc4mdV+gG5YeQUUUMsq46Ptg8PUTk5EfPnq69fX9sx92HsyuXW//cuax/gIG7sRFkZZV/AMqRx+Nx9GOrXJw9UrFn4kP7+ATRyG/ujt1dZWxsmY+9WkhQ4klRUbFo4okUKkQ22gGvj3egz5K+PfLwEl2MDM3P4IjDr79fiNrrRRY41v3i4pLPyfFDYVGSUFk/wk4Hxmwix7GesC9hbBrNP4wcB/o3BIZ4gfM5pAQmOf5dQvLKgIDQJfpYWtjngh2AOfY+Cw4K48Fj1xYBxnbQVNepi4yIJVJK6skzW0G/OQwkhaLflpyc9hEej4w8vB6cqwvLi4nKjL7V+C33GpRpNa4/elZ7Ivdqb1Q6ytAk56rHmw5YM8NeWDLCkRUz9knNZKc8OXdlqN5Gt+K7R5UTvYLLAezvFyoIkUJ4uZWl/TnuuZTktG3ie/fNY4NCLqzgnmtsbNmkqqp1Z/u3fAg8elRCXG5u2xZeZGvrfJXFaoDKfvkH8qQzP58ogsjB1KZHjpuZklW08v2qqtovuL+haOrWY12lpRXHIa//Kzg4YhWA3Qhsgqvuu+BI/+SW19E2KsHyWD+g6i1v00dHx/gQOBvoI4HAwYWJCM4vJADG64NeHPvY2VL/Ji2pOIyjF5ipgXsdNVWtq8AkmIEm3pJ373xGbw4e9K11QR5VVHS57xKdnOuY6dniURs+bs8MQhaMQJTcfSWAnKucaOe1qjw+pVkSi9xZmc39c9+/9SpTSEBcER8IUxBERzdQz2mgwwygmnSgnUuS4nIP8CGUFDQ6Q0MjXmnDgMYU4fCPwChEvgZq760or9xITC7NkRtsbZxoQJ8XNTV0rsPahZDvevC+eH9+XlGdpQArzZaUVHz5GmBFEDUY/Dk/36BvDPXN/yYjpTQMjoJBnIU1zwOln1NRpmSIi+07DVE0KCRE7PET5Nw95DpQ+G00N7PxBzCysT5w7kLI1wNAu4TDxUTRQRZT9M8AY/qHAjSL/B7y7TrQfQSiHcG3zdZWVI5TAyMWAJvgPSd/0fCX+pIuBrHsUWAdFSW0RjE7pm+vxeMDc/1rjrWdKKZW0pA90xsdaTtV1DM3vAbPDS2Mr/VmpVQbVkQh3bIYFNCUdbXh3uBbe2NQhAAYv9hI8D/3SwCPDyEjrdgNFEbo8LKo6lwFOYwB3k0wAFS40Ut3IPKYqIS4/BiWw+AICv5sRJyrXu7NBTBXH1xa+irAUPUW4L0wPfr7B3+tqkL5u4T4vlHQk8yHhL6E7gDWogORYyKLUSkLznwPQMP6YDmINin8cuRjot8EsCUHYHCEdSA3AmtiXZrtbF04AENEvx3g2okC16gmOxTVREWHmz2nO6abt5FzBYN5AQcrXZBbtRcKqDs01jbd9Q05l3GnMNq0IhiZVYQjT1ZKT8fMyLKXBKLCUgpkBCspauZDIaEARqLAGAUAofDziVHgcNoQDXLmZraruL9NSzt7kAeiFwoZ4oVomWcyq0TIea7iq2A75GvIhz+5udJo0PZshaJms4a6Nh0M/1aAgRG+fLUCNigBOsf1wizk0c0Q7R9CNA8Rhc4+1WFgBgrUDJqwJqE7jONXC77RAsMTbAb7M6FewPr8CLnfGSJ5q4oK5WtIR6mgz7sBzL+XC2AVANj5NYDxOpJLAR6cbxX9rp368GibA4ptdkBNU0wjTk82VaMQVOf+o0+tG/Kqdkf1kw2cnFzGrt5vzwxAdpWhyJIR+uwGm6VAzhWNNlLa74+8sX/MyLiwGzz3OY4KADD/na8UL+eJQo6ZxwUYFBW3wIv7v93Gi6DqbWtrbefkQSdHt38C3ffiSAE67MnKvLiWi25dwEBLATblFFkzN26Uci5zzp7N+BAitheoFsBUG4yjxxOAAcXWgv7Y0DPRUXEb36Y30OmnAOwYLoKUlfbXpqWeXU3OwXnCAOBfH8GyBMDLR/Ddhz2fZt7xvX2iwxYda7NC14aTk8i5O7NtmxLaAvuCWc7I7xYVZfWejSDnbo3XfR1QFz7pXOmHbBm+KLv/hg8nJ4+37jRjxD4IbDpX2TU78tfXD5yfV/AXAKgePBy3SM/Bo73cXL0/g6j4JPvSlc+dHF3/6+nhK3EqLZ1T7AwMDH4EvWQL9I84N49AC7URqkgViJSftm7hQZ6evmmkLNXJ7e9QhHVhAACcseSkVAIAWHOdoaFFGgD7RoCx40A0zoeGRIiYGFt9QXVy/y84IF0EaBSDb2BglsN1QRODnQuiCh3QMrwcHh6zBfr3jyFlbIBKejMUY0LQHexaLOzWAz0PY8CARboyzl34ZLFXXw+Okg3AvyPAy0bwUoAZ4ymnU7rMUXKnNcodiK4b+6Fn3c/9bu/K872x2ZFNDiis3gGldtIZ4w/ZBFhzT+//Jb71yA3XKndErfREJzpO59+ZHVzskyfXhjWlVeMbMMPycJTYdc3mjVF8LksPG5oX0y2vKAZ6Huh2Ahr7+3C4p4JwCE0NXVNS3pnqfgpXzxg0uPigkONBQeF03DphZ4mjJ1iT4wDMZQwKGAV7+wDQbDkURJNA69w5WJcL4DIwKORoCVyJPoGIe4x7YqwbBh6cbzYv7xoP5yavuPQrYIk+PIfPISos/QT0ngAgp+EsD2EMt1W5pDxQ9E2QJfRRlFe/c4CiXyEmKjuF5Zbm4KLdywIs+w4At89ctUvvNUPpvbYo847rbOu9Ir6XOTnP42ibPaK3OKCE2953B+e7OddvhUM5wbRqKqIBZUc0RgyyJus5FHWy+wLdGqpsK0Yo8mJ919s2PfCL9BUUeMgE6GsERwFZDPHsIoxMFET6emYEAFeu5FN3Lt4sQaTEcq8xOjr2oYW5bQWATPwSVFlZLYbH4SpvO1TPHfwAEO5J4VIFz8/s19RrAYOSAOtxHMLUpgycDYNLOgDxt5iI9BO4hChNPXl6SZ4vKizeCUxUAXKE3mThhwHAxobijGxxcJEluE9GeQDrA46K9cFOMwUR3AZ5mwRYZBFgHgAYLQJ84TWA2YsAtwDAq7kALlwEeIoYGP6hhu/KkNNsVr8VyuyzRdkD7hP5QyF5OQNB13MGQm+e7KIunOhwQEdbbRFr4rrpyz65Vjmk3vnHYJYrCrjlhiIbgkfimmPzopsOX49uPlLmVhP4mFoViByrQp7WT3XILZdX4f50PUQbhebpbxsTHWcL9GqupqKlC9WzyPHjyQRjQCQrgUEMoKWg3Kqr/2jpz335mwB8PSiqTA66ePBz2qTYI+vhOlQbLgrszExtTA/HHv0P9KyfQ3ulD4bQA3rnFFPubt5i0OrshyJMDb+Qu9WtLB0UPTx8t9FoAat+SX+4N1/jYH9QCm7XrCLCY22Boi0hlRgaGVrIx8TEvXLH7OriuQHSjz6M29lYOZhAPv8qLDTyq53bBQzAobUB4I8XAV4HAGsBwPoAsDAXwGtAb2UA2AAA3gcAr+QCeC8eh3k1oNiR9TfHfJsuDhgDsNboUr8NgGwNkWyFzvRYo1Pd1iit2x6o1xLdHE1N5uTrBwObEjsCh8IbHCByXVFovRu0VK7Iu8YFCjA35F7liTyqfZBTJQ0YITdgxR/P/+dpmTkVnDtsunBt1IGdP+LAzh12YOcMObKzB53YF/up7Mw+Kvtsr/293MGwiu8fDXzMKY4GT6TEtTosxLd5sONaPdixzZ7sqEYvdngDjR3G8mYH1fmyaTW06aT2lOzpxzOrV/zx/F+e/wGWcaIzsxbfogAAAABJRU5ErkJggg==
-description: Exabeam user behavior integration
-configuration:
-- display: Server URL
- name: url
- defaultvalue: ""
- type: 0
- required: true
-- display: Credentials
- name: credentials
- defaultvalue: ""
- type: 9
- required: true
-- display: Use system proxy settings
- name: proxy
- defaultvalue: "true"
- type: 8
- required: false
-- display: Do not validate server certificate (insecure)
- name: insecure
- defaultvalue: "false"
- type: 8
- required: false
-script:
- script: |-
- ''' IMPORTS '''
- import requests
- from datetime import datetime, timedelta
- import urllib3
- urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
-
- if not demisto.params()['proxy']:
- del os.environ['HTTP_PROXY']
- del os.environ['HTTPS_PROXY']
- del os.environ['http_proxy']
- del os.environ['https_proxy']
-
- ''' GLOBALS '''
- URL = demisto.params()['url'']
- if URL[-1] != '/':
- URL += '/'
- URL_LOGIN = URL + 'api/'
- URL_UBA = URL + 'uba/api/'
- SESSION = requests.session()
- SESSION.headers.update({'Accept': 'application/json'})
- if demisto.params()['insecure']:
- SESSION.verify = False
-
- ''' HELPERS '''
- def convert_unix_to_date(d):
- ''' Convert millise since epoch to date formatted MM/DD/YYYY HH:MI:SS '''
- if d:
- dt = datetime.utcfromtimestamp(d / 1000)
- return dt.strftime('%m/%d/%Y %H:%M:%S')
- return 'N/A'
-
- def convert_date_to_unix(d):
- ''' Convert a given date to millis since epoch '''
- return int((d - datetime.utcfromtimestamp(0)).total_seconds() * 1000)
-
- def login():
- ''' Login using the credentials and store the cookie '''
- http_request('POST', URL_LOGIN + 'auth/login', data={
- 'username': demisto.params()['credentials']['identifier'],
- 'password': demisto.params()['credentials']['password']
- })
-
- def logout():
- ''' Logout from the session '''
- http_request('GET', URL_LOGIN + 'auth/logout', None)
-
- def http_request(method, path, data):
- ''' Do the actual HTTP request '''
- if method == 'GET':
- respone = SESSION.get(path, params=data)
- else:
- respone = SESSION.post(path, data=data)
- if respone.status_code != requests.codes.ok:
- text = respone.text
- if text:
- try:
- res = respone.json()
- text = 'Code: [%s], Error: [%s]' % (res.get('_apiErrorCode'), res.get('internalError'))
- except:
- pass
- return_error('Error in API call to Exabeam [%d] - %s' % (respone.status_code, text))
- if not respone.text:
- return {}
- return respone.json()
-
- def get_watchlist_id():
- ''' Return watchlist id based on given parameters '''
- if not demisto().args['id'] and not demisto.args()['title']:
- logout()
- return_error('Please provide either ID or title')
- wid = demisto.args()['id']
- if not wid:
- watchlist = http_request('GET', URL_UBA + 'watchlist', None)
- for item in watchlist:
- if item.get('title').lower() == demisto.args()['title'].lower():
- watchlist_id = item.get('watchlistId')
- break
- if not watchlist_id:
- logout()
- return_error('Unable to find watchlist with the given title')
- return watchlist
-
- ''' FUNCTIONS '''
- def exabeam_users():
- ''' Return user statistics '''
- res = http_request('GET', URL_UBA + 'kpi/count/users', None)
- demisto.results({
- 'Type': entryTypes['note'],
- 'ContentsFormat': formats['json'],
- 'Contents': res,
- 'HumanReadable': tableToMarkdown('User statistics', [res], ['highRisk', 'recent', 'total'])
- })
-
- def exabeam_assets():
- ''' Return asset statistics '''
- res = http_request('GET', URL_UBA + 'kpi/count/assets', None)
- demisto.results({
- 'Type': entryTypes['note'],
- 'ContentsFormat': formats['json'],
- 'Contents': res,
- 'HumanReadable': tableToMarkdown('Asset statistics', [res], ['highRisk', 'recent', 'total'])
- })
-
- def exabeam_sessions():
- ''' Return session statistics '''
- res = http_request('GET', URL_UBA + 'kpi/count/sessions', None)
- demisto.results({
- 'Type': entryTypes['note'],
- 'ContentsFormat': formats['json'],
- 'Contents': res,
- 'HumanReadable': tableToMarkdown('Session statistics', [res], ['highRisk', 'recent', 'total'])
- })
-
- def exabeam_events():
- ''' Return event statistics '''
- res = http_request('GET', URL_UBA + 'kpi/count/events', None)
- demisto.results({
- 'Type': entryTypes['note'],
- 'ContentsFormat': formats['json'],
- 'Contents': res,
- 'HumanReadable': tableToMarkdown('Event statistics', [res], ['recent', 'total'])
- })
-
- def exabeam_anomalies():
- res = http_request('GET', URL_UBA + 'kpi/count/anomalies', None)
- demisto.results({
- 'Type': entryTypes['note'],
- 'ContentsFormat': formats['json'],
- 'Contents': res,
- 'HumanReadable': tableToMarkdown('Anomalies statistics', [res], ['recent', 'total'])
- })
-
- def exabeam_notable():
- ''' Return notable users in a specific period of time '''
- res = http_request(
- 'GET',
- URL_UBA + 'users/notable',
- {
- 'numberOfResults': demisto.args()['number-of-results'],
- 'unit': demisto.args()['unit'],
- 'num': demisto.args()['num']
- }
- )
-
- if res.get('users'):
- users = [{
- 'Highest': u['highestRiskScore'],
- 'Name': u['userFullName'],
- 'Username': demisto.get(u, 'user.username'),
- 'Email': demisto.get(u, 'user.info.email'),
- 'Department': demisto.get(u, 'user.info.department'),
- 'DN': demisto.get(u, 'user.info.dn'),
- 'RiskScore': demisto.get(u, 'user.riskScore'),
- 'NotableSessionIDs': u.get('notableSessionIds', [])
- } for u in res['users']]
-
- demisto.results({
- 'Type': entryTypes['note'],
- 'ContentsFormat': formats['json'],
- 'Contents': res,
- 'HumanReadable': tableToMarkdown('Notables', users, ['Name', 'Username', 'Email', 'Department', 'DN', 'RiskScore', 'Highest', 'NotableSessionIDs']),
- 'EntryContext': {'Exabeam.Notable': res['users']}
- })
-
- else:
- demisto.results('No notable users found in the requested period')
-
- def exabeam_lockouts():
- ''' Return lockouts '''
- res = http_request(
- 'GET',
- URL_UBA + 'lockouts/accountLockouts',
- {
- 'numberOfResults': demisto.getArg('number-of-results'),
- 'unit': demisto.getArg('unit'),
- 'num': demisto.getArg('num')
- })
- if res.get('lockouts'):
- lockouts = [{
- 'Name': demisto.get(l, 'user.info.fullName'),
- 'Username': demisto.get(l, 'user.username'),
- 'Email': demisto.get(l, 'user.info.email'),
- 'Department': demisto.get(l, 'user.info.department'),
- 'DN': demisto.get(l, 'user.info.dn'),
- 'Title': demisto.get(l, 'user.info.title'),
- 'RiskScore': demisto.get(l, 'user.riskScore'),
- 'Executive': demisto.get(l, 'isUserExecutive'),
- 'LockoutTime': convert_unix_to_date(demisto.get(l, 'firstLockoutEvent.time')),
- 'Host': demisto.get(l, 'firstLockoutEvent.host'),
- 'LockoutRisk': demisto.get(l, 'lockoutInfo.riskScore'),
- 'LoginHost': demisto.get(l, 'lockoutInfo.loginHost')
- } for l in res['lockouts']]
-
- demisto.results({
- 'Type': entryTypes['note'],
- 'ContentsFormat': formats['json'],
- 'Contents': res,
- 'HumanReadable': tableToMarkdown('Lockouts', lockouts, ['User', 'Username', 'Email', 'Department', 'DN', 'Title', 'RiskScore', 'Executive', 'LockoutTime', 'Host', 'LockoutRisk', 'LoginHost']),
- 'EntryContext': {'Exabeam.Lockout': res['lockouts']}
- })
- else:
- demisto.results('No lockouts found in the requested period')
-
- def exabeam_timeline():
- ''' Returns session, triggered rules and events of a user '''
- res = http_request('GET', URL_UBA + 'user/%s/timeline/entities/all' % demisto.args()['username'], None)
- risk_score = 0
- session = ''
- for entity in res.get('entities', []):
- if entity.get('tp') == 'session' and entity.get('rs', 0) > risk_score:
- risk_score = entity.get('rs', 0)
- session = entity.get('id')
- if session:
- session_info = http_request('GET', URL_UBA + 'session/%s/info' % session, None)
- si = session_info.get('sessionInfo')
- if not si:
- return_error('Unable to find session info')
- session_data = {
- 'Username': si.get('username'),
- 'RiskScore': si.get('riskScore'),
- 'InitialRiskScore': si.get('initialRiskScore'),
- 'NumOfReasons': si.get('numOfReasons'),
- 'LoginHost': si.get('loginHost'),
- 'Zones': ','.join(si.get('zones', [])),
- 'Assets': si.get('numOfAssets'),
- 'Events': si.get('numOfEvents'),
- 'SecurityEvents': si.get('numOfSecurityEvents')
- }
- md = tableToMarkdown(
- 'Session %s from %s to %s' % (session, convert_unix_to_date(si.get('startTime')), convert_unix_to_date(si.get('endTime'))),
- [session_data],
- ['Username', 'RiskScore', 'InitialRiskScore', 'NumOfReasons', 'LoginHost', 'Zones', 'Assets', 'Events', 'SecurityEvents'])
-
- triggered_rules_data = [{
- 'ID': tr.get('ruleId'),
- 'Type': tr.get('ruleType'),
- 'Name': demisto.get(session_info, 'rules.%s.ruleName' % (tr.get('ruleId'))),
- 'EventID': tr.get('eventId'),
- 'SessionID': tr.get('sessionId'),
- 'Source': demisto.get(session_info, 'triggeredRuleEvents.%s.fields.source' % (tr.get('eventId'))),
- 'Domain': demisto.get(session_info, 'triggeredRuleEvents.%s.fields.domain' % (tr.get('eventId'))),
- 'Host': demisto.get(session_info, 'triggeredRuleEvents.%s.fields.host' % (tr.get('eventId'))),
- 'DestIP': demisto.get(session_info, 'triggeredRuleEvents.%s.fields.dest_ip' % (tr.get('eventId'))),
- 'EventType': demisto.get(session_info, 'triggeredRuleEvents.%s.fields.event_type' % (tr.get('eventId')))
- } for tr in session_info.get('triggeredRules')]
-
- md += '\n' + tableToMarkdown('Triggered Rules',
- triggered_rules_data,
- ['ID', 'Type', 'Name', 'EventID', 'SessionID', 'EventType', 'Source', 'Domain', 'Host', 'DestIP'])
- session_data['TriggeredRules'] = triggered_rules_data
- events = http_request(
- 'GET',
- URL_UBA + 'timeline/events/start',
- {
- 'username': demisto.args()['username'],
- 'sequenceTypes': 'session',
- 'startSequenceType': 'session',
- 'startSequenceId': session,
- 'preferredNumberOfEvents': 200
- }
- )
-
- events_data = [{
- 'Type': ev.get('tp'),
- 'Count': ev.get('c'),
- 'Start': convert_unix_to_date(ev.get('ts')),
- 'End': convert_unix_to_date(ev.get('te')),
- 'Sources': [es.get('fields', {}).get('source') for es in ev.get('es')]
- } for ev in events.get('aggregatedEvents', [])]
-
- md += '\n' + tableToMarkdown('Timeline', events_data, ['Type', 'Count', 'Start', 'End'])
- session_data['Events'] = events_data
-
- demisto.results({
- 'Type': entryTypes['note'],
- 'ContentsFormat': formats['json'],
- 'Contents': events,
- 'HumanReadable': md,
- 'EntryContext': {'Exabeam.Timeline': session_data}
- })
- else:
- demisto.results('No risk score exists for the given user')
-
- def exabeam_session_entities():
- ''' Returns session entities for a given user, can be filtered by container-type, container-id '''
- res = http_request(
- 'GET',
- URL_UBA + 'user/%s/timeline/entities' % demisto.args()['username'],
- {
- 'numberOfResults': demisto.args()['number-of-results'],
- 'unit': demisto.args()['unit'],
- 'num': demisto.args()['num'],
- 'endContainerType': demisto.args()['container-type'],
- 'endContainerId': demisto.args()['container-id']
- }
- )
-
- demisto.results({
- 'Type': entryTypes['note'],
- 'ContentsFormat': formats['json'],
- 'Contents': res
- })
-
- def exabeam_user_info():
- ''' Returns user info '''
- username = demisto.args()['username']
- res = http_request('GET', URL_UBA + 'user/%s/info' % username, None)
- if res.get('username'):
- u = {
- 'Username': res['username'],
- 'AccountNames': ','.join(res.get('accountNames', [])),
- 'Executive': res['isExecutive'],
- 'WatchList': res['isOnWatchlist'],
- 'Name': demisto.get(res, 'userInfo.info.fullName'),
- 'ID': demisto.get(res, 'userInfo.info.accountId'),
- 'Department': demisto.get(res, 'userInfo.info.department'),
- 'DN': demisto.get(res, 'userInfo.info.dn'),
- 'Email': demisto.get(res, 'userInfo.info.email'),
- 'Type': demisto.get(res, 'userInfo.info.employeeType'),
- 'Groups': demisto.get(res, 'userInfo.info.group'),
- 'SID': demisto.get(res, 'userInfo.info.sid'),
- 'Title': demisto.get(res, 'userInfo.info.title'),
- 'RiskScore': demisto.get(res, 'userInfo.riskScore'),
- 'AverageRiskScore': demisto.get(res, 'userInfo.averageRiskScore'),
- 'Labels': demisto.get(res, 'userInfo.labels'),
- 'FirstSeen': convert_unix_to_date(demisto.get(res, 'userInfo.firstSeen')),
- 'LastSeen': convert_unix_to_date(demisto.get(res, 'userInfo.lastSeen')),
- 'LastSessionID': demisto.get(res, 'userInfo.lastSessionId'),
- 'PastScores': ','.join(map(str, demisto.get(res, 'userInfo.pastScores')))
- }
-
- md = tableToMarkdown('User info', [u], ['Name', 'Username', 'Email', 'Department', 'DN', 'Groups',
- 'Title', 'RiskScore', 'AverageRiskScore', 'Executive', 'WatchList', 'AccountNames', 'ID',
- 'Type', 'SID', 'Labels', 'FirstSeen', 'LastSeen', 'LastSessionID', 'PastScores'])
-
- if demisto.get(res, 'userInfo.info.photo'):
- md += '\n + ')\n'
-
- # Let's get the sessions as well
- notable_res = http_request(
- 'GET',
- URL_UBA + 'users/notable',
- {
- 'numberOfResults': 100,
- 'unit': 'd',
- 'num': 7
- }
- )
- if notable_res.get('users'):
- for un in notable_res['users']:
- if demisto.get(un, 'user.username') == username:
- u['NotableList'] = True
- md += '\n## User is on the notable list\n'
- notable_session_ids = un.get('notableSessionIds', [])
- if notable_session_ids:
- u['NoteableSessionIDs'] = notable_session_ids
- session_res = http_request(
- 'GET',
- URL_UBA + 'user/%s/riskTimeline/data' % username,
- {
- 'unit': 'd',
- 'num': 7,
- 'endTimeSequenceType': 'session',
- 'endTimeSequenceId': notable_session_ids[0]
- )
- if session_res.get('sessions'):
- md += '\n' + tableToMarkdown('Sessions', session_res['sessions'])
-
- demisto.results({
- 'Type': entryTypes['note'],
- 'ContentsFormat': formats['json'],
- 'Contents': res,
- 'HumanReadable': md,
- 'EntryContext': {'Account(val.Email && val.Email === obj.Email || val.ID && val.ID === obj.ID || val.Username && val.Username === obj.Username)': u}
- })
-
- else:
- demisto.results('No username with [' + username + '] found')
-
- def exabeam_triggered_rules():
- ''' Return triggered rules for a given container '''
- res = http_request(
- 'GET',
- URL_UBA + 'triggeredRules',
- {
- 'containerType': demisto.args()['container-type'],
- 'containerId': demisto.args()['container-id']
- }
- )
-
- demisto.results({
- 'Type': entryTypes['note'],
- 'ContentsFormat': formats['json'],
- 'Contents': res
- })
-
- def exabeam_watchlists():
- ''' Retrieve current list of watchlists '''
- res = http_request('GET', URL_UBA + 'watchlist', None)
-
- demisto.results({
- 'Type': entryTypes['note'],
- 'ContentsFormat': formats['json'],
- 'Contents': res,
- 'HumanReadable': tableToMarkdown('Watchlists', res, ['title', 'watchlistId']),
- 'EntryContext': {'Exabeam.Watchlists': res}
- })
-
- def exabeam_watchlist():
- watchlist_id = get_watchlist_id()
- res = http_request('GET', URL_UBA + 'watchlist/%s/' % watchlist_id, {'numberOfResults': demisto.args()['num']})
-
- users = [{
- 'Name': demisto.get(u, 'user.info.fullName'),
- 'Department': demisto.get(u, 'user.info.department'),
- 'Username': u.get('username'),
- 'RiskScore': demisto.get(u, 'user.riskScore'),
- 'IsExecutive': u.get('isExecutive')
- } for u in res.get('users', [])]
-
- demisto.results({
- 'Type': entryTypes['note'],
- 'ContentsFormat': formats['json'],
- 'Contents': res,
- 'HumanReadable': tableToMarkdown('Watchlist %s [%s] - %d users' % (res.get('title'), res.get('category'), res.get('totalNumberOfUsers')),
- users,
- ['Name', 'Department', 'Username', 'RiskScore', 'IsExecutive']),
- 'EntryContext': {'Exabeam.Watchlist.%s' % res.get('title'): users}
- })
-
- def exabeam_watchlist_add():
- ''' Adds a user to a given watchlist '''
- watchlist_id = get_watchlist_id()
- username = demisto.args()['username']
- res = http_request(
- 'PUT',
- URL_UBA + 'watchlist/%s/add' % watchlist_id,
- {
- 'items[]': username,
- 'category': 'Users'
- }
- )
-
- if res.get('numberAdded') == 1:
- md = 'User %s added to watchlist %s' % (username, res.get('title'))
- else:
- md = 'User %s was already on watchlist %s' % (username, res.get('title'))
-
- demisto.results({
- 'Type': entryTypes['note'],
- 'ContentsFormat': formats['json'],
- 'Contents': res,
- 'HumanReadable': md
- })
-
- def exabeam_watchlist_remove():
- watchlist_id = get_watchlist_id()
- username = demisto.args()['username']
- res = http_request('PUT', URL_UBA + 'watchlist/%s/remove' % watchlist_id, {
- 'items[]': username,
- 'category': 'Users',
- 'watchlistId': watchlist_id
- })
- if res.get('numberRemoved') == 1:
- md = 'User %s removed from watchlist %s' % (username, res.get('title'))
- else:
- md = 'User %s was not on watchlist %s' % (username, res.get('title'))
-
- demisto.results({
- 'Type': entryTypes['note'],
- 'ContentsFormat': formats['json'],
- 'Contents': res,
- 'HumanReadable': md
- })
-
- ''' EXECUTION '''
- login()
-
- LOG('command is %s' % (demisto.command(), ))
-
- try:
- if demisto.command() == 'test-module':
- demisto.results('ok')
-
- elif demisto.command() == 'xb-users':
- exabeam_users()
-
- elif demisto.command() == 'xb-assets':
- exabeam_assets()
-
- elif demisto.command() == 'xb-sessions':
- exabeam_sessions()
-
- elif demisto.command() == 'xb-events':
- exabeam_events()
-
- elif demisto.command() == 'xb-anomalies':
- exabeam_anomalies()
-
- elif demisto.command() == 'xb-notable':
- exabeam_notable()
-
- elif demisto.command() == 'xb-lockouts':
- exabeam_lockouts()
-
- elif demisto.command() == 'xb-timeline':
- exabeam_timeline()
-
- elif demisto.command() == 'xb-session-entities':
- exabeam_session_entities()
-
- elif demisto.command() == 'xb-userinfo':
- exabeam_userinfo()
-
- elif demisto.command() == 'xb-triggered-rules':
- exabeam_triggerred_rules()
-
- elif demisto.command() == 'xb-watchlists':
- exabeam_watchlists()
-
- elif demisto.command() == 'xb-watchlist':
- exabeam_watchlist()
-
- elif demisto.command() == 'xb-watchlist-add':
- exabeam_watchlist_add()
-
- elif demisto.command() == 'xb-watchlist-remove':
- exabeam_watchlist_remove()
-
- else:
- logout()
- return_error('Unrecognized command: ' + demisto.command())
-
-
- except Exception, e:
- LOG(e.message)
- LOG.print_log()
- return_error(e.message)
-
- logout()
- type: python
- commands:
- - name: xb-users
- arguments: []
- description: Return the total number of users managed by Exabeam
- - name: xb-assets
- arguments: []
- description: Return the total number of assets managed by Exabeam
- - name: xb-sessions
- arguments: []
- description: Return the total number of tracked sessions by Exabeam
- - name: xb-events
- arguments: []
- description: Return the total number of events processed by Exabeam
- - name: xb-anomalies
- arguments: []
- description: Display anomaly statistics
- - name: xb-notable
- arguments:
- - name: number-of-results
- default: true
- description: Number of records to return
- defaultValue: "100"
- - name: unit
- auto: PREDEFINED
- predefined:
- - d
- - M
- description: The unit of the num argument. Can be d for days, w for weeks, M
- for months
- defaultValue: d
- - name: num
- description: The number of units (days, weeks, etc.)
- defaultValue: "1"
- outputs:
- - contextPath: Exabeam.Notable.Highest
- description: Highest risk score of the user
- type: number
- - contextPath: Exabeam.Notable.Name
- description: User full name
- type: string
- - contextPath: Exabeam.Notable.Username
- description: User name
- type: string
- - contextPath: Exabeam.Notable.Email
- description: User email
- type: string
- - contextPath: Exabeam.Notable.Department
- description: User department
- type: string
- - contextPath: Exabeam.Notable.DN
- description: User dn
- type: string
- - contextPath: Exabeam.Notable.RiskScore
- description: User risk score
- type: number
- - contextPath: Exabeam.Notable.NotableSessionIDs
- description: User notable session Ids
- type: string
- description: Display the notable users
- - name: xb-lockouts
- arguments:
- - name: number-of-results
- default: true
- description: Number of records to return
- defaultValue: "100"
- - name: unit
- auto: PREDEFINED
- predefined:
- - d
- - M
- description: The unit of the num argument. Can be d for days, w for weeks, M
- for months
- defaultValue: d
- - name: num
- description: The number of units (days, weeks, etc.)
- defaultValue: "1"
- outputs:
- - contextPath: Exabeam.Lockout.isUserExecutive
- description: Is the user an executive
- type: boolean
- - contextPath: Exabeam.Lockout.user.username
- description: Username of user
- type: string
- - contextPath: Exabeam.Lockout.user.riskScore
- description: Risk score of user
- type: number
- - contextPath: Exabeam.Lockout.user.firstSeen
- description: When did we first see the user
- type: date
- - contextPath: Exabeam.Lockout.user.lastSeen
- description: When did we last see the user
- type: date
- - contextPath: Exabeam.Lockout.user.lastSessionId
- description: Last session id of the user
- type: string
- - contextPath: Exabeam.Lockout.user.info.department
- description: User department
- type: string
- - contextPath: Exabeam.Lockout.user.info.dn
- description: User DN
- type: string
- - contextPath: Exabeam.Lockout.user.info.email
- description: User email
- type: string
- - contextPath: Exabeam.Lockout.user.info.fullName
- description: User full name
- type: string
- - contextPath: Exabeam.Lockout.user.info.group
- description: User groups
- type: string
- - contextPath: Exabeam.Lockout.user.info.location
- description: User location
- type: string
- - contextPath: Exabeam.Lockout.user.info.manager
- description: Users' manager
- type: string
- - contextPath: Exabeam.Lockout.user.info.sid
- description: User identifier
- type: string
- - contextPath: Exabeam.Lockout.user.info.title
- description: User title
- type: string
- - contextPath: Exabeam.Lockout.lockoutInfo.lockoutId
- description: ID of the lockout
- type: string
- - contextPath: Exabeam.Lockout.lockoutInfo.loginHost
- description: The login host for lockout
- - contextPath: Exabeam.Lockout.lockoutInfo.riskScore
- description: Risk score for lockout
- type: number
- - contextPath: Exabeam.Lockout.lockoutInfo.isRisky
- description: Is this risky
- type: boolean
- description: List all the Exabeam lockout users.
- - name: xb-timeline
- arguments:
- - name: username
- required: true
- default: true
- description: The username to act upon
- description: Display the timeline events for a given user
- - name: xb-session-entities
- arguments:
- - name: username
- required: true
- default: true
- description: The username to act upon
- - name: number-of-results
- default: true
- description: Number of records to return
- defaultValue: "100"
- - name: unit
- auto: PREDEFINED
- predefined:
- - d
- - w
- - M
- description: The unit of the num argument. Can be d for days, w for weeks, M
- for months
- defaultValue: d
- - name: num
- description: The number of units (days, weeks, etc.)
- defaultValue: "1"
- - name: container-type
- description: Container type for the filter - accepts container types like session,
- etc.
- defaultValue: session
- - name: container-id
- description: The container ID we want to filter by
- description: Display the session entities for a given user filter by container
- - name: xb-userinfo
- arguments:
- - name: username
- required: true
- default: true
- description: The username to act upon
- outputs:
- - contextPath: Account.Username
- description: Username of user
- type: string
- - contextPath: Account.AccountNames
- description: All account names we know about
- type: string
- - contextPath: Account.Executive
- description: Is this user an executive
- type: boolean
- - contextPath: Account.WatchList
- description: Is this user on a watch list
- type: boolean
- - contextPath: Account.Name
- description: Name of he user
- type: string
- - contextPath: Account.ID
- description: Account ID of the user
- type: string
- - contextPath: Account.Department
- description: Department of user
- type: string
- - contextPath: Account.DN
- description: DN of user
- type: string
- - contextPath: Account.Email
- description: Email of user
- type: string
- - contextPath: Account.Type
- description: Type of account
- type: string
- - contextPath: Account.Groups
- description: Groups for the user
- type: string
- - contextPath: Account.SID
- description: SID of the user
- type: string
- - contextPath: Account.Title
- description: Title of the user
- type: string
- - contextPath: Account.RiskScore
- description: Risk score of the user
- type: number
- - contextPath: Account.AverageRiskScore
- description: Average risk score of the user
- type: number
- - contextPath: Account.Labels
- description: Any labels assigned to the user
- type: string
- - contextPath: Account.FirstSeen
- description: First time user was seen
- type: date
- - contextPath: Account.LastSeen
- description: Last time user was seen
- type: date
- - contextPath: Account.LastSessionID
- description: Last session ID of the user
- type: string
- - contextPath: Account.PastScores
- description: All past scores of the user
- type: number
- - contextPath: Account.LoginHost
- description: The last session login host
- type: string
- - contextPath: Account.LoginLabel
- description: Last session login label
- type: string
- - contextPath: Account.NotableList
- description: Is the user on the notable list
- type: boolean
- - contextPath: Account.NotableSessionIDs
- description: List of session IDs
- type: string
- description: Display information about the given user
- - name: xb-triggered-rules
- arguments:
- - name: container-type
- description: Container type for the filter - accepts container types like session,
- etc.
- defaultValue: session
- - name: container-id
- description: The container ID we want to filter by
- description: Display the triggered rules for a given container
- - name: xb-watchlists
- arguments: []
- outputs:
- - contextPath: Exabeam.Watchlists
- description: Watchlists
- description: Retrieve the list of watchlists we currently have
- - name: xb-watchlist
- arguments:
- - name: id
- default: true
- description: Watchlist ID to retrieve data from
- - name: title
- description: Watchlist title to retrieve data from
- - name: num
- description: Number of users to retrieve
- defaultValue: "100"
- outputs:
- - contextPath: Exabeam.Watchlists
- description: Watchlists
- description: Retrieve the users on a given watchlist. You must provide either
- id or title.
- - name: xb-watchlist-add
- arguments:
- - name: id
- default: true
- description: Watchlist ID to add user to
- - name: title
- description: Watchlist title to add user to
- - name: username
- required: true
- description: The username to act upon
- - name: watch-until-days
- description: How many days should we watch the given user
- defaultValue: "7"
- description: Add a user to the watchlist. You must provide either id or title.
- execution: true
- - name: xb-watchlist-remove
- arguments:
- - name: id
- default: true
- description: Watchlist ID to remove user from
- - name: title
- description: Watchlist title to remove user from
- - name: username
- required: true
- description: The username to act upon
- description: Remove a user from the watchlist. You must provide either id or title.
- execution: true
- runonce: false
-releaseNotes: "New integration"
-tests:
-- No test
\ No newline at end of file
diff --git a/Beta_Integrations/integration-Signal_Sciences_WAF.yml b/Beta_Integrations/integration-Signal_Sciences_WAF.yml
deleted file mode 100644
index c06af4f04ff8..000000000000
--- a/Beta_Integrations/integration-Signal_Sciences_WAF.yml
+++ /dev/null
@@ -1,506 +0,0 @@
-commonfields:
- id: Signal Sciences WAF
- version: -1
-name: Signal Sciences WAF
-display: Signal Sciences WAF
-category: Network Security
-image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHgAAAAyCAYAAACXpx/YAAAAAXNSR0IArs4c6QAADodJREFUeAHtWHt0VdWZ//be53HPuTcIyCMvHgEFUZAqIjOFUXmHV0ISotYZHKqrvmdWO9WqWG1cbUWd6bhY2lKwD63VgmkSCBBaqkCoWGoFeQsojxhyEwjySnLPvefsx3z7xrBiYLDLha1d6+w/7rl7f9+3H7/vuTdA2EIEQgRCBEIEQgRCBEIEQgRCBEIEQgRCBEIEQgRCBEIEQgRCBEIEQgRCBEIEQgRCBEIEPhcCRydC3+YCyC4DoJ9rglDo744A6bqD+unREcwKConBplJFRkgAqSSvo1T9haeCX+augj91lQn7X14Eziq4odCYxFjkfkr5LEooC7h6h4OspVRemmGwOxIBPQ6QmCVanQbm8inyVGRF9oYWHAvblxkBundsr4zGIutly7DWUhDXC8EX+ikYnVXljYHTsYUGGKN8DkdkAFMzK2EzccW/G6b5M7O3/05DoXXLZx1Oz3/oJoh8Ft8XSY8XRPMbi605r5eCdaF1tl8N0e1zIXohnn80GmmY4vQjGeJD3HjF6VP+fwx7Ez7WhzhcCnlRGalRhPRIJsjM/jWJd/X4u3eBmXXcuT7CxMpAstWZFd5cPd611RVGJjlMfosDuZoSdoYStSEhvBcGVsH7TSXOI0qJoeK0/2DuJ+t1lb+Y/cYSZ7MCOSj1cWpI3gY41XXug/nW1RkZ9EGpYJySRDGmNic5X9Kvitd25e3crytwxjo2fFuAtzCrHC7I21nub/mftrRFTxGlzhDJGjuUeygfBjoyskoB6+lxb0aHcvXGrlsCAanr8Z7PiSckNJ1vs/FipyhqqRoBbBgQtkYCTxiU3GcJ61rkJwrEbFT67Spq5ZxP/qKPEQiUAv988zYUwDWxmPEHqcgMKUUtKPKhSdltFtBp5+PvPGZSuC5qsiIljXGdx79M/42lm4633V1soaLIEL0xrdxo1FxJKe3tB23TmvvAjqY57lMWqBuTXKyK9/H/J3Us0cdFuhS8ruthyrDiJko+ICVJJhjcNLg88dGhgRDxrrGvG1ydeEvz01N+od/d7tav2v+gq/zfvG/Yd5hU9Wnz1MTclcE6gAC0ZzbY/tbP2stJ01tEA2vdScr3fRbv34tulGGVfA/Ah+hll+kcZDvuEkJkZiCC6SdN2Jl73FqGhVahr8hGw7CeyjqmjlHatoMQy6RGcOC8GyfUBVBApYdfgLzDkITDLWnlpvlj0a8YQmStHwj14zUNW0MxKzCJNUVJkVA+/Cpw2BAz8M3MFWJZvAiGWaY7IaUSbzDhTGMMhgQpsekt2196czkIFKf1BdZwZsiZlFiDlVT7vZS3dFANnGOA6fU/9aNiuotR7GzuHVDtberMcqQYck1p3QWUDVQE9kjprcjGVIMCORFC/8kVaBUAe7WMdhA3Zt2rFMsGFbzvJfhreb+Dw5oWL7FKiLAYl/5eNKqvM9MIghR5Pbu6Pf1pnsZiczRj9HYpmUtE6s8qIZZnroVjZ2nUmIe6cqiALW0ng1c7Uk7zWMgQfeyvE0pGMwL1COqbb5PkhvT9VipjDyEwIGsgDAGqxqU4LPojC7b2ErFlJoMCX5BbMdeO5zKop8z8qlDySrw7CU7sczywTF+rgLxkGzTDJc6GxiLnR02FMAYTuKk3qZuk/CH081+MGowFDTLHi5wFUcNagWGymBBWKCxSYyn1ijLY42l+4t7gMPUC4VY1Tn4vRodbMxzj1zdw8z81/XARDLUMoxaV+20FwRhG4ftRN7L+6IxLBmn6hRoL6CtcQtKIkGVHi81fNhQYk3Vh2CFzZKY50qLmerw2Po5XxtGEqCcJWL/QtYgtrFGmyZaYyr1R8x/DcB+NWhtBGnfgWTLRCR50o5E3Dk+BvPR8kj1MDf4SM1g5UHMqmtVD6CRr4iVwhaY3zHa+ZjKyXik6D410rGHai5VrPJymFcZuMQjdoCRMVYL2B8aec3q6rzfN7Zs2zCDTWkwpWSCU6o0W/69UkB+POAHd2x8wmNpCqZGBhO5Eyj/hIvfdIMxthPCZbYE/N7fKK28qpj+0GesHgahkeFBUctOJfd0b0xvv8pNdmVjSloR7MFbXmQb5L9NyN+cU2dX64aSdlXkAojUeBT9+s3st8jyS5FCdSiWu+kmldxWaQJlBiAOStWl+QwWYQwkQgO373vNGCpkY63HZggCVYmXMDlXBB5yI248lE5dlVQTDA98vNSjNU3bLjC5bO6ebuTK5zlPe10DCW5KY86IRe23PzLbaplJrhGamFn3GIEZeIP3CPzJvuPLkKDCsO3UtoiiTnAv0fpnO75zZz6HBgs8SX0EMJnt+YhIjaoAdte7TcxHK21BJDsacJzMrE8N9Sb6TYdu9iHDH182AHqig54Wix4KEHLW4yhueTKlxJMGf2TcroxdjcqFUbNOeE97wrKrUJJ+LO10TJouWMwW78XaA2MxAnWzMrkzlHyDJK5Ipb6quqdIKTvjmDqUURgXzSm5geAC+Cj2pwQvU9P7L4TfxIuMHrhWZnxL8B1nVqdWSsHEU6NaRO46mFaA336Wp7BWJxbvKk2MD9KgU55XdIjQ/yDAf6czXeAZBEmJMBHchRfCTAavhZBkAf7EqeCklZB0WY4bmF+jl6DkIkHhFh/TsKrWXK3lEUdVtdDOY41GGSHqyjx17uqnYXUtM9gRGGNSZ0afzev/f//4VsLxvZXJKkOQjWgN/YcQk10hBnm0sjfXGdcd6Av6SUymqdTrIXO3vyipv3dN5Lowacndp7xhGP/0wZBjc/Wm8yP49ppVnFHZx56M1PxZjLMXJ0ZNWrBq7kvG2jVKiBBFRasLImAmXcinLc2tS+8vwTDmYKnR4jhA+1GDQVyieM7SnVREvtn+HNdLdjFK0J379VeWYIhSssZiR31Tibr9MRp+2ZfvVNA2g2nOmXo5wMNyaU/qVJ36Mi8/TxYZuDUXWkzHbeKzV9xdkV/LHjxQ7uaiPKzCkL00zXOBHAw/lwTsNt8I3z6ScGfiAMrIz+0DsKGlxotMoMzFf8zQZ4x0+ojGKnov+gOdHhPRXERsZErAeS3MMkygEBHNQqmG2eafF5CIuRA2GqGVofN0VI9e2hyct+de1/qg85PxmvMgsxPMNwZe7SxBYSdOrX3iOPvFmEvR1MTXy0yi7Ex3GxLyOdgi1mAKPAKT0BEQR7lutzZ/aGlVCEdl+xvOtopWEkQCljTgotUMbPgGZauVkLS6D2S9Qiyv92+4qZNXAxBxU/L3EtYsaptCx6YUu/xBXV2SdQYLxWoEdizTMjpRlWPSJREo9jcqdnx4XfAKGcBuUt6GDr/P37VJwtFxjkXn/B9OgG9IoSbH8qKlsfNH+VLXJHbCYqd5KCPQXCo8czLeHHroJug8ttL7jGNBfYsLSc1PMCekWiPNCTQi9WeCBVdL6Rs7y4Oe6unPQBNA50gbyifQ5Hx3e44Xu3UfRiLW3aoa6OZGJlqH6Yh6MJ/ek6tG23rYZjI7PcYqRTPBMgw+UOv07T0bApL03AaYMsdsE1k2xxMs5Vf5DwL0fSRXEPzLblnXiP+cMipmmiMG21gA+xhB+c10hXKn5df7XxRO12f6k5M0GSDNwvGf13IHBXxMpeaDfisSaMoTonmI2k6NOsiv9Ys7VT2Mm60ciQd5ZS0KL+zVjLFDSuyw9+Wzruxk2/Z7ny//OrPLSyq2fCj1N05jvC761xYP3NF/XdvkZzONU3pZhmy9gPtvVWGLuiJj2Esyx9YSmnm/nFw7aZCzqgYXh7v1AyB+aIEc7rtrp9nLfx0rwftxIK8bYds8FfIEy8HiMpyNO+xzEBaWrdd3oapOaMeIEb+Mjynqs3x/VwQuNxk5TsepEy4/S6Ke95J/1vAxKYrb1BKaIPRjet0UUWYthRWItskAbvhDq4UCqIxYhFfjit6tbNLItKvhr6zEUS7zoG+jiilBLr4P5eD6uiQRnWyOGSsNy9xNm/u/ApDtM09EAHPThKA3a96HjFtUhQgh3wGtwUsngW4yQLNt03sWXt52RCH0v6OPMR4yalZCPYY4ebaXcA01F7o6IjGwFAx6rL4Ue80phAO7hZ5bh7sZCsZYa5ButAd8l2/jus4C9uMLb/G8z4Mb61bA3Xmw8ilZblvDV05lV/qN6ax9N7pUdiZ36FaaOXHwzmHT5mvaYo2mdW+/fQ+PBif6/sJ6kmBE6AatBM5C8ojVFXh1cA/vTvAKeU5S/jkGrFfsKLfJ7WGnXEGZMRLczia8qMG6vQU9Me7Af2LUylXwglRTbtXwtbmKIpN/V/8vQq7KrvOfjhc5xw5STuGCNYLAHTyeDUZI6O7DkAcnls0TRbrslxvdOrV85eLtLE8U06cwyqJwmQF3KgPwhKVMVuStgs2bFu/oOLIAmWLYzB5UzDN+53sDn3N/qPQzyrS3cTDxAVbBR8/av5huPFPMxqOASwuBqfD2raE2opYMxp2o6Vr5PKU67benhY5GJhuDCARkkH0DH/LPeZ/by4JW6AuOga5Cpgoi8QDiVMhW8qnmzq8SL9dPpZsPxioBGBghKfp484S8d9CacQPLJphlkgopIjDLG5ZgY1iV9/yWkpV8ltfzZdmSOPbPlFlcdnRM5ePCTqrdxNtzUXOLs+7jU8fRd7izzRfoTL4D8Q6WQ2TEdWmX+8VJXofcv6hgLv58PgXPywYFSuMQR7sM206FJ7MXSvNYg6m5krG/z+R0DV/J1n2+p80sdmG4PcSPqXSwgsBLmGyUxIuhNk/BN+LifpJP71bTtPL9kOPrXIHCOgjuEGmaxQmbaC/ChYxg+BLzMW9TjOWu9+g76xfrqB4Oc5thX8fVsLlafo/DKwRmRW1qSdNGgVUE6JF+stcJ5uiCgL9+NM83ruwx/YV1duOjK9gtbIJw4RCBEIEQgRCBEIEQgRCBEIEQgRCBEIEQgRCBEIEQgRCBEIEQgRCBEIEQgRCBEIEQgRCBEIEQgRCBEIEQgROAfAYH/A5jMbWXPdlUHAAAAAElFTkSuQmCC
-description: Protect your web application using Signal Sciences
-configuration:
-- display: Email
- name: Email
- defaultvalue: ""
- type: 0
- required: true
-- display: Token
- name: Token
- defaultvalue: ""
- type: 4
- required: true
-- display: Corporation Name
- name: corpName
- defaultvalue: ""
- type: 0
- required: true
-script:
- script: |-
- ''' IMPORTS '''
- import os
- import json
- import requests
- import datetime
-
- ''' GLOBAL VARS '''
- USE_SSL = True
-
- EMAIL = demisto.params()['Email']
- TOKEN = demisto.params()['Token']
- CORPNAME = demisto.params()['corpName']
-
- SERVER_URL = 'https://dashboard.signalsciences.net/api/v0/'
-
- '''SUFFIX ENDPOINTS'''
- GET_SITES_SUFFIX = 'corps/{0}/sites'
- WHITELIST_SUFFIX = 'corps/{0}/sites/{1}/whitelist'
- BLACKLIST_SUFFIX = 'corps/{0}/sites/{1}/blacklist'
- DELETE_WHITELIST_IP_SUFFIX = 'corps/{0}/sites/{1}/whitelist/{2}'
- DELETE_BLACKLIST_IP_SUFFIX = 'corps/{0}/sites/{1}/blacklist/{2}'
-
- '''TABLE TITLES'''
- WHITELIST_TITLE = 'Signal Sciences - Whitelist'
- BLACKLIST_TITLE = 'Signal Sciences - Blacklist'
- SITES_LIST_TITLE = "Sites list"
- ADD_IP_TO_WHITELIST_TITLE = 'Signal Sciences - Adding an IP to Whitelist'
- ADD_IP_TO_BLACKLIST_TITLE = 'Signal Sciences - Adding an IP to Blacklist'
-
- '''TABLE HEADERS'''
- ADD_IP_HEADERS = ['Source', 'Note', 'Expiration data']
- WHITELIST_HEADERS = ['ID', 'Source', 'ExpiryDate', 'Note', 'CreatedDate', 'CreatedBy']
-
- ''' HELPER FUNCTIONS '''
- def http_request(method, url, params_dict=None, data=None):
- LOG('running %s request with url=%s\nparams=%s' % (method, url, json.dumps(params_dict)))
-
- headers = {
- 'Content-Type': 'application/json',
- 'x-api-user': EMAIL,
- 'x-api-token': TOKEN
- }
-
- try:
- res = requests.request(method,
- url,
- verify=USE_SSL,
- params=params_dict,
- headers=headers,
- data=data
- )
- res.raise_for_status()
-
- if 'whitelist/' in url or 'blacklist/' in url:
- return None
-
- return res.json()
-
- except Exception, e:
- LOG(e)
- raise(e)
-
-
- '''COMMANDS'''
- def test_module():
- try:
- url = SERVER_URL + 'corps'
- res = http_request('GET', url)
- except Exception, e:
- raise Exception(e.message)
-
- demisto.results('ok')
-
-
- def get_whitelist(siteName):
- """Get the whitelist data for siteName"""
- url = SERVER_URL + WHITELIST_SUFFIX.format(CORPNAME, siteName)
- site_whitelist = http_request('GET', url)
- data = site_whitelist.get('data', [])
-
- outputs = []
- for item in data:
- output = {}
-
- output['ID'] = item.get('id', '')
- output['Source'] = item.get('source', '')
- output['ExpiryDate'] = item.get('expires', '')
- output['Note'] = item.get('note', '')
- output['CreatedDate'] = item.get('created', '')
- output['CreatedBy'] = item.get('createdBy', '')
-
- outputs.append(output)
-
- sidedata = "Number of IPs in the Whitelist {0}".format(len(data))
-
- return {
- 'Type': entryTypes['note'],
- 'Contents': site_whitelist,
- 'ContentsFormat': formats['json'],
- 'ReadableContentsFormat': formats['markdown'],
- 'HumanReadable': tableToMarkdown(WHITELIST_TITLE, outputs, WHITELIST_HEADERS, metadata=sidedata),
- 'EntryContext': {
- 'SigSciences.Whitelist(val.ID==obj.ID)': outputs,
- }
- }
-
-
- def get_blacklist(siteName):
- """Get blacklist data for siteName"""
- url = SERVER_URL + BLACKLIST_SUFFIX.format(CORPNAME, siteName)
- site_blacklist = http_request('GET', url)
- data = site_blacklist.get('data', [])
-
- outputs = []
- for item in data:
- output = {}
-
- output['ID'] = item.get('id', '')
- output['Source'] = item.get('source', '')
- output['ExpiryDate'] = item.get('expires', '')
- output['Note'] = item.get('note', '')
- output['CreatedDate'] = item.get('created', '')
- output['CreatedBy'] = item.get('createdBy', '')
-
- outputs.append(output)
-
- sidedata = "Number of IPs in the Blacklist {0}".format(len(data))
-
- return {
- 'Type': entryTypes['note'],
- 'Contents': site_blacklist,
- 'ContentsFormat': formats['json'],
- 'ReadableContentsFormat': formats['markdown'],
- 'HumanReadable': tableToMarkdown(BLACKLIST_TITLE, outputs, WHITELIST_HEADERS, metadata=sidedata),
- 'EntryContext': {
- 'SigSciences.Blacklist(val.ID==obj.ID)': outputs,
- }
- }
-
-
- def add_ip_to_whitelist(siteName, ip, note, expires=None):
- """Add an ip to the whitelist"""
- url = SERVER_URL + WHITELIST_SUFFIX.format(CORPNAME, siteName)
- data = {
- 'source': ip,
- 'note': note
- }
- if expires is not None:
- data['expires'] = expires
-
- res = http_request('PUT', url, data=json.dumps(data))
-
- output = {}
- human_readable = {}
- output['ID'] = res.get('id', '')
- output['Note'] = res.get('note', '')
- output['Source'] = res.get('source', '')
- output['CreatedBy'] = res.get('createdBy', '')
- output['CreatedDate'] = res.get('created', '')
- output['ExpiryDate'] = res.get('expires', '')
-
- human_readable['Note'] = output['Note']
- human_readable['Source'] = output['Source']
- human_readable['Expiration data'] = output['ExpiryDate'] if output['ExpiryDate'] else "Not Set"
-
- sidedata = "The IP has been successfully added to whitelist."
-
- return {
- 'Type': entryTypes['note'],
- 'Contents': res,
- 'ContentsFormat': formats['json'],
- 'ReadableContentsFormat': formats['markdown'],
- 'HumanReadable': tableToMarkdown(ADD_IP_TO_WHITELIST_TITLE, human_readable, ADD_IP_HEADERS, metadata=sidedata),
- 'EntryContext': {
- 'SigSciences.Whitelist(val.ID==obj.ID)': output,
- }
- }
-
-
- def add_ip_to_blacklist(siteName, ip, note, expires=None):
- """Add an ip to the blacklist"""
- url = SERVER_URL + BLACKLIST_SUFFIX.format(CORPNAME, siteName)
- data = {
- 'source': ip,
- 'note': note
- }
- if expires is not None:
- data['expires'] = expires
-
- res = http_request('PUT', url, data=json.dumps(data))
-
- output = {}
- human_readable = {}
- output['ID'] = res.get('id', '')
- output['Note'] = res.get('note', '')
- output['Source'] = res.get('source', '')
- output['CreatedBy'] = res.get('createdBy', '')
- output['CreatedDate'] = res.get('created', '')
- output['ExpiryDate'] = res.get('expires', '')
-
- human_readable['Note'] = output['Note']
- human_readable['Source'] = output['Source']
- human_readable['Expiration data'] = output['ExpiryDate'] if output['ExpiryDate'] else "Not Set"
-
- sidedata = "The IP has been successfully added to blacklist."
-
- return {
- 'Type': entryTypes['note'],
- 'Contents': res,
- 'ContentsFormat': formats['json'],
- 'ReadableContentsFormat': formats['markdown'],
- 'HumanReadable': tableToMarkdown(ADD_IP_TO_BLACKLIST_TITLE, human_readable, ADD_IP_HEADERS, metadata=sidedata),
- 'EntryContext': {
- 'SigSciences.Blacklist(val.ID==obj.ID)': output,
- }
- }
-
-
- def whitelist_remove_ip(siteName, IP):
- """Remove an ip from the whitelist"""
- url = SERVER_URL + WHITELIST_SUFFIX.format(CORPNAME, siteName)
- site_whitelist = http_request('GET', url)
- data = site_whitelist.get('data', [])
-
- for item in data:
- if item.get('source', '') == IP:
- url = SERVER_URL + DELETE_WHITELIST_IP_SUFFIX.format(CORPNAME, siteName, item.get('id', ''))
- res = http_request('DELETE', url)
-
- if 'res' not in locals():
- raise Exception("The IP {0} was not found on the WhiteList".format(IP))
-
- else:
- human_readable = '### Signal Sciences - Removing an IP from Whitelist \n\n The IP has been successfully removed from Whitelist.'
-
- return {
- 'Type': entryTypes['note'],
- 'Contents': res,
- 'ContentsFormat': formats['json'],
- 'ReadableContentsFormat': formats['markdown'],
- 'HumanReadable': human_readable,
- 'EntryContext': {
- 'SigSciences.Whitelist(val.ID==obj.ID)': {},
- }
- }
-
-
- def blacklist_remove_ip(siteName, IP):
- """Remove an ip from the blacklist"""
- url = SERVER_URL + BLACKLIST_SUFFIX.format(CORPNAME, siteName)
- site_blacklist = http_request('GET', url)
- data = site_blacklist.get('data', [])
-
- for item in data:
- if item.get('source', '') == IP:
- url = SERVER_URL + DELETE_BLACKLIST_IP_SUFFIX.format(CORPNAME, siteName, item.get('id', ''))
- res = http_request('DELETE', url)
-
- if 'res' not in locals():
- raise Exception("The IP {0} was not found on the BlackList".format(IP))
-
- else:
- human_readable = '### Signal Sciences - Removing an IP from Blacklist \n\n The IP has been successfully removed from Blacklist.'
-
- return {
- 'Type': entryTypes['note'],
- 'Contents': res,
- 'ContentsFormat': formats['json'],
- 'ReadableContentsFormat': formats['markdown'],
- 'HumanReadable': human_readable,
- 'EntryContext': {
- 'SigSciences.Blacklist(val.ID==obj.ID)': {},
- }
- }
-
-
- def get_sites():
- """Get the sites list"""
- url = SERVER_URL + GET_SITES_SUFFIX.format(CORPNAME)
- res = http_request('GET', url)
- data = res.get('data', [])
-
- outputs = []
- for item in data:
- output = {}
- output['Name'] = item.get('name', '')
- output['CreatedDate'] = item.get('created', '')
-
- outputs.append(output)
-
- return {
- 'Type': entryTypes['note'],
- 'Contents': res,
- 'ContentsFormat': formats['json'],
- 'ReadableContentsFormat': formats['markdown'],
- 'HumanReadable': tableToMarkdown(SITES_LIST_TITLE, outputs, ['Name', 'CreatedDate']),
- 'EntryContext': {
- 'SigSciences.Sites(val.Name==obj.Name)': outputs,
- }
- }
-
- ''' EXECUTION CODE '''
-
- LOG('command is %s' % (demisto.command(), ))
-
- try:
- if demisto.command() == 'test-module':
- test_module()
- elif demisto.command() == 'sigsci-get-whitelist':
- demisto.results(get_whitelist(**demisto.args()))
- elif demisto.command() == 'sigsci-get-blacklist':
- demisto.results(get_blacklist(**demisto.args()))
- elif demisto.command() == 'sigsci-whitelist-add-ip':
- demisto.results(add_ip_to_whitelist(**demisto.args()))
- elif demisto.command() == 'sigsci-blacklist-add-ip':
- demisto.results(add_ip_to_blacklist(**demisto.args()))
- elif demisto.command() == 'sigsci-whitelist-remove-ip':
- demisto.results(whitelist_remove_ip(**demisto.args()))
- elif demisto.command() == 'sigsci-blacklist-remove-ip':
- demisto.results(blacklist_remove_ip(**demisto.args()))
- elif demisto.command() == 'sigsci-get-sites':
- demisto.results(get_sites(**demisto.args()))
-
-
- except Exception, e:
- LOG(e.message)
- LOG.print_log()
- raise
- type: python
- commands:
- - name: sigsci-get-whitelist
- arguments:
- - name: siteName
- required: true
- description: 'Please input the siteName you want see whitelist for. e.g : siteName='
- outputs:
- - contextPath: SigSciences.Whitelist.ID
- description: ID for this particular entry.
- type: string
- - contextPath: SigSciences.Whitelist.Source
- description: IP Address present in the whitelist.
- type: string
- - contextPath: SigSciences.Whitelist.ExpiryDate
- description: Expiration Timestamp.
- type: date
- - contextPath: SigSciences.Whitelist.Note
- description: Note associated with the tag.
- type: string
- - contextPath: SigSciences.Whitelist.CreatedDate
- description: Created Date Timestamp
- type: date
- - contextPath: SigSciences.Whitelist.CreatedBy
- description: User who added this source.
- type: string
- description: Fetch whitelist residing on Signal Sciences' Platform.
- - name: sigsci-get-blacklist
- arguments:
- - name: siteName
- required: true
- description: 'Please input the siteName you want see whitelist for. e.g : siteName='
- outputs:
- - contextPath: SigSciences.Blacklist.ID
- description: ID for this particular entry.
- type: string
- - contextPath: SigSciences.Blacklist.Source
- description: IP Address present in the blacklist.
- type: string
- - contextPath: SigSciences.Blacklist.ExpiryDate
- description: Expiration Timestamp.
- type: date
- - contextPath: SigSciences.Blacklist.Note
- description: Note associated with the tag.
- type: string
- - contextPath: SigSciences.Blacklist.CreatedDate
- description: Created Date Timestamp
- type: date
- - contextPath: SigSciences.Blacklist.CreatedBy
- description: User who added this source.
- type: string
- description: Fetch blacklist residing on Signal Sciences' Platform.
- - name: sigsci-whitelist-add-ip
- arguments:
- - name: siteName
- required: true
- description: 'Please input the siteName you want see whitelist for. e.g : siteName='
- - name: ip
- required: true
- description: IP Address to be added.
- - name: note
- required: true
- description: Note associated with the tag.
- - name: expires
- description: Optional RFC3339-formatted datetime in the future. Omit this paramater
- if it does not expire.
- outputs:
- - contextPath: SigSciences.Whitelist.Source
- description: IP Address present in the whitelist.
- type: string
- - contextPath: SigSciences.Whitelist.Note
- description: Note associated with the tag.
- type: string
- - contextPath: SigSciences.Whitelist.ID
- description: ID for this particular entry.
- type: string
- - contextPath: SigSciences.Whitelist.CreatedBy
- description: User who added this source.
- type: string
- - contextPath: SigSciences.Whitelist.CreatedDate
- description: Created Date Timestamp
- type: date
- - contextPath: SigSciences.Whitelist.ExpiryDate
- description: Expiration Timestamp.
- type: date
- description: Add an IP to the whitelist residing on Signal Sciences' Platform.
- - name: sigsci-blacklist-add-ip
- arguments:
- - name: siteName
- required: true
- description: 'Please input the siteName you want see whitelist for. e.g : siteName='
- - name: ip
- required: true
- description: IP Address to be added.
- - name: note
- required: true
- description: Note associated with the tag.
- - name: expires
- description: Optional RFC3339-formatted datetime in the future. Omit this paramater
- if it does not expire.
- outputs:
- - contextPath: SigSciences.Blacklist.Source
- description: IP Address present in the Blacklist.
- type: string
- - contextPath: SigSciences.Blacklist.Note
- description: Note associated with the tag.
- type: string
- - contextPath: SigSciences.Blacklist.ID
- description: ID for this particular entry.
- type: string
- - contextPath: SigSciences.Blacklist.CreatedBy
- description: User who added this source.
- type: string
- - contextPath: SigSciences.Blacklist.CreatedDate
- description: Created Date Timestamp.
- type: date
- - contextPath: SigSciences.Blacklist.ExpiryDate
- description: Expiration Timestamp.
- type: date
- description: Add an IP to the blacklist residing on Signal Sciences' Platform.
- - name: sigsci-whitelist-remove-ip
- arguments:
- - name: siteName
- required: true
- description: 'Please input the siteName you want see whitelist for. e.g : siteName='
- - name: IP
- required: true
- description: IP Address entry to be removed.
- description: Remove an IP from whitelist residing on Signal Sciences' Platform.
- - name: sigsci-blacklist-remove-ip
- arguments:
- - name: siteName
- required: true
- description: 'Please input the siteName you want see blacklist for. e.g : siteName='
- - name: IP
- required: true
- description: IP Address entry to be removed.
- description: Remove an IP from blacklist residing on Signal Sciences' Platform.
- - name: sigsci-get-sites
- arguments: []
- outputs:
- - contextPath: SigSciences.Sites.Name
- description: Site name
- type: string
- - contextPath: SigSciences.Sites.CreatedDate
- description: Site creation date
- type: date
- description: Get all the site name from SignalSciences
- runonce: false
-releaseNotes: "New integration with SignalSciences."
-tests:
- - SignalSciences Test
diff --git a/Beta_Integrations/integration-Symantec_Deepsight.yml b/Beta_Integrations/integration-Symantec_Deepsight.yml
new file mode 100644
index 000000000000..0ce86f9bb6ad
--- /dev/null
+++ b/Beta_Integrations/integration-Symantec_Deepsight.yml
@@ -0,0 +1,933 @@
+commonfields:
+ id: Symantec Deepsight Intelligence
+ version: -1
+name: Symantec Deepsight Intelligence
+display: Symantec Deepsight Intelligence (Beta)
+category: Data Enrichment & Threat Intelligence
+image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHgAAAAgCAYAAADZubxIAAASu0lEQVR42u1aCXSc1XUe75tsaTSa5d+X2TUaabRvtiFAIBQMLkRtCZRS1rD2hBPApNCYAE0xGGgA+5BS41KSQw0mNbaWWbTalm1wodiGkEDsYGxsZHnROpJGo9vvzYwkS5aKKT1AOLrnvDMz73/v/ffd797v3vckw1css/7w7/6rByNljW0bbkszTMm3S6zzDZdf4Z7xcizoqaDGc2capuTbIZ4s8ZwcNSek2czPPnmj9SfHG7Of+GRD+TzDlPzpS2Bu2rk/DBhr/vGWylazee4Sejtf6WouuGL384WzDFPyzRB6+07z4S1lF/Q0Lr0hGil+MBYOrIqF/P8UC+beQzVlf0nBCwtpJU0/fY5NshWruucq3wLLtT/wW9+sfu76lbqQ+SM8mgL2myJtoeLAQH1gTbzGvq8r6InGWvOJduQRbc8l2hbAZwlRczFR2N9GDd7mvrDrjuM1pYsSAKuqqml5P5Ms3uc4s/SAJDies0uW5Xg0lXu/bqEN5Zn91bk/PdXoOhlttRM1uamvxUPt293U0eKjzsZC6o0U00CwgKgOrTYfQAdosN4fj0fOezu0IrvKa8lcJXDupxTJ/oDFIjyZkWH5c8OUfP3yUdAudVU7/pMiuRRtyKHuxmyikI9i9dnUs9VFA9s0irdoNNTgoaG6QqItxXheQj1NJdTZUE4frXfSc1fPji+2p5Fm8X2YLSyt0cy+WsOUfP3S3ur3nNrmeKt/u4t6awooFlxCQ+FSogY/QPTGB+r9XZ1N+fs6mrLD0XB2y+Dm8o9pc2GU6nxErWX00Zbz6abzDPTqo5V0SaFGzgwHBcx5n+Wmu28wTMnXK7teN5jaWnzbosivHfWF1LWZAVtBA5EA9TQGdvY0OW7rfbNY+rDm4jm7n795Fu2+eRY76lD9khKq9j5N1f6Dr67KGcr3TSeP1Ux55kI63yXRfX/F/8pQVTXDMCVfo6xcOb27UXoh1hxAbi2nDgAbB+VSvffIQMR/e9t7Kz/31unW842PLvHPGsr3zqB7rqqgK/PstPrWbDreWnqyr9F1wWTzMufNE02mRbdlZRnXmK2mdaYs41qjxXjvItOiUsOU/D/l3U1LKnuCzh6KFFEstJR6W8qpN5R9YrDBfdnZzOd5RwGXbntLFcy0OHs2PXW9SM/ebKR3f5VLtNdP0SDfQoj88fMkiVui6dIeRREGdbtMukMlza6SoiukOdSjgiA8+G2x8bx580qNxkW3m0wmdpL46hiNXRlGQ4tfoO3lNIh8GwuXUV9DafzEZtftZzM/w5CR4eIcb+fzfsqWvVQgLYrdWLSo89NNuUPRrToNtOTQULMzSjW+a8dcW1qtFodD/R0AJgBMmi7/1uXSd3k8zr1ut7NTUSTiOO5bU3ljP/8qydinpuzAz7lfHcDNS7199aWdsYYyijWUA4wy6q7/zn/QhqoZXrPudPDudboorzAbDBPStGqRH3JbXFTAFZFdzSU7l7HvxTsKb4tuDRzqaXZSf1OAqDWHKJL/Br1XNXsU4MwbnC6N7A6FeN6y0mSaJ6B7gcWywGqxWCpFkbtJFMVvy3XmTJ63BlVVZgC3fqWXPSdf893dHyqkvnAxUUslxRvKB7veKD+HPRMWCIEcztNewDnIz8lB1aR6Tp+bPjddV23yATfvJZ9YRLLiPanomdczCjpeX/Zi37Y8sIGfiIEcKj5AkcX68FyLJfMhBjCoGQBnuSdliIwM1WIxLbdasy7LzMwUx7JAusbL/DIZbR4cRBAseRi3LGXAmWZzRr4kCXfLsnxfVlZWwbCxOS6rUFH4e+BA95jN5nz0TTsdDFm26pIkXYX1HuQ46wOZlswrFy5cmDUmLUFnVZUuFxYKphQj6QDxRpvNsjIzM/1KrJuW7Df64azXANw/YDzYSvwADnyFpmkXZkBG01UW73Bo1/C87aeiyN8lqEJgEiqfiY0GsObfYexDaHdgvVyfz4fgmUA6tuTWRuuKUC2XINIWU7y58u3Oza6RzeRb9FvLs/wDJdYiyuEK9vp1vz/1aLpoFu9ziHZEbjZyZoBkTv758LzPwgUXRZt06sP5eShYSNRQNkitcJyU2GzGOxxONQEwJ5gfMRqN6RPpJwjWSzVNjjkcOsmy+BB77/AzGG2t3Y41dHXAZrMVa5q63sZZCN+vxdhVsiIOKKrE5pEo8e0A4AeSxD+uaVIMczBPYc/azebM7yffJZgAwhpVFTtVTSbWQK2JBqDfMhoX+Eedy/wImy9I0k8w7zpFFdvYu/BOEkQO63IvMSq227X1LpeDNIyFsyV0AVUznaOLFi0qSe5DXa7r2u91Vn9oEnsvmE2LiqKN7XeUxWAj6PEk9tCf0EtNjoV9MFb8+YQO0RssOhKrLyXWaOdiouYlrxCN8WhDbkbuygCfSzmKi7yaZ6/O67JxnlFWreoRt+YGSDkkuAJdvKRdODznjxu93Img+2RfvZf6a8qwbin1R4qvGYn+9HRdtyvHkIOxYR6bt+0EVf8DotQ3FmDBBFr7LQMYn008Qif1aAbA2MP6AXQzIsyk6/KLAJyNO8EAlBXpGKLhYxiUgcR+DyRBk9th6IOYRxjPDNXEDKkoCoe+jwAKG38ULSzL0h6AwwxOgmBrwLg5CYA588NYi615XJLFIbwjKinC7wBwP9ZmAPYhqi8HC/y1JIm1GNeNZwzcdkTf64LAr0PhBXKxVGLsCafTTnh+yGbLWoNn25SkbnE4698YUsIJttXDDgtnacdau/DuI9gv+/3UhADH6kuGRgDeVkbxxvJnDWeIL80paI0eLOSSnORRPL92Ca4nPIqbkgD7iNN9q3HeGomuPVsWGz8N5b83gJuwvrpKosYyGmgqu+v0VWVZuFbV+A5VE4k1u1Mjl8vZjo1tYhE5GqnqehgBY5QoqC2P9Ymi0a8AwBTAK9A1G568HgVawqux9lo4h8jzJg/G7QNIDPg4LPo860dzqaryPtZl/Qd43ign1xWvxphbQOkcA5NRMyK8OuVgRxmlDwPMAGO0K4pCKxwvHy1LkLn7UwATGCMRVVjLDTAOML1EgALnNqYKrelwmldSLNTPUg0bD+ZW4Ji/ZzpDxyYf6J7pKyv8ERURDlD3w3lKYZcMI/SGzncN0/1EANMIwC2l1FOT/4uJxllNynka7+lwSdnkFJ2s9XtUN7kQ1XZNP+rOdY/JowcOLM84tLX4v/sbvNRbW07sCNZdXXg3yrox7ACgLsbGNiEntsFYCcMwSgMo7YJg/gs2BrnpMnjuIIssgHhzYp7M3Yy+AfT122xigvqx+YQjwJif8io/XC/M0h1aCBHCAPpE0/gRPbFWC/rYuw5bLOn28XtG2pA5zryEF6xvMGMjqo6jsr8oEU0MYBkAK3Ify6mj1bJwCaNipisoNmFLAC8zUBQWwYq4Y5gFTCZRgK7vYywD8h11TE4WX08xRC+cXYUjfJ+Biz2zdPOM4SwFABefTAKMFsmjY6+5X5q01Le5nnHKXnLIABjNDXDdANkuaY+PH/vu67mWthr/0f6Il6J1xUS4HYtWF1w32dqiaMllxYUoCXuxCWYgAK0ftNkSlD1X1eWPdIAB+tqUpG7bumSu0vewI1eKstcjmpkBDnMK500tPdvpcobcHhdzjoPKaP805NCtAHc8wAsB0A+dbmedw6ntR17sY06TAvgEouV7SYCtiQjWwCoAeLkhJUgTl7J+FvGizP9zcqymACgALAHI0WMSUkI+y916kl1O4nsTmCeiaHIYezuClngvnGopbLNCSUSvEGP6nTXAA5Hi1jiOSFFWCLWUAeiKFmr0pU18oaG5dcnZMQZg2XVCsSraGffaG88pHqj1UzTkoYFgLg1F8qm7Ju/Cz9NnIapSnuf+BUCxSGa5+cdJQLknWcQIIsDjMmAwYVvq2PFyauqcyQB2AWCPd2KAVTUJMKPFdDndiAIsjDUHWc5FxLTBmDsA7AeJFDEGYO5hzMOa6pkAy2IqgrnxAI85B2OtEvSfQB/TLQbH6MLvLtQI3djDCbQ29B3jOMtFkoT3JYs4VmBdf9YA924OrBpqLAN9BhJR3B8sONEfKc2dZPgsyaauccruBMBe1UNOwbEmUdmOk/6acx+heh/1hL1EAHkolN9Gu//MfzY6LVgw57scb0sYCYVXgh1gxAth7EEFFS4Ms4LRLX7HkM9uSk2b+0UBxhpbE05iVw7bbBkqDHkLixoYMg7wNyJFBJJUrqyGLmcJsH4po2jMZzn4fwXY4ZCzkX8PpSj6XVDxxcjxS5Gzz8ERb6nZbFzCvmNoJhz8R6qWADgOB7r3i0Rwcby+NDbYVEEdmxFpDcV0PFj2ZCJXTiC2LNs5umiPgppZBA94eHvF+DGnNl6gR3+Tv7+3Npu6wzmgZwcNVvsaWlurRkp+XuErQLNrM7i5SlXVmOpvhtFkXIENETO+yWpckQLYCuD2IZqGZBQsbKM4LhydP9/AnxXA2ZMDDDo9hDzplqQk7SedyDLi5ADmGYxhYJ5Q1STA0O9hNhcUHuX5MwHGuwh5OgGwpnEKxu5PVe0AeGS/C1Bf7EJjAIMtpBzDqDAnUobP05zIfQ9OEmfRDvbaMpzHmaSlpeVMejvWBjqORcprqaGQBhorqTNURJ81lnYfeGfxuZNFMW/jH3Mq+js6r6wZvzCtNEyPv+FZR3WI3kgOdTfnUDykDcVr/StOu6ZcgBusVuQ4UjThGLz9NVUV7seh/cfIsa+wIwaOUAzgI+lZ8wtGCw9hnZKMsEQTZWx0VOYC+PXJ45A4BmAYPcQoF54/FuBUDsb7D5nNogPRuSoVwYMA8jFUrmXsogQRdjRVRZ8BsKaPBVgWBFA0iixdHQFYUbI4gJio2NEOoUqvwr30xTgHZxqNC+9l67Bn0OUtGcUjuxjBe1+EXj2I6gfZHvBpBnXvdjh18vrcg8jJL3CciV2MPAZHP4bPVQx0s8Gc5nLJuqZZrHCYZF0x2Lz0snjEH+2JFFB7Qwkd35pPBxul/R/UirmGyWXORJ3xRv0+qtX7qc6Da8psGnzLR9FG597OxmVZowWVyCjnaRisl+VZKI0oGtuw2ZOgyJEzYMqLr0SR0QUAEwCDAa4bB/BLjO5wn32U0d8wwDBMhEUVov+wLNuG+6eh8NqOiALw0lENygDQAOYfG9aBGZ1V33ZQOADqcDr1LsWuXJyi7UfRWJE3IIqjd+ag5WU61nR7nGSzWYer3Zlut/6Cx+skl9uReMb0xHp/q6pgHhzv2HucLjt7nmjsN4BndliH+fNTheV3oOsh9DGWYs9HGnRoZsE8PyuLhxOuRC3wGIJobzLqPrxzTkdIr+ne4aHOhlyK1eVSV1imUzWOPVRfct7oxcfk8sEm98KudypWRrfn9Q6ECmiwpoKoLkDU4B7qaC66diIHAS3izll4muNsu2y85VOOt7Yh976PCvWXiIozqN8KShMlDlV24izYBfpynvZ4Npzmfmx+Jza7kdHiMOOgbzX6diH6Xz2tfxqjXgC/0+V0bvR6vVzqYuUSK2cJQofPoMsHvGh7XMWRC8D9AlETRCSVpFjoBhSD2wFymOfNi0euYAVLeWaWcauVt+wwmUx3jjq1SQCDYA3hj4LIHUM6eRM0viyZix2LRNF6I/TfjnaUpRgwWcRsNt3Ou/hxV6R8AY6Vv4St9kOfdui/FwXYo4qSLHR9PsNsi8VYyeoCTuLuHpn4SXWhv70l++OhFhdROJ9iYVxfvuEmqvX3UOPiJ7pDFYFjmy5bONbkNO3gtnz+WNh5SW8of/vA9lLqbi2j9roCikcqiYIBitf4n/+wxjHn8y7jGUCpNgttQodit1XIP/+VuDCQxN9gs/PHDZmeWmsGW+Ms+mew/qqqqgn6U7qMyrTh+ZOsOX7czImKz9Sas1PPp01uB6wxuUwbXQefZyunGssuiEf0g7TNR/FwJe6nSwFUAaIQd9R1eaeG6vQWqnG+TOGKNRSpeH4wbN/YtVXaF222A8xCigUr6CSOQ13NcIwWLw1uzq7r/DePyfAlBN49Jy1t/t/Ds9fC43eryfzbb+H55YYp+eLSF/F+d6je/jG1AGRcTvRFyqg/DLoF2NRUnDgrE4oxRDW+43ezh6jeiUjPAcj4vZX9/VfEd/zj3q9LrV9WH3Y+BT0m7mqRr1m+6QelrwUZzTZMyf9N2mqK8vsi7jpq8AGw4sR/TA6EAWxdBQ1uKaTB2hKiarRaAB/MA6XnAuQCAF7A/tvyWKxa/CntXgb6/PKSlTWfR9SuRUGyGYXQS8ilVyeob0q+nBxoVOf21jqv6goWBHuaC+NUnweA0UJ+gIpWhxbKTf7G2TkaXnKkO1LxdE+woNAwJX86svPl0kUDDYWlVF20mrYU7qK6nFMUZHTsjVMo+zB+b6FQ0S0na89VN2zYMPXfk98w+R8/ODLJ/SnkLAAAAABJRU5ErkJggg==
+description: Leverage Symantec Deepsight Intelligence
+detaileddescription: |-
+ Note: This is a beta Integration, which lets you implement and test pre-release software. Since the integration is beta, it might contain bugs. Updates to the integration during the beta phase might include non-backward compatible features. We appreciate your feedback on the quality and usability of the integration to help us identify issues, fix them, and continually improve.
+configuration:
+- display: Base URL
+ name: url
+ defaultvalue: https://deepsightapi.symantec.com
+ type: 0
+ required: true
+- display: API Key
+ name: apikey
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Trust any certificate (unsecure)
+ name: unsecure
+ defaultvalue: ""
+ type: 8
+ required: false
+script:
+ script: |
+ import requests
+ import json
+
+ # disable insecure warnings
+ requests.packages.urllib3.disable_warnings()
+
+
+ ''' Global Variables '''
+ APIKEY = demisto.params()['apikey']
+ VERIFY = not demisto.params()['unsecure']
+ BASE_URL = 'https://deepsightapi.symantec.com/v1/'
+ DOMAIN_ACCESS_SUFFIX = 'domains/{0}'
+ IP_ACCESS_SUFFIX = 'ips/{0}'
+ URL_ACCESS_SUFFIX = 'urls/{0}'
+ FILE_ACCESS_SUFFIX = 'files/{0}'
+ USAGE_LIMIT_STATUS_SUFFIX = 'application/usage_limit_status'
+ HEADERS = {'API-KEY': APIKEY, 'Accept': 'application/json'}
+
+
+ #dict used to switch names of fields from returned value name to demisto context standards name
+
+
+ if not demisto.getParam('proxy'):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+
+ ''' Helper Functions'''
+
+
+ def convert_deepsight_date_to_demisto_format(date_from_deepsight):
+ """Remove 'Z' from the end of the date and return it. If empty date - return unchanged.
+ """
+ if date_from_deepsight == "":
+ return ""
+ demisto_format_date = date_from_deepsight[:-1]
+ return demisto_format_date
+
+
+ def return_code_match(statusCode):
+ if statusCode == 200:
+ return "200: Success"
+ if statusCode == 400:
+ return "400: Invalid Input. Input was incorrect format."
+ if statusCode == 403:
+ return "403: Access Denied. API key successful, but license does not permit access to the requested resource."
+ if statusCode == 404:
+ return "404: Data not found"
+ if statusCode == 429:
+ return "429: License count usage has been exceeded."
+ return "503: Server is overloaded. Try again later"
+
+
+ def calc_dbot_score(url_data_json):
+ dbotscore = 0
+ if 'reputationValues' in url_data_json:
+ reputation_values_data = url_data_json["reputationValues"]
+ reputation_score = reputation_values_data["reputation"]
+ if reputation_score > 5:
+ dbotscore = 3
+ else:
+ dbotscore = 2
+ return dbotscore
+
+
+ def create_behaviour_context(behaviour_data_json):
+ behavior_context = {
+ "Type" : behaviour_data_json.get("type", ""),
+ "Behaviour" : behaviour_data_json.get("behaviour", ""),
+ "Description" : behaviour_data_json.get("description", ""),
+ }
+ return behavior_context
+
+
+ def create_behaviours_context(data_json):
+ all_behaviours_contexts = []
+ list_of_behaviour_data_jsons = data_json.get("behaviours", [])
+ for behaviour_data_json in list_of_behaviour_data_jsons:
+ cur_behaviour_context = create_behaviour_context(behaviour_data_json)
+ all_behaviours_contexts.append(cur_behaviour_context)
+
+ return all_behaviours_contexts
+
+
+ def create_malicious_data(data_from_api_json):
+ malicious_description = ""
+ if 'reputationValues' in data_from_api_json:
+ malicious_description = 'Reputation: {}'.format(data_from_api_json['reputationValues'].get("reputation", "Unknown")),
+ malicious_data = {
+ 'Description': malicious_description,
+ 'Vendor': 'Symantec Deepsight Intelligence'
+ }
+ return malicious_data
+
+
+ def create_reputation_values_context(data_from_api_json):
+ reputation_data = data_from_api_json.get("reputationValues", None)
+ reputation_values_context = {
+ "Reputation": "",
+ "Confidence": "",
+ "Hostility": ""
+ }
+
+ if reputation_data:
+ reputation_values_context["Reputation"] = reputation_data.get("reputation", "")
+ reputation_values_context["Confidence"] = reputation_data.get("confidence", "")
+ reputation_values_context["Hostility"] = reputation_data.get("hostility", "")
+
+ return reputation_values_context
+
+
+ def return_mati_report_entry_context(data_from_api_json):
+ mati_entry_context = {}
+ mati_reports = data_from_api_json.get('matiReports', [])
+ all_mati_reports_contexts = []
+
+ for report in mati_reports:
+ mati_report_context = {
+ "ID": report.get('id', ""),
+ "Title": report.get('title', ""),
+ "Date": report.get('date', "")
+ }
+ all_mati_reports_contexts.append(mati_report_context)
+ return all_mati_reports_contexts
+
+
+ def merge_two_dicts(dict_a, dict_b):
+ merged_dict = dict_a.copy()
+ merged_dict.update(dict_b)
+ return merged_dict
+
+
+ def create_deepsight_domain_entry_context(generic_domain_entry_context, domain_data_json):
+ network_data = domain_data_json.get("network", None)
+ first_seen_date = domain_data_json.get("firstSeen", "")
+ first_seen_date = convert_deepsight_date_to_demisto_format(first_seen_date)
+ last_seen_date = domain_data_json.get("lastSeen", "")
+ last_seen_date = convert_deepsight_date_to_demisto_format(last_seen_date)
+ deepsight_domain_entry_context = {
+ 'Whitelisted': domain_data_json.get("whitelisted", ""),
+ 'FirstSeen': first_seen_date,
+ 'LastSeen': last_seen_date,
+ 'ReputationValues': create_reputation_values_context(domain_data_json),
+ 'Report': return_mati_report_entry_context(domain_data_json),
+ 'ProxyType': "",
+ 'Behaviour': create_behaviours_context(domain_data_json),
+ 'Domain': domain_data_json.get("domain", ""),
+ 'TargetCountries': domain_data_json.get("targetCountries", ""),
+ }
+
+ if network_data:
+ deepsight_ip_entry_context['ProxyType'] = network_data.get("proxyType", "")
+
+ deepsight_domain_entry_context = merge_two_dicts(deepsight_domain_entry_context, generic_domain_entry_context)
+ return deepsight_domain_entry_context
+
+
+
+ def create_registrant_context(whois_data_from_api):
+ registrant_data_dict = {
+ 'Name': whois_data_from_api.get('person', ""),
+ 'Email': whois_data_from_api.get('email', "")
+ }
+ return registrant_data_dict
+
+ def create_registrar_context(whois_data_from_api):
+ registrar_data_dict = {
+ 'Name': whois_data_from_api.get('registrar', ""),
+ }
+ return registrar_data_dict
+
+
+ def get_nameservers_data(whois_data_from_api):
+
+ string_of_name_servers = ""
+ array_of_name_servers = whois_data_from_api.get('nameServers', None)
+ if array_of_name_servers:
+ string_of_name_servers = " ".join(array_of_name_servers)
+ return string_of_name_servers
+
+
+ #generate whois data dict
+ def gen_whois_data_dict(domain_data_json):
+ whois_data_for_entry_context = {}
+ whois_data_from_api = domain_data_json.get("whois", None)
+
+ if whois_data_from_api:
+ created_date = whois_data_from_api.get('created', "")
+ created_date = convert_deepsight_date_to_demisto_format(created_date)
+ updated_date = whois_data_from_api.get('updated', "")
+ updated_date = convert_deepsight_date_to_demisto_format(updated_date)
+ expiration_date = whois_data_from_api.get('expires', "")
+ expiration_date = convert_deepsight_date_to_demisto_format(expiration_date)
+
+ whois_data_for_entry_context = {
+ "CreationDate": created_date,
+ "UpdatedDate": updated_date,
+ "ExpirationDate": expiration_date,
+ "NameServers": get_nameservers_data(whois_data_from_api),
+ "Registrant": create_registrant_context(whois_data_from_api),
+ "Registrar": create_registrar_context(whois_data_from_api)
+ }
+ return whois_data_for_entry_context
+
+
+ #generate domain data dict for context in Demisto standard
+ def create_generic_domain_entry_context(domain_data_json):
+ domain_entry_context = {
+ 'WHOIS': gen_whois_data_dict(domain_data_json),
+ 'Name': domain_data_json.get('domain', "")
+ }
+ return domain_entry_context
+
+
+ def calc_file_dbot_score(file_data_json):
+ dbotscore = 0
+ if 'reputation' in file_data_json:
+ file_rep = file_data_json['reputation']
+ if file_rep == "Clean" or file_rep == "Trending Clean":
+ dbotscore = 1
+ elif file_rep == "Trending Bad":
+ dbotscore = 2
+ elif file_rep == "Malicious":
+ dbotscore = 3
+ return dbotscore
+
+
+ def create_deepsight_file_entry_context(generic_file_entry_context, file_data_json):
+ deepsight_file_entry_context = {
+ "Report": return_mati_report_entry_context(file_data_json),
+ }
+ deepsight_file_entry_context = merge_two_dicts(deepsight_file_entry_context, generic_file_entry_context)
+ return deepsight_file_entry_context
+
+
+ def get_generic_file_entry_context(file_data_json):
+ entry_context = {
+ 'MD5': file_data_json.get('MD5', ""),
+ 'SHA256': file_data_json.get('SHA256', "")
+ }
+ return entry_context
+
+
+ def gen_file_malicious_data(file_data_json, dbotscore):
+ malicious_data = None
+ if dbotscore == 3:
+ malicious_description = 'Reputation of file: {}'.format(file_data_json.get('reputation', 'Not found')),
+ malicious_data = {
+ 'Description': malicious_description,
+ 'Vendor': 'Symantec Deepsight Intelligence'
+ }
+ return malicious_data
+
+
+ def create_deepsight_ip_entry_context(generic_entry_context, ip_data_json):
+ network_data = ip_data_json.get("network", None)
+ first_seen_date = ip_data_json.get("firstSeen", "")
+ first_seen_date = convert_deepsight_date_to_demisto_format(first_seen_date)
+ last_seen_date = ip_data_json.get("lastSeen", "")
+ last_seen_date = convert_deepsight_date_to_demisto_format(last_seen_date)
+
+ deepsight_ip_entry_context = {
+ 'Whitelisted': ip_data_json.get("whitelisted", ""),
+ 'FirstSeen': ip_data_json.get("firstSeen", ""),
+ 'LastSeen': ip_data_json.get("lastSeen", ""),
+ 'ReputationValues': create_reputation_values_context(ip_data_json),
+ 'Report': return_mati_report_entry_context(ip_data_json),
+ 'ProxyType': "",
+ 'Behaviour': create_behaviours_context(ip_data_json),
+ 'Domain': ip_data_json.get("domain", ""),
+ 'TargetCountries': ip_data_json.get("targetCountries", ""),
+ }
+
+ if network_data:
+ deepsight_ip_entry_context['ProxyType'] = network_data.get("proxyType", "")
+
+ deepsight_ip_entry_context = merge_two_dicts(deepsight_ip_entry_context, generic_entry_context)
+ return deepsight_ip_entry_context
+
+
+ def gen_geo_data_dict(data_from_api):
+ geo_data_dict = {}
+ geo_data_from_api = data_from_api.get('geolocation', None)
+ if geo_data_from_api:
+ keys = geo_data_from_api.keys()
+ geo_data_dict = {
+ 'Country': geo_data_from_api.get("country", ""),
+ 'City': geo_data_from_api.get("city", "")
+ }
+ if 'latitude' in keys and 'longtitude' in keys:
+ geo_data_dict['Location'] = "{0}, {1}".format(geo_data_from_api['latitude'], geo_data_from_api['longtitude'])
+ return geo_data_dict
+
+
+ def create_generic_ip_entry_context(ip_data_json, ip):
+ """Returns an entry context dict with the relevant values for the general !ip context standards.
+ """
+ entry_context = {
+ 'Geo': gen_geo_data_dict(ip_data_json),
+ 'Address': ip,
+ 'ASN': "",
+ }
+
+ network_data = ip_data_json.get("network", None)
+ if network_data:
+ entry_context['ASN'] = network_data.get("asn", "")
+
+ return entry_context
+
+
+ ''' Commands '''
+
+
+ # Build Markdown contextual data for human readable data in war room output
+ def build_md(jsondata, searchItem):
+ mdOutput = "## Symantec Deepsight Intelligence: " + str(searchItem).upper()
+ for key in jsondata.keys():
+ if isinstance(jsondata[key],dict):
+ mdOutput += "\n\n__" + key.upper() + "__"
+ for k in jsondata[key]:
+ if str(k) != 'uri':
+ mdOutput += "\n- __" + (str(k)).upper() + "__: " + str(jsondata[key][k])
+ elif isinstance(jsondata[key], list):
+ mdOutput += "\n\n__" + key.upper() + "__"
+ for i in range(len(jsondata[key])):
+ if isinstance(jsondata[key][i], dict):
+ for x in jsondata[key][i]:
+ if str(x) != 'uri':
+ mdOutput += "\n- __" + (str(x)).upper() + "__: " + str(jsondata[key][i][x])
+ else:
+ mdOutput += "\n- __" + (str(i)).upper() + "__: " + str(jsondata[key][i])
+ else:
+ mdOutput += "\n\n__" + key.upper() + "__: " + str(jsondata[key])
+ return mdOutput
+
+
+ def get_domain_data(domain):
+ request_url = BASE_URL + DOMAIN_ACCESS_SUFFIX.format(domain)
+ dom_req = requests.get(request_url, headers=HEADERS, verify=VERIFY)
+ domain_data_json = json.loads(dom_req.content)
+ return domain_data_json
+
+
+ def get_domain_data_command():
+ domain = demisto.args()['domain']
+ domain_data_json = get_domain_data(domain)
+
+ dbotscore = calc_dbot_score(domain_data_json)
+ dbotscore_context = {
+ 'Indicator' : domain,
+ 'Score' : dbotscore,
+ 'Type' : 'domain',
+ 'Vendor' : 'Symantec Deepsight Intelligence'
+ }
+
+ generic_domain_entry_context = create_generic_domain_entry_context(domain_data_json)
+ if dbotscore == 3:
+ generic_domain_entry_context["Malicious"] = create_malicious_data(domain_data_json)
+
+ deepsight_domain_entry_context = create_deepsight_domain_entry_context(generic_domain_entry_context, domain_data_json)
+ md = build_md(domain_data_json, domain)
+ entry_context = {
+ 'DBotScore': dbotscore_context,
+ 'Domain(val.Domain && val.Domain == obj.Domain)':generic_domain_entry_context,
+ 'Deepsight.Domain(val.Domain && val.Domain == obj.Domain)':deepsight_domain_entry_context
+ }
+ demisto.results({
+ 'Type' : entryTypes['note'],
+ 'Contents' : domain_data_json,
+ 'ContentsFormat' : formats['json'],
+ 'HumanReadable' : md, # not sure if need to build the md first?
+ 'ReadableContentsFormat' : formats['markdown'],
+ 'EntryContext' : entry_context
+ })
+
+
+ def get_ip_data(ip):
+ request_url = BASE_URL + IP_ACCESS_SUFFIX.format(ip)
+ ip_data_from_api = requests.get(request_url, headers=HEADERS, verify=VERIFY)
+ ip_data_json = json.loads(ip_data_from_api.content)
+ return ip_data_json
+
+
+ def get_ip_data_command():
+ ip = demisto.args()['ip']
+ ip_data_json = get_ip_data(ip)
+
+ dbotscore = calc_dbot_score(ip_data_json)
+ dbotscore_context = {
+ 'Indicator' : ip,
+ 'Score' : dbotscore,
+ 'Type' : 'ip',
+ 'Vendor' : 'Symantec Deepsight Intelligence'
+ }
+ generic_ip_entry_context = create_generic_ip_entry_context(ip_data_json, ip)
+
+ if dbotscore == 3:
+ generic_ip_entry_context['Malicious'] = create_malicious_data(ip_data_json)
+
+ deepsight_ip_entry_context = create_deepsight_ip_entry_context(generic_ip_entry_context, ip_data_json)
+
+ md = build_md(ip_data_json, ip)
+ entry_context = {
+ 'DBotScore': dbotscore_context,
+ 'IP(val.Address && val.Address == obj.Address)': generic_ip_entry_context,
+ 'Deepsight.IP(val.Address && val.Address == obj.Address)': deepsight_ip_entry_context,
+ }
+
+ demisto.results({
+ 'Type' : entryTypes['note'],
+ 'Contents' : ip_data_json,
+ 'ContentsFormat' : formats['json'],
+ 'HumanReadable' : md, # not sure if need to build the md first?
+ 'ReadableContentsFormat' : formats['markdown'],
+ 'EntryContext' : entry_context
+ })
+
+
+ def get_file_data(file_hash):
+ request_url = BASE_URL + FILE_ACCESS_SUFFIX.format(file_hash)
+ file_data_from_api = requests.get(request_url, headers=HEADERS, verify=VERIFY)
+ file_data_json = json.loads(file_data_from_api.content)
+ return file_data_json
+
+
+ # Return data based on a sha256 or md5 hash search
+ def get_file_data_command():
+ filehash = demisto.args()['file']
+ file_data_json = get_file_data(filehash)
+
+ dbotscore = calc_file_dbot_score(file_data_json)
+ dbotscore_context = {
+ 'Indicator' : filehash,
+ 'Type' : 'hash',
+ 'Vendor' : 'Symantec Deepsight Intelligence',
+ 'Score' : dbotscore
+ }
+
+ generic_file_entry_context = get_generic_file_entry_context(file_data_json)
+
+ if dbotscore == 3:
+ generic_file_entry_context['Malicious'] = create_malicious_data(file_data_json)
+
+ deepsight_file_entry_context = create_deepsight_file_entry_context(generic_file_entry_context, file_data_json)
+
+ md = tableToMarkdown(filehash, file_data_json, headers=None, headerTransform=None, removeNull=False, metadata=None)
+ entry_context = {
+ 'DBotScore': dbotscore_context,
+ 'File(val.File && val.File == obj.File)': generic_file_entry_context,
+ 'Deepsight.File(val.File && val.File == obj.File)': deepsight_file_entry_context
+ }
+ demisto.results({
+ 'Type' : entryTypes['note'],
+ 'Contents' : file_data_json,
+ 'ContentsFormat' : formats['json'],
+ 'HumanReadable' : md, # not sure if need to build the md first?
+ 'ReadableContentsFormat' : formats['markdown'],
+ 'EntryContext' : entry_context
+ })
+
+
+ def get_url_data(url):
+ url_to_send_request_to = BASE_URL + URL_ACCESS_SUFFIX.format(url)
+ url_data_from_request = requests.get(url_to_send_request_to, headers=HEADERS, verify=VERIFY)
+ url_data_json = json.loads(url_data_from_request.content)
+ return url_data_json
+
+
+ # Search for intel based on an URL
+ # if behaviour has value other than SPAM => malicous (possible values are Attack, Bot, CnC, Fraud, Malware, Phish_host or SPAM)
+ # else if behaviour is SPAM => suspicious
+ # if no behaviour, then unknown
+ def get_url_data_command():
+ url = demisto.args()['url']
+ url_data_json = get_url_data(url)
+
+ md = build_md(url_data_json, url)
+ dbotscore = calc_dbot_score(url_data_json)
+
+ generic_url_entry_context = {
+ 'Data': url
+ }
+
+ if dbotscore == 3:
+ generic_url_entry_context["Malicious"] = create_malicious_data(data_from_api_json)
+
+ dbotscore_context = {
+ 'Indicator' : url,
+ 'Score' : dbotscore,
+ 'Type' : 'url',
+ 'Vendor' : 'Symantec Deepsight Intelligence'
+ }
+
+ entry_context = {
+ 'DBotScore': dbotscore_context,
+ 'URL(val.Data && val.Data == obj.Data)': generic_url_entry_context,
+ 'Deepsight.URL(val.Data && val.Data == obj.Data)': generic_url_entry_context
+ }
+
+ demisto.results({
+ 'Type' : entryTypes['note'],
+ 'Contents' : url_data_json,
+ 'ContentsFormat' : formats['json'],
+ 'HumanReadable' : md, # not sure if need to build the md first?
+ 'ReadableContentsFormat' : formats['markdown'],
+ 'EntryContext' : entry_context
+ })
+
+
+ def get_request_status():
+ request_url = BASE_URL + USAGE_LIMIT_STATUS_SUFFIX
+ status_data = requests.get(request_url, headers=HEADERS, verify=VERIFY)
+ status_data_json = json.loads(status_data.content)
+ return status_data_json
+
+ # get results of request status - determine current limit usage of the api license
+ def get_request_status_command():
+ status_data_json = get_request_status()
+ md = build_md(status_data_json, "Requests Limit Status")
+
+ entry_context = {
+ 'Deepsight.RequestLimitPerDay': status_data_json.get("X-License-Limit-Limit", ""),
+ 'Deepsight.RequestsRemaining': status_data_json.get("X-License-Limit-Remaining", ""),
+ 'Deepsight.SecondsToLimitReset': status_data_json.get("X-License-Limit-Reset", "")
+ }
+
+ demisto.results({
+ 'Type' : entryTypes['note'],
+ 'Contents' : status_data_json,
+ 'ContentsFormat' : formats['json'],
+ 'ReadableContentsFormat' : formats['markdown'],
+ 'HumanReadable' : md,
+ 'EntryContext' : entry_context
+ })
+
+
+ def test_module():
+ try:
+ result = get_ip_data("5.79.86.16")
+ ip = result["ip"]
+ except Exception, e:
+ raise Exception("Test failed: API request did not succeed, result: {}".format(result))
+ demisto.results('ok')
+
+ LOG('command is %s' % (demisto.command(), ))
+ try:
+ if demisto.command() == 'test-module':
+ test_module()
+ elif demisto.command() == 'domain':
+ get_domain_data_command()
+ elif demisto.command() == 'ip':
+ get_ip_data_command()
+ elif demisto.command() == 'file':
+ get_file_data_command()
+ elif demisto.command() == 'url':
+ get_url_data_command()
+ elif demisto.command() == 'deepsight-get-request-status':
+ request_status_command()
+ except Exception as e:
+ return_error(str(e))
+ type: python
+ commands:
+ - name: ip
+ arguments:
+ - name: ip
+ required: true
+ default: true
+ description: ip to be enriched.
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator value
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator's type
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: The indicator's vendor
+ type: string
+ - contextPath: DBotScore.Score
+ description: The indicator's score
+ type: number
+ - contextPath: IP.Address
+ description: The IP Address
+ type: string
+ - contextPath: Deepsight.IP.Address
+ description: The IP Address
+ type: string
+ - contextPath: IP.ASN
+ description: The IP's ASN
+ type: number
+ - contextPath: Deepsight.IP.ASN
+ description: The IP's ASN
+ type: number
+ - contextPath: IP.Geo.Country
+ description: The IP's country
+ type: string
+ - contextPath: Deepsight.IP.Geo.Country
+ description: The IP's country
+ type: string
+ - contextPath: IP.Geo.City
+ description: The IP's city
+ type: string
+ - contextPath: Deepsight.IP.Geo.City
+ description: The IP's city
+ type: string
+ - contextPath: IP.Geo.Location
+ description: The IP's latitude and longtitude
+ type: string
+ - contextPath: Deepsight.IP.Geo.Location
+ description: The IP's latitude and longtitude
+ type: string
+ - contextPath: Deepsight.IP.Whitelisted
+ description: Is the IP whitelisted
+ type: boolean
+ - contextPath: Deepsight.IP.FirstSeen
+ description: When was the IP first seen
+ type: date
+ - contextPath: Deepsight.IP.LastSeen
+ description: When was the IP last seen
+ type: date
+ - contextPath: Deepsight.IP.ProxyType
+ description: The type of the IP's proxy
+ type: string
+ - contextPath: Deepsight.IP.Behaviours
+ description: Behaviours of the IP
+ - contextPath: Deepsight.IP.Domain
+ description: The domain associated with the IP address
+ type: string
+ - contextPath: Deepsight.IP.TargetCountries
+ description: Three-letter ISO3 code representing a country associated with the
+ IP address's activity
+ - contextPath: Deepsight.IP.Report.ID
+ description: The MATI report identification number
+ type: string
+ - contextPath: Deepsight.IP.Report.Title
+ description: The MATI report identification title
+ type: string
+ - contextPath: Deepsight.IP.Report.Date
+ description: The MATI report identification date
+ type: date
+ - contextPath: Deepsight.IP.ReputationValues.Reputation
+ description: A value that combines an item’s behaviors exhibited, confidence
+ in the listing, hostility of the item, consecutive listings and listing ratio;
+ values range from 1 to 10 with higher values representing a worse reputation
+ type: number
+ - contextPath: Deepsight.IP.ReputationValues.Confidence
+ description: Shows our confidence on whether the listing is correct or a false
+ positive with values ranging from 1 to 5 with higher values representing more
+ confidence in the listing
+ type: number
+ - contextPath: Deepsight.IP.ReputationValues.Hostility
+ description: Enumerates the IP address’ hostility level with values ranging
+ from 1 to 5 with higher values representing a more hostile item
+ type: number
+ - contextPath: IP.Malicious.Description
+ description: Description of the malicious IP
+ type: string
+ - contextPath: Deepsight.IP.Malicious.Description
+ description: Description of the malicious IP
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: Vendor of the data about the malicious IP
+ type: string
+ - contextPath: Deepsight.IP.Malicious.Vendor
+ description: Vendor of the data about the malicious IP
+ type: string
+ description: Enrich an IP address from Symantec Deepsight Intelligence
+ - name: domain
+ arguments:
+ - name: domain
+ required: true
+ default: true
+ description: Domain to enrich
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator value
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator's type
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: The indicator's vendor
+ type: string
+ - contextPath: DBotScore.Score
+ description: The indicator's score
+ type: number
+ - contextPath: Domain.Name
+ description: The domain itself
+ type: string
+ - contextPath: Deepsight.Domain.Name
+ description: The domain itself
+ type: string
+ - contextPath: Domain.WHOIS.CreationDate
+ description: The creation date of the domain
+ type: date
+ - contextPath: Deepsight.Domain.WHOIS.CreationDate
+ description: The creation date of the domain
+ type: date
+ - contextPath: Domain.WHOIS.UpdatedDate
+ description: The last update date of the domain
+ type: date
+ - contextPath: Deepsight.Domain.WHOIS.UpdatedDate
+ description: The last update date of the domain
+ type: date
+ - contextPath: Domain.WHOIS.ExpirationDate
+ description: The expiration date of the domain
+ type: date
+ - contextPath: Deepsight.Domain.WHOIS.ExpirationDate
+ description: The expiration date of the domain
+ type: date
+ - contextPath: Domain.WHOIS.NameServers
+ description: An array of name servers associated with the registered person
+ in the whois record
+ type: string
+ - contextPath: Deepsight.Domain.WHOIS.NameServers
+ description: An array of name servers associated with the registered person
+ in the whois record
+ type: string
+ - contextPath: Domain.WHOIS.Registrar.Name
+ description: The name of the domain's registrar
+ type: string
+ - contextPath: Deepsight.Domain.WHOIS.Registrar.Name
+ description: The name of the domain's registrar
+ type: string
+ - contextPath: Domain.WHOIS.Registrant.Name
+ description: The name of the domain's registrant
+ type: string
+ - contextPath: Deepsight.Domain.WHOIS.Registrant.Name
+ description: The name of the domain's registrant
+ type: string
+ - contextPath: Domain.WHOIS.Registrant.Email
+ description: The email of the domain's registrant
+ type: string
+ - contextPath: Deepsight.Domain.WHOIS.Registrant.Email
+ description: The email of the domain's registrant
+ type: string
+ - contextPath: Deepsight.Domain.Whitelisted
+ description: Indicates whether the domain is whitelisted
+ type: boolean
+ - contextPath: Deepsight.Domain.FirstSeen
+ description: The time and date that the domain first appeared in the database
+ type: date
+ - contextPath: Deepsight.Domain.LastSeen
+ description: The time and date that the domain last appeared in the database
+ type: date
+ - contextPath: Deepsight.Domain.ReputationValues.Reputation
+ description: A value that combines an item’s behaviors exhibited, confidence
+ in the listing, hostility of the item, consecutive listings and listing ratio;
+ values range from 1 to 10 with higher values representing a worse reputation
+ type: number
+ - contextPath: Deepsight.Domain.ReputationValues.Confidence
+ description: Shows our confidence on whether the listing is correct or a false
+ positive with values ranging from 1 to 5 with higher values representing more
+ confidence in the listing
+ type: number
+ - contextPath: Deepsight.Domain.ReputationValues.Hostility
+ description: ' Enumerates the IP address’ hostility level with values ranging
+ from 1 to 5 with higher values representing a more hostile item'
+ type: string
+ - contextPath: Deepsight.Domain.Domain
+ description: The domain itself
+ type: string
+ - contextPath: Deepsight.Domain.TargetCountries
+ description: Three-letter ISO3 code representing a country associated with the
+ domain's activity
+ type: unknown
+ - contextPath: Deepsight.Domain.Report.ID
+ description: The MATI report identification number
+ type: number
+ - contextPath: Deepsight.Domain.Report.Title
+ description: The MATI report title
+ type: string
+ - contextPath: Deepsight.Domain.Report.Date
+ description: The MATI report creation date
+ type: date
+ - contextPath: Deepsight.Domain.Behaviour.Type
+ description: A subcategory of behaviour
+ type: string
+ - contextPath: Deepsight.Domain.Behaviour.Behaviour
+ description: 'Indicates that the domain has been observed as part of a specific
+ activity: Attack, Bot, CnC, Fraud, Malware, Phish_host, or Spam'
+ type: string
+ - contextPath: Deepsight.Domain.Behaviour.Description
+ description: A description of the activity observed
+ - contextPath: Domain.Malicious.Description
+ description: Description of the malicious domain
+ type: string
+ - contextPath: Deepsight.Domain.Malicious.Description
+ description: Description of the malicious domain
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: Vendor of the malicious domain
+ type: string
+ - contextPath: Deepsight.Domain.Malicious.Vendor
+ description: Vendor of the malicious domain
+ type: string
+ description: Enrich a domain from Symantec Deepsight intelligence
+ - name: file
+ arguments:
+ - name: file
+ required: true
+ default: true
+ description: sha256 or md5 hash only
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator value
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator's type
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: The indicator's vendor
+ type: string
+ - contextPath: DBotScore.Score
+ description: The indicator's score
+ type: number
+ - contextPath: File.MD5
+ description: The MD5 hash string
+ type: string
+ - contextPath: Deepsight.File.MD5
+ description: The MD5 hash string
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash string
+ type: string
+ - contextPath: Deepsight.File.SHA256
+ description: The SHA256 hash string
+ type: string
+ - contextPath: Deepsight.File.Report.ID
+ description: ' The MATI report ID'
+ type: string
+ - contextPath: Deepsight.File.Report.Title
+ description: ' The MATI report title'
+ type: string
+ - contextPath: Deepsight.File.Report.Date
+ description: ' The MATI report date'
+ type: date
+ - contextPath: File.Malicious.Description
+ description: The description of the malicious file, including it's reputation
+ (if found in Deepsight data)
+ type: string
+ - contextPath: Deepsight.File.Malicious.Description
+ description: The description of the malicious file, including it's reputation
+ (if found in Deepsight data)
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: The vendor of the data about the malicious file
+ type: string
+ - contextPath: Deepsight.File.Malicious.Vendor
+ description: The vendor of the data about the malicious file
+ type: string
+ description: Enrich a file from Symantec Deepsight Intelligence
+ - name: url
+ arguments:
+ - name: url
+ required: true
+ default: true
+ description: URL to enrich, Should contain HTTP(S)://
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator value
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator's type
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: The indicator's vendor
+ type: string
+ - contextPath: DBotScore.Score
+ description: The indicator's score
+ type: number
+ - contextPath: URL.Data
+ description: The URL itself
+ type: string
+ - contextPath: Deepsight.URL.Data
+ description: The URL itself
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: Description of the malicious URL
+ type: string
+ - contextPath: Deepsight.URL.Malicious.Description
+ description: Description of the malicious URL
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: The vendor of the data about the malicious URL
+ type: string
+ - contextPath: Deepsight.URL.Malicious.Vendor
+ description: The vendor of the data about the malicious URL
+ type: string
+ description: Enrich an URL from Symantec Deepsight Intelligence
+ - name: deepsight-get-request-status
+ arguments: []
+ description: Get API Usage status for current license period
+ dockerimage: demisto/python:1.3-alpine
+ runonce: false
+beta: true
+tests:
+- Symantec Deepsight Test
\ No newline at end of file
diff --git a/Beta_Integrations/integration-Symantec_Deepsight_CHANGELOG.md b/Beta_Integrations/integration-Symantec_Deepsight_CHANGELOG.md
new file mode 100644
index 000000000000..5c9b25d3cdba
--- /dev/null
+++ b/Beta_Integrations/integration-Symantec_Deepsight_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+#### New Integration
+Leverage Symantec Deepsight Intelligence
\ No newline at end of file
diff --git a/Beta_Integrations/integration-ThreatQ.yml b/Beta_Integrations/integration-ThreatQ.yml
index 1919fcb9c110..4e970a4b1965 100644
--- a/Beta_Integrations/integration-ThreatQ.yml
+++ b/Beta_Integrations/integration-ThreatQ.yml
@@ -1,8 +1,8 @@
commonfields:
- id: ThreatQ
+ id: ThreatQ_Beta
version: -1
-name: ThreatQ
-display: ThreatQ
+name: ThreatQ_Beta
+display: ThreatQ (Beta)
category: Data Enrichment & Threat Intelligence
image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAOEAAADhCAMAAAAJbSJIAAAAb1BMVEX///8/g8QvfME5gMM1fsIyfcLs8vkresD1+fzw9fpmms7S4PD6/P4necBAhcWHrtfB1ep9qNWjwOB0otKsxuNQjchvoNHY5fKrxeKbut22zeZJiceUttzV4vHj7PZZk8zF2Oy60egLcb2OsdlZkMqLtOAXAAAL90lEQVR4nNVd6bqquBKVJBBBAREBBQXsfd7/GVtwYiYFmVy/7tm3v5hFhporm40g7O2kTM/XIHaKLDMe8LKscOLgekzLxN6L+lkpsPP7JSiwS5FJ8ANGA49/EhNRFxfB5Z7bqqcKxzZJfcdzEWnzGsCDKaJe4afJVvWk2ZGc44ygWW5tnohk8TlRPXUGbA+BYZoQcg2apmkEB62X0kodipax+7BE1Ekt1USGYaexuZLem6QZa0hyF3hc6L1JesFONaUmtueI8KP3Ikmisy5HMvfdhVfLDEfT9XPV5B7Yha4Iei+SNFS9WXcOx9M3zNFRyVE4v5ojUsYxd6h4fjVH6qg4j5Yv8Pz1OLq+bAG5P2IijV8Fgo9SDa1dhKTyq4AiecdxG0i4YPrAKJCkAhy46y/MHMldAj87dBXxq+CGwt0BZSb3humCZAexBP+U7dA3MPoTyM+KTMX8KpiRMNmo7oppAxNBO/UiXwaOgV4E8NvH+hB8iP+Yu2i0IrV3aBeE92HcIb0IVoeRrxJ3+CsMOu/Algru9421OwYRoqY+NNGJM8VNFUfanf4cz6WI6LCg9Mif4hPbvEwvQfE4C0jtkgqRGk3s890p+GcoFCP0KphiDauMOusoUfeRQ3GTtFcRH/9ReRRFb9Qn4taioXJzN0TLz8/5F3fdNHFvrRmpPuuf2NOJ/fD9DV0BQqOHfesk4rD6WynyliWPhQveFJFgo7hG2VxEXNR/yzNxFMl5s7Gzz79keOH+Wvv0+Tere8fyQ71Nzp+zTmT4i4PGuXNff7M8YRTJY/jtl3Akw894/VJ03w6xRJjXw60iGX+fD0hiCQw3d+NzgX8CKSdRgrG+XZKvYxNJEYtW4HYZbgpB+5TUUtD7/oHKuFArb2q9Vek38ycR5D/GQTX6paFXmJKiU+dKCrqNHwvELCJ26u/XOAQ4ksNwYz8uVbfx75sg9c2rR29sU8MU6Spu4fD69RcEnURcDx40vx8pZVG0Wvfa8T0JvkxJHTBNm9ovztRkcSZPhvhy5SoccS3jrdaYJFTC0H7++sOk4rpfnwz37TFdGfHFPl4KsrXJeUqOJ8PuVU2UJIrV5jH+t2mLr7V4nsPNsb31n2JSNmpepPKn7L3h2S7B8y7d7DpqIVKRXXRAn18+8TP8s+fgeeds40hB9n+teOD6h7fZ4GwX4KnTDIyIpLhtOnC/5+OP13WK/dfg/7ojYgXpxdFX89/x2qbm+TW402VI/NGJCMNj4V43X8O3shK0fA3u93aFKz/N7+B+v2vvky/Ex3zpS6D3EZWJo3t7/8/+J1+G7D3gua8LUgUSI/lc4Uc+Qp98JHvaH1DFIn4xMKElQOl7wPvA3aVE7PNm+BEJhwGGSheRzy5tUBhiqOQkvsHnpiHnz4CDDLEaQ7EGHxsRf0XeIMOmP1Myci52Pm54uIcPNlag2Dyxv/CIgKPyO+Jp+Ju56mqmdhxiblljvBGr2jyPzkA47HDtTiXN2V+HP5g0//Ag4pUSAzWN3DGfupSo6Rj26yKnbRt3TJVX47F5I18Vc/Nal8jo1/KUFtquCSvStDnSuMGJ0rFfl4J48T7FRcvVlI//h1LCwqOwlhJsycJNLx2rCVkBxREMGT0s6F4gE+Mo3qabcGHfhU54acKNrtYQfuzTRUKxF3qZ+lBUcbX7ecF92guf7adsFSnZYFOAG1I4666KNTWGWqG/mUknwmjg/+1rYrfpjaCCVhPX8aOIivuptzwDIYkR2+kFU3W/m+2oIeVWmQBdh8dQDHvaKaLShHpi2ANhYKPejWV7B+JsYIRoiqBqtabCkPKGzVdBs9Xij40BFcWaiZv3bibp6M0QIyN4RwDa9yQesvdGNsEHRPVBfKgkFL9BCHWzIP3qLK01RINZFnPhSNWK2wN7P3SKB5zQv6RlWyNrhujp8J0xF6tT53JjQkMSjBRVzB1D5arpDPyPuEQjhTHzBoqndTe/T2rKGEGGzE51vm8GfHJHx0ub5kPmwxeUJng7QscJJvMGGJFTFbUMr+lPpKgzROqUmxcTODw3KZkoamLIeniV72iJ5/TNCSO2m/A1TFHbFrC3ShhidJv4T5iS5FxtxUWlk+Ns0h/I5HOlU99IJXZmVekzaRmwubKU+2rG8DiFdKa4IGXyuJoyyi8X4E6xO2cWsOWPERXJmPPYYuLNWXY2W9K4ptZF6IazxvmJzaOsMu9kHOl/DFuL0duqpf2UeAwB6pw1sqM0nj+MnKmzHnNhgzc/lmRYbAJs2o3Y3KaC5wvGlk3L2jFHrqjgCYsCewKgOz+YjrBZ+f0sQ0DtzY8yBIQef5MhpED8NxlC6sN/8i7dQio1tZOHLDhC0jf102kYAKqd0lAvncUBkomjpW0xB1BxmJ724TRgPW80tfEnAatJ0dRPMwUblkmlq69tAsDCKW39peMAlvRr6/MexR2Y0ahv3GIM0EIGfWNPI5hLEeoR1Dh+OAxoTqrOMeBBlND8d63j+EMAV/NrnYsxgBu4hEHrfJoBwHu/6Z0T1YMFzu7/NdsJ3oTp1ywLKD8t8kshSOAlKBrkCEOwoB2D+jxvEOCtbTTI1QcB3p5Ifb0FDPD6aOU1M0AsOIeqpwwEYwbNFz9nWLTbsTLg93w0UIGouoZ0AWD2768ppRVgjSV/TGWrMVkT24PievxlgLRi+DWF5glIbPQXNymsdeb63iYqPCDjfT76m3StuE/C/0oec4YB0Bx0ZY+hW0ixCncye5fedX2iyrh+VL3TR0UGLObI0xrD6VC8Wq7hf9KjHjmzZrq8X9s9+r7JLb/t8kA71pFNutDJtk8z2pS50h09zI6MZb7u7TnrKPednk3CMdPnozGxJTkm9iXri1vJnaWZvfoL+pfaV2PwCEh9boHZqw+3myyfjl1iSOJRZF5CaB/h3HfHh8byWtvN9hdYuIS3YLrjKBb30nkHYk7hLTbnxsWeHB18xywLAUtYFpThu2E5lhizk4a9r35ZTJy/FqgjfhmPrKYhqwTb35nW7wVM/gSfRob+Ce+5MM1kf2qon2xfDgcim76yN3VlUpb35wzIr/52ZnYVRjJgvmYY3pmxL2hpn19MaXxOBOiq7B0IWYR9sOrdKWxS4lzvOVeHOnvuOpN3Zru63zbGiEbhZccrl2XHPiO2N7sOXB4PwwR5zpGHFEnYn05mNQSgUaxxlibJjmvFyEQf3S6Y386zeT7GbLrhKvMDkI4IeP+w5PqyJqbxciky3dyyDcgblnwfY3xorg7gx5uAvNAOe4eU94vaGP1b4A2wC8BmAvqAl/UxnpwANU5AXeA+a7k1AX19HJxvzABkHAEi0gpAYXv4m84XARQN4l1Yv3QKe3t2ybvcoZCnik2Dyc5KHNgHXvS2+spXKMYnY/pze9UOALZpBRwt0oOBVWKA+biT62hfWH0LH0BvmTdEPald2cujHO3LsO95crjFGgW0BgcyKeM6tLGsiwf/rGuyu9a8JTIHYvSCmNYfXrBt6KoiyqNAigbyWh8/CcHnrwJdGcW8iKRouPHbgNzeHeD9+cJIi2EArkIpYnKsVDnrmC3jN9G/lR1iV9EgRZ4E8OvzBR4EhVPEBKRgt7B+iz5x5PnqO0+svWS+4PhgOE8gjrX2oGYbskC4pnHf2D16koB5P45nRXotI+Efet6HOh1GNN2BdyEESw0IeEmJLg48XknlAMz3jmnCivj6UZfBFJr9cV2uf3ACRiDHLxxlpvZOJZnwYiZ7XQR1JdxQRoH9gahaRkwkJQpugwWJDBz4IaYOw3ywi+SLfxRJfcN4fzbkblWCj7Lz5q2pnEzewK6vohAtX+g7gvOTkTc3jJ0jQY/DyFH5iPhO9Do+1k8lvwq3ZX5cVn6han4Vct8Voq1i0/V1aYK0P0fcOWISnbWqNt8FHkdFByNPaLrqMtj30ORCEiMzTjWtw7bS2F2cnPqmRx1d6T2xPQSGufBQYtM0goNWh28EySnMCAJ5WDFGJIvPP9RPZpukfuFRRGZ5YkwQ9Qo/TX5h8Tqw8/slKLD7IEqq93DbxDAxEXVxEVzu+W91xutiv03K9HwNYudfllWVyl6WFU4cXI9pmdgSTaL/ARRFqdYCUJUeAAAAAElFTkSuQmCC
description: ThreatQ Integration
@@ -10,7 +10,9 @@ detaileddescription: "You must have a ThreatQ user account to retrieve an api to
The api token is required for all api requests. \nThreatQ provides indicator scoring
weighting for indicators and their contextual information, such as sources, attributes,
and indicator types, as they are added to ThreatQ.\nFor detailed information on
- ThreatQ scoring please refer to https://helpcenter.threatq.com/\n"
+ ThreatQ scoring please refer to https://helpcenter.threatq.com/\n
+
+ Note: This is a beta Integration, which lets you implement and test pre-release software. Since the integration is beta, it might contain bugs. Updates to the integration during the beta phase might include non-backward compatible features. We appreciate your feedback on the quality and usability of the integration to help us identify issues, fix them, and continually improve."
configuration:
- display: TQ API URL E.g. https://192.168.1.136/api
name: apiUrl
@@ -22,7 +24,7 @@ configuration:
defaultvalue: ""
type: 0
required: true
-- display: 'Email '
+- display: Email
name: credentials
defaultvalue: ""
type: 9
@@ -70,15 +72,53 @@ script:
PROXY = load_proxy()
+ def get_errors_string_from_bad_request(bad_request_results):
+ errors_list = bad_request_results.json().get("errors", [])
+ errors_string = ""
+ error_num = 1
+ if errors_list:
+ errors_string = "Errors from server: \n"
+ for error in errors_list:
+ errors_string += "Error #{0}: {1}\n".format(error_num, error)
+ error_num += 1
+ return errors_string
+
# ThreatQ auth based on OAuth 2.0 credential grand method
def tq_access():
data = {'grant_type': 'password','email': EMAIL, 'password': PASSWORD, 'client_id': CLIENT_ID}
access_token_response = requests.post(API_TOKEN_URL, data=data, verify=False, allow_redirects=False)
+
tokens = json.loads(access_token_response.text)
+ if int(access_token_response.status_code) >= 400:
+ errors_string = get_errors_string_from_bad_request(access_token_response)
+ error_message = "Authentication failed, unable to retrieve an access token.\n {}".format(errors_string)
+ return_error(error_message)
+
+ new_integration_context = {
+ "access_token": tokens['access_token'],
+ "access_token_creation_time": int(time.time()) -1, # decrementing one second to be on the safe side
+ "access_token_expires_in": tokens['expires_in']
+ }
+ demisto.setIntegrationContext(new_integration_context)
token = tokens['access_token']
return token
- ACCESS_TOKEN = tq_access()
+
+ def access_token_not_expired():
+ epoch_time_now = time.time()
+ epoch_time_when_token_granted = demisto.getIntegrationContext().get("access_token_creation_time")
+ token_time_until_expiration = demisto.getIntegrationContext().get("access_token_expires_in")
+ return int(epoch_time_now) - int(epoch_time_when_token_granted) < int(token_time_until_expiration)
+
+
+ def get_access_token():
+ existing_access_token = demisto.getIntegrationContext().get("access_token")
+ if existing_access_token and access_token_not_expired():
+ return existing_access_token
+ else:
+ new_access_token = tq_access()
+ return new_access_token
+
# remove html tags from ThreatQ description field
def cleanhtml(raw_html):
@@ -86,14 +126,17 @@ script:
cleantext = re.sub(cleanr, '', raw_html)
return cleantext
+
''' Catch-all function for all command '''
def query_tq(keyword):
'''
This function handles all the querying of threatq
'''
tq_url = API_URL + "/search?query=" + keyword
- api_call_headers = {'Authorization': 'Bearer ' + ACCESS_TOKEN}
+ access_token = get_access_token()
+ api_call_headers = {'Authorization': 'Bearer ' + access_token}
api_call_response = requests.get(tq_url, headers=api_call_headers, verify=False)
+
response = json.loads(api_call_response.text)
# Find ThreatQ object type and object id based on keyword search results
@@ -125,7 +168,8 @@ script:
tq_url = API_URL + "/" + tq_obj_type + "/" + tq_obj_id
# get ThreatQ response
- api_call_headers = {'Authorization': 'Bearer ' + ACCESS_TOKEN}
+ access_token = get_access_token()
+ api_call_headers = {'Authorization': 'Bearer ' + access_token}
api_call_response = requests.get(tq_url, headers=api_call_headers, verify=False)
response = json.loads(api_call_response.text)
@@ -197,7 +241,8 @@ script:
tq_url = API_URL + "/indicators/?value=" + indicator + "&with=score,attributes,sources"
# get ThreatQ response on indicators attributes
- api_call_headers = {'Authorization': 'Bearer ' + ACCESS_TOKEN}
+ access_token = get_access_token()
+ api_call_headers = {'Authorization': 'Bearer ' + access_token}
api_call_response = requests.get(tq_url, headers=api_call_headers, verify=False)
try:
response = json.loads(api_call_response.text)
@@ -276,7 +321,7 @@ script:
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': md,
'EntryContext': {'ThreatQ(val.name && val.name == obj.name)': createContext(tq_attr_context, removeNull=True),
- 'DbotScore(val.Vendor && val.Indicator && val.Vendor == obj.Vendor && val.Indicator == obj.Indicator)' :
+ 'DBotScore(val.Vendor && val.Indicator && val.Vendor == obj.Vendor && val.Indicator == obj.Indicator)' :
createContext(dbot_score, removeNull=True),
}
}
@@ -285,10 +330,6 @@ script:
'dbotscore': dbot_score }
- # You can use demisto.args()[argName] to get a specific arg. args are strings.
- # You can use demisto.params()[paramName] to get a specific params.
-
-
''' EXECUTION CODE '''
LOG('command is %s' % (demisto.command(), ))
try:
@@ -390,6 +431,6 @@ script:
description: DBotScore Malicious status
description: Run file check against ThreatQ
runonce: false
-releaseNotes: "ThreatQ beta integration"
+beta: true
tests:
-- No test
\ No newline at end of file
+- No test
diff --git a/Beta_Integrations/integration-ThreatQ_CHANGELOG.md b/Beta_Integrations/integration-ThreatQ_CHANGELOG.md
new file mode 100644
index 000000000000..37854940c8ed
--- /dev/null
+++ b/Beta_Integrations/integration-ThreatQ_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+#### New Integration
+ThreatQ Integration
\ No newline at end of file
diff --git a/Beta_Integrations/integration-TrendMicroDDA.yml b/Beta_Integrations/integration-TrendMicroDDA.yml
index f70eac139331..c49913bd5e4d 100644
--- a/Beta_Integrations/integration-TrendMicroDDA.yml
+++ b/Beta_Integrations/integration-TrendMicroDDA.yml
@@ -1,8 +1,8 @@
commonfields:
- id: Trend Micro Deep Discovery Analyzer
+ id: Trend Micro Deep Discovery Analyzer Beta
version: -1
-name: Trend Micro Deep Discovery Analyzer
-display: Trend Micro Deep Discovery Analyzer
+name: Trend Micro Deep Discovery Analyzer Beta
+display: Trend Micro Deep Discovery Analyzer (Beta)
category: Forensics & Malware Analysis
image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHgAAAAyCAMAAACgee/qAAACplBMVEUAAAADBQUAAAACBQUAAAAAAAACBQYAAAADBQUDBQUEBQUDBQUDBAUCBQUAAAACBQYDBAUDBgYEBgYCBQUDBQUCBQUCAwMAAAAAAAACBgYBAwMAAAAAAAAAAAADBAUCBgYDBQYCBQUCBQUCBAQCAwUAAgMEBAUDBQUAAwQDBAUCBQUCBQUDBAUCAwMABAQABATcFxzYFx4AAADTOk4ASiX/X1/eHicCBAYCBQUCBQUDAwUDBQYCBAQCBAQCBAQDAwUABATaGCH52dvhHyjdGyMCBAUDBAYCBQUEBAXeHyfeHSQDBAXbGSMAAADUDx7mpqbcGyPdGyPcGyQEBAXcFR7dICYDBgYDBgYCBQXdGiIEBgbgOEDaGCIDAwbjQ0naGR8ABATaLzbfV1feICngISvfJy7eJC3dFx/hNTzcGyPhMznbGyIEBgbdGyICBATgPEPgOULbGiDdMDndSk4AAwT97+/kVFoAAADeISnaEBn////eGyTcGyL0xsn//v7fGyTkSlDfKDH////eKzPeNDvxsrXxsLL//f3aGyP64+XdJi7bGyICBAYEBgb////cHyfdOD/bMTnbMDbdGCbcQknvur7f///cHCQDBQb////aDxjcGCHkHSfbFR3gHCTdHCThHSbbExvbERrgNDvdHyjeGyPvnaDtjpL//f364eLcFh7dIyz99fXqfYDnZ23hQkngOUDjHSb+9/f97/D77O376er2y83yq67peHzlW2H++fn64+T639/1xsf0wMLys7XvoqXul5vtkpbrgYXncHTmX2XeKDDlHSbhEBn75eb30dP1yMr0vb/zuLvwpanrhYrkV13jT1biSU/iR07gPUTfLzfhFyD/3+D53N70wsT4r7Hwe4DncXbnbHHkWmDsWWDmJy/ZCxBTAkaxAAAAl3RSTlMAXwOqJgndGfbwjPz6zivgu7GKavjUTi4j2kcVDwXr17aonXxRM8KQOr9uZ2NMQD08JQwFBALp5NCmlYB/dnNXQhj+/vjo5s/LqWxcMh8NCuze0cW6srCuoJmHelpaSUU+IRf49dnOrqufmpSLgXloXk5MPDgtKQf69fTy7u3l5ODSzMK6t7eloJSGhoWEdXVyY1VKQj8QLmikJQAABh9JREFUWMPtl1VXG1EQgCchJIRAUiGQ4qWlRVukRkuBuru7u7u7u3fvJpsQEpxSoE7d3d3ln3RWswu0T4Vzeg7fA3fnLocvM3d2lkA99dQdU5ctPjxu3KKJKwdBHbJy3LZ23RiGecB0mzZj7sS6ci+bPZ1x5FE82Q6m25ZFdaGeOnd1RTalwMHMnAi1zfEZD5Ram91ltT4s3psKtcri6Q6FNcfmfll6O//WnR+zB0ItcqSbQ66lyu/m37h+4eplmqY/7KjFg146TZFv+b3Tr9y5djvlLi1C9a62UEtMaSf3lpSXo9TGpW59dwHNB6F2aDuCoWTkYqkl7OXnaHrtCuDxzlSJxABk8VeBGmBRNxdvBWEUrVJNSMFVp1Jp+BA3ggcoxUed1J+x3sSUdwrFDvEjIk0BIgmPTySr8m5PBPZj1BLXzrhmERInhIg+TQMeUmc6/iJ2fUPxxUmCONbf32wgen//XukADQmJwKsehGhDUKwlAR38kV7x+JuNUNNDB9CKkMYY+hISbuqJeyajrLOc1F+wV6L48yzgUavVGh/SABdgxX5BeKWLJSSREzdJUbOAICYd5eLMlJBWwwhp0EYS96mQJCUl1cQ5p1Gcv2YKiFhQjAsvNgLSjJDmvBidiCT2CZKJE3HR9SSGaGlUii1tc5UWvcipdsYFKL70sC+IaKqKQ1oQEs2JTdFGRKPmxHrcHgmTJXEgIKPZUGCS4M3JzS88U1ZcrdSfaPpM2fs+NYpJ79jYFthUEd6cmBhYTKGc2Cc4nBhaLVeKsThjQeAQw2nt3x9jE1mreovLTtH0BZtjaGpNYgF9IHBig54lghcH6NDSIkspXkhIIxAYw+CcyK18VkjTp97+qlpqaz5baWve4C41iQ1RYxv1JH6JwIu1Fh2LmhdbQk341PkpxC3Zh0xghNPmzu9Ks5wvKCjNVRa67CxNfyyxZXdPrvGMLVz15gviJmEggmINxBNELlb7ExIMAsOdrjuPztJI4dWiolt2pfgp7n91UdlU/xrFODkG6InewosjQCn27i0To1CNH1Ib6hFTJdRTGrlG5ebalIW+g9vXce+PYr5TfXlx+IH0TkimKIZEmTgt3XcYRhNAZFYFlVNZyCZ82lr1gCuxs869xSLIS40tAwg3MllxFkGJfGQ24A8T99SRRDEySXsVSMxjKPt1GumaW3Vavj4vfhxZc+nSvJoBR2evpgPYVDt6eeEACcOFJwM3+3ml6XAJwpA9VBV3o6NKB2GrQKAvY39zmUYKrFXyffNI2nXi4/RvCE6IMyYAS1IeDifk8mu70vsS86Vv8pvMHPhH6CbHpfNP1MAh76/RSFGJ8p+uS6dw87mN77aK8SAQ1ob/IaLGSwlvnbcnUIeFhSmcIWxDh2qMwdHCBPl5hUYeK9J9d5FG8ovRqzjiaLO/BTLMaSDiG6kGkTiTjzZeiuZHaLUdMqS7MbH68JEWkJHkPsf1lkebY7tbhDtnpFeGp9KtCekHw0hvEPEySX86k3hlepFAMRwZHh/fkMQI0YDwiH7xPcwp4KHtdm58nCpz8UW22u5xtb9yT+y27O79JXGAKSokvL1H3NTzLow1rYKUsYliOMoM0Bw/Jk8GiWbD5oqU+YH5xG1Fit2lF7jwmdtFCTC7QRL7RJlVPRv61yAOM0WCnKYB5g76cI0QRekxWQtOajl7aI5zN24X3HyC6SNFd4ulN6Sj3RSP2C+hR6/Ijh1qEKsj2MGi0UmHoPfVBsSIUUufEHyscYzJObmVVnL+9n3PQ53tGA8eMYkxk4yWSrGYlF8MJBoSZKUOJgvFKJA0a+MdZQgCBSs2yrVXC9xWm8fLLAAPk4lxNFke5XkdjPI0l6WJn9nQIlQMI7UADfyMQtQmijQxkXSowokNZ3hp4ZUbp++jVuad11b+LKpCjSrvmEAQaSVrF12cb4LkhSy8Y0kwemZWp2atoRpJm7/kX3x+69Kr+64c1Eo4HPvQW5skD3+IX0tddrTK0203Hmqb1L6bHjgoOXkVeX2SoQ7osmCIk3HmZfOt7GTW9UmCOmLgkjFD11MOpzOv+5ARfZOhLknt0n/SkmNJyQOhnnr+T34DnwO1l/mBsm0AAAAASUVORK5CYII=
description: Deep Discovery Analyzer is a turnkey appliance that uses virtual images
@@ -14,8 +14,10 @@ detaileddescription: |-
3) Paste this value into the ip_address parameter in the integration
4) Copy the API key to the clipboard from Help --> About
5) Paste this value into the apiKey parameter in the integration
+
+ Note: This is a beta Integration, which lets you implement and test pre-release software. Since the integration is beta, it might contain bugs. Updates to the integration during the beta phase might include non-backward compatible features. We appreciate your feedback on the quality and usability of the integration to help us identify issues, fix them, and continually improve.
configuration:
-- display: DDA Server and Port (e.g https://192.168.10.23:20002)
+- display: DDA Server and Port (e.g https://192.168.0.1:)
name: server
defaultvalue: ""
type: 0
@@ -76,7 +78,7 @@ script:
''' loads the host url from the configuration or strips the server url to get valid host url '''
host = demisto.params()['ip_address']
if host:
- #strip https://www. of the server address
+ #strip https://www. of the server address //disable-secrets-detection
url = re.compile(r"https?://(www\.)?")
host = url.sub('', demisto.params()['server']).strip().strip('/')
#strip :{port} of the server address
@@ -814,7 +816,7 @@ script:
predefined:
- "0"
- "1"
- description: '0 (not encrypted) or 1 (Encrypted with password “virusâ€) '
+ description: '0 (not encrypted) or 1 (Encrypted with password "virus")'
defaultValue: "0"
- name: archive_name
description: A name for the retrieved archive
@@ -916,7 +918,7 @@ script:
description: Report file type e.g. "PE"
type: string
- contextPath: InfoFile.Size
- description: 'Report file size '
+ description: Report file size
type: number
- contextPath: File.Malicious.Vendor
description: For malicious files, the vendor that made the decision
@@ -1036,7 +1038,6 @@ script:
defaultValue: "1"
description: Retrieves a brief XML report of a given submission
runonce: false
-releaseNotes: "member to also add tasks to the generic detonation playbooks."
+beta: true
tests:
-- detonate_file_-_generic_test
-- detonate_url_-_generic_test
\ No newline at end of file
+- No Test - run "Test Playbook TrendMicroDDA" manually
diff --git a/Beta_Integrations/integration-TrendMicroDDA_CHANGELOG.md b/Beta_Integrations/integration-TrendMicroDDA_CHANGELOG.md
new file mode 100644
index 000000000000..5566b3b8118f
--- /dev/null
+++ b/Beta_Integrations/integration-TrendMicroDDA_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+#### New Integration
+Deep Discovery Analyzer is a turnkey appliance that uses virtual images of endpoint configurations to analyze and detect targeted attacks.
\ No newline at end of file
diff --git a/Beta_Integrations/integration-cisco-ise.yml b/Beta_Integrations/integration-cisco-ise.yml
deleted file mode 100644
index 555978b5d8ed..000000000000
--- a/Beta_Integrations/integration-cisco-ise.yml
+++ /dev/null
@@ -1,624 +0,0 @@
-commonfields:
- id: Cisco pxGrid ISE
- version: -1
-name: Cisco pxGrid ISE
-display: Cisco pxGrid ISE
-category: Network Security
-image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEkAAAAyCAYAAAAQlvbeAAAGx0lEQVR42u1Za2xURRTeOzPbYikPeWgRWx7y8JXwQ/3HDw2JxldMBDXGBIhiFF+AihDFhJDwA8KjElhLd/c+lkIRQhtRjPKIKIokCshD9s7c3S0FSgVFnilQlPW77ez2lr3VLs0uNt6TnGxzz8ycc78558w3tz5PPPHEE0888cQTTzz5T4lini5jYb6IBPaPy4c/svz4WKYfWqys/mNYtwGJ1tRNZypPUt38nFRxJdf+WDCxnmlmkhqHZ3cbkFgoGmEaT0KPMNUsyKkvwyRMN0WrP3N99wFJM8MSpDhTo7kFSecEfn6R/tZ4IHU3kJSTF4uZJl6hmrgvG5AUPT6QRaJTybLfR2bVnCPx4SitqUr5uZJsQKJqbAyLiFd9FaJP/pvzuvgc9Bs7sDqqix6dBQnAaraNquL7rPxFxJbWwyBe3VmQaFXMz3TBW2xBMT//JRU2y2VgJxB8n86DxDdK28Gs/Kn8R9mcv+o0SCrviWdHbZtf4ytzWPOiyL3v8MUysEa8QO8sQKqVtn3u68aKO/C3S877otMgabwIz+olSAF3+mAWdY2kaYeegoMGqlobSDDuzzVIROfT7MwkRnRmzkHad4rSDXVrWMg8Tqri47uSRZp0ftFviJJcg0Q1sd22Uc36IdcgsUqzH1rGOTlP78JRzivlImcKInkASRWbZVPfnnOQNN4P+pucF+wC3Td1uUhTgSEGXWNbJm2nsCPZgLRJNuBoZibx1kwK8Z0uIO2xbSiPrVk0bhukxhaQVB5yAemsbUPFXH8m0W3HJiNLLlJd7FAisUKnjXzZ+BzAuUxXWZuU2nraWZBomM9kutlMVyYqM/yFjsxj4egVUlu/IMMWjn4Ef1fop/UfdhYkpcok1OA1eN5MDGtyO9ta4YdtK2yX6MfRKV3jQ4Y1mEbMm9zvaKKUVNcVuJDJVC9L4KUz7dW8jCw9QdyzN1rmSl6D+xVaaWbYALgN0iHpb23GYbDGAhhWqTtB5YXYtNt9+RbZgCta05gnsFsst5fpuMJUgUwy0csso/t8Tyo/PZyFogHyTeOj+fBHIsfHoa8ElKW7R/k88cST/7XQtYlCNOk7QBVGM8O6uf2lNKrYXAXaE3a/+5dFPhDz74IOw0lT9M+XXDGAaeJOrDWE6ryg43Exmx+NxPgRYO89bhg4RKvrhWa5iBnCZrR/Qq9CT0Ofd1CBUnkLP+nX+evtj/QDJZi/ATZ5NcB8jFUCP5W5nJYPws82x9hmALXbt1IUt4uppqE/C/EVsDdA/5JxHcXYxcRI9M7v6bWsaQBTrT0AwQ74Wp3hAGmYDDQJkGal56+wCA2LzY45l6EXoEkltHd0uxff1jCJhfnVDD8gub6KeN/0mocvDGZVsShT3WICPdCtvT7twoD8lZgki5LSJ1hEvI/fl1AKS/FsggOkoTajlSDNbCsxMVTudJIaYiPIXn/7mgBONUZZuCOdHUq4bjiuP02Ol11HVfNlAPQmSrgSmZQGiYVFbRogne/F70yMm50mmlDMieSnzEL8Njhskrt5hKjWIPeRHYMEBl8GptySYQDsAPrGw3RVLIPV09XxeekX1OILOy59MSoFOjZrD10dK3LY+lKNR+U6l4gaL805SHD0UDrwsFjig2QFkhQarF/NdEdp6LyRRsSsa75+bkyVI0Ac2pEfZNf4tmwTUzLt/N10zFXWI7kHaV18XNqhYS24XpCUJacLmWHOha3e0WeSxDj8hmNDPktlAPpShxlAdfOZdEy6mOTydWF62l6dyP0tgFYfHtLSaFUbpFiMVB/rkz1IzheI+QH2E/JkRPnxXY4MWJgCj0ZiczqMqfLg3ehHV5GNSYD5HQ1y0lZuvDD1mQXaTLTYsPw07gCvcZweaJJiMvQx/D0Lp8jj/wYSCe4rpmHzAezwLZhXzHTrXvSSRll236bHreT3sJB5JU0RjNhyNPonMW8C+th8X6h1g8iGOoWtsbY4Trav4fsFgD8R6+1s62uiJn8UYMeZUpxocTh2pwDtQWqWIL2Xeu7XrFFy7AXoqfRcvCSJ1L/Yrilv+nUGelPShQI0+SqsNgpQe34EaMkxaXfTuBL4uTS/TLs6VoLgNTg/4wjkCnSyA6Qh0n4RIM1oA0nY/On8NS/RQMOJd9x7jvU01tqbOsGkWuBJvdoBqteVoT99kuJcUs9CI0S3Snw3QgDSrfZ/cZnewnIrqc4/AIca4biW9EZfmQZ9G/b7HeD1sjMOWg4NQZeghJ4l4Xp/x9wsOhbj5kKDyKJyGrEm+gIxlkEHDA6gxFsYVwENUFW8hrgG+TzxxBNPPPHEE0888eRGyN8R98jGg6pa+wAAAABJRU5ErkJggg==
-description: Next-generation secure network access
-configuration:
-- display: Cisco ISE server URL. For example, https://123.123.123.65
- name: serverURL
- defaultvalue: ""
- type: 0
- required: true
-- display: Server port. For example, 9060
- name: serverPort
- defaultvalue: ""
- type: 0
- required: true
-- display: Cisco ISE username
- name: credentials
- defaultvalue: ""
- type: 9
- required: true
-- display: Use system proxy settings
- name: proxy
- defaultvalue: "true"
- type: 8
- required: false
-- display: Trust any certificate (unsecure)
- name: insecure
- defaultvalue: "false"
- type: 8
- required: false
-script:
- script: |-
- ''' IMPORTS '''
- import requests
- from requests.auth import HTTPBasicAuth
- from requests.packages.urllib3.exceptions import InsecureRequestWarning
-
- if not demisto.params()['proxy']:
- del os.environ['HTTP_PROXY']
- del os.environ['HTTPS_PROXY']
- del os.environ['http_proxy']
- del os.environ['https_proxy']
-
- # disable insecure warnings
- requests.packages.urllib3.disable_warnings()
-
- ''' GLOBAL VARS '''
-
- BASE_URL=re.sub("/[\/]+$/", "", demisto.params().get('serverURL'))
- SERVER_PORT=demisto.params().get('serverPort')
- SERVER_URL=BASE_URL + ':' + SERVER_PORT
-
- USERNAME=demisto.params().get('credentials').get('identifier')
- PASSWORD=demisto.params().get('credentials').get('password')
-
- USE_SSL = not demisto.params().get('insecure', False)
-
- ISE=requests.session()
- ISE.auth = (USERNAME, PASSWORD)
- ISE.verify = USE_SSL
- ISE.disable_warnings = True
- ISE.timeout = 5
-
- ''' HELPER FUNCTIONS '''
-
- def is_mac(mac):
- """
- Test for valid mac address
- :param mac: MAC address in the form of AA:BB:CC:00:11:22
- :return: True/False
- """
-
- if re.search(r'([0-9A-F]{2}[:]){5}([0-9A-F]){2}', mac.upper()) is not None:
- return True
- else:
- return False
-
- def http_request(method, url_suffix, params_dict, data=None, headers={}):
-
- url = SERVER_URL + url_suffix
- LOG('running %s request with url=%s' % (method, url))
- try:
- if method == 'GET':
- ISE.headers.update(headers)
- result = ISE.get(url, auth=HTTPBasicAuth(USERNAME, PASSWORD), verify=USE_SSL)
- if result.status_code == 200:
- return result
- elif result.status_code == 404:
- pass
- else:
- raise Exception("Got status code: " + str(result.status_code) + " For the request to the " + url_suffix + " endpoint. " + str(result.text))
- elif method == 'PUT':
- ISE.headers.update({'Accept':'application/json', 'Content-Type':'application/json'})
- result = ISE.put(url, auth=HTTPBasicAuth(USERNAME, PASSWORD), verify=USE_SSL, data=json.dumps(data))
- return result
-
- except Exception, e:
- LOG(e)
- raise Exception(str(e))
-
- def translate_group_id(group_id):
-
- """
- Translates group ID to group name
- """
- headers = {
- 'Accept':'application/json',
- }
- api_endpoint = "/ers/config/identitygroup/1"
- identity_group = http_request('GET', api_endpoint, {}, {}, headers).json()['IdentityGroup']
- return identity_group['name']
-
- ''' COMMANDS FUNCTIONS '''
-
- def get_endpoint_id(mac_address=None, group_name=None):
- """
- Returns endpoint id by specific mac address
- """
- headers = None
- if mac_address is not None:
- headers = {
- 'Accept':'application/json',
- 'Connection': 'keep_alive'
- }
- api_endpoint = "/ers/config/endpoint?filter=mac.EQ.{}".format(mac_address)
- if group_name is not None:
- api_endpoint = "/ers/config/endpointgroup?filter=name.EQ.{}".format(group_name)
- headers = {
- "Content-Type": "application/vnd.com.cisco.ise.identity.endpoint.1.0+xml; charset=utf-8",
- 'Accept':'application/json'
- }
- return json.loads(http_request('GET', api_endpoint, {}, '', headers).text)
-
- def get_endpoint_id_command():
- """
- corresponds to 'cisco-ise-get-endpoint-id' command. Returns endpoint's id
- """
- mac_address = demisto.args().get('macAddress')
-
- if not is_mac(mac_address):
- return_error('Given MAC address is invalid')
-
- endpoint_data = get_endpoint_id(mac_address)
- endpoint_id = endpoint_data.get('SearchResult', {}).get('resources', [])[0].get('id', None)
-
- ec = {
- 'Endpoint(val.ID === obj.ID)': {
- 'ID': endpoint_id,
- 'MACAddress': mac_address
- }
- }
-
- return {
- 'Type': entryTypes['note'],
- 'Contents': endpoint_id,
- 'ContentsFormat': formats['json'],
- 'ReadableContentsFormat': formats['markdown'],
- 'HumanReadable': "The endpoint's ID is: " + endpoint_id,
- 'EntryContext': ec
- }
-
- def get_endpoint_details(endpoint_id):
- """
- Gets endpoint details by specific id
- """
- headers = {
- 'Accept':'application/json',
- 'Connection': 'keep_alive'
- }
- api_endpoint = '/ers/config/endpoint/{}'.format(endpoint_id)
- response = http_request('GET', api_endpoint, {}, {}, headers)
- if response:
- return json.loads(response.text)
- else:
- return_error('Endpoint was not found.')
-
- def get_endpoint_details_command():
- """
- corresponds to 'cisco-ise-get-endpoint-details' command. Returns information about a specific endpoint
- """
-
- endpoint_id = demisto.args().get('endpointID')
- endpoint_mac_address = demisto.args().get('macAddress')
-
- if endpoint_mac_address and not is_mac(endpoint_mac_address):
- return_error('Given MAC address is invalid')
-
- if not endpoint_id and not endpoint_mac_address:
- return_error('Either endpoint ID or MAC address should be provided')
-
- if endpoint_mac_address and not endpoint_id:
- endpoint_id = get_endpoint_id(endpoint_mac_address).get('SearchResult', {}).get('resources', [])[0].get('id', None)
-
- endpoint_data = get_endpoint_details(endpoint_id)
-
- endpoint_details = endpoint_data.get('ERSEndPoint')
-
- if endpoint_details:
- custom_attributes = endpoint_details.get('customAttributes')
- if custom_attributes:
- custom_attributes = custom_attributes.get('customAttributes')
- portal_user = endpoint_details.get('portalUser')
- description = endpoint_details.get('description')
- group_name = translate_group_id(endpoint_details['groupId'])
- hr = {
- 'ID': endpoint_details['id'],
- 'MACAddress': endpoint_details['mac'],
- 'Group': group_name,
- 'Link': '[{0}]({0})'.format(endpoint_details['link'].get('href')),
- 'CustomAttributes': custom_attributes,
- 'StaticGroupAssignment': endpoint_details['staticGroupAssignment'],
- 'StaticProfileAssignment': endpoint_details['staticProfileAssignment']
- }
- detailed_ec = {
- 'ID': endpoint_details['id'],
- 'MACAddress': endpoint_details['mac'],
- 'Group': group_name,
- 'StaticGroupAssignment': endpoint_details['staticGroupAssignment'],
- 'StaticProfileAssignment': endpoint_details['staticProfileAssignment']
- }
- if custom_attributes:
- detailed_ec['CustomAttributes'] = {}
- for attribute in custom_attributes:
- detailed_ec['CustomAttributes'][attribute] = custom_attributes[attribute]
- if portal_user:
- hr['User'] = portal_user
- detailed_ec['User'] = portal_user
- if description:
- hr['Description'] = description
- detailed_ec['Description'] = description
- ec = {
- 'Endpoint(val.ID === obj.ID)': {
- 'ID': endpoint_details['id'],
- 'MACAddress': endpoint_details['mac']
- },
- 'CiscoISE.Endpoint(val.ID === obj.ID)': detailed_ec
- }
-
- title = 'Endpoint details - ' + (endpoint_id or endpoint_mac_address)
- return {
- 'Type': entryTypes['note'],
- 'Contents': endpoint_details,
- 'ContentsFormat': formats['json'],
- 'ReadableContentsFormat': formats['markdown'],
- 'HumanReadable': tableToMarkdown(title, hr, removeNull=True),
- 'EntryContext': ec
- }
- else:
- return 'No results found'
-
- def reauthenticate_endpoint(mac_address, psn_address):
- """
- Reauthenticates an endpoint
- """
- api_endpoint = "/admin/API/mnt/CoA/Reauth/{}/{}/1".format(psn_address, mac_address)
- response = http_request('GET', api_endpoint, {}, {}, {})
- return response
-
- def get_psn_for_mac(mac_address):
- """
- Retrieves psn for an endpoint
- """
- api_endpoint = "/admin/API/mnt/AuthStatus/MACAddress/{}/86400/0/0".format(mac_address)
- response = http_request('GET', api_endpoint, {}, {}, {})
- if response:
- return response
- else:
- return_error('Could not reauthenticate the endpoint')
-
- def reauthenticate_endpoint_command():
- """
- corresponds to 'cisco-ise-reauthenticate-endpoint' command. Reauthenticates an endpoint
- """
- mac_address = demisto.args().get('macAddress').upper()
- if not is_mac(mac_address):
- return "Please enter a valid mac address"
- mac_address = (':').join([x.upper() for x in mac_address.split(':')])
- mac_address_psn = get_psn_for_mac(mac_address)
- if not mac_address_psn:
- return "Couldn't find psn address for mac: " + mac_address
- psn_address = json.loads(xml2json(mac_address_psn)).get('restAuthStatusOutputList', {}).get('authStatusList',{}).get('authStatusElements', {})[0].get('acs_server')
- if not psn_address:
- return "Couldn't find psn address for mac: " + mac_address + " response from psn endpoint was: " + json.dumps(mac_address_psn)
- activation_result = reauthenticate_endpoint(mac_address, psn_address).text
- json_activation_result = json.loads(xml2json(activation_result)).get('remoteCoA').get('results')
- activation_result_boolean = 'true' in json_activation_result
- return {
- 'Type': entryTypes['note'],
- 'Contents': activation_result,
- 'ContentsFormat': formats['json'],
- 'ReadableContentsFormat': formats['markdown'],
- 'HumanReadable': 'Activation result was : ' + activation_result_boolean,
- 'EntryContext': {
- "CiscoISE.Endpoint(val.MACAddress==obj.MACAddress)": {
- 'MACAddress': json_activation_result,
- 'reauthenticateResult': activation_result_boolean
- }
- }
- }
-
- def get_endpoints():
- """
- Gets data about existing endpoints
- """
- headers = {
- 'Accept':'application/json',
- 'Connection': 'keep_alive'
- }
- api_endpoint = "/ers/config/endpoint"
- return json.loads(http_request('GET', api_endpoint, {}, {}, headers).text)
-
- def get_endpoints_command():
- """
- corresponds to 'ise-get-endpoints' command. Get data about the existing endpoints
- """
-
- endpoints_data = get_endpoints().get('SearchResult', {})
-
- if endpoints_data.get('total',0) < 1:
- return 'No endpoints were found.'
-
- endpoints = endpoints_data.get('resources', [])
-
- context = []
- hr = []
-
- for endpoint in endpoints:
- context_dict = {
- 'ID': endpoint.get('id'),
- 'MACAddress': endpoint.get('name')
- }
- hr_dict = dict(context_dict)
- link_data = endpoint.get('link')
- if link_data:
- href = link_data.get('href')
- hr_dict['Link'] = '[{0}]({0})'.format(href)
- context.append(context_dict)
- hr.append(hr_dict)
-
- ec = {
- 'Endpoint(val.ID == obj.ID)': context,
- 'CiscoISE.Endpoint(val.ID == obj.ID)': context
- }
-
- return {
- 'Type': entryTypes['note'],
- 'Contents': endpoints,
- 'ContentsFormat': formats['json'],
- 'ReadableContentsFormat': formats['markdown'],
- 'HumanReadable': tableToMarkdown('Cisco pxGrid ISE Endpoints', hr, ['ID', 'MACAddress', 'Link'], removeNull=True),
- 'EntryContext': ec
- }
-
- def update_endpoint_by_id(endpoint_id, endpoint_details):
- """
- Updates endpoint status
- """
- headers = {
- "Content-Type": "application/vnd.com.cisco.ise.identity.endpoint.1.0+xml; charset=utf-8"
- }
- api_endpoint = "/ers/config/endpoint/{}".format(endpoint_id)
- return http_request('PUT', api_endpoint, {}, endpoint_details, headers)
-
- def update_endpoint_custom_attribute_command():
- """
- corresponds to 'cisco-ise-update-endpoint-custom-attribute' command. Blocks endpoint using predefined custom fields
- """
-
- endpoint_id = demisto.args().get('id')
- endpoint_mac_address = demisto.args().get('macAddress')
-
- if endpoint_mac_address and not is_mac(endpoint_mac_address):
- return "Please enter a valid mac address"
-
- if not endpoint_id and not endpoint_mac_address:
- return 'Please enter either endpoint id or endpoint mac address'
-
- if endpoint_mac_address and not endpoint_id:
- endpoint_id = get_endpoint_id(endpoint_mac_address).get('SearchResult', {}).get('resources', [])[0].get('id', None)
-
- endpoint_details = get_endpoint_details(endpoint_id)
-
- if "ERSEndPoint" not in endpoint_details:
- return 'Failed to get endpoint %s' % endpoint_id
-
- attribute_names = demisto.args().get('attributeName').split(',')
- attribute_values = demisto.args().get('attributeValue').split(',')
-
- attributes_dic = {}
- for couple in zip(attribute_names, attribute_values):
- attributes_dic[couple[0]] = couple[1]
- try:
- del endpoint_details['ERSEndPoint']['link']
- if endpoint_details['ERSEndPoint'].get('customAttributes'):
- endpoint_details['ERSEndPoint']['customAttributes']['customAttributes'] = attributes_dic
- else:
- endpoint_details['ERSEndPoint']['customAttributes'] = {'customAttributes': attributes_dic}
-
- update_result = update_endpoint_by_id(endpoint_id, endpoint_details)
- if update_result.status_code != 200:
- return "Update failed for endpoint " + endpoint_id + ". Please check if the custom fields are defined in the system. Got the following response: " + \
- json.dumps(json.loads(update_result.text).get('ERSResponse',{}).get('messages',[]))
-
- update_json = json.loads(update_result.text)
-
- updated_fields_dict_list = update_json.get('UpdatedFieldsList', {}).get('updatedField',[])
-
- if len(updated_fields_dict_list) > 0:
- updated_fields_string = ' the new custom fields are: ' + json.dumps(updated_fields_dict_list[0].get('newValue'))
- else:
- updated_fields_string = ", but the fields that you've tried to update already had that specific value or do not exist"
-
- return 'Successfully updated endpoint %s' % endpoint_id + updated_fields_string
-
- except Exception as e:
- raise Exception("Exception: Failed to update endpoint {}: ".format(endpoint_id) + str(e))
-
- def update_endpoint_group_command():
- """
- corresponds to 'cisco-ise-update-endpoint-group' command. Updates endpoint status
- """
-
- endpoint_group_name = demisto.args().get('groupName')
- endpoint_group_id = demisto.args().get('groupId')
-
- if not endpoint_group_name and not endpoint_group_id:
- return 'Please enter either group id or group name'
-
- if endpoint_group_name and not endpoint_group_id:
- endpoint_group_data = get_endpoint_id(None, endpoint_group_name).get('SearchResult', {})
- if endpoint_group_data.get('total',0) < 1:
- return 'No endpoints were found. Please make sure you entered the correct group name'
-
- endpoint_group_id = endpoint_group_data.get('resources')[0].get('id')
-
- endpoint_id = demisto.args().get('id')
- endpoint_mac_address = demisto.args().get('macAddress')
-
- if endpoint_mac_address and not is_mac(endpoint_mac_address):
- return "Please enter a valid mac address"
-
- if not endpoint_id and not endpoint_mac_address:
- return 'Please enter either endpoint id or endpoint mac address'
-
- if endpoint_mac_address and not endpoint_id:
- endpoint_id = get_endpoint_id(endpoint_mac_address).get('SearchResult', {}).get('resources', [])[0].get('id', None)
-
- endpoint_details = get_endpoint_details(endpoint_id)
-
- if "ERSEndPoint" not in endpoint_details:
- return 'Failed to get endpoint %s' % endpoint_id
-
- try:
- endpoint_details['ERSEndPoint']['groupId']= endpoint_group_id
- update_result = update_endpoint_by_id(endpoint_id, endpoint_details)
-
- # Create result
- msg = "Endpoint " + endpoint_id + " updated successfully" if update_result.status_code == 200 else "Update failed for endpoint " + endpoint_id + ", got the following response: " + \
- json.dumps(json.loads(update_result.text).get('ERSResponse',{}).get('messages',[]))
- result = [{'Update status': msg}]
-
- except Exception as e:
- raise Exception("Exception: Failed to update endpoint {}: ".format(endpoint_id) + str(e))
-
- return {
- 'Type': entryTypes['note'],
- 'Contents': result,
- 'ContentsFormat': formats['json'],
- 'ReadableContentsFormat': formats['markdown'],
- 'HumanReadable': msg,
- }
-
- def list_number_of_active_sessions():
- """
- This function is used for test-module to check connectivity
- """
- api_endpoint = "/admin/API/mnt/Session/ActiveCount"
-
- response = http_request('GET', api_endpoint, {}, {}, {})
-
- return response
-
- ''' EXECUTION CODE '''
- try:
- if demisto.command() == 'test-module':
- #This is the call made when pressing the integration test button.
- if get_endpoints_command():
- demisto.results('ok')
- elif list_number_of_active_sessions():
- demisto.results('ok')
- else:
- demisto.results('test failed')
- elif demisto.command() == 'cisco-ise-get-endpoint-id':
- demisto.results(get_endpoint_id_command())
- elif demisto.command() == 'cisco-ise-get-endpoint-details':
- demisto.results(get_endpoint_details_command())
- elif demisto.command() == 'cisco-ise-reauthenticate-endpoint':
- demisto.results(reauthenticate_endpoint_command())
- elif demisto.command() == 'cisco-ise-get-endpoints':
- demisto.results(get_endpoints_command())
- elif demisto.command() == 'cisco-ise-update-endpoint-custom-attribute':
- demisto.results(update_endpoint_custom_attribute_command())
- elif demisto.command() == 'cisco-ise-update-endpoint-group':
- demisto.results(update_endpoint_group_command())
-
- except Exception, e:
- LOG(e.message)
- LOG.print_log()
- raise Exception(str(e))
- type: python
- commands:
- - name: cisco-ise-get-endpoint-id
- arguments:
- - name: macAddress
- required: true
- description: MAC address of the endpoint (format 11:22:33:44:55:66)
- outputs:
- - contextPath: Endpoint.ID
- description: Endpoint ID
- type: string
- - contextPath: Endpoint.MACAddress
- description: Endpoint MAC address
- type: string
- description: Get endpoint's ID by its MAC
- - name: cisco-ise-get-endpoint-details
- arguments:
- - name: endpointID
- description: The id of the endpoint
- - name: macAddress
- description: MAC address of the endpoint (format 11:22:33:44:55:66)
- outputs:
- - contextPath: CiscoISE.Endpoint.ID
- description: Endpoint ID
- type: string
- - contextPath: CiscoISE.Endpoint.Description
- description: Endpoint description
- type: string
- - contextPath: CiscoISE.Endpoint.MACAddress
- description: Endpoint MAC address
- type: string
- - contextPath: CiscoISE.Endpoint.Group
- description: Endpoint group name
- type: string
- - contextPath: Endpoint.ID
- description: Endpoint ID
- type: string
- - contextPath: Endpoint.MACAddress
- description: Endpoint MAC address
- type: string
- - contextPath: CiscoISE.Endpoint.CustomAttributes
- description: Endpoint custom attributes
- type: string
- - contextPath: CiscoISE.Endpoint.StaticGroupAssignment
- description: Ture if endpoint has static group assignment
- type: boolean
- - contextPath: CiscoISE.Endpoint.StaticProfileAssignment
- description: Ture if endpoint has static profile assignment
- type: boolean
- - contextPath: CiscoISE.Endpoint.User
- description: Endpoint profile user
- type: string
- description: Get details about certain endpoint
- - name: cisco-ise-reauthenticate-endpoint
- arguments:
- - name: macAddress
- required: true
- description: MAC address of the endpoint (format 11:22:33:44:55:66)
- outputs:
- - contextPath: CiscoISE.Endpoint.MACAddress
- description: Mac address of the endpoint
- type: string
- - contextPath: CiscoISE.Endpoint.reauthenticateResult
- description: Reauthentication result
- type: boolean
- description: Change of Authorization (CoA), reauthenticating an endpoint
- - name: cisco-ise-get-endpoints
- arguments: []
- outputs:
- - contextPath: Endpoint.ID
- description: Endpoint ID
- type: string
- - contextPath: Endpoint.MACAddress
- description: Endpoint MAC address
- type: string
- - contextPath: CiscoISE.Endpoint.ID
- description: Endpoint ID
- type: string
- - contextPath: CiscoISE.Endpoint.MACAddress
- description: Endpoint MAC address
- type: string
- description: Get data about the existing endpoints
- - name: cisco-ise-update-endpoint-custom-attribute
- arguments:
- - name: id
- description: Endpoint ID
- - name: macAddress
- description: MAC address of the endpoint (format 11:22:33:44:55:66)
- - name: attributeName
- required: true
- description: The name of the attribute. Can be a comma separated list. For example,
- attributeName=firstAttribute,secondAttribute
- - name: attributeValue
- required: true
- description: The value of the attribute. Can be a comma separated list. For
- example, attributeValue=firstValue,secondValue
- description: Update an endpoint custom attribute
- - name: cisco-ise-update-endpoint-group
- arguments:
- - name: groupId
- description: The group ID to assign to this endpoint, e.g. 1
- - name: macAddress
- description: MAC address of the endpoint (format 11:22:33:44:55:66)
- - name: id
- description: Endpoint ID to update. E.g. 046f1250-bc6e-11e4-9baf-000c2916b229
- - name: groupName
- description: Name of the new group for the endpoint
- description: Updates the group of an endpoint
- runonce: false
-tests:
- - cisco-ise-test-playbook
\ No newline at end of file
diff --git a/Beta_Integrations/integrations-Awake_Security.yml b/Beta_Integrations/integrations-Awake_Security.yml
deleted file mode 100644
index 9981a3c57105..000000000000
--- a/Beta_Integrations/integrations-Awake_Security.yml
+++ /dev/null
@@ -1,586 +0,0 @@
-commonfields:
- id: Awake Security
- version: -1
-name: Awake Security
-display: Awake Security
-category: Network Security
-image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHgAAAAyCAYAAACXpx/YAAAAAXNSR0IArs4c6QAADwdJREFUeAHtWnt0ldWVP4/vcW/CU6DmNUgrFNPwygNERUthsIMKtGgSIIBtmbEPxZnSTtcoA70FtcvaKdNBqQXXtGuAjBoVRlBXixV5VjQPxERlFToO5IUOBDDJvfd7nDO/c+FL7osYbmGt+eM7a305r332d+7v7L3P3vsLIX7xEfAR8BHwEfAR8BHwEfAR8BHwEfAR8BHwEfAR8BHwEfAR8BHwEfAR8BG4ugjQv4R94dqnxwSprKaMmnZndNGRx/6+8S/h56+98ghkfMBFodAAg414gxv6ZCIJEbb9QVREpzWFVpy58tv0OWaKAMt0ISHXjKSUTpSuS6TrEM00C3Vqrsicn7/yaiCQ8QHzzuj/ECIbKeOxfbmOQ7jGHhi3dl3h1diozzMzBDI20ep1xWvXz6Xc2E6FAB9JuK4T27Kea1h1/4LMtnP5qwrKFk8nlE4gghxqrtt8KB2HvLKqWwjlE0mY72ht/M3JZJqionLjTJZRQVwq82jW83V1G+1kmrwJ5WOJmT1ThsO72xqf+yB5XvXzSyrv0pg5Khp232hv2vp+Ohpv7Nrxi76gB4yZjh3Z1X742Y+8ca/OKa6czTi/3rXCz586su1jb3xEUfkAwwxOh9kMUEJxOSYWSSQVRH7c1lC9V81krMFqcYN7eqd0nJ1c11SXuLZDGNfumfjor74aG7jKf0aOXzSUULJF081f4rc+M2rUNwLpXsklrTKN4FMkYC9PN386qE/SmL5Z07UtreL8xHQ0XDcfNnXjKW5o09LNjyhZPJoy4zmqG+u1LPovhIT6xFYL0OW6YW5kGrsrmV9e2aJlppn9KqNshXAHJ/DRdTaT6WyHZhg1WsB8IfnRA8EaztjLHs+Exd5gv+tQSEhOVruO3QUtwjKJinLmirWloVBWv/lkSCg0NoMzLd+xIhYh9EvOcGdqOlZCyDeEa0Oa5TQyffoFaYwj5JJNY9h/7OH6rXFTsWZBQXkQ/Cc7Vth2otaB5HnVN6icxzUty7GjUSLobXkTmsako/PGoHtcSuF1e+qC0iU3MqqvE444Bz4LP2n6bXvPJBqUc0PtUwirzra7v5P6WN8hVCzy1qT8WG+iv/XhlfcfLglt2KQFtX9wbTvmcCnP2rFGfAM8NvSXTyZ0kskFruvCIskXuW4sFG6kAnzeTOYFgndc1zoPeMbnnsvLbyME/kNcoeSm3p68Be11vX1CnHzj87pko4UrPhgQ7TwePxdrl96nU9pV4Th2J4ToFW4EKi0p52LuiRTaiwO41FLMa3bRHTmwSL/lnA10otGlLQ3VqVeOUGYZiwU72lK39deX4u+NX5YGTzqwd15x7YHlX9y9e7jHQNVRIh93LeskZRfZCUHgej1U+Oi63Hi6K9nOG7fgrxjRZiFA+5Og7hNw8jphP+bEzHbSi9oa7GYIQRPXjAGcsLKE6bFzBwKuGyGcbRCWMOArIxiLp2EWmcqVg6GJ/ceOvRaNn1PtXGXWqVZKqTzkSvGk4zqSMVleWlqqJ9Neql8KIRkaGPq0Zpg3wCI+gcPdfClaNY4990s5+33A4/bte5Cb+nZdD/5btqm/9IVduwZ7G2gK3d8uJX0E9iM2JNUBm2ZBQBr/5NFc6Zqa/E5YisFU0Fdba59tgFnar2lmgTTYl1PfVeMKSfcqj18ykmCCcwYOKISgFACwtyEEH8KJKCgYOCQxEuB8GpwX3EDyzVTehHDKyzVd50Swl1vrxh4kwmlESFHSQscWp6NPN9ZOulZrRnAeLPyrgbOnV6WjyWSsXwdceGD3OEOnj0ghidPdTXgwcOvA7GBCzGt2RDYL2zpEtYsOl4M7j/JlZWt+1e8f2f8foBwYWilcR7jUfUmtE46sUT4jTF9lOj6UOPukcJSPcFv8PQxGU5iuU/y0d3AnHtc0nQspe+9yeNhciqmuA9/YEu8k884rnZMlKfm6Y1thCMCrcK5wI7BtGtc5I+yeZPqUvku7c4qXzoaFeBg8jlKH3pfOSiSvg7i5yWPp+p99wFDNINXWMMMYSHDdqSKsKASdPDj+4MEbPKZ/XLciDGBWSVfgxSpqghfBebZLxE9IqG+P0uPR3zpn4tFCxtnNcJyass6djYHOhPU7xwl3wP/4asHEpfnJvDqIVe+6dgc2dkPemYLrvXk4WNBoQZiU+zD3rhqHnvd4yiOJMRoqOhoa/G77Yfekt663zpqm6QYcKrGnuX7LcTUuqL0dZtZmknz92gm3Z/fSJrXgZMGU36RpYiMQO21LeW/zu//RkkSVpqvsCS3KL636QV7p4h/2PFMW/2Ne6cK/JTD33qLPPOCJb++/k+p8rrDiQkOIO5yaIZy4P/EYqfrwjx/YJYT7Qk/YhAwX07S7JvHPKYfjihWus69xzTQAzzZP2pvfrWmBud6laYGhxHDvSH5Zd92EU1TSOoRUQUR1sXtYxZQw2ZPhHJ3hsvsIZXIfhEZirOceFlmkhONukpRCAGpStAYebwUCh4sW5ILj1Pp59wisRS0wGs20YRCg9EVCCQhlVepqcYS959Ql4vjk1QKCoWl8gm4Efm4Y5hM9jxb4GSP85yOtzgHemj4POK+2Nou5JIRNQKixmbiiDpxxeve4/W/OihsmjitCcHjOYU1Mi6FRFCZuTVHoqZ6XxtNfbnv06NkmDqoc5ixCbFkdv15ItlkKuDmSwZtOthohOMHufmVdXEZnqHV6kI+BOR0Fl7T2o8Pbz8pPI40Q0HbkCq7LGTQwZp2EYLfFNJyx3fHvUu28L85RzuadUNZPWMDa3jNfgzufsK0UGCCMK+8ZT2qoeSbkevyWWgjRfCRkfpREkraL+Ji4rrPHcsKzEh7bmgXcbz/xXvVZb2Gfntiw6LkyqgVKJRIYKQUHzkyTa0JUYW6XN/9eaPmHxWs2rNdN/Z9d20LY5BJumON1J/I90PzMo8u07h48ZLLG2QThuicgOeNzS6q+pHjhbsVl4gxD1NSFOHFa7tQPxra9RRIyThC2vULYUEZyY2wJoVOQMKBWJPwHxaPlw22nC0qr3tbMwDxcQ1MwVMuomIoDPBsN8wZFk1AGD5qJsDTHcax6GQ7eir3EFAaWRKmDgXEk6uXsnOLyEe0NNZ8krFUdbERQ8SfceZsgEG/o3HgcZvc8wp+nU2gTBtQVSFpba6tfTxhO0+lTgzXJurFTSU0dUQBPfGDnYH5VsuzTZL5WR2Qdkg/HvTw1tAJ3HP9B4conr0umvdw+ZL6CMo0iarlODwRqTDPwknpgprbpeuAZzrUBeALc1ual8O4KH4FgnIKvMDZ/YuUY3ONTlQACrr0erQJatXEn35JbvGgkpKEQHnpdcsIhRiPoAlXDMSsxgvr2nr0EzW2Gpv8CGSVsRctlJDBT0aUrgtJAc33NMVdEq2B8zuG3rc8rW7wwHW38GPbM4/uXavd5wA2v76mnjvNdabt7kYZs7H3cRuE4dVZX5yYr6j6azLxp3Yoz8GbXKuRUUWETwojPBQN05YWRDP+Onj0ImjHHdawumLUf2lFkcqzId73Hika+bTvO4+o6gXKUE3jA8W9SGgo1r9d1U5Oc3y4dMgXO0IkW/cz7vXTWfseyBSR3HOPyb3TN1CADKeY5Z9KCUXjHDFipVqS3lscySvF7saL3IR7eGDPTXHzmgbXVP7sP9/A3YYlcGPZncqYund27p8xbfZpoeL+iPhT6NdhvLK3dgXSdV3JJJBh0moqKkCJMX/gJvVoUOPfC+/6KRMiEOwNqwZYWr3nyNw2rH/hj+lV9j+YNGv5lXddHOW50Z0vtVuR7U4u6o8ODh82nXJuUb/BSuKQJ70K8uxcCcAeSMpX4R4Ux0pUvkEOvIct1ocDRPsrMa48hpr0eB3ivCq1gS3s03KNTOWSuBwYhOtoCU/mkNx5fZ5XO3znMoRWSsq+ojwun3qv+c/x8crutfus2eMEPwBJtIq7ckluy+Gtt9Vvg3KUWCHqKw5dK1f+PDbKubE5371PW3dfhqhfVbfw21ECsQqxqKXVSWgWDZUKi10wPhfoWrHQ7xZhGZaVy3nAoL16ChCivGvfmDk0zoAgunK3EAs91D7xm9fnrFsY0XRA3ZpI9qlNHft+Fr2MHEQlkQUhugmCe1O2ORm/+Ql3Occ9X4LdJRFixODxx/kKvu+6lNni8r8NLHsgNnvJRId2a1rr/fMa2oiF8er0GofSzeaWVJal0OF5KrisoWTK/oHjx3fFPfnHVPfklC2d4a/o00R5RpvXh1csPQHW34CKKscCXJ9zb/K/P02vSJiP6ek9W4fxcnMrd+LzW2SEDv++L1nXYi7GkhqSVBGY9njaok/chbCcRLlGYZwcKmqDhilZ9nFBCyZmu4p6GE++9gvi5t+RPyh4P5+pWIZz/lux8yvpeStVizytejJIlCQmWC99lEkkv9lrrq9c6VnQDvnDlMaLvzB1X2ZNZg4IIRcY1/WZk8l7kpvFC/IOvSzX4ohdzGhVdoiYhIVEya8YyqesziOMY2Bh+X7+LUtOPLDfyy6Zps054qxxLIoVpz4VDM1zdxarg/ltd9P1fvKbuao/us+rhWiACl/QRaO/x7oZ/b+2LvnVI89t5n+bfjzdlXZs13D0VR3zs0Nbz+SVLvoUwrwgpp9PtQ9uOxk3HmiazX45E6DKN8Wzk7g4kz0tbnHE1JHCE+1Zr/Y7u5Pn4vhWJvAar8SNAmeiMSrIpakU+lC55JZ7+YlsMiVjfPyvkW8gWDUdOrFcRo5E3bWksg/QF4dykno+MBVE9YdJFN+gC20kH9z+mmfpDSuJiQXiaN/c1hMCeWF3de/73ZMvs5oqKsEc7ae2GhwxDf0x9bVIlRmdZDx9e9b2fejR+fXUQ6JUM8GeMPiQQ84oo4lckMi73ccLdKryYPDQ3Nzd+u50fO0/Zlt2Eb5mxYYQpii4hQRJP77evHAIJB4x/nnscnmUsAM/kFRRf1GAzajs4b4tff2z9g+dhnlfiXJH+QiSLSdD1eY/Gr/fbmSOQcMANLadWIr79McC/4PleBl8lGMKy9lld4SXNN9/cY549FshT/xf+t/bv4MKctu3o+o4/f/Kv3pxf+wj4CPgI+Aj4CPgI+Aj4CPgI+Aj4CPgI+Aj4CPgI+Aj4CPgI+Aj4CPgI+Aj4CPgI/D9F4P8A15R2TQhjgN0AAAAASUVORK5CYII=
-description: Network Traffic Analysis
-detaileddescription: 'All commands expect timestamps in the following format: "2000-01-01T00:00:00Z"'
-configuration:
-- display: Credentials
- name: credentials
- defaultvalue: ""
- type: 9
- required: false
-- display: Awake Security server address
- name: address
- defaultvalue: ""
- type: 0
- required: false
-- display: Verify server certificate
- name: verify
- defaultvalue: "True"
- type: 8
- required: false
-- display: Fetch incidents
- name: isFetch
- defaultvalue: ""
- type: 8
- required: false
-- display: Incident type
- name: incidentType
- defaultvalue: ""
- type: 13
- required: false
-- display: Comma-separated threat behaviors to generate incidents for
- name: threat_behaviors
- defaultvalue: ""
- type: 0
- required: false
-- display: Period (in minutes) between incident reports
- name: period
- defaultvalue: "60"
- type: 0
- required: false
-- display: Minimum Threshold
- name: min_threshold
- defaultvalue: "33"
- type: 0
- required: false
-- display: Maximum Threshold
- name: max_threshold
- defaultvalue: "66"
- type: 0
- required: false
-script:
- script: |
- import base64
- import datetime
- import re
- import requests
-
-
- params = demisto.params()
- address = params["address"]
- prefix = address + "/awakeapi/v1"
- verify = params["verify"]
- credentials = params["credentials"]
- identifier = credentials["identifier"]
- password = credentials["password"]
- authTokenRequest = {
- "loginUsername": identifier,
- "loginPassword": password
- }
- authTokenResponse = requests.post(prefix + "/authtoken", json=authTokenRequest, verify=verify)
- authToken = authTokenResponse.json()["token"]["value"]
- headers = {
- "Authentication": ("access " + authToken)
- }
-
- command = demisto.command()
- args = demisto.args()
- request = {}
-
- # Convenient utility to marshal command arguments into the request body
- def slurp(fields):
- for field in fields:
- if field in args:
- request[field] = args[field]
-
- # Render a subset of the fields of the Contents as a markdown table
- def displayTable(contents, fields):
- # We don't use a set() because we want to preserve field order
- #
- # The fields are ordered to put the most relevant information first
- presentFields = []
- # Omit table columns that are all empty
- for content in contents:
- for field in fields:
- if field in content and content[field] and field not in presentFields:
- presentFields.append(field)
- line0 = "| "
- line1 = "| "
- for field in presentFields:
- # Translate camel-case field names to title-case space-separated words
- tokens = re.findall("[a-zA-Z][A-Z]*[^A-Z]*", field)
- name = " ".join(map(lambda token: token.title(), tokens))
- line0 += name + " | "
- line1 += "--- | "
- line0 += "\n"
- line1 += "\n"
- body = ""
- for content in contents:
- body += "| "
- for field in presentFields:
- if field in content:
- value = json.dumps(content[field])
- else:
- value = ""
- body += value + " | "
- body += "\n"
- if presentFields:
- return (line0 + line1 + body)
- else:
- return "Empty results"
-
- def returnResults(contents, humanReadable, dbotScore):
- machineReadable = {
- "AwakeSecurity": contents,
- }
- if dbotScore is not None:
- machineReadable["DBotScore"] = dbotScore
- results = {
- "Type": 1,
- "ContentsFormat": "json",
- "Contents": json.dumps(machineReadable),
- "HumanReadable": humanReadable,
- "ReadableContentsFormat": "markdown",
- }
- demisto.results(results)
-
- def toDBotScore(indicator_type, percentile, lookup_key):
- # Demisto's score - 0:unknown, 1:ok, 2:suspicous, 3:bad
- #
- # Our scores range from 0 to 100, where higher
- # scores mean more unusual behavior
- #
- # Our product does not yet assign meaning
- # to these scores since unusual behavior is
- # not necessarily malicious, but empirically
- # partioning the scores by quartile appears to
- # map closely enough to Demisto's classification
- # scheme for now.
- if percentile <= args['min_threshold']:
- # People doing something out of the ordinary
- # compared to others (i.e. performing weird
- # searches, learning something exotic, making
- # an unusual purchase)
- score = 1
- elif percentile <= args['max_threshold']:
- # Something doing multiple things out of the
- # ordinary, worth investigating
- score = 2
- else:
- # Probably bad or at least not compliant with
- # company policy. You have to make an effort
- # to score this high.
- score = 3
- return {
- "Vendor": "Awake Security",
- "Type": indicator_type,
- "Indicator": lookup_key,
- "Score": score
- }
-
- def lookup(lookup_type, lookup_key):
- path = "/lookup/" + lookup_type
- request["lookup_key"] = lookup_key
- if "lookback_minutes" in args:
- request["lookback_minutes"] = int(args["lookback_minutes"])
- else:
- request["lookback_minutes"] = 480
- response = requests.post(prefix + path, json=request, headers=headers, verify=verify)
- return response.json()
-
- def lookupDevice(lookup_key):
- contents = lookup("device", lookup_key)
- humanReadableFields = [
- "deviceScore",
- "deviceName",
- "deviceType",
- "os",
- "osVersion",
- "commonEmail",
- "commonUsername",
- "tags",
- "recentIP",
- "activeIP",
- "nSimilarDevices",
- "ipCount",
- "applicationCount",
- "protocols",
- "firstSeen",
- "lastSeen",
- ]
- if "deviceScore" in contents:
- dbotScore = toDBotScore("device", contents["deviceScore"], lookup_key)
- else:
- dbotScore = {
- "Vendor": "Awake Security",
- "Type": 'device',
- "Indicator": lookup_key,
- "Score": 0
- }
- humanReadable = displayTable([contents], humanReadableFields)
- returnResults(contents, humanReadable, dbotScore)
-
- def lookupDomain(lookup_key):
- contents = lookup("domain", lookup_key)
- humanReadableFields = [
- "notability",
- "isAlexaTopOneMillion",
- "isDGA",
- "intelSources",
- "numAssociatedDevices",
- "numAssociatedActivities",
- "approxBytesTransferred",
- "protocols",
- "firstSeen",
- "lastSeen",
- ]
- if "notability" in contents:
- dbotScore = toDBotScore("domain", contents["notability"], lookup_key)
- else:
- dbotScore = {
- "Vendor": "Awake Security",
- "Type": 'domain',
- "Indicator": lookup_key,
- "Score": 0
- }
- humanReadable = displayTable([contents], humanReadableFields)
- returnResults(contents, humanReadable, dbotScore)
-
- def lookupEmail(lookup_key):
- contents = lookup("email", lookup_key)
- humanReadableFields = [
- "notabilityPercentile",
- "deviceName",
- "os",
- "deviceType",
- "application",
- "numberSimilarDevices",
- "numberSessions",
- "firstSeen",
- "lastSeen",
- "duration",
- "deviceId",
- ]
- if "notabilityPercentile" in contents:
- dbotScore = toDBotScore("email", contents["notabilityPercentile"], lookup_key)
- else:
- dbotScore = {
- "Vendor": "Awake Security",
- "Type": 'email',
- "Indicator": lookup_key,
- "Score": 0
- }
- humanReadable = displayTable(contents, humanReadableFields)
- returnResults(contents, humanReadable, dbotScore)
-
- def lookupIp(lookup_key):
- contents = lookup("ip", lookup_key)
- humanReadableFields = [
- "deviceCount",
- "activityCount",
- "ipFirstSeen",
- "ipLastSeen",
- ]
- dbotScore = {
- "Vendor": "Awake Security",
- "Type": 'ip',
- "Indicator": lookup_key,
- "Score": 0
- }
- # Note: No DBotScore for IP addresses as we do not score them.
- # Our product scores devices rather than IP addresses.
- humanReadable = displayTable([contents], humanReadableFields)
- returnResults(contents, humanReadable, dbotScore)
-
- def query(lookup_type):
- # Default to an empty query if unset
- request["queryExpression"] = ""
- slurp(["queryExpression", "startTime", "endTime"])
- nameMappings = [
- ("ipAddress","device.ip == {}"),
- ("deviceName","device.name like r/{}/"),
- ("domainName", "domain.name like r/{}/"),
- ("protocol", "activity.protocol == \"{}\""),
- ("tags","\"{}\" in device.tags"),
- ]
- for (name, mapping) in nameMappings:
- if name in args:
- if "queryExpression" in request and request["queryExpression"]:
- request["queryExpression"] = request["queryExpression"] + " && " + mapping.format(args[name])
- else:
- request["queryExpression"] = mapping.format(args[name])
- path = "/query/" + lookup_type
- response = requests.post(prefix + path, json=request, headers=headers, verify=verify)
- contents = response.json()
- return contents
-
- def queryActivities():
- contents = query("activities")
- humanReadableFields = [
- "sourceIP",
- "sourceHost",
- "sourcePort",
- "destIP",
- "destHost",
- "destPort",
- "activityDeviceName",
- "activityStart",
- "activityEnd",
- "protocols",
- ]
- humanReadable = displayTable(contents, humanReadableFields)
- returnResults(contents, humanReadable, None)
-
- def queryDevices():
- contents = query("devices")
- humanReadableFields = [
- "notabilityPercentile",
- "deviceName",
- "os",
- "deviceType",
- "application",
- "numberSimilarDevices",
- "numberSessions",
- "firstSeen",
- "lastSeen",
- "duration",
- "deviceId",
- ]
- humanReadable = displayTable(contents, humanReadableFields)
- returnResults(contents, humanReadable, None)
-
- def queryDomains():
- contents = query("domains")
- humanReadableFields = [
- "name",
- "notability",
- "created",
- "lastUpdated",
- "expiration",
- "registrantOrg",
- "registrantCountry",
- "registrarName",
- "nameservers",
- "deviceCount",
- "intelCount",
- "lastSeen",
- ]
- humanReadable = displayTable(contents, humanReadableFields)
- returnResults(contents, humanReadable, None)
-
- def pcapDownload():
- slurp(["monitoringPointID"])
- session = {}
- for field in [ "hostA", "hostB", "startTimeRFC3339Nano", "endTimeRFC3339Nano" ]:
- if field in args:
- session[field] = args[field]
- if "startTimeRFC3339Nano" in args:
- session["startTimeRFC3339Nano"] = args["startTime"]
- if "endTimeRFC3339Nano" in args:
- session["endTimeRFC3339Nano"] = args["endTime"]
- for field in [ "protocol", "portA", "portB" ]:
- if field in args:
- session[field] = int(args[field])
- request["sessions"] = [ session ]
- path = "/pcap/download"
- response = requests.post(prefix + path, json=request, headers=headers, verify=verify)
- b64 = response.json()["pcap"]
- bytes = base64.b64decode(b64)
- demisto.results(fileResult("download.pcap", bytes))
-
- def fetchIncidents():
- threatBehaviorsString = params.get("threat_behaviors") or ""
- threatBehaviors = [ threatBehavior.strip() for threatBehavior in threatBehaviorsString.split(",")]
- if threatBehaviors == [""]:
- threatBehaviors = []
- lastRun = demisto.getLastRun();
- formatString = "%Y-%m-%d %H:%M:%S+0000"
- earlyTimeString = "1970-01-01 00:00:00+0000"
- startTimeString = lastRun.get("time") or earlyTimeString
- startTime = datetime.datetime.strptime(startTimeString, formatString)
- endTime = datetime.datetime.utcnow()
- endTimeString = datetime.datetime.strftime(endTime, formatString)
- if datetime.timedelta(minutes=int(params['period'])) <= endTime - startTime:
- jsonRequest = {
- "startTime": startTimeString,
- "endTime": endTimeString,
- "threatBehaviors": threatBehaviors
- }
- response = requests.post(prefix + "/threat-behavior/matches", json=jsonRequest, headers=headers, verify=verify)
- jsonResponse = response.json()
- matchingThreatBehaviors = jsonResponse.get("matchingThreatBehaviors", [])
- def toIncident(matchingThreatBehavior):
- # Currently the threat behavior API doesn't allow us to retrieve metadata for
- # the behaviors that matched, which is why this incident record is mostly empty
- #
- # However, we can provide the original query that the threat behavior corresponded
- # to plus the date range so that a playbook can feed them back into
- # `awake-query-{devices,activities}` to retrieving the matching devices or
- # activities that triggered the match to the threat behavior.
- return {
- "Name": matchingThreatBehavior["name"],
- "Query": matchingThreatBehavior["query"],
- "StartTime": startTimeString,
- "EndTime": endTimeString,
- }
- demisto.incidents(map(toIncident, matchingThreatBehaviors))
- # Don't increase the low-water-mark until we actually find incidents
- #
- # This is a precaution because incidents sometimes appear in an old time
- # bucket after a delay
- if 0 < len(matchingThreatBehaviors):
- lastRun = { "time": endTimeString }
- else:
- demisto.incidents([])
- demisto.setLastRun(lastRun)
-
- ''' EXECUTION '''
- LOG('command is %s' % (command))
-
- try:
- if command == "test-module":
- # If we got this far we already successfully authenticated against the server
- demisto.results('ok')
-
- elif command == "fetch-incidents":
- fetchIncidents()
-
- elif command == "awake-query-devices":
- queryDevices()
-
- elif command == "awake-query-activities":
- queryActivities()
-
- elif command == "awake-query-domains":
- queryDomains()
-
- elif command == "awake-pcap-download":
- pcapDownload()
-
- elif command == "domain":
- lookupDomain(args["domain"])
-
- elif command == "email":
- lookupDomain(args["email"])
-
- elif command == "ip":
- lookupIp(args["ip"])
-
- elif command == "device":
- lookupDevice(args["device"])
-
- except Exception, e:
- LOG(e.message)
- LOG.print_log()
- return_error(e.message)
- type: python
- commands:
- - name: awake-query-devices
- arguments:
- - name: queryExpression
- description: A query expression in the Awake Query Language
- - name: startTime
- required: true
- description: Beginning of the time range to query
- - name: endTime
- required: true
- description: End of the time range to query
- - name: ipAddress
- description: IP address (Exact match)
- - name: deviceName
- description: Name of the device (Regular expression)
- - name: domainName
- description: Name of the domain (Regular expression)
- - name: protocol
- description: Protocol (all uppercase, i.e. "TLS")
- - name: tag
- description: Tag to match (Regular expression)
- description: Query devices
- - name: awake-query-activities
- arguments:
- - name: queryExpression
- description: A query expression in the Awake Query Language
- - name: startTime
- required: true
- description: Beginning of the time range to query
- - name: endTime
- required: true
- description: End of the time range to query
- - name: ipAddress
- description: IP address (Exact match)
- - name: deviceName
- description: Name of the device (Regular expression)
- - name: domainName
- description: Name of the domain (Regular expression)
- - name: protocol
- description: Protocol (all uppercase, i.e. "TLS")
- - name: tag
- description: Tag to match (Regular expression)
- description: Query activities
- - name: awake-query-domains
- arguments:
- - name: queryExpression
- description: A query expression in the Awake Query Language
- - name: startTime
- required: true
- description: Beginning of the time range to query
- - name: endTime
- required: true
- description: End of the time range to query
- - name: ipAddress
- description: IP address (Exact match)
- - name: deviceName
- description: Name of the device (Regular expression)
- - name: domainName
- description: Name of the domain (Regular expression)
- - name: protocol
- description: Protocol (all uppercase, i.e. "TLS")
- - name: tag
- description: Tag to match (Regular expression)
- description: Query domains
- - name: awake-pcap-download
- arguments:
- - name: protocol
- description: Protocol (all uppercase, i.e. "TLS")
- - name: hostA
- description: First host's address
- - name: hostB
- description: Second host's address
- - name: portA
- description: First host's port
- - name: portB
- description: Second host's port
- - name: startTime
- description: Beginning of the time range to query
- - name: endTime
- description: End of the time range to query
- description: Download a PAP
- - name: domain
- arguments:
- - name: domain
- required: true
- description: The domain name
- - name: lookback_minutes
- description: How many minutes of history to query
- description: Lookup a domain
- - name: ip
- arguments:
- - name: ip
- required: true
- description: The IP address
- - name: lookback_minutes
- description: How many minutes of history to query
- description: Lookup an IP address
- - name: email
- arguments:
- - name: email
- description: The email address
- - name: lookback_minutes
- description: How many minutes of history to query
- description: Lookup an email address
- - name: device
- arguments:
- - name: device
- required: true
- description: The device ID
- - name: lookback_minutes
- description: How many minutes of history to query
- description: Lookup a device
- isfetch: true
- runonce: false
-releaseNotes: "-"
diff --git a/Beta_Integrations/integrations-proofpoint_threat_response.yml b/Beta_Integrations/integrations-proofpoint_threat_response.yml
deleted file mode 100644
index 9be8592a6edb..000000000000
--- a/Beta_Integrations/integrations-proofpoint_threat_response.yml
+++ /dev/null
@@ -1,352 +0,0 @@
-commonfields:
- id: Proofpoint Threat Response
- version: -1
-name: Proofpoint Threat Response
-display: Proofpoint Threat Response
-category: Email Gateway
-image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAX8AAACECAMAAABPuNs7AAAAgVBMVEX///8AAADy8vL7+/vKyspnZ2e6urpOTk6oqKhkZGSBgYEiIiI5OTng4OBKSkrDw8MPDw+fn5+wsLDo6OjS0tKIiIjZ2dns7Ox3d3erq6tTU1PGxsY+Pj5dXV3V1dVERESWlpYrKysgICCNjY1xcXE0NDQODg4qKiqYmJgYGBh7e3vubSIFAAAJPklEQVR4nO2c12LiOhCGXYgpG0NcgNAhJJS8/wMey7I1xbIhWQLenPnurK5ffSRwHEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQBEEQhF/GdHQaK/az26fdKZncPu3fQfzhlnzcPnWT9ur2af8GvKELLG+fvkn76fZp/wY+3Fbq3xsXbG9fpDaxd9up/6qMtrt9kdqE21L9+2W059sXqUWkVP8fqKzo30QE2h+Wy+cf2KOI/k1sjTzBD+VgMph/Kdr/RP9BWc3jo0vC+L/pv3l0SRj3099fj2bx4vLpPJlMPOoyTYMoimajC3HDziwLFncSi981+vtpkBUwbMoiq0MvinpB6jWFqsELw2q0px/Tv7SHLPJMg26ZUX+BQ02p2SSYqyDIPOMHpoQZy22NPEnURcFWM1TRdZZ4ahL5SFVmqUrZWGx8FSzemAW0Y89jPT5CFi+vC+rL7D+J+c4/J/uljrf7NKmrIKkp9i4v2PSSqlfjm6JmBQrJ6fMjhWBB6fiualhUMDaJ0GOTomtRZ7KqBHs1NdlU/PRhxzNfCeSs2VhGWrDjiZwj7G+c9d4KNlxZV0hJGY5Fy0VulbiS73fB+gc8mzFUywiWdUFeipjH0xXko/jVGuxU+M4tfur8BfpPnVld5JLk2ZbFC+pIxlHvP3vm23M+ebw9CwLcbncG+icV+bNJyKL/yPgW+r/b6qxYE2n+1ITq+lfq71Xkz7sDwlIDzfYK/bvVaKd76m/txWX9TM0+QY9Cf0u5S1C/S+pDHa/U31rAMaqLpX0qwYwL0/8wtEUL76i/nZjpP5gzvydbrBKzx/GaQm2u09/OyFRl1BSsnKmMA+//VlYP19/1qP5n5KX0t8/9JcZaaFMXUL3Ttv4er9Df9a9pYjMZmu+r9Fe1v9f6m/M26MWnPnb5pPqzUpBKL6NFmgZkmS26HYn+FHfSzoy0SDbOkzAME7OUvE1D9V2VdTeexVuyj9oWNSGlHo/SdLR9Qy5/rtH/NYp7ZBkO8vNAmJgcP1Q5LQeEm+h/LjaN/gA5erX6z9CBKZPMzPa4Bj6tdbakl2UP0V6lNMaY5P6YAhL9d0UeHm5kncUEuZgdJy52fFH/slejDcW+cPqx8y/Wf+cbZzStRLwiWdmDcJrh49goMh6yJxYbr5doySsWigv6L61ZaNnQOoSOHiG46stkqEP+ifWHAznMhaUF9i76Y4sAdOy80lj/DwiHmokcCaEH5ULCXNOtyXvPcrXr71mzmLO0ejgPtCjnp7V6/Tu2SGV576H/gHjQWiP98aMEmIi3JDISLeRfCGi+YqFu1v9Uk4VPCshuCKHtt6RaXP+hNe3S9R76U23Gxn1BqufiQ//BuLIVCWaDbJHomI8uDYUaWQ+fZv2p1QUGwBrHpN0f9+UhyZHrT6I9Qv8D9QDRVLcD/bFlMrG6OiT8O56rmTaomfQ2vlF/dhs8IqmC5YrbxoyHS764/sReZfrVHfV/px5Qb7UDBT23KAxIsKeRUcsMcU/lBjPeMo36j2lcyEJlbj7OvIawyk+dBv3J6H95gP5cQmOvUesb6I+PfjB/Vw4kxueIp2C+b4ZBpsVt1J8PHiwlhOJTHGr91Gmz/vyx5RJnCfrjcRpZXXOMj4t7IA+1Nj569DXqzw/9xmOOBwMbxnwha6/+vHpGNbXhuaR/yiIj/X1oyYr+cGi6Qv8Ri2wGaBfrz2Yp/KRr5Pxa/Rv6/4/pb6wLpP8PWKh/Rn8+vRrzgJpR7fqDwZdrQ9ZDsFDzUPDeSsvWqD9fY4xH33Gm+IMCO9OO02b9+cA1ZVB3AHb9wTVikSHZJT6l8Tt3SECfrRr153ngtoNQlTfrYJcLnTbrzx4kQY9SGyO7/rB80nso7LPCMwC7DEdro579GvVnL9bAtKMaBkydPg1GZsI2688mB9hbKmns+nt1kZ2T8djipPjkvGPVb9Sf7ezp4gNjjG0FYGXI79larD9dgMEGqCYNu/7o1MkWYHinsCBGSBoKeehe22x/oHnQEy+0BhuK8KQxX+JbrD8xXMHJKO92NfrDzILNV8RcpISFyYHO4WB+KFbNZv2JAYIZ3FBTkkWG968W649nB+9AXWv0R5ceWFmkWV5N9DwIVxO1UjH2LtiftxB3yl2hjUkzoWuBfIi1WX9ogAS9c8ptNjX6O+gOC/avUxQ7n42RWKie+E6hcLp0/2VsJCG6h9b9HVny0bN1dBWn56W/1f/Wv3+h97/nnqrMZIycdAnq9O+gkHO98nkn5FbYRfF14VjrNcEPJ8q2u6S/e4xV7DW+4SzVhhHrHmK9nIx2KJy2i5rPr+kPha159fhdLr5/0KLW6e/QJ4Xz/oo8YSxrxURc9ecv2MHs2S/qb6EUjl6RblYr+qKnGDrm+2v6owY/Pw+fK8fNb3NJ/ydeOaZ/szzbMljtyzRa92/o/2lC9puClU1sHL6mP3snkS1XnV7cm3VmWQF7jW+xr9b/xbVw9i/oT94dcNCJqfpCl9TmGv0Ptrj4hxrWx58FpeWble1a/dc0tazEaezug4E7yna+f/MaC9nfjm6V6uCuzH+pJZqGmILrGwAVv9n+82aJjG+7fNsbrpyDCWacvqa/Q/POi/ySOuPPrnPs30b/kWWkm0I16O8kO3ulmb2mbgrCr3Qb9e9U35AemEHJ/sTanYNNwrh9UX9a+lzxQ+oMguHsfXAj/R3+BHgO11VN+mNrA4pcmRU921NRavUz+oOtAdufvSWNvOK2HqdDl/+cF6yPcdUnvhr9jSvca5MVQB/lOs4gHrnh6630d5wYPRL/wIv8YtPVbCo3LXkqEav3+9oWLBzQUG9bdiF5GhbZwBae2v9naJVacmueLin7kceQmVWKDIa64YPye0lG0rwMhabQZAx1zE3h/YkTBU7fOVnLcSVU/2w236ut4Xm+/er/syTxYK4mycPz+8naRppJ9LpUGp6Hg9lV+wZ+/9LZd7OF+Lw61cb2F/unnYpwXI2D273UVCSTNJ0kXmXYfR+uf+uov//6FYj+j0X0fyyi/2MR/R+L6P9YRP/HIvo/FtH/sYj+j0X0fyz1729bwi/X3wkKYtv/IbUAP255AQVBEARBEARBEARBEARBEARBEARBEARBEARBEARBEARBEARBEARBEIR/jP8A1b11uYKkC88AAAAASUVORK5CYII=
-description: 'Proofpoint builds lists for enforcement actions '
-detaileddescription: |-
- In order to create API Key
- Settings -> API Key -> Add api key
-configuration:
-- display: Server URL (e.g. https://192.168.0.1)
- name: url
- defaultvalue: ""
- type: 0
- required: true
-- display: API Key
- name: apikey
- defaultvalue: ""
- type: 4
- required: true
-- display: Trust any certificate (unsecure)
- name: insecure
- defaultvalue: "true"
- type: 8
- required: false
-- display: Use system proxy
- name: proxy
- defaultvalue: "false"
- type: 8
- required: false
-- display: ID of IPs blacklist
- name: blacklist_ip
- defaultvalue: ""
- type: 0
- required: false
-- display: ID of Domains blacklist
- name: blacklist_domain
- defaultvalue: ""
- type: 0
- required: false
-- display: ID of URLs blacklist
- name: blacklist_url
- defaultvalue: ""
- type: 0
- required: false
-- display: ID of hashes blacklist
- name: blacklist_hash
- defaultvalue: ""
- type: 0
- required: false
-script:
- script: |
- ''' IMPORTS '''
- import requests
- import json
- requests.packages.urllib3.disable_warnings()
-
- if not demisto.getParam('proxy'):
- del os.environ['HTTP_PROXY']
- del os.environ['HTTPS_PROXY']
- del os.environ['http_proxy']
- del os.environ['https_proxy']
-
- ''' GLOBAL VARS '''
- BASE_URL = demisto.params().get('url')
- if BASE_URL[-1] != '/':
- BASE_URL += '/'
- API_KEY = demisto.params().get('apikey')
- VERIFY_CERTIFICATE = False
-
- ''' COMMAND FUNCTIONS '''
- def get_list(list_id):
- fullurl = BASE_URL + 'api/lists/{}/members.json'.format(list_id)
- res = requests.get(
- fullurl,
- headers = {
- 'Content-Type': 'application/json',
- 'Authorization': API_KEY
- },
- verify=VERIFY_CERTIFICATE
- )
-
- if res.status_code < 200 or res.status_code >= 300:
- return_error('Get list failed. URL: {}, StatusCode: {}, Response: {}'.format(fullurl, res.status_code, res.text))
-
- return res.json()
-
- def get_list_command():
- ''' Retrieves all indicators of a the given list ID in Threat Response '''
- list_id = demisto.args().get('list-id')
- list_items = get_list(list_id)
-
- demisto.results({ 'list' : list_items })
-
-
- def add_to_list(list_id, indicator, comment, expiration):
- fullurl = BASE_URL + 'api/lists/{}/members.json'.format(list_id)
-
- indicator = {
- 'member': indicator
- }
- if comment:
- indicator['description'] = comment
-
- if expiration:
- indicator['expiration'] = expiration
-
- res = requests.post(
- fullurl,
- headers = {
- 'Authorization': API_KEY
- },
- verify=VERIFY_CERTIFICATE,
- json=indicator
- )
-
- if res.status_code < 200 or res.status_code >= 300:
- return_error('Add to list failed. URL: {}, Request Body: {}, StatusCode: {}, Response: {}'.format(fullurl, json.dumps(indicator), res.status_code, res.content))
-
- return res.json()
-
- def add_to_list_command():
- ''' Adds given indicators to the given list ID in Threat Response '''
- list_id = demisto.args().get('list-id')
- indicators = argToList(demisto.args().get('indicator'))
- comment = demisto.args().get('comment')
- expiration = demisto.args().get('expiration')
-
- message = ''
- for indicator in indicators:
- add_to_list(list_id, indicator, comment, expiration)
- message += '{} added successfully to {}\n'.format(indicator, list_id)
-
- demisto.results(message)
-
-
- def block_ip_command():
- ''' Adds given IPs to the relevant blacklist in Threat Response '''
- list_id = demisto.params().get('blacklist_ip')
- ips = argToList(demisto.args().get('ip'))
- expiration = demisto.args().get('expiration')
-
- message = ''
- for ip in ips:
- add_to_list(list_id, ip, None, expiration)
- message += '{} added successfully to block_ip list\n'.format(ip)
-
- demisto.results(message)
-
-
- def block_domain_command():
- ''' Adds given domains to the relevant blacklist in Threat Response '''
- list_id = demisto.params().get('blacklist_domain')
- domains = argToList(demisto.args().get('domain'))
- expiration = demisto.args().get('expiration')
-
- message = ''
- for domain in domains:
- add_to_list(list_id, domain, None, expiration)
- message += '{} added successfully to block_domain list\n'.format(domain)
-
- demisto.results(message)
-
-
- def block_url_command():
- ''' Adds given URLs to the relevant blacklist in Threat Response '''
- list_id = demisto.params().get('blacklist_url')
- urls = argToList(demisto.args().get('url'))
- expiration = demisto.args().get('expiration')
-
- message = ''
- for url in urls:
- add_to_list(list_id, url, None, expiration)
- message += '{} added successfully to block_url list\n'.format(url)
-
- demisto.results(message)
-
-
- def block_hash_command():
- ''' Adds given hashes to the relevant blacklist in Threat Response '''
- list_id = demisto.params().get('blacklist_hash')
- hashes = argToList(demisto.args().get('hash'))
- expiration = demisto.args().get('expiration')
-
- message = ''
- for h in hashes:
- add_to_list(list_id, h, None, expiration)
- message += '{} added successfully to block_hash list\n'.format(h)
-
- demisto.results(message)
-
-
- def search_indicators(list_id, indicator_filter):
- list_indicators = get_list(list_id)
- found_items = []
- for item in list_indicators:
- item_indicator = demisto.get(item, 'host.host')
- if indicator_filter in item_indicator:
- found_items.append(item)
-
- return found_items
-
- def search_indicator_command():
- ''' Retrieves indicators of a list, using a filter '''
- list_id = demisto.args().get('list-id')
- indicator_filter = demisto.args().get('filter')
- found = search_indicators(list_id, indicator_filter)
-
- demisto.results({ 'indicators': found })
-
-
- def delete_indicator(list_id, indicator_filter):
- indicator = search_indicators(list_id, indicator_filter)
- if len(indicator) == 0:
- return_error('{} not exists in {}'.format(indicator_filter, list_id))
-
- indicator_id = indicator.get('id')
- fullurl = BASE_URL + 'api/lists/{}/members/{}.json'.format(list_id, indicator_id)
- res = requests.delete(
- fullurl,
- headers = {
- 'Authorization': API_KEY
- },
- verify=VERIFY_CERTIFICATE
- )
- if res.status_code < 200 or res.status_code >= 300:
- return_error('Delete indicator failed. URL: {}, StatusCode: {}, Response: {}'.format(fullurl, res.status_code, res.text))
-
- def delete_indicator_command():
- ''' Deletes an indicator from a list '''
- list_id = demisto.args().get('list-id')
- indicator = demisto.args().get('indicator')
- delete_indicator(list_id, indicator)
-
- demisto.results('{} deleted successfully from list {}'.format(list_id, indicator))
-
-
- def test():
- get_list(demisto.params().get('blacklist_ip'))
-
-
- ''' EXECUTION CODE '''
- LOG('command is %s' % (demisto.command(), ))
- if demisto.command() == 'test-module':
- test()
- demisto.results('ok')
-
- elif demisto.command() == 'proofpoint-tr-get-list':
- get_list_command()
-
- elif demisto.command() == 'proofpoint-tr-add-to-list':
- add_to_list_command()
-
- elif demisto.command() == 'proofpoint-tr-block-ip':
- block_ip_command()
-
- elif demisto.command() == 'proofpoint-tr-block-domain':
- block_domain_command()
-
- elif demisto.command() == 'proofpoint-tr-block-url':
- block_url_command()
-
- elif demisto.command() == 'proofpoint-tr-block-hash':
- block_hash_command()
-
- elif demisto.command() == 'proofpoint-tr-delete-indicator':
- delete_indicator_command()
-
- elif demisto.command() == 'proofpoint-tr-search-indicator':
- search_indicator_command()
- type: python
- commands:
- - name: proofpoint-tr-get-list
- arguments:
- - name: list-id
- required: true
- description: ID of the list
- description: Get list items
- - name: proofpoint-tr-add-to-list
- arguments:
- - name: list-id
- required: true
- - name: indicator
- required: true
- description: 'Could be ip,url,domain,hash. For example: "192.168.1.1,192.168.1.2"'
- isArray: true
- - name: comment
- description: Comment regarding the member
- - name: expiration
- description: Expiration of the member
- description: Add member to list
- execution: true
- - name: proofpoint-tr-block-ip
- arguments:
- - name: ip
- required: true
- description: List of IPs
- isArray: true
- - name: expiration
- description: Expiration of the IP
- description: 'Block IP '
- execution: true
- - name: proofpoint-tr-block-domain
- arguments:
- - name: domain
- required: true
- description: List of domains
- isArray: true
- - name: expiration
- description: Expiration of the Domain
- description: Block Domain
- - name: proofpoint-tr-search-indicator
- arguments:
- - name: list-id
- required: true
- description: ID of the list
- - name: filter
- required: true
- description: Filter for the search. For example, "1.1" will return [1.1.1.1,
- 22.22.1.1,1.1.22.22]
- description: 'Return a list of indicators '
- - name: proofpoint-tr-delete-indicator
- arguments:
- - name: list-id
- required: true
- description: ID of the list
- - name: indicator
- required: true
- description: 'Could be ip,url,domain,hash. For example: "demisto.com"'
- description: Delete an indicator from a list
- - name: proofpoint-tr-block-url
- arguments:
- - name: url
- required: true
- description: List of URLs
- isArray: true
- - name: expiration
- description: Expiration of the URLs
- description: Block URL
- - name: proofpoint-tr-block-hash
- arguments:
- - name: hash
- required: true
- description: List of hashes
- isArray: true
- - name: expiration
- description: Expiration of the hash
- description: Block hash
- runonce: false
-releaseNotes: DO NOT RELEASE
\ No newline at end of file
diff --git a/Beta_Integrations/playbook-TrendMicro_DDA_DetonateFile.yml b/Beta_Integrations/playbook-TrendMicro_DDA_DetonateFile.yml
index af2c84e0edc9..4be5b3bed5dd 100644
--- a/Beta_Integrations/playbook-TrendMicro_DDA_DetonateFile.yml
+++ b/Beta_Integrations/playbook-TrendMicro_DDA_DetonateFile.yml
@@ -1,6 +1,6 @@
-id: Detonate File - Trend Micro Deep Discovery Analyzer
+id: Detonate File - Trend Micro Deep Discovery Analyzer Beta
version: -1
-name: Detonate File - Trend Micro Deep Discovery Analyzer
+name: Detonate File - Trend Micro Deep Discovery Analyzer Beta
fromversion: 4.0.0
description: "Detonates a File using the Trend Micro Deep Discovery Analyzer sandbox.\nDeep
Discovery Analyzer(version 6.0.0) supports the following File Types:\nbat, cell,
@@ -448,7 +448,6 @@ outputs:
- contextPath: IP.Address
description: IPs relevant to the submission
type: string
-releaseNotes: "-"
+beta: true
tests:
-- Test Playbook TrendMicroDDA
-- detonate_file_-_generic_test
+- No Test - run "Test Playbook TrendMicroDDA" manually
diff --git a/Beta_Integrations/playbook-TrendMicro_DDA_DetonateURL.yml b/Beta_Integrations/playbook-TrendMicro_DDA_DetonateURL.yml
index 1a43bb7f24d1..972047cd2ae6 100644
--- a/Beta_Integrations/playbook-TrendMicro_DDA_DetonateURL.yml
+++ b/Beta_Integrations/playbook-TrendMicro_DDA_DetonateURL.yml
@@ -1,6 +1,6 @@
-id: Detonate URL - Trend Micro Deep Discovery Analyzer
+id: Detonate URL - Trend Micro Deep Discovery Analyzer Beta
version: -1
-name: Detonate URL - Trend Micro Deep Discovery Analyzer
+name: Detonate URL - Trend Micro Deep Discovery Analyzer Beta
fromversion: 4.0.0
description: Detonates a URL using the Trend Micro Deep Discovery Analyzer sandbox.
starttaskid: "0"
@@ -368,7 +368,6 @@ outputs:
- contextPath: File.Name
description: File name
type: string
-releaseNotes: "-"
+beta: true
tests:
-- Test Playbook TrendMicroDDA
-- detonate_url_-_generic_test
+- No Test - run "Test Playbook TrendMicroDDA" manually
diff --git a/Beta_Integrations/remedy_SR/remedy_SR.py b/Beta_Integrations/remedy_SR/remedy_SR.py
new file mode 100644
index 000000000000..60ab61c4901b
--- /dev/null
+++ b/Beta_Integrations/remedy_SR/remedy_SR.py
@@ -0,0 +1,413 @@
+import base64
+import hashlib
+import json
+import os
+import demistomock as demisto
+import requests
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+if not demisto.params().get('proxy', False):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+VERIFY_CERTIFICATE = not demisto.params().get('insecure', False)
+URL = demisto.params()['server']
+XML_NS = demisto.params()['xml_ns']
+USERNAME = demisto.params()['username']
+PASSWORD = demisto.params()['password']
+
+HEADERS = {
+ 'Content-Type': 'text/xml',
+ 'SOAPAction': ''
+}
+
+GET_TICKET_BODY = """
+
+
+
+ sams
+
+ {password_digest}
+ {base64_binary}
+ {req_time}
+
+
+
+
+
+
+
+
+ ?
+
+ Remedy
+ sams
+
+
+ {date}
+ {time}
+ UTC
+
+ 1
+
+
+
+
+ ?
+ ?
+ ?
+ ?
+
+
+ {service_request_id}
+
+
+
+
+ """
+
+CREATE_TICKET_BODY = """
+
+
+ sams
+ {password_digest}
+ {base64_binary}
+ {req_time}
+
+
+
+
+
+
+
+
+ ?
+
+ Remedy
+ !svcautomationdev
+
+
+ {date}
+ {time}
+ UTC
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {details}
+ True
+
+ test
+
+ New
+
+ 47103
+
+
+
+ {requester_ntid}
+
+ {requester_pernr}
+
+
+
+
+
+
+ {contact_email}
+
+ {contact_name}
+
+ {contact_phone}
+
+
+ {requester_email}
+
+
+
+ {requester_name}
+
+ {requester_phone}
+
+ {requester_work_city}
+
+ {requester_work_location}
+
+ {requester_work_street}
+
+
+
+ Web
+ Demisto
+
+ Pending
+
+
+
+ test
+
+ test
+
+
+
+
+
+ """
+
+
+def http_request(body=''):
+ ''' Makes an API call with the given arguments '''
+ response = requests.post(URL, data=body, headers=HEADERS, verify=VERIFY_CERTIFICATE)
+
+ if response.status_code < 200 or response.status_code >= 300:
+ if response.status_code == 404:
+ return_error('Request Failed. with status: 404. Cannot find the requested resource. Check your Server URL')
+ elif response.status_code == 500:
+ json_result = json.loads(xml2json(response.content))
+ return_error('Request Failed. with status: ' + str(response.status_code) + '. Reason is: ' + str(
+ json_result['Envelope']['Body']['Fault']['faultstring']))
+ else:
+ return_error(
+ 'Request Failed. with status: ' + str(response.status_code) + '. Reason is: ' + str(response.reason))
+
+ json_result = json.loads(xml2json(response.content))
+
+ if 'Envelope' in json_result:
+ if 'Body' in json_result['Envelope']:
+ if 'Fault' in json_result['Envelope']['Body']:
+ return_error('Request Failed. Reason is: ' + json_result['Envelope']['Body']['Fault']['faultstring'])
+
+ return json_result
+
+
+def prettify_get_ticket(json_result):
+ ticket = json_result['Envelope']['Body']['getResponse']['return']['Body']
+
+ if not ticket:
+ return_error(json_result['Envelope']['Body']['getResponse']['return']['Header'])
+
+ pretty_ticket = {
+ 'ServiceRequestId': ticket['ServiceRequestId'],
+ 'ServiceRequestStatus': ticket['ServiceRequestStatus'],
+ 'Priority': ticket['Priority']
+ }
+ if 'Created' in ticket:
+ if 'When' in ticket['Created']:
+ pretty_ticket['Date'] = ticket['Created']['When']['Date']
+ pretty_ticket['Time'] = ticket['Created']['When']['Time']
+
+ if 'Details' in ticket:
+ pretty_ticket['Details'] = ticket['Details']
+ if 'SourceReference' in ticket:
+ pretty_ticket['SourceReference'] = ticket['SourceReference']
+
+ if 'RequesterContactInformation' in ticket:
+
+ if 'RequesterEmail' in ticket['RequesterContactInformation']:
+ pretty_ticket['RequesterEmail'] = ticket['RequesterContactInformation']['RequesterEmail']
+ if 'RequesterName' in ticket['RequesterContactInformation']:
+ pretty_ticket['RequesterName'] = ticket['RequesterContactInformation']['RequesterName']
+ if 'RequesterPhone' in ticket['RequesterContactInformation']:
+ pretty_ticket['RequesterPhone'] = ticket['RequesterContactInformation']['RequesterPhone']
+ if 'RequesterWorkCity' in ticket['RequesterContactInformation']:
+ pretty_ticket['RequesterWorkCity'] = ticket['RequesterContactInformation']['RequesterWorkCity']
+ if 'RequesterWorkLocation' in ticket['RequesterContactInformation']:
+ pretty_ticket['RequesterWorkLocation'] = ticket['RequesterContactInformation']['RequesterWorkLocation']
+ if 'RequesterWorkStreet' in ticket['RequesterContactInformation']:
+ pretty_ticket['RequesterWorkStreet'] = ticket['RequesterContactInformation']['RequesterWorkStreet']
+
+ if 'ContactInformation' in ticket['RequesterContactInformation']:
+ if 'ContactEmail' in ticket['RequesterContactInformation']['ContactInformation']:
+ pretty_ticket['ContactEmail'] = ticket['RequesterContactInformation']['ContactInformation'][
+ 'ContactEmail']
+ if 'ContactPhone' in ticket['RequesterContactInformation']['ContactInformation']:
+ pretty_ticket['ContactPhone'] = ticket['RequesterContactInformation']['ContactInformation'][
+ 'ContactPhone']
+ if 'ContactName' in ticket['RequesterContactInformation']['ContactInformation']:
+ pretty_ticket['ContactName'] = ticket['RequesterContactInformation']['ContactInformation'][
+ 'ContactName']
+
+ return pretty_ticket
+
+
+@logger
+def remedy_get_ticket(service_request_id):
+ now = datetime.utcnow()
+ req_time = now.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z'
+ date = now.strftime('%Y-%m-%d')
+ time = now.strftime('%H:%M:%S')
+
+ nonce = os.urandom(16)
+ base64_binary = base64.b64encode(nonce).decode("ascii")
+ # Password_Digest = Base64 (SHA-1 (nonce + createtime + password))
+ hash_object = hashlib.sha1(nonce + req_time.encode("utf-8") + PASSWORD.encode("utf-8"))
+ digest_string = hash_object.digest()
+ password_digest = base64.b64encode(digest_string).decode("ascii")
+
+ body = GET_TICKET_BODY.format(xml_ns=XML_NS, password_digest=password_digest, base64_binary=base64_binary,
+ req_time=str(req_time), date=date, time=time,
+ service_request_id=service_request_id)
+ response = http_request(body)
+
+ return response
+
+
+def remedy_get_ticket_command():
+ service_request_id = demisto.args()['service_request_id']
+ response = remedy_get_ticket(service_request_id)
+ pretty_ticket = prettify_get_ticket(response)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': response,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Ticket:', pretty_ticket,
+ ['ServiceRequestId', 'Priority', 'ServiceRequestStatus', 'RequesterEmail',
+ 'RequesterName', 'RequesterPhone']),
+ 'EntryContext': {
+ "Remedy.Ticket(val.ServiceRequestId == obj.ServiceRequestId)": pretty_ticket
+ }
+ })
+
+
+def prettify_create_ticket(json_result, requester_phone, requester_name, requester_email):
+ ticket = json_result['Envelope']['Body']['createResponse']['return']['Body']
+
+ if not ticket:
+ return_error(json_result['Envelope']['Body']['createResponse']['return']['Header'])
+
+ pretty_ticket = {'ServiceRequestId': ticket['ServiceRequestId']}
+ pretty_ticket['RequesterPhone'] = requester_phone
+ pretty_ticket['RequesterName'] = requester_name
+ pretty_ticket['RequesterEmail'] = requester_email
+
+ return pretty_ticket
+
+
+@logger
+def remedy_create_ticket(details, requester_ntid, requester_email, requester_name,
+ requester_phone, requester_work_city, requester_work_location,
+ requester_work_street, requester_pernr='?',
+ contact_email='?', contact_name='?', contact_phone='?'):
+ now = datetime.utcnow()
+ req_time = now.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z'
+ date = now.strftime('%Y-%m-%d')
+ time = now.strftime('%H:%M:%S')
+
+ nonce = os.urandom(16)
+ base64_binary = base64.b64encode(nonce).decode("ascii")
+ # Password_Digest = Base64 (SHA-1 (nonce + createtime + password))
+ hash_object = hashlib.sha1(nonce + req_time.encode("utf-8") + PASSWORD.encode("utf-8"))
+ digest_string = hash_object.digest()
+ password_digest = base64.b64encode(digest_string).decode("ascii")
+
+ body = CREATE_TICKET_BODY.format(xml_ns=XML_NS, password_digest=password_digest, base64_binary=base64_binary,
+ req_time=str(req_time), date=date, time=time, details=details,
+ requester_ntid=requester_ntid, requester_email=requester_email,
+ requester_name=requester_name, requester_phone=requester_phone,
+ requester_work_city=requester_work_city,
+ requester_work_location=requester_work_location,
+ requester_work_street=requester_work_street, requester_pernr=requester_pernr,
+ contact_email=contact_email, contact_name=contact_name,
+ contact_phone=contact_phone)
+ response = http_request(body)
+
+ return response
+
+
+def remedy_create_ticket_command():
+ args = demisto.args()
+ details = args['details']
+ requester_ntid = args['requester_ntid']
+ requester_pernr = args['requester_pernr'] if 'requester_pernr' in args else None
+ contact_email = args['contact_email'] if 'contact_email' in args else None
+ contact_name = args['contact_name'] if 'contact_name' in args else None
+ contact_phone = args['contact_phone'] if 'contact_phone' in args else None
+ requester_email = args['requester_email']
+ requester_name = args['requester_name']
+ requester_phone = args['requester_phone']
+ requester_work_city = args['requester_work_city']
+ requester_work_location = args['requester_work_location']
+ requester_work_street = args['requester_work_street']
+
+ response = remedy_create_ticket(details, requester_ntid, requester_email, requester_name,
+ requester_phone, requester_work_city, requester_work_location,
+ requester_work_street, requester_pernr,
+ contact_email, contact_name, contact_phone)
+
+ pretty_ticket = prettify_create_ticket(response, requester_phone, requester_name, requester_email)
+
+ ec_create = {
+ 'ServiceRequestId': response['Envelope']['Body']['createResponse']['return']['Body']['ServiceRequestId'],
+ 'Details': details,
+ 'RequesterNTID': requester_ntid,
+ 'RequesterPERNR': requester_pernr,
+ 'RequesterEmail': requester_email,
+ 'RequesterName': requester_name,
+ 'RequesterPhone': requester_phone,
+ 'RequesterWorkCity': requester_work_city,
+ 'RequesterWorkLocation': requester_work_location,
+ 'RequesterWorkStreet': requester_work_street,
+ 'ContactEmail': contact_email,
+ 'ContactName': contact_name,
+ 'ContactPhone': contact_phone
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': response,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Ticket:', pretty_ticket,
+ ['ServiceRequestId', 'RequesterEmail', 'RequesterName', 'RequesterPhone']),
+ 'EntryContext': {
+ "Remedy.Ticket(val.ServiceRequestId == obj.ServiceRequestId)": ec_create
+ }
+ })
+
+
+def remedy_update_ticket_command():
+ demisto.log('TODO')
+
+
+''' EXECUTION CODE '''
+LOG('command is %s' % (demisto.command(),))
+try:
+ if demisto.command() == 'test-module':
+ remedy_get_ticket('SR000552078')
+ demisto.results('ok')
+
+ elif demisto.command() == 'remedy-get-ticket':
+ remedy_get_ticket_command()
+
+ elif demisto.command() == 'remedy-create-ticket':
+ remedy_create_ticket_command()
+
+ elif demisto.command() == 'remedy-create-ticket':
+ remedy_update_ticket_command()
+
+except Exception as e:
+ return_error(str(e.message))
diff --git a/Beta_Integrations/remedy_SR/remedy_SR.yml b/Beta_Integrations/remedy_SR/remedy_SR.yml
new file mode 100644
index 000000000000..9ddbfa940cc1
--- /dev/null
+++ b/Beta_Integrations/remedy_SR/remedy_SR.yml
@@ -0,0 +1,235 @@
+category: Case Management
+commonfields:
+ id: remedy_sr_beta
+ version: -1
+configuration:
+- display: Server (e.g http://192.168.0.1)
+ name: server
+ required: true
+ type: 0
+- display: xml_ns (e.g http://.com/tts/servicerequest/v1_6)
+ name: xml_ns
+ required: true
+ type: 0
+- display: Username
+ name: username
+ required: true
+ type: 0
+- display: Password
+ name: password
+ required: true
+ type: 4
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+description: |-
+ The BMC Service Request Management application enables an IT department and other business departments to easily define available services, publish those services in a service catalog, and automate fulfillment of those services for the user community, enabling users to help themselves.
+ This integration uses SOAP API and supports SRM 9.0 version.
+display: Remedy SR (Beta)
+name: remedy_sr_beta
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Service request ID
+ isArray: false
+ name: service_request_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Get ticket details
+ execution: false
+ name: remedy-get-ticket
+ outputs:
+ - contextPath: Remedy.Ticket.RequesterEmail
+ description: Email of requester
+ type: string
+ - contextPath: Remedy.Ticket.RequesterName
+ description: Name of requester
+ type: string
+ - contextPath: Remedy.Ticket.RequesterPhone
+ description: Phone of requester
+ type: string
+ - contextPath: Remedy.Ticket.RequesterNTID
+ description: NTID of requester
+ type: string
+ - contextPath: Remedy.Ticket.RequesterWorkLocation
+ description: Work location of requester
+ type: string
+ - contextPath: Remedy.Ticket.RequesterWorkCity
+ description: Work city of requester
+ type: string
+ - contextPath: Remedy.Ticket.RequesterWorkStreet
+ description: Work street of requester
+ type: string
+ - contextPath: Remedy.Ticket.Details
+ description: Ticket details
+ type: string
+ - contextPath: Remedy.Ticket.Priority
+ description: Ticket priority
+ type: string
+ - contextPath: Remedy.Ticket.ServiceRequestId
+ description: Ticket Service request ID
+ type: string
+ - contextPath: Remedy.Ticket.Details
+ description: Ticket Details
+ type: string
+ - contextPath: Remedy.Ticket.SourceReference
+ description: Source reference of the ticket
+ type: string
+ - contextPath: Remedy.Ticket.Date
+ description: Date the ticket was created
+ type: string
+ - contextPath: Remedy.Ticket.Time
+ description: Time the ticket was created
+ type: string
+ - contextPath: Remedy.Ticket.ContactEmail
+ description: Contact Email
+ type: string
+ - contextPath: Remedy.Ticket.ContactName
+ description: Contact Name
+ type: string
+ - contextPath: Remedy.Ticket.ContactPhone
+ description: Contact Phone
+ type: string
+ - contextPath: Remedy.Ticket.RequesterPERNR
+ description: Requester PERNR
+ type: string
+ - arguments:
+ - default: false
+ description: Ticket details
+ isArray: false
+ name: details
+ required: true
+ secret: false
+ - default: false
+ description: Requester NTID
+ isArray: false
+ name: requester_ntid
+ required: true
+ secret: false
+ - default: false
+ description: Requester PERNR
+ isArray: false
+ name: requester_pernr
+ required: false
+ secret: false
+ - default: false
+ description: Contact Email
+ isArray: false
+ name: contact_email
+ required: false
+ secret: false
+ - default: false
+ description: Contact Name
+ isArray: false
+ name: contact_name
+ required: false
+ secret: false
+ - default: false
+ description: Contact Phone
+ isArray: false
+ name: contact_phone
+ required: false
+ secret: false
+ - default: false
+ description: Email of User
+ isArray: false
+ name: requester_email
+ required: true
+ secret: false
+ - default: false
+ description: Requester First/Last Name
+ isArray: false
+ name: requester_name
+ required: true
+ secret: false
+ - default: false
+ description: User Phone
+ isArray: false
+ name: requester_phone
+ required: true
+ secret: false
+ - default: false
+ description: Requester City
+ isArray: false
+ name: requester_work_city
+ required: true
+ secret: false
+ - default: false
+ description: Requester Office
+ isArray: false
+ name: requester_work_location
+ required: true
+ secret: false
+ - default: false
+ description: Requester Street
+ isArray: false
+ name: requester_work_street
+ required: true
+ secret: false
+ deprecated: false
+ description: Create a ticket
+ execution: false
+ name: remedy-create-ticket
+ outputs:
+ - contextPath: Remedy.Ticket.RequesterEmail
+ description: Email of requester
+ type: string
+ - contextPath: Remedy.Ticket.RequesterName
+ description: Name of requester
+ type: string
+ - contextPath: Remedy.Ticket.RequesterPhone
+ description: Phone of requester
+ type: string
+ - contextPath: Remedy.Ticket.RequesterNTID
+ description: NTID of requester
+ type: string
+ - contextPath: Remedy.Ticket.RequesterWorkLocation
+ description: Work location of requester
+ type: string
+ - contextPath: Remedy.Ticket.RequesterWorkCity
+ description: Work city of requester
+ type: string
+ - contextPath: Remedy.Ticket.RequesterWorkStreet
+ description: Work street of requester
+ type: string
+ - contextPath: Remedy.Ticket.Details
+ description: Ticket details
+ type: string
+ - contextPath: Remedy.Ticket.Priority
+ description: Ticket priority
+ type: string
+ - contextPath: Remedy.Ticket.ServiceRequestId
+ description: Ticket Service request ID
+ type: string
+ - contextPath: Remedy.Ticket.RequesterPERNR
+ description: Requester PERNR
+ type: string
+ - contextPath: Remedy.Ticket.ContactEmail
+ description: Contact Email
+ type: string
+ - contextPath: Remedy.Ticket.ContactName
+ description: Contact Name
+ type: string
+ - contextPath: Remedy.Ticket.ContactPhone
+ description: Contact Phone
+ type: string
+ - deprecated: false
+ description: Update a ticket
+ execution: false
+ name: remedy-update-ticket
+ isfetch: false
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- No Test
+beta: true
diff --git a/Beta_Integrations/remedy_SR/remedy_SR_description.md b/Beta_Integrations/remedy_SR/remedy_SR_description.md
new file mode 100644
index 000000000000..e91ea3697156
--- /dev/null
+++ b/Beta_Integrations/remedy_SR/remedy_SR_description.md
@@ -0,0 +1 @@
+Note: This is a beta Integration, which lets you implement and test pre-release software. Since the integration is beta, it might contain bugs. Updates to the integration during the beta phase might include non-backward compatible features. We appreciate your feedback on the quality and usability of the integration to help us identify issues, fix them, and continually improve.
\ No newline at end of file
diff --git a/Beta_Integrations/remedy_SR/remedy_SR_image.png b/Beta_Integrations/remedy_SR/remedy_SR_image.png
new file mode 100644
index 000000000000..6303d583c151
Binary files /dev/null and b/Beta_Integrations/remedy_SR/remedy_SR_image.png differ
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 8be2aeb723bf..a5e1ad9bc5fb 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -6,6 +6,20 @@ Welcome to Demisto content repo!
## How to contribute
+To get you started, refer to our [Getting Started article](https://github.com/demisto/content/blob/master/docs/README.md)
+
+After you finish developing, there are two steps left before you can create a pull request:
+
+ - Run linting and test checks as detailed [here](https://github.com/demisto/content/tree/master/docs/linting)
+
+ - Validate files are formatted according correctly, by running from the Content root directory: ```PYTHONPATH="`pwd`:${PYTHONPATH}" python2 Tests/scripts/validate_files.py```
+
+ - Document your changes in the relevant changelog file as detailed [here](https://github.com/demisto/content/tree/master/docs/release_notes).
+
+You should now be ready to push your changes to the Content GitHub repository, please do as follows.
+
+## Push changes to GitHub
+
Demisto content is MIT Licensed and accepts contributions via GitHub pull requests.
If you are a first time GitHub contributor, please look at these links explaining on how to create a Pull Request to a GitHub repo:
* https://guides.github.com/activities/forking/
@@ -13,6 +27,13 @@ If you are a first time GitHub contributor, please look at these links explainin
**Working on your first Pull Request?** You can learn how from this *free* series [How to Contribute to an Open Source Project on GitHub](https://egghead.io/series/how-to-contribute-to-an-open-source-project-on-github)
+## Contributor License Agreement
+Before merging any PRs, we need all contributors to sign a contributor license agreement. By signing a contributor license agreement, we ensure that the community is free to use your contributions.
+
+When you contribute a new pull request, a bot will evaluate whether you have signed the CLA. If required, the bot will comment on the pull request, including a link to accept the agreement. The CLA document is available for review as a [PDF](docs/cla.pdf).
+
+If the `license/cla` status check remains on *Pending*, even though all contributors have accepted the CLA, you can recheck the CLA status by visiting the following link (replace **[PRID]** with the ID of your PR): https://cla-assistant.io/check/demisto/content?pullRequest=[PRID] .
+
## Contributing Playbooks
You can edit or create playbooks visually inside the Demisto Platform and then export to a yaml file.
diff --git a/Classifiers/classifier-EWS_v2.json b/Classifiers/classifier-EWS_v2.json
index 1bbf7528026e..c89c446931ac 100644
--- a/Classifiers/classifier-EWS_v2.json
+++ b/Classifiers/classifier-EWS_v2.json
@@ -6,6 +6,10 @@
"mapping": {
"Phishing": {
"internalMapping": {
+ "Email HTML": {
+ "complex": null,
+ "simple": "Html"
+ },
"Attachment Count": {
"simple": "",
"complex": {
diff --git a/Classifiers/classifier-EWS_v2_CHANGELOG.md b/Classifiers/classifier-EWS_v2_CHANGELOG.md
new file mode 100644
index 000000000000..f732f9654866
--- /dev/null
+++ b/Classifiers/classifier-EWS_v2_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added "Email HTML" mapping.
\ No newline at end of file
diff --git a/Classifiers/classifier-Gsuite-gmail.json b/Classifiers/classifier-Gsuite-gmail.json
index 338eaad11beb..57d483db28ce 100644
--- a/Classifiers/classifier-Gsuite-gmail.json
+++ b/Classifiers/classifier-Gsuite-gmail.json
@@ -1,11 +1,15 @@
{
"id": "1bb6fa72-f964-4011-8a48-f70338082b68",
- "version": 3,
+ "version": -1,
"modified": "2018-07-25T15:49:02.222234104+03:00",
"defaultIncidentType": "",
"mapping": {
"Phishing": {
"internalMapping": {
+ "Email HTML": {
+ "complex": null,
+ "simple": "Html"
+ },
"Attachment Count": {
"simple": "",
"complex": {
diff --git a/Classifiers/classifier-Gsuite-gmail_CHANGELOG.md b/Classifiers/classifier-Gsuite-gmail_CHANGELOG.md
new file mode 100644
index 000000000000..f732f9654866
--- /dev/null
+++ b/Classifiers/classifier-Gsuite-gmail_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added "Email HTML" mapping.
\ No newline at end of file
diff --git a/Classifiers/classifier-Mail-listener.json b/Classifiers/classifier-Mail-listener.json
index cef4130cdb0f..c53fbf813a4b 100644
--- a/Classifiers/classifier-Mail-listener.json
+++ b/Classifiers/classifier-Mail-listener.json
@@ -6,6 +6,10 @@
"mapping": {
"Phishing": {
"internalMapping": {
+ "Email HTML": {
+ "complex": null,
+ "simple": "HTML"
+ },
"Attachment Name": {
"simple": "attachments",
"complex": null
diff --git a/Classifiers/classifier-Mail-listener_CHANGELOG.md b/Classifiers/classifier-Mail-listener_CHANGELOG.md
new file mode 100644
index 000000000000..f732f9654866
--- /dev/null
+++ b/Classifiers/classifier-Mail-listener_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added "Email HTML" mapping.
\ No newline at end of file
diff --git a/Classifiers/classifier-OnboardingIntegration.json b/Classifiers/classifier-OnboardingIntegration.json
new file mode 100644
index 000000000000..fe540d6a3bab
--- /dev/null
+++ b/Classifiers/classifier-OnboardingIntegration.json
@@ -0,0 +1,125 @@
+{
+ "id": "6f0fa57c-f3ea-4267-883b-e43d55086223",
+ "version": -1,
+ "modified": "2019-04-24T14:13:16.362251+03:00",
+ "defaultIncidentType": "Phishing",
+ "mapping": {
+ "Phishing": {
+ "internalMapping": {
+ "Email BCC": {
+ "simple": "BCC",
+ "complex": null
+ },
+ "Email Body": {
+ "simple": "Text",
+ "complex": null
+ },
+ "Email HTML": {
+ "simple": "Body",
+ "complex": null
+ },
+ "Email Body HTML": {
+ "simple": "Body",
+ "complex": null
+ },
+ "Email CC": {
+ "simple": "CC",
+ "complex": null
+ },
+ "Email From": {
+ "simple": "From",
+ "complex": null
+ },
+ "Email Message ID": {
+ "simple": "Message-ID",
+ "complex": null
+ },
+ "Email Received": {
+ "simple": "Received",
+ "complex": null
+ },
+ "Email Reply To": {
+ "simple": "Reply-To",
+ "complex": null
+ },
+ "Email Sender IP": {
+ "simple": "",
+ "complex": {
+ "root": "Received",
+ "filters": [],
+ "accessor": "",
+ "transformers": [
+ {
+ "operator": "substringFrom",
+ "args": {
+ "from": {
+ "value": {
+ "simple": "(",
+ "complex": null
+ },
+ "isContext": false
+ }
+ }
+ },
+ {
+ "operator": "substringTo",
+ "args": {
+ "to": {
+ "value": {
+ "simple": ")",
+ "complex": null
+ },
+ "isContext": false
+ }
+ }
+ }
+ ]
+ }
+ },
+ "Email Subject": {
+ "simple": "Subject",
+ "complex": null
+ },
+ "Email To": {
+ "simple": "To",
+ "complex": null
+ },
+ "Email To Count": {
+ "simple": "",
+ "complex": {
+ "root": "To",
+ "filters": [],
+ "accessor": "",
+ "transformers": [
+ {
+ "operator": "splitAndTrim",
+ "args": {
+ "delimiter": {
+ "value": {
+ "simple": ",",
+ "complex": null
+ },
+ "isContext": false
+ }
+ }
+ },
+ {
+ "operator": "count",
+ "args": {}
+ }
+ ]
+ }
+ }
+ },
+ "dontMapEventToLabels": false
+ }
+ },
+ "unclassifiedCases": {},
+ "custom": true,
+ "transformer": {
+ "simple": "",
+ "complex": null
+ },
+ "keyTypeMap": {},
+ "brandName": "OnboardingIntegration"
+}
\ No newline at end of file
diff --git a/Classifiers/classifier-OnboardingIntegration_CHANGELOG.md b/Classifiers/classifier-OnboardingIntegration_CHANGELOG.md
new file mode 100644
index 000000000000..f732f9654866
--- /dev/null
+++ b/Classifiers/classifier-OnboardingIntegration_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added "Email HTML" mapping.
\ No newline at end of file
diff --git a/Classifiers/classifier-PaloAltoNetworks_Cortex.json b/Classifiers/classifier-PaloAltoNetworks_Cortex.json
new file mode 100644
index 000000000000..1b2ea0e42040
--- /dev/null
+++ b/Classifiers/classifier-PaloAltoNetworks_Cortex.json
@@ -0,0 +1,439 @@
+{
+ "brandName": "Palo Alto Networks Cortex",
+ "custom": true,
+ "defaultIncidentType": "",
+ "fromVersion": "5.0.0",
+ "id": "5c83f473-c618-4c41-85d8-251eb1f90566",
+ "keyTypeMap": {
+ "threat": "Traps"
+ },
+ "mapping": {
+ "Traps": {
+ "dontMapEventToLabels": false,
+ "internalMapping": {
+ "Agent ID": {
+ "complex": null,
+ "simple": "agentId"
+ },
+ "Blocked Action": {
+ "complex": {
+ "accessor": "block",
+ "filters": [],
+ "root": "messageData",
+ "transformers": [
+ {
+ "args": {
+ "limit": {
+ "isContext": false,
+ "value": null
+ },
+ "replaceWith": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "File was not blocked"
+ }
+ },
+ "toReplace": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "0"
+ }
+ }
+ },
+ "operator": "replace"
+ },
+ {
+ "args": {
+ "limit": {
+ "isContext": false,
+ "value": null
+ },
+ "replaceWith": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "File was blocked"
+ }
+ },
+ "toReplace": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "1"
+ }
+ }
+ },
+ "operator": "replace"
+ }
+ ]
+ },
+ "simple": ""
+ },
+ "Command Line": {
+ "complex": {
+ "accessor": "processes.commandLine",
+ "filters": [],
+ "root": "messageData",
+ "transformers": []
+ },
+ "simple": ""
+ },
+ "Device Name": {
+ "complex": null,
+ "simple": "endPointHeader.deviceName"
+ },
+ "Event Type": {
+ "complex": null,
+ "simple": "eventType"
+ },
+ "File Hash": {
+ "complex": null,
+ "simple": "messageData.sourceProcess.sha256"
+ },
+ "File Name": {
+ "complex": {
+ "accessor": "files.fileName",
+ "filters": [],
+ "root": "messageData",
+ "transformers": []
+ },
+ "simple": ""
+ },
+ "File Path": {
+ "complex": {
+ "accessor": "files.rawFullPath",
+ "filters": [],
+ "root": "messageData",
+ "transformers": []
+ },
+ "simple": ""
+ },
+ "File Size": {
+ "complex": {
+ "accessor": "files.fileSize",
+ "filters": [],
+ "root": "messageData",
+ "transformers": []
+ },
+ "simple": ""
+ },
+ "Involved Users": {
+ "complex": {
+ "accessor": "userName",
+ "filters": [],
+ "root": "endPointHeader",
+ "transformers": []
+ },
+ "simple": ""
+ },
+ "PID": {
+ "complex": {
+ "accessor": "processes.pid",
+ "filters": [],
+ "root": "messageData",
+ "transformers": []
+ },
+ "simple": ""
+ },
+ "Parent Process ID": {
+ "complex": {
+ "accessor": "processes.parentId",
+ "filters": [],
+ "root": "messageData",
+ "transformers": []
+ },
+ "simple": ""
+ },
+ "Src": {
+ "complex": null,
+ "simple": "endPointHeader.agentIp"
+ },
+ "Src NT Domain": {
+ "complex": null,
+ "simple": "endPointHeader.deviceDomain"
+ },
+ "Src OS": {
+ "complex": {
+ "accessor": "osType",
+ "filters": [],
+ "root": "endPointHeader",
+ "transformers": [
+ {
+ "args": {
+ "limit": {
+ "isContext": false,
+ "value": null
+ },
+ "replaceWith": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "Windows"
+ }
+ },
+ "toReplace": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "1"
+ }
+ }
+ },
+ "operator": "replace"
+ },
+ {
+ "args": {
+ "limit": {
+ "isContext": false,
+ "value": null
+ },
+ "replaceWith": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "OS X/macOS"
+ }
+ },
+ "toReplace": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "2"
+ }
+ }
+ },
+ "operator": "replace"
+ },
+ {
+ "args": {
+ "limit": {
+ "isContext": false,
+ "value": null
+ },
+ "replaceWith": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "Android"
+ }
+ },
+ "toReplace": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "3"
+ }
+ }
+ },
+ "operator": "replace"
+ },
+ {
+ "args": {
+ "limit": {
+ "isContext": false,
+ "value": null
+ },
+ "replaceWith": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "Linux"
+ }
+ },
+ "toReplace": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "4"
+ }
+ }
+ },
+ "operator": "replace"
+ }
+ ]
+ },
+ "simple": ""
+ },
+ "Src User": {
+ "complex": null,
+ "simple": "${messageData.users.[0].userName}@${messageData.users.[0].userDomain}"
+ },
+ "Subtype": {
+ "complex": null,
+ "simple": "eventType"
+ },
+ "Terminated Action": {
+ "complex": {
+ "accessor": "terminate",
+ "filters": [],
+ "root": "messageData",
+ "transformers": [
+ {
+ "args": {
+ "limit": {
+ "isContext": false,
+ "value": null
+ },
+ "replaceWith": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "Traps did not terminate the file"
+ }
+ },
+ "toReplace": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "0"
+ }
+ }
+ },
+ "operator": "replace"
+ },
+ {
+ "args": {
+ "limit": {
+ "isContext": false,
+ "value": null
+ },
+ "replaceWith": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "Traps terminated the file"
+ }
+ },
+ "toReplace": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "1"
+ }
+ }
+ },
+ "operator": "replace"
+ }
+ ]
+ },
+ "simple": ""
+ },
+ "Traps ID": {
+ "complex": {
+ "accessor": "",
+ "filters": [],
+ "root": "uuid",
+ "transformers": [
+ {
+ "args": {
+ "limit": {
+ "isContext": false,
+ "value": null
+ },
+ "replaceWith": {
+ "isContext": false,
+ "value": null
+ },
+ "toReplace": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "-"
+ }
+ }
+ },
+ "operator": "replace"
+ }
+ ]
+ },
+ "simple": ""
+ },
+ "Triggered Security Profile": {
+ "complex": null,
+ "simple": "messageData.profile"
+ },
+ "Vendor Product": {
+ "complex": null,
+ "simple": "Traps ${endPointHeader.agentVersion} - Content ${endPointHeader.contentVersion}"
+ },
+ "details": {
+ "complex": null,
+ "simple": "messageData.description"
+ },
+ "name": {
+ "complex": {
+ "accessor": "eventCategory",
+ "filters": [],
+ "root": "messageData",
+ "transformers": [
+ {
+ "args": {
+ "prefix": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": "TRAPS THREAT: "
+ }
+ },
+ "suffix": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": " (ID: "
+ }
+ }
+ },
+ "operator": "concat"
+ },
+ {
+ "args": {
+ "prefix": {
+ "isContext": false,
+ "value": null
+ },
+ "suffix": {
+ "isContext": true,
+ "value": {
+ "complex": null,
+ "simple": "id"
+ }
+ }
+ },
+ "operator": "concat"
+ },
+ {
+ "args": {
+ "prefix": {
+ "isContext": false,
+ "value": null
+ },
+ "suffix": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": ")"
+ }
+ }
+ },
+ "operator": "concat"
+ }
+ ]
+ },
+ "simple": ""
+ },
+ "severity": {
+ "complex": null,
+ "simple": "messageData.trapsSeverity"
+ }
+ }
+ }
+ },
+ "transformer": {
+ "complex": null,
+ "simple": "messageData.class"
+ },
+ "unclassifiedCases": {},
+ "version": -1
+}
\ No newline at end of file
diff --git a/Classifiers/classifier-PaloAltoNetworks_CortexXDR.json b/Classifiers/classifier-PaloAltoNetworks_CortexXDR.json
new file mode 100644
index 000000000000..f4bffb50c28f
--- /dev/null
+++ b/Classifiers/classifier-PaloAltoNetworks_CortexXDR.json
@@ -0,0 +1,138 @@
+{
+ "brandName": "Cortex XDR - IR",
+ "defaultIncidentType": "",
+ "fromVersion": "5.0.0",
+ "id": "32f26072-9a69-41a5-8db8-0d1226431078",
+ "custom": true,
+ "keyTypeMap": {
+ "XDR Incident": "Cortex XDR Incident"
+ },
+ "mapping": {
+ "Cortex XDR Incident": {
+ "dontMapEventToLabels": false,
+ "internalMapping": {
+ "XDR Alert Count": {
+ "complex": null,
+ "simple": "alert_count"
+ },
+ "XDR Assigned User Email": {
+ "complex": null,
+ "simple": "assigned_user_mail"
+ },
+ "XDR Assigned User Pretty Name": {
+ "complex": null,
+ "simple": "assigned_user_pretty_name"
+ },
+ "XDR Description": {
+ "complex": null,
+ "simple": "description"
+ },
+ "XDR Detection Time": {
+ "complex": {
+ "accessor": "",
+ "filters": [],
+ "root": "detection_time",
+ "transformers": [
+ {
+ "args": {},
+ "operator": "TimeStampToDate"
+ }
+ ]
+ },
+ "simple": ""
+ },
+ "XDR High Severity Alert Count": {
+ "complex": null,
+ "simple": "high_severity_alert_count"
+ },
+ "XDR Incident ID": {
+ "complex": null,
+ "simple": "incident_id"
+ },
+ "XDR Low Severity Alert Count": {
+ "complex": null,
+ "simple": "low_severity_alert_count"
+ },
+ "XDR Medium Severity Alert Count": {
+ "complex": null,
+ "simple": "med_severity_alert_count"
+ },
+ "XDR Notes": {
+ "complex": null,
+ "simple": "notes"
+ },
+ "XDR Resolve Comment": {
+ "complex": null,
+ "simple": "resolve_comment"
+ },
+ "XDR Severity": {
+ "complex": null,
+ "simple": "severity"
+ },
+ "XDR Status": {
+ "complex": null,
+ "simple": "status"
+ },
+ "XDR URL": {
+ "complex": null,
+ "simple": "xdr_url"
+ },
+ "XDR User Count": {
+ "complex": null,
+ "simple": "user_count"
+ },
+ "occurred": {
+ "complex": {
+ "accessor": "",
+ "filters": [],
+ "root": "creation_time",
+ "transformers": [
+ {
+ "args": {},
+ "operator": "TimeStampToDate"
+ }
+ ]
+ },
+ "simple": ""
+ },
+ "severity": {
+ "complex": null,
+ "simple": "severity"
+ }
+ }
+ },
+ "CortextXDRIncident": {
+ "dontMapEventToLabels": false,
+ "internalMapping": {
+ "XDR Severity": {
+ "complex": null,
+ "simple": "severity"
+ }
+ }
+ }
+ },
+ "transformer": {
+ "complex": {
+ "accessor": "",
+ "filters": [],
+ "root": "severity",
+ "transformers": [
+ {
+ "args": {
+ "dt": {
+ "isContext": false,
+ "value": {
+ "complex": null,
+ "simple": ".=\"XDR Incident\""
+ }
+ }
+ },
+ "operator": "DT"
+ }
+ ]
+ },
+ "simple": ""
+ },
+ "unclassifiedCases": {},
+ "version": -1
+}
\ No newline at end of file
diff --git a/Classifiers/classifier-PaloAltoNetworks_CortexXDR_CHANGELOG.md b/Classifiers/classifier-PaloAltoNetworks_CortexXDR_CHANGELOG.md
new file mode 100644
index 000000000000..364a8b865e80
--- /dev/null
+++ b/Classifiers/classifier-PaloAltoNetworks_CortexXDR_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added new mapping for the Cortex XDR integration. The integration converts an incident in XDR to an incident in Demisto, with the incident type **Cortex XDR Incident**. **(Available from Demsito 5.0)**
diff --git a/Classifiers/classifier-PaloAltoNetworks_Cortex_CHANGELOG.md b/Classifiers/classifier-PaloAltoNetworks_Cortex_CHANGELOG.md
new file mode 100644
index 000000000000..83cd2697a2bc
--- /dev/null
+++ b/Classifiers/classifier-PaloAltoNetworks_Cortex_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+New classifier for Palo Alto Networks Cortex integration for Traps incidents.
\ No newline at end of file
diff --git a/Dashboards/dashboard-Home-old.json b/Dashboards/dashboard-Home-old.json
index 3cd02b510e3e..ecff8fe23c87 100644
--- a/Dashboards/dashboard-Home-old.json
+++ b/Dashboards/dashboard-Home-old.json
@@ -243,7 +243,7 @@
"id": "my-mean-time-to-resolution",
"version": 2,
"modified": "2018-02-28T15:55:31.609278+02:00",
- "name": "My Mean Time To Resolution",
+ "name": "My Mean Time to Resolution",
"dataType": "incidents",
"widgetType": "duration",
"query": "-category:job and status:closed and owner:{me}",
diff --git a/Dashboards/dashboard-Home.json b/Dashboards/dashboard-Home.json
index c55da685564b..e5ecbf289e3f 100644
--- a/Dashboards/dashboard-Home.json
+++ b/Dashboards/dashboard-Home.json
@@ -242,7 +242,7 @@
"id": "my-mean-time-to-resolution",
"version": 2,
"modified": "2018-02-28T15:55:31.609278+02:00",
- "name": "My Mean Time To Resolution",
+ "name": "My Mean Time to Resolution",
"dataType": "incidents",
"widgetType": "duration",
"query": "-category:job and status:closed and owner:\"{me}\"",
diff --git a/Dashboards/dashboard-SLA.json b/Dashboards/dashboard-SLA.json
new file mode 100644
index 000000000000..d4df522a69a6
--- /dev/null
+++ b/Dashboards/dashboard-SLA.json
@@ -0,0 +1,260 @@
+{
+ "id": "sla-dashboard",
+ "description": "A new dashboard to give you a good overview of your SLAs.",
+ "version": -1,
+ "fromVersion": "4.1.0",
+ "fromDate": "0001-01-01T00:00:00Z",
+ "toDate": "0001-01-01T00:00:00Z",
+ "period": {
+ "byTo": "",
+ "byFrom": "days",
+ "toValue": null,
+ "fromValue": 30,
+ "field": ""
+ },
+ "fromDateLicense": "0001-01-01T00:00:00Z",
+ "name": "SLA",
+ "layout": [
+ {
+ "id": "25a2e8f0-fd4e-11e8-a656-2b6c8cbabaee",
+ "forceRange": false,
+ "x": 6,
+ "y": 0,
+ "i": "25a2e8f0-fd4e-11e8-a656-2b6c8cbabaee",
+ "w": 3,
+ "h": 1,
+ "widget": {
+ "id": "fddd62ff-a411-4e6a-8213-e0277a9b95b5",
+ "version": 1,
+ "name": "Mean Time to Detection",
+ "dataType": "incidents",
+ "widgetType": "duration",
+ "query": "-category:job and detectionsla.runStatus:ended",
+ "sort": null,
+ "isPredefined": false,
+ "description": "The mean time (average time) to detection across all incidents that their severity was determined. The widget takes into account incidents from the last 30 days by default.",
+ "dateRange": {
+ "fromDate": "0001-01-01T00:00:00Z",
+ "toDate": "0001-01-01T00:00:00Z",
+ "period": {
+ "byTo": "",
+ "byFrom": "days",
+ "toValue": null,
+ "fromValue": 30,
+ "field": ""
+ },
+ "fromDateLicense": "0001-01-01T00:00:00Z"
+ },
+ "params": {
+ "keys": [
+ "avg|detectionsla.totalDuration"
+ ]
+ },
+ "size": 0,
+ "category": ""
+ }
+ },
+ {
+ "id": "3747f820-fd4e-11e8-a656-2b6c8cbabaee",
+ "forceRange": false,
+ "x": 0,
+ "y": 0,
+ "i": "3747f820-fd4e-11e8-a656-2b6c8cbabaee",
+ "w": 3,
+ "h": 3,
+ "widget": {
+ "id": "1e54092d-1ed0-47a6-862d-893adc05e612",
+ "version": 1,
+ "name": "Detection SLA by Status",
+ "dataType": "incidents",
+ "widgetType": "pie",
+ "query": "-category:job and -detectionsla.runStatus:idle",
+ "sort": null,
+ "isPredefined": false,
+ "description": "The detection SLA status of all incidents that their severity was determined. The widget takes into account incidents from the last 30 days by default, and inherits new time range when the dashboard time changes.",
+ "dateRange": {
+ "fromDate": "0001-01-01T00:00:00Z",
+ "toDate": "0001-01-01T00:00:00Z",
+ "period": {
+ "byTo": "",
+ "byFrom": "days",
+ "toValue": null,
+ "fromValue": 30,
+ "field": ""
+ },
+ "fromDateLicense": "0001-01-01T00:00:00Z"
+ },
+ "params": {
+ "groupBy": [
+ "detectionsla.slaStatus"
+ ]
+ },
+ "size": 0,
+ "category": ""
+ }
+ },
+ {
+ "id": "3de5b1e0-fd4e-11e8-a656-2b6c8cbabaee",
+ "forceRange": false,
+ "x": 3,
+ "y": 0,
+ "i": "3de5b1e0-fd4e-11e8-a656-2b6c8cbabaee",
+ "w": 3,
+ "h": 3,
+ "widget": {
+ "id": "1767dee0-7f8c-48a5-8988-c58b9e713ab6",
+ "version": 1,
+ "name": "Remediation SLA by Status",
+ "dataType": "incidents",
+ "widgetType": "pie",
+ "query": "-category:job and -remediationsla.runStatus:idle",
+ "sort": null,
+ "isPredefined": false,
+ "description": "The remediation SLA status of all incidents that started a remediation process. The widget takes into account incidents from the last 30 days by default, and inherits new time range when the dashboard time changes.",
+ "dateRange": {
+ "fromDate": "0001-01-01T00:00:00Z",
+ "toDate": "0001-01-01T00:00:00Z",
+ "period": {
+ "byTo": "",
+ "byFrom": "days",
+ "toValue": null,
+ "fromValue": 30,
+ "field": ""
+ },
+ "fromDateLicense": "0001-01-01T00:00:00Z"
+ },
+ "params": {
+ "groupBy": [
+ "remediationsla.slaStatus"
+ ]
+ },
+ "size": 0,
+ "category": ""
+ }
+ },
+ {
+ "id": "a48c1670-fdf1-11e8-a2fa-df5e7de7d45d",
+ "forceRange": false,
+ "x": 9,
+ "y": 0,
+ "i": "a48c1670-fdf1-11e8-a2fa-df5e7de7d45d",
+ "w": 3,
+ "h": 1,
+ "widget": {
+ "id": "mean-time-to-resolution",
+ "version": 169,
+ "name": "Mean Time To Resolution",
+ "dataType": "incidents",
+ "widgetType": "duration",
+ "query": "-category:job and status:closed",
+ "sort": null,
+ "isPredefined": true,
+ "dateRange": {
+ "fromDate": "0001-01-01T00:00:00Z",
+ "toDate": "0001-01-01T00:00:00Z",
+ "period": {
+ "byTo": "",
+ "byFrom": "days",
+ "toValue": null,
+ "fromValue": 7,
+ "field": ""
+ },
+ "fromDateLicense": "0001-01-01T00:00:00Z"
+ },
+ "params": {
+ "keys": [
+ "avg|openDuration",
+ "count|1"
+ ]
+ },
+ "size": 0,
+ "category": ""
+ }
+ },
+ {
+ "id": "d2bbe430-02a1-11e9-878d-4fff182656eb",
+ "forceRange": false,
+ "x": 6,
+ "y": 1,
+ "i": "d2bbe430-02a1-11e9-878d-4fff182656eb",
+ "w": 6,
+ "h": 5,
+ "widget": {
+ "id": "mttd-by-type",
+ "version": 1,
+ "name": "MTTD by Type",
+ "dataType": "incidents",
+ "widgetType": "line",
+ "query": "-category:job and detectionsla.runStatus:ended",
+ "sort": null,
+ "isPredefined": false,
+ "dateRange": {
+ "fromDate": "0001-01-01T00:00:00Z",
+ "toDate": "0001-01-01T00:00:00Z",
+ "period": {
+ "byTo": "",
+ "byFrom": "days",
+ "toValue": null,
+ "fromValue": 7,
+ "field": ""
+ },
+ "fromDateLicense": "0001-01-01T00:00:00Z"
+ },
+ "params": {
+ "groupBy": [
+ "occurred(d)",
+ "type"
+ ],
+ "keys": [
+ "avg|detectionsla.totalDuration / 60"
+ ]
+ },
+ "size": 0,
+ "category": ""
+ }
+ },
+ {
+ "id": "e30f9430-02a1-11e9-878d-4fff182656eb",
+ "forceRange": false,
+ "x": 0,
+ "y": 3,
+ "i": "e30f9430-02a1-11e9-878d-4fff182656eb",
+ "w": 6,
+ "h": 3,
+ "widget": {
+ "id": "mttr-by-type",
+ "version": 168,
+ "name": "MTTR by Type",
+ "dataType": "incidents",
+ "widgetType": "line",
+ "query": "-category:job and status:closed",
+ "sort": null,
+ "isPredefined": true,
+ "dateRange": {
+ "fromDate": "0001-01-01T00:00:00Z",
+ "toDate": "0001-01-01T00:00:00Z",
+ "period": {
+ "byTo": "",
+ "byFrom": "days",
+ "toValue": null,
+ "fromValue": 7,
+ "field": ""
+ },
+ "fromDateLicense": "0001-01-01T00:00:00Z"
+ },
+ "params": {
+ "groupBy": [
+ "occurred(d)",
+ "type"
+ ],
+ "keys": [
+ "avg|openDuration / (3600*24)"
+ ]
+ },
+ "size": 0,
+ "category": ""
+ }
+ }
+ ],
+ "isPredefined": false
+}
\ No newline at end of file
diff --git a/Documentation/.gitignore b/Documentation/.gitignore
new file mode 100644
index 000000000000..7bdba0b3965d
--- /dev/null
+++ b/Documentation/.gitignore
@@ -0,0 +1,3 @@
+commonServerJsDoc.js
+commonServerJsDoc.json
+doc-CommonServer.json
diff --git a/Documentation/common_server_docs.py b/Documentation/common_server_docs.py
index db412833643f..93d6883bba8b 100644
--- a/Documentation/common_server_docs.py
+++ b/Documentation/common_server_docs.py
@@ -2,17 +2,27 @@
import json
import sys
import yaml
+import os
from parinx import parser
+from package_creator import clean_python_code
-jsPrivateFuncs = ["dqQueryBuilder", "toArray", "indent", "formatTableValuesRecursive", "string_to_array", "array_to_hex_string",
- "SHA256_init", "SHA256_write", "SHA256_finalize", "SHA256_hash", "HMAC_SHA256_init", "HMAC_SHA256_write",
- "HMAC_SHA256_finalize", "HMAC_SHA256_MAC"]
+SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
+CONTENT_DIR = os.path.abspath(SCRIPT_DIR + '/..')
+sys.path.append(CONTENT_DIR + '/Tests/demistomock')
+
+import demistomock # noqa: E402
+
+# PrivateFuncs are functions to ignore when running the script
+jsPrivateFuncs = ["dqQueryBuilder", "toArray", "indent", "formatTableValuesRecursive", "string_to_array",
+ "array_to_hex_string", "SHA256_init", "SHA256_write", "SHA256_finalize", "SHA256_hash",
+ "HMAC_SHA256_init", "HMAC_SHA256_write", "HMAC_SHA256_finalize", "HMAC_SHA256_MAC"]
pyPrivateFuncs = ["raiseTable", "zoomField", "epochToTimestamp", "formatTimeColumns", "strip_tag", "elem_to_internal",
- "internal_to_elem", "json2elem", "elem2json", "json2xml", "OrderedDict", "datetime", "timedelta",
- "createContextSingle", "IntegrationLogger", "tblToMd"]
+ "internal_to_elem", "json2elem", "elem2json", "json2xml", "OrderedDict", "datetime", "timedelta",
+ "createContextSingle", "IntegrationLogger", "tblToMd", "DemistoException", "BaseClient",
+ "BaseHTTPClient", "DemistoHandler", "DebugLogger"]
-pyIrregularFuncs = {"LOG" : {"argList" : ["message"]}}
+pyIrregularFuncs = {"LOG": {"argList": ["message"]}}
jsAutomationOnly = ["fileNameFromEntry", "closeInvestigation", "setSeverity", "setIncident", "createNewIncident",
"setPlaybookAccordingToType", "setOwner", "taskAssign", "setTaskDueDate", "setPlaybook", "addTask",
@@ -20,20 +30,20 @@
markdownDescFuncs = ["createEntry"]
+
def readJsonFile(filepath):
with open(filepath, 'r') as f:
out = json.load(f)
return out
- return []
+
def readYmlFile(filepath):
with open(filepath, 'r') as f:
- out = yaml.load(f)
+ out = yaml.safe_load(f)
return out
- return []
-def reformatPythonOutput(output, origin, language):
+def reformatPythonOutput(output, origin, language):
res = []
isError = False
for a in output:
@@ -41,7 +51,7 @@ def reformatPythonOutput(output, origin, language):
continue
if a.get("description", "") == "":
- print "Description is missing for Python function", a["name"]
+ print("Description is missing for Python function", a["name"])
isError = True
# format arguments
@@ -55,7 +65,7 @@ def reformatPythonOutput(output, origin, language):
argInfo["type"] = argInfo["type_name"]
if argInfo.get("description", "") == "":
isError = True
- print "Missing description for argument", argName, "in python function", a["name"]
+ print("Missing description for argument", argName, "in python function", a["name"])
del argInfo["type_name"]
z.append(argInfo)
@@ -74,8 +84,8 @@ def reformatPythonOutput(output, origin, language):
return res, isError
-def createJsDocumentation(path, origin, language):
+def createJsDocumentation(path, origin, language):
isError = False
commonServerJs = readJsonFile(path)
x = []
@@ -86,11 +96,11 @@ def createJsDocumentation(path, origin, language):
y = {}
y["name"] = a.get("name", "")
if y["name"] == "":
- print "Error extracting function name for JS fucntion with the following data:\n", a
+ print("Error extracting function name for JS fucntion with the following data:\n", a)
isError = True
y["description"] = a.get("description", "")
if y["description"] == "":
- print "Description is missing for JS function", y["name"]
+ print("Description is missing for JS function", y["name"])
isError = True
for arg in a.get("params", []):
@@ -101,8 +111,8 @@ def createJsDocumentation(path, origin, language):
del arg["optional"]
if arg.get("name", "") == "" or arg.get("description", "") == "":
isError = True
- print "Missing name/description for argument in JS function", y["name"], ".\n Arg name is", \
- arg.get("name", ""), ", args description is", arg.get("description", "")
+ print("Missing name/description for argument in JS function", y["name"], ".\n Arg name is",
+ arg.get("name", ""), ", args description is", arg.get("description", ""))
y["arguments"] = a.get("params", [])
returns = a.get("returns", None)[0]
@@ -118,41 +128,46 @@ def createJsDocumentation(path, origin, language):
x.append(y)
return x, isError
+
def createPyDocumentation(path, origin, language):
isErrorPy = False
- # create commonServerPy json doc
- commonServerPython = readYmlFile(path)
- pyScript = commonServerPython.get("script", "")
+
+ with open(path, 'r') as file:
+ pyScript = clean_python_code(file.read(), remove_print_future=False)
code = compile(pyScript, '', 'exec')
- ns = {}
- exec code in ns
+ ns = {'demisto': demistomock}
+ exec(code, ns) # guardrails-disable-line
x = []
for a in ns:
- if callable(ns.get(a)) and a not in pyPrivateFuncs:
+ if a != 'demisto' and callable(ns.get(a)) and a not in pyPrivateFuncs:
docstring = inspect.getdoc(ns.get(a))
if not docstring:
- print "docstring for function " + a + " is empty"
+ print("docstring for function {} is empty".format(a))
isErrorPy = True
else:
y = parser.parse_docstring(docstring)
y["name"] = a
- y["argList"] = list(inspect.getargspec(ns.get(a)))[0] if pyIrregularFuncs.get(a, None) == None else pyIrregularFuncs[a]["argList"]
+ y["argList"] = list(inspect.getargspec(ns.get(a)))[0] if pyIrregularFuncs.get(a, None) is None \
+ else pyIrregularFuncs[a]["argList"]
+
x.append(y)
if isErrorPy:
return None, isErrorPy
return reformatPythonOutput(x, origin, language)
+
def main(argv):
jsDoc, isErrorJS = createJsDocumentation('./Documentation/commonServerJsDoc.json', 'CommonServerJs', 'javascript')
- pyDoc, isErrorPy = createPyDocumentation('./Scripts/script-CommonServerPython.yml', 'CommonServerPython', 'python')
+ pyDoc, isErrorPy = createPyDocumentation('./Scripts/CommonServerPython/CommonServerPython.py',
+ 'CommonServerPython', 'python')
finalDoc = readJsonFile('./Documentation/commonServerConstants.json')
if isErrorJS or isErrorPy or not finalDoc:
- print "Errors found in common server docs."
+ print("Errors found in common server docs.")
sys.exit(1)
with open('./Documentation/doc-CommonServer.json', 'w') as fp:
finalDoc += jsDoc
@@ -161,4 +176,4 @@ def main(argv):
if __name__ == "__main__":
- main(sys.argv[1:])
+ main(sys.argv[1:])
diff --git a/Documentation/extract_common_server_js.py b/Documentation/extract_common_server_js.py
index f896d6c7389e..bd8246256f6d 100644
--- a/Documentation/extract_common_server_js.py
+++ b/Documentation/extract_common_server_js.py
@@ -1,12 +1,14 @@
import sys
import yaml
+
def readFile(filepath):
with open(filepath, 'r') as f:
- out = yaml.load(f)
+ out = yaml.safe_load(f)
return out
return []
+
def main(argv):
# create commonServer js file to extract doc from
commonServer = readFile('./Scripts/script-CommonServer.yml')
diff --git a/IncidentFields/incidentfield-Agent_ID.json b/IncidentFields/incidentfield-Agent_ID.json
new file mode 100644
index 000000000000..7ee3a43fbfcd
--- /dev/null
+++ b/IncidentFields/incidentfield-Agent_ID.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Traps"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "agentid",
+ "closeForm": false,
+ "columns": null,
+ "content": true,
+ "defaultRows": null,
+ "description": "Agent ID",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_agentid",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Agent ID",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Agent_ID_CHANGELOG.md b/IncidentFields/incidentfield-Agent_ID_CHANGELOG.md
new file mode 100644
index 000000000000..80ba190ca259
--- /dev/null
+++ b/IncidentFields/incidentfield-Agent_ID_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+Agent ID
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-App.json b/IncidentFields/incidentfield-App.json
new file mode 100644
index 000000000000..0303380560ed
--- /dev/null
+++ b/IncidentFields/incidentfield-App.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_app",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.389092425+03:00",
+ "name": "App",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "app",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Access"
+ ],
+ "systemAssociatedTypes": [
+ "Access"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-App_CHANGELOG.md b/IncidentFields/incidentfield-App_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-App_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Asset_ID.json b/IncidentFields/incidentfield-Asset_ID.json
new file mode 100644
index 000000000000..8168bc093481
--- /dev/null
+++ b/IncidentFields/incidentfield-Asset_ID.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_assetid",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.427700464+03:00",
+ "name": "Asset ID",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "assetid",
+ "type": "shortText",
+ "closeForm": true,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Vulnerability"
+ ],
+ "systemAssociatedTypes": [
+ "Vulnerability"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Asset_ID_CHANGELOG.md b/IncidentFields/incidentfield-Asset_ID_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Asset_ID_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Attachment_Count.json b/IncidentFields/incidentfield-Attachment_Count.json
new file mode 100644
index 000000000000..233ab93a54a1
--- /dev/null
+++ b/IncidentFields/incidentfield-Attachment_Count.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_attachmentcount",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.313509514+03:00",
+ "name": "Attachment Count",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "attachmentcount",
+ "type": "number",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Attachment_Count_CHANGELOG.md b/IncidentFields/incidentfield-Attachment_Count_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Attachment_Count_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Attachment_Extension.json b/IncidentFields/incidentfield-Attachment_Extension.json
new file mode 100644
index 000000000000..a478a07df136
--- /dev/null
+++ b/IncidentFields/incidentfield-Attachment_Extension.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_attachmentextension",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.316669039+03:00",
+ "name": "Attachment Extension",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "attachmentextension",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Attachment_Extension_CHANGELOG.md b/IncidentFields/incidentfield-Attachment_Extension_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Attachment_Extension_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Attachment_Hash.json b/IncidentFields/incidentfield-Attachment_Hash.json
new file mode 100644
index 000000000000..8440aa750f6b
--- /dev/null
+++ b/IncidentFields/incidentfield-Attachment_Hash.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_attachmenthash",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.320408213+03:00",
+ "name": "Attachment Hash",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "attachmenthash",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Attachment_Hash_CHANGELOG.md b/IncidentFields/incidentfield-Attachment_Hash_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Attachment_Hash_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Attachment_ID.json b/IncidentFields/incidentfield-Attachment_ID.json
new file mode 100644
index 000000000000..123b19d10ca5
--- /dev/null
+++ b/IncidentFields/incidentfield-Attachment_ID.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_attachmentid",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.323188662+03:00",
+ "name": "Attachment ID",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "attachmentid",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Attachment_ID_CHANGELOG.md b/IncidentFields/incidentfield-Attachment_ID_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Attachment_ID_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Attachment_Name.json b/IncidentFields/incidentfield-Attachment_Name.json
new file mode 100644
index 000000000000..9efae1d80fbd
--- /dev/null
+++ b/IncidentFields/incidentfield-Attachment_Name.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_attachmentname",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.326381385+03:00",
+ "name": "Attachment Name",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "attachmentname",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Attachment_Name_CHANGELOG.md b/IncidentFields/incidentfield-Attachment_Name_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Attachment_Name_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Attachment_size.json b/IncidentFields/incidentfield-Attachment_size.json
new file mode 100644
index 000000000000..007d8ca98912
--- /dev/null
+++ b/IncidentFields/incidentfield-Attachment_size.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_attachmentsize",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.329260341+03:00",
+ "name": "Attachment size",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "attachmentsize",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Attachment_size_CHANGELOG.md b/IncidentFields/incidentfield-Attachment_size_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Attachment_size_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Attachment_type.json b/IncidentFields/incidentfield-Attachment_type.json
new file mode 100644
index 000000000000..057e8c3079ec
--- /dev/null
+++ b/IncidentFields/incidentfield-Attachment_type.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_attachmenttype",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.332108621+03:00",
+ "name": "Attachment type",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "attachmenttype",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Attachment_type_CHANGELOG.md b/IncidentFields/incidentfield-Attachment_type_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Attachment_type_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Blocked_Action.json b/IncidentFields/incidentfield-Blocked_Action.json
new file mode 100644
index 000000000000..eadf194cdd04
--- /dev/null
+++ b/IncidentFields/incidentfield-Blocked_Action.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Traps"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "blockedaction",
+ "closeForm": false,
+ "columns": null,
+ "content": true,
+ "defaultRows": null,
+ "description": "Blocked Action",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_blockedaction",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Blocked Action",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Blocked_Action_CHANGELOG.md b/IncidentFields/incidentfield-Blocked_Action_CHANGELOG.md
new file mode 100644
index 000000000000..c86ef99122c1
--- /dev/null
+++ b/IncidentFields/incidentfield-Blocked_Action_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+Blocked Action
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Bugtraq.json b/IncidentFields/incidentfield-Bugtraq.json
new file mode 100644
index 000000000000..9766ab2db479
--- /dev/null
+++ b/IncidentFields/incidentfield-Bugtraq.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_bugtraq",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.430765635+03:00",
+ "name": "Bugtraq",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "bugtraq",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Vulnerability"
+ ],
+ "systemAssociatedTypes": [
+ "Vulnerability"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Bugtraq_CHANGELOG.md b/IncidentFields/incidentfield-Bugtraq_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Bugtraq_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-CVE.json b/IncidentFields/incidentfield-CVE.json
new file mode 100644
index 000000000000..3c071c9b173e
--- /dev/null
+++ b/IncidentFields/incidentfield-CVE.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_cve",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.433685165+03:00",
+ "name": "CVE",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "cve",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Vulnerability"
+ ],
+ "systemAssociatedTypes": [
+ "Vulnerability"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-CVE_CHANGELOG.md b/IncidentFields/incidentfield-CVE_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-CVE_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-CVSS.json b/IncidentFields/incidentfield-CVSS.json
new file mode 100644
index 000000000000..e9ef11f043c5
--- /dev/null
+++ b/IncidentFields/incidentfield-CVSS.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_cvss",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.436656554+03:00",
+ "name": "CVSS",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "cvss",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Vulnerability"
+ ],
+ "systemAssociatedTypes": [
+ "Vulnerability"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-CVSS_CHANGELOG.md b/IncidentFields/incidentfield-CVSS_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-CVSS_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Command_Line.json b/IncidentFields/incidentfield-Command_Line.json
new file mode 100644
index 000000000000..8fd3eceb455e
--- /dev/null
+++ b/IncidentFields/incidentfield-Command_Line.json
@@ -0,0 +1,40 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Crowdstrike",
+ "Traps"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "commandline",
+ "closeForm": false,
+ "columns": null,
+ "content": true,
+ "defaultRows": null,
+ "description": "Command Line",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_commandline",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Command Line",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": [],
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Command_Line_CHANGELOG.md b/IncidentFields/incidentfield-Command_Line_CHANGELOG.md
new file mode 100644
index 000000000000..98761433eb93
--- /dev/null
+++ b/IncidentFields/incidentfield-Command_Line_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+Command Line
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Dest.json b/IncidentFields/incidentfield-Dest.json
new file mode 100644
index 000000000000..acea7c8de09b
--- /dev/null
+++ b/IncidentFields/incidentfield-Dest.json
@@ -0,0 +1,37 @@
+{
+ "id": "incident_dest",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.39174697+03:00",
+ "name": "Dest",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "Destination",
+ "cliName": "dest",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Access",
+ "Malware"
+ ],
+ "systemAssociatedTypes": [
+ "Access",
+ "Malware"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Dest_CHANGELOG.md b/IncidentFields/incidentfield-Dest_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Dest_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Dest_NT_Domain.json b/IncidentFields/incidentfield-Dest_NT_Domain.json
new file mode 100644
index 000000000000..b39a9e73bb39
--- /dev/null
+++ b/IncidentFields/incidentfield-Dest_NT_Domain.json
@@ -0,0 +1,37 @@
+{
+ "id": "incident_destntdomain",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.394843894+03:00",
+ "name": "Dest NT Domain",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "Destination NT Domain",
+ "cliName": "destntdomain",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Access",
+ "Malware"
+ ],
+ "systemAssociatedTypes": [
+ "Access",
+ "Malware"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Dest_NT_Domain_CHANGELOG.md b/IncidentFields/incidentfield-Dest_NT_Domain_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Dest_NT_Domain_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Device_Name.json b/IncidentFields/incidentfield-Device_Name.json
new file mode 100644
index 000000000000..16b6b9f80b7e
--- /dev/null
+++ b/IncidentFields/incidentfield-Device_Name.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Traps"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "devicename",
+ "closeForm": false,
+ "columns": null,
+ "content": true,
+ "defaultRows": null,
+ "description": "Device Name",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_devicename",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Device Name",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Device_Name_CHANGELOG.md b/IncidentFields/incidentfield-Device_Name_CHANGELOG.md
new file mode 100644
index 000000000000..2a066285b43d
--- /dev/null
+++ b/IncidentFields/incidentfield-Device_Name_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+Device Name
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Duration.json b/IncidentFields/incidentfield-Duration.json
new file mode 100644
index 000000000000..8a51e66a7c31
--- /dev/null
+++ b/IncidentFields/incidentfield-Duration.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_duration",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.397705995+03:00",
+ "name": "Duration",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "duration",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Access"
+ ],
+ "systemAssociatedTypes": [
+ "Access"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Duration_CHANGELOG.md b/IncidentFields/incidentfield-Duration_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Duration_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_BCC.json b/IncidentFields/incidentfield-Email_BCC.json
new file mode 100644
index 000000000000..805db276152b
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_BCC.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailbcc",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.335010031+03:00",
+ "name": "Email BCC",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailbcc",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_BCC_CHANGELOG.md b/IncidentFields/incidentfield-Email_BCC_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_BCC_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Body.json b/IncidentFields/incidentfield-Email_Body.json
new file mode 100644
index 000000000000..d7584577541b
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Body.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailbody",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.337643992+03:00",
+ "name": "Email Body",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailbody",
+ "type": "longText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Body_CHANGELOG.md b/IncidentFields/incidentfield-Email_Body_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Body_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Body_Format.json b/IncidentFields/incidentfield-Email_Body_Format.json
new file mode 100644
index 000000000000..1ff7be40ebd0
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Body_Format.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailbodyformat",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.34104065+03:00",
+ "name": "Email Body Format",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailbodyformat",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Body_Format_CHANGELOG.md b/IncidentFields/incidentfield-Email_Body_Format_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Body_Format_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Body_HTML.json b/IncidentFields/incidentfield-Email_Body_HTML.json
new file mode 100644
index 000000000000..1b4f517d078f
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Body_HTML.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailbodyhtml",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.344156391+03:00",
+ "name": "Email Body HTML",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailbodyhtml",
+ "type": "html",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": true
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Body_HTML_CHANGELOG.md b/IncidentFields/incidentfield-Email_Body_HTML_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Body_HTML_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_CC.json b/IncidentFields/incidentfield-Email_CC.json
new file mode 100644
index 000000000000..10946e3d4409
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_CC.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailcc",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.347108901+03:00",
+ "name": "Email CC",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailcc",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_CC_CHANGELOG.md b/IncidentFields/incidentfield-Email_CC_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_CC_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Client_Name.json b/IncidentFields/incidentfield-Email_Client_Name.json
new file mode 100644
index 000000000000..7acda2ca69dc
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Client_Name.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailclientname",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.349792637+03:00",
+ "name": "Email Client Name",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailclientname",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Client_Name_CHANGELOG.md b/IncidentFields/incidentfield-Email_Client_Name_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Client_Name_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_From.json b/IncidentFields/incidentfield-Email_From.json
new file mode 100644
index 000000000000..ff86767c98d0
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_From.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailfrom",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.35251857+03:00",
+ "name": "Email From",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailfrom",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_From_CHANGELOG.md b/IncidentFields/incidentfield-Email_From_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_From_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_In_Reply_To.json b/IncidentFields/incidentfield-Email_In_Reply_To.json
new file mode 100644
index 000000000000..30badcf854cb
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_In_Reply_To.json
@@ -0,0 +1,33 @@
+{
+ "id": "incident_emailinreplyto",
+ "version": -1,
+ "modified": "2018-09-02T10:53:05.262517498+03:00",
+ "name": "Email In Reply To",
+ "ownerOnly": false,
+ "placeholder": "emailinreplyto",
+ "description": "The mail to whom you reply",
+ "cliName": "emailinreplyto",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_In_Reply_To_CHANGELOG.md b/IncidentFields/incidentfield-Email_In_Reply_To_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_In_Reply_To_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Keywords.json b/IncidentFields/incidentfield-Email_Keywords.json
new file mode 100644
index 000000000000..1450cb56868f
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Keywords.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailkeywords",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.355250331+03:00",
+ "name": "Email Keywords",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailkeywords",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Keywords_CHANGELOG.md b/IncidentFields/incidentfield-Email_Keywords_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Keywords_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Message_ID.json b/IncidentFields/incidentfield-Email_Message_ID.json
new file mode 100644
index 000000000000..bc951f5a865d
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Message_ID.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailmessageid",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.358269902+03:00",
+ "name": "Email Message ID",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailmessageid",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Message_ID_CHANGELOG.md b/IncidentFields/incidentfield-Email_Message_ID_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Message_ID_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Received.json b/IncidentFields/incidentfield-Email_Received.json
new file mode 100644
index 000000000000..4aed1d75abb9
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Received.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailreceived",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.360983117+03:00",
+ "name": "Email Received",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailreceived",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Received_CHANGELOG.md b/IncidentFields/incidentfield-Email_Received_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Received_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Reply_To.json b/IncidentFields/incidentfield-Email_Reply_To.json
new file mode 100644
index 000000000000..3e2db905bcb2
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Reply_To.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailreplyto",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.36346759+03:00",
+ "name": "Email Reply To",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailreplyto",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Reply_To_CHANGELOG.md b/IncidentFields/incidentfield-Email_Reply_To_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Reply_To_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Return_Path.json b/IncidentFields/incidentfield-Email_Return_Path.json
new file mode 100644
index 000000000000..2e33737256ca
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Return_Path.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailreturnpath",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.366098374+03:00",
+ "name": "Email Return Path",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailreturnpath",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Return_Path_CHANGELOG.md b/IncidentFields/incidentfield-Email_Return_Path_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Return_Path_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Sender_IP.json b/IncidentFields/incidentfield-Email_Sender_IP.json
new file mode 100644
index 000000000000..63fc1a2f8c81
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Sender_IP.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailsenderip",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.368675488+03:00",
+ "name": "Email Sender IP",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailsenderip",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Sender_IP_CHANGELOG.md b/IncidentFields/incidentfield-Email_Sender_IP_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Sender_IP_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Size.json b/IncidentFields/incidentfield-Email_Size.json
new file mode 100644
index 000000000000..81d2908e4e50
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Size.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailsize",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.37171586+03:00",
+ "name": "Email Size",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailsize",
+ "type": "number",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Size_CHANGELOG.md b/IncidentFields/incidentfield-Email_Size_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Size_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Source.json b/IncidentFields/incidentfield-Email_Source.json
new file mode 100644
index 000000000000..bccae43342ff
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Source.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailsource",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.375343721+03:00",
+ "name": "Email Source",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailsource",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Source_CHANGELOG.md b/IncidentFields/incidentfield-Email_Source_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Source_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Subject.json b/IncidentFields/incidentfield-Email_Subject.json
new file mode 100644
index 000000000000..28384cc1a1b9
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Subject.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailsubject",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.378224888+03:00",
+ "name": "Email Subject",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailsubject",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_Subject_CHANGELOG.md b/IncidentFields/incidentfield-Email_Subject_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_Subject_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_To.json b/IncidentFields/incidentfield-Email_To.json
new file mode 100644
index 000000000000..dc89e6ae5810
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_To.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailto",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.380974651+03:00",
+ "name": "Email To",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailto",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_To_CHANGELOG.md b/IncidentFields/incidentfield-Email_To_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_To_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_To_Count.json b/IncidentFields/incidentfield-Email_To_Count.json
new file mode 100644
index 000000000000..9bd15968d4aa
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_To_Count.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailtocount",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.383761983+03:00",
+ "name": "Email To Count",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailtocount",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_To_Count_CHANGELOG.md b/IncidentFields/incidentfield-Email_To_Count_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_To_Count_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_URL_Clicked.json b/IncidentFields/incidentfield-Email_URL_Clicked.json
new file mode 100644
index 000000000000..74ad59ce4fad
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_URL_Clicked.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_emailurlclicked",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.386379045+03:00",
+ "name": "Email URL Clicked",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailurlclicked",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": [
+ "Phishing"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Email_URL_Clicked_CHANGELOG.md b/IncidentFields/incidentfield-Email_URL_Clicked_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Email_URL_Clicked_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-File_Hash.json b/IncidentFields/incidentfield-File_Hash.json
new file mode 100644
index 000000000000..fec8f7dec143
--- /dev/null
+++ b/IncidentFields/incidentfield-File_Hash.json
@@ -0,0 +1,37 @@
+{
+ "id": "incident_filehash",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.405371418+03:00",
+ "name": "File Hash",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "filehash",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Malware",
+ "Traps"
+ ],
+ "systemAssociatedTypes": [
+ "Malware",
+ "Traps"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-File_Hash_CHANGELOG.md b/IncidentFields/incidentfield-File_Hash_CHANGELOG.md
new file mode 100644
index 000000000000..c9507117bf5a
--- /dev/null
+++ b/IncidentFields/incidentfield-File_Hash_CHANGELOG.md
@@ -0,0 +1,8 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+Associated to Traps incident type.
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-File_Name.json b/IncidentFields/incidentfield-File_Name.json
new file mode 100644
index 000000000000..5a360561008e
--- /dev/null
+++ b/IncidentFields/incidentfield-File_Name.json
@@ -0,0 +1,37 @@
+{
+ "id": "incident_filename",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.407952489+03:00",
+ "name": "File Name",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "filename",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Malware",
+ "Traps"
+ ],
+ "systemAssociatedTypes": [
+ "Malware",
+ "Traps"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-File_Name_CHANGELOG.md b/IncidentFields/incidentfield-File_Name_CHANGELOG.md
new file mode 100644
index 000000000000..08cab6f77265
--- /dev/null
+++ b/IncidentFields/incidentfield-File_Name_CHANGELOG.md
@@ -0,0 +1,9 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+Associated to Traps incident type.
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-File_Path.json b/IncidentFields/incidentfield-File_Path.json
new file mode 100644
index 000000000000..7c853e5e22e0
--- /dev/null
+++ b/IncidentFields/incidentfield-File_Path.json
@@ -0,0 +1,37 @@
+{
+ "id": "incident_filepath",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.410700747+03:00",
+ "name": "File Path",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "filepath",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Malware",
+ "Traps"
+ ],
+ "systemAssociatedTypes": [
+ "Malware",
+ "Traps"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-File_Path_CHANGELOG.md b/IncidentFields/incidentfield-File_Path_CHANGELOG.md
new file mode 100644
index 000000000000..08cab6f77265
--- /dev/null
+++ b/IncidentFields/incidentfield-File_Path_CHANGELOG.md
@@ -0,0 +1,9 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+Associated to Traps incident type.
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-File_Size.json b/IncidentFields/incidentfield-File_Size.json
new file mode 100644
index 000000000000..f0399425cd8f
--- /dev/null
+++ b/IncidentFields/incidentfield-File_Size.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Traps"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "filesize",
+ "closeForm": false,
+ "columns": null,
+ "content": true,
+ "defaultRows": null,
+ "description": "File Size",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_filesize",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "File Size",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-File_Size_CHANGELOG.md b/IncidentFields/incidentfield-File_Size_CHANGELOG.md
new file mode 100644
index 000000000000..74037d936037
--- /dev/null
+++ b/IncidentFields/incidentfield-File_Size_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+File Size
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Infected_Hosts.json b/IncidentFields/incidentfield-Infected_Hosts.json
new file mode 100644
index 000000000000..ecfbfe2eed3c
--- /dev/null
+++ b/IncidentFields/incidentfield-Infected_Hosts.json
@@ -0,0 +1,64 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Traps"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "infectedhosts",
+ "closeForm": false,
+ "columns": [
+ {
+ "displayName": "Infected Host IP",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "infectedhostip",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 600
+ },
+ {
+ "displayName": "Infected Hostname",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "infectedhostname",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ }
+ ],
+ "content": true,
+ "defaultRows": [],
+ "description": "Infected hosts found in the investigation",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_infectedhosts",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Infected Hosts",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": [],
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "grid",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Infected_Hosts_CHANGELOG.md b/IncidentFields/incidentfield-Infected_Hosts_CHANGELOG.md
new file mode 100644
index 000000000000..e3da3efa6868
--- /dev/null
+++ b/IncidentFields/incidentfield-Infected_Hosts_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+Infected Hosts
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Isolated.json b/IncidentFields/incidentfield-Isolated.json
new file mode 100644
index 000000000000..c555878131fe
--- /dev/null
+++ b/IncidentFields/incidentfield-Isolated.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Traps"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "isolated",
+ "closeForm": false,
+ "columns": null,
+ "content": true,
+ "defaultRows": null,
+ "description": "Isolated",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_isolated",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Isolated",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Isolated_CHANGELOG.md b/IncidentFields/incidentfield-Isolated_CHANGELOG.md
new file mode 100644
index 000000000000..587e3e1c796b
--- /dev/null
+++ b/IncidentFields/incidentfield-Isolated_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+Isolated
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Malicious_Behavior.json b/IncidentFields/incidentfield-Malicious_Behavior.json
new file mode 100644
index 000000000000..e90104b74559
--- /dev/null
+++ b/IncidentFields/incidentfield-Malicious_Behavior.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Traps"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "maliciousbehavior",
+ "closeForm": false,
+ "columns": null,
+ "content": true,
+ "defaultRows": null,
+ "description": "Malicious Behavior",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_maliciousbehavior",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Malicious Behavior",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Malicious_Behavior_CHANGELOG.md b/IncidentFields/incidentfield-Malicious_Behavior_CHANGELOG.md
new file mode 100644
index 000000000000..6f421ce2389c
--- /dev/null
+++ b/IncidentFields/incidentfield-Malicious_Behavior_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+Malicious Behavior
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Malware_Family.json b/IncidentFields/incidentfield-Malware_Family.json
new file mode 100644
index 000000000000..047d40e8af1d
--- /dev/null
+++ b/IncidentFields/incidentfield-Malware_Family.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_malwarefamily",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.416319236+03:00",
+ "name": "Malware Family",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "malwarefamily",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Malware"
+ ],
+ "systemAssociatedTypes": [
+ "Malware"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Malware_Family_CHANGELOG.md b/IncidentFields/incidentfield-Malware_Family_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Malware_Family_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-PID.json b/IncidentFields/incidentfield-PID.json
new file mode 100644
index 000000000000..cc8d73d1bc5b
--- /dev/null
+++ b/IncidentFields/incidentfield-PID.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Traps"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "pid",
+ "closeForm": false,
+ "columns": null,
+ "content": true,
+ "defaultRows": null,
+ "description": "PID",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_pid",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "PID",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
diff --git a/IncidentFields/incidentfield-PID_CHANGELOG.md b/IncidentFields/incidentfield-PID_CHANGELOG.md
new file mode 100644
index 000000000000..0940007e196c
--- /dev/null
+++ b/IncidentFields/incidentfield-PID_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+PID
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Parent_Process_ID.json b/IncidentFields/incidentfield-Parent_Process_ID.json
new file mode 100644
index 000000000000..6f77fb868b53
--- /dev/null
+++ b/IncidentFields/incidentfield-Parent_Process_ID.json
@@ -0,0 +1,40 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Crowdstrike",
+ "Traps"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "parentprocessid",
+ "closeForm": false,
+ "columns": null,
+ "content": true,
+ "defaultRows": null,
+ "description": "Parent Process ID",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_parentprocessid",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Parent Process ID",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": [],
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Parent_Process_ID_CHANGELOG.md b/IncidentFields/incidentfield-Parent_Process_ID_CHANGELOG.md
new file mode 100644
index 000000000000..1a677744b045
--- /dev/null
+++ b/IncidentFields/incidentfield-Parent_Process_ID_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+Parent Process ID
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Quarantined.json b/IncidentFields/incidentfield-Quarantined.json
new file mode 100644
index 000000000000..b849accb6894
--- /dev/null
+++ b/IncidentFields/incidentfield-Quarantined.json
@@ -0,0 +1,40 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "File",
+ "Host"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "quarantined",
+ "closeForm": false,
+ "columns": null,
+ "content": true,
+ "defaultRows": null,
+ "description": "Whether the indicator is quarantined or isolated",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 2,
+ "hidden": false,
+ "id": "indicator_quarantined",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Quarantined",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": [],
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "boolean",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Quarantined_CHANGELOG.md b/IncidentFields/incidentfield-Quarantined_CHANGELOG.md
new file mode 100644
index 000000000000..f0a759f7895f
--- /dev/null
+++ b/IncidentFields/incidentfield-Quarantined_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+Quarantined
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Signature.json b/IncidentFields/incidentfield-Signature.json
new file mode 100644
index 000000000000..51a99a3473f9
--- /dev/null
+++ b/IncidentFields/incidentfield-Signature.json
@@ -0,0 +1,37 @@
+{
+ "id": "incident_signature",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.440145161+03:00",
+ "name": "Signature",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "signature",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Vulnerability",
+ "Malware"
+ ],
+ "systemAssociatedTypes": [
+ "Vulnerability",
+ "Malware"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Signature_CHANGELOG.md b/IncidentFields/incidentfield-Signature_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Signature_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Src.json b/IncidentFields/incidentfield-Src.json
new file mode 100644
index 000000000000..b2faad2b9aa7
--- /dev/null
+++ b/IncidentFields/incidentfield-Src.json
@@ -0,0 +1,39 @@
+{
+ "id": "incident_src",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.400366431+03:00",
+ "name": "Src",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "Source",
+ "cliName": "src",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Access",
+ "Malware",
+ "Traps"
+ ],
+ "systemAssociatedTypes": [
+ "Access",
+ "Malware",
+ "Traps"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Src_CHANGELOG.md b/IncidentFields/incidentfield-Src_CHANGELOG.md
new file mode 100644
index 000000000000..08cab6f77265
--- /dev/null
+++ b/IncidentFields/incidentfield-Src_CHANGELOG.md
@@ -0,0 +1,9 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+Associated to Traps incident type.
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Src_NT_Domain.json b/IncidentFields/incidentfield-Src_NT_Domain.json
new file mode 100644
index 000000000000..4ad6ed7a793a
--- /dev/null
+++ b/IncidentFields/incidentfield-Src_NT_Domain.json
@@ -0,0 +1,37 @@
+{
+ "id": "incident_srcntdomain",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.402950395+03:00",
+ "name": "Src NT Domain",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "Source NT Domain",
+ "cliName": "srcntdomain",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Access",
+ "Traps"
+ ],
+ "systemAssociatedTypes": [
+ "Access",
+ "Traps"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Src_NT_Domain_CHANGELOG.md b/IncidentFields/incidentfield-Src_NT_Domain_CHANGELOG.md
new file mode 100644
index 000000000000..08cab6f77265
--- /dev/null
+++ b/IncidentFields/incidentfield-Src_NT_Domain_CHANGELOG.md
@@ -0,0 +1,9 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+Associated to Traps incident type.
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Src_OS.json b/IncidentFields/incidentfield-Src_OS.json
new file mode 100644
index 000000000000..2349c7108170
--- /dev/null
+++ b/IncidentFields/incidentfield-Src_OS.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Traps"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "srcos",
+ "closeForm": false,
+ "columns": null,
+ "content": true,
+ "defaultRows": null,
+ "description": "Src OS",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_srcos",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Src OS",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Src_OS_CHANGELOG.md b/IncidentFields/incidentfield-Src_OS_CHANGELOG.md
new file mode 100644
index 000000000000..ed7429f401fe
--- /dev/null
+++ b/IncidentFields/incidentfield-Src_OS_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+Src OS
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Src_User.json b/IncidentFields/incidentfield-Src_User.json
new file mode 100644
index 000000000000..c69a85eef0f9
--- /dev/null
+++ b/IncidentFields/incidentfield-Src_User.json
@@ -0,0 +1,37 @@
+{
+ "id": "incident_srcuser",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.42473716+03:00",
+ "name": "Src User",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "Source User",
+ "cliName": "srcuser",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Access",
+ "Traps"
+ ],
+ "systemAssociatedTypes": [
+ "Access",
+ "Traps"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Src_User_CHANGELOG.md b/IncidentFields/incidentfield-Src_User_CHANGELOG.md
new file mode 100644
index 000000000000..08cab6f77265
--- /dev/null
+++ b/IncidentFields/incidentfield-Src_User_CHANGELOG.md
@@ -0,0 +1,9 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+Associated to Traps incident type.
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Subtype.json b/IncidentFields/incidentfield-Subtype.json
new file mode 100644
index 000000000000..7e14a0584324
--- /dev/null
+++ b/IncidentFields/incidentfield-Subtype.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Traps"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "subtype",
+ "closeForm": false,
+ "columns": null,
+ "content": true,
+ "defaultRows": null,
+ "description": "Subtype",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_subtype",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Subtype",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Subtype_CHANGELOG.md b/IncidentFields/incidentfield-Subtype_CHANGELOG.md
new file mode 100644
index 000000000000..efdae1a51102
--- /dev/null
+++ b/IncidentFields/incidentfield-Subtype_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+Subtype
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Terminated_Action.json b/IncidentFields/incidentfield-Terminated_Action.json
new file mode 100644
index 000000000000..1b50ec7bc61e
--- /dev/null
+++ b/IncidentFields/incidentfield-Terminated_Action.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Traps"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "terminatedaction",
+ "closeForm": false,
+ "columns": null,
+ "content": true,
+ "defaultRows": null,
+ "description": "Terminated Action",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_terminatedaction",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Terminated Action",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Terminated_Action_CHANGELOG.md b/IncidentFields/incidentfield-Terminated_Action_CHANGELOG.md
new file mode 100644
index 000000000000..7f2768dbe6d6
--- /dev/null
+++ b/IncidentFields/incidentfield-Terminated_Action_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+Terminated Action
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Traps_ID.json b/IncidentFields/incidentfield-Traps_ID.json
new file mode 100644
index 000000000000..2e2f40c228ec
--- /dev/null
+++ b/IncidentFields/incidentfield-Traps_ID.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Traps"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "trapsid",
+ "closeForm": false,
+ "columns": null,
+ "content": true,
+ "defaultRows": null,
+ "description": "Traps event ID",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_trapsid",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Traps ID",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Traps_ID_CHANGELOG.md b/IncidentFields/incidentfield-Traps_ID_CHANGELOG.md
new file mode 100644
index 000000000000..2494dafe4534
--- /dev/null
+++ b/IncidentFields/incidentfield-Traps_ID_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+Traps ID
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Triggered_Security_Profile.json b/IncidentFields/incidentfield-Triggered_Security_Profile.json
new file mode 100644
index 000000000000..a4957f4e314c
--- /dev/null
+++ b/IncidentFields/incidentfield-Triggered_Security_Profile.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Traps"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "triggeredsecurityprofile",
+ "closeForm": false,
+ "columns": null,
+ "content": true,
+ "defaultRows": null,
+ "description": "Triggered Security Profile",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_triggeredsecurityprofile",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "Triggered Security Profile",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Triggered_Security_Profile_CHANGELOG.md b/IncidentFields/incidentfield-Triggered_Security_Profile_CHANGELOG.md
new file mode 100644
index 000000000000..6393eb24c4eb
--- /dev/null
+++ b/IncidentFields/incidentfield-Triggered_Security_Profile_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+Triggered Security Profile
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-User.json b/IncidentFields/incidentfield-User.json
new file mode 100644
index 000000000000..196c8f622a1f
--- /dev/null
+++ b/IncidentFields/incidentfield-User.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_user",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.421943082+03:00",
+ "name": "User",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "The user involved",
+ "cliName": "user",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Malware"
+ ],
+ "systemAssociatedTypes": [
+ "Malware"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-User_CHANGELOG.md b/IncidentFields/incidentfield-User_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-User_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Vendor_ID.json b/IncidentFields/incidentfield-Vendor_ID.json
new file mode 100644
index 000000000000..c251fa95f4d2
--- /dev/null
+++ b/IncidentFields/incidentfield-Vendor_ID.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_vendorid",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.443496625+03:00",
+ "name": "Vendor ID",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "vendorid",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [],
+ "validationRegex": "",
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Vulnerability"
+ ],
+ "systemAssociatedTypes": [
+ "Vulnerability"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Vendor_ID_CHANGELOG.md b/IncidentFields/incidentfield-Vendor_ID_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Vendor_ID_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Vendor_Product.json b/IncidentFields/incidentfield-Vendor_Product.json
new file mode 100644
index 000000000000..c03060361ec5
--- /dev/null
+++ b/IncidentFields/incidentfield-Vendor_Product.json
@@ -0,0 +1,37 @@
+{
+ "id": "incident_vendorproduct",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.419089491+03:00",
+ "name": "Vendor Product",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "vendorproduct",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Malware",
+ "Traps"
+ ],
+ "systemAssociatedTypes": [
+ "Malware",
+ "Traps"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Vendor_Product_CHANGELOG.md b/IncidentFields/incidentfield-Vendor_Product_CHANGELOG.md
new file mode 100644
index 000000000000..08cab6f77265
--- /dev/null
+++ b/IncidentFields/incidentfield-Vendor_Product_CHANGELOG.md
@@ -0,0 +1,9 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+Associated to Traps incident type.
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Vulnerability_Category.json b/IncidentFields/incidentfield-Vulnerability_Category.json
new file mode 100644
index 000000000000..6781900edf24
--- /dev/null
+++ b/IncidentFields/incidentfield-Vulnerability_Category.json
@@ -0,0 +1,35 @@
+{
+ "id": "incident_vulnerabilitycategory",
+ "version": -1,
+ "modified": "2018-08-23T18:31:06.446623086+03:00",
+ "name": "Vulnerability Category",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "vulnerabilitycategory",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Vulnerability"
+ ],
+ "systemAssociatedTypes": [
+ "Vulnerability"
+ ],
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-Vulnerability_Category_CHANGELOG.md b/IncidentFields/incidentfield-Vulnerability_Category_CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/IncidentFields/incidentfield-Vulnerability_Category_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_Alert_Count.json b/IncidentFields/incidentfield-XDR_Alert_Count.json
new file mode 100644
index 000000000000..f5f1f3fb97ea
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Alert_Count.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdralertcount",
+ "closeForm": false,
+ "columns": null,
+ "content": false,
+ "defaultRows": null,
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdralertcount",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR Alert Count",
+ "neverSetAsRequired": false,
+ "ownerOnly": true,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": [],
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "number",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_Alert_Count_CHANGELOG.md b/IncidentFields/incidentfield-XDR_Alert_Count_CHANGELOG.md
new file mode 100644
index 000000000000..3edff59c7b9e
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Alert_Count_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+- Added the **XDR Alert Count** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_Alerts.json b/IncidentFields/incidentfield-XDR_Alerts.json
new file mode 100644
index 000000000000..d76f18234276
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Alerts.json
@@ -0,0 +1,176 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdralerts",
+ "closeForm": false,
+ "columns": [
+ {
+ "displayName": "Alert Id",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "alertid",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "Detection Timestamp",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "detectiontimestamp",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "Severity",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "severity",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "Name",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "name",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "Category",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "category",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "Action",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "action",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "Action Pretty",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "actionpretty",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "Description",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "description",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "Host IP",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "hostip",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "Host Name",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "hostname",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "User Name",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "username",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ }
+ ],
+ "content": false,
+ "defaultRows": [
+ {},
+ {},
+ {}
+ ],
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdralerts",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR Alerts",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": [],
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "grid",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_Alerts_CHANGELOG.md b/IncidentFields/incidentfield-XDR_Alerts_CHANGELOG.md
new file mode 100644
index 000000000000..7b807095829d
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Alerts_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+- Added the **XDR Alerts Count** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_Assigned_User_Email.json b/IncidentFields/incidentfield-XDR_Assigned_User_Email.json
new file mode 100644
index 000000000000..365b0189ea1c
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Assigned_User_Email.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdrassigneduseremail",
+ "closeForm": false,
+ "columns": null,
+ "content": false,
+ "defaultRows": null,
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdrassigneduseremail",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR Assigned User Email",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": [],
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_Assigned_User_Email_CHANGELOG.md b/IncidentFields/incidentfield-XDR_Assigned_User_Email_CHANGELOG.md
new file mode 100644
index 000000000000..5e30689ef48b
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Assigned_User_Email_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+- Added the **XDR Assigned User Email** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_Assigned_User_Pretty_Name.json b/IncidentFields/incidentfield-XDR_Assigned_User_Pretty_Name.json
new file mode 100644
index 000000000000..531c8e03ef72
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Assigned_User_Pretty_Name.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdrassigneduserprettyname",
+ "closeForm": false,
+ "columns": null,
+ "content": false,
+ "defaultRows": null,
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdrassigneduserprettyname",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR Assigned User Pretty Name",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_Assigned_User_Pretty_Name_CHANGELOG.md b/IncidentFields/incidentfield-XDR_Assigned_User_Pretty_Name_CHANGELOG.md
new file mode 100644
index 000000000000..cdd0d45cd6d3
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Assigned_User_Pretty_Name_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added the **XDR Assigned User Pretty Name** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_Description.json b/IncidentFields/incidentfield-XDR_Description.json
new file mode 100644
index 000000000000..1c8e1437d9b7
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Description.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdrdescription",
+ "closeForm": false,
+ "columns": null,
+ "content": false,
+ "defaultRows": null,
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdrdescription",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR Description",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": [],
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_Description_CHANGELOG.md b/IncidentFields/incidentfield-XDR_Description_CHANGELOG.md
new file mode 100644
index 000000000000..82846bb34400
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Description_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added the **XDR Description** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_Detection_Time.json b/IncidentFields/incidentfield-XDR_Detection_Time.json
new file mode 100644
index 000000000000..172b8f795adb
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Detection_Time.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdrdetectiontime",
+ "closeForm": false,
+ "columns": null,
+ "content": false,
+ "defaultRows": null,
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdrdetectiontime",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR Detection Time",
+ "neverSetAsRequired": false,
+ "ownerOnly": true,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": [],
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "date",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_Detection_Time_CHANGELOG.md b/IncidentFields/incidentfield-XDR_Detection_Time_CHANGELOG.md
new file mode 100644
index 000000000000..b51ed1cdc03e
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Detection_Time_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added the **XDR Detection Time** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_File_Artifacts.json b/IncidentFields/incidentfield-XDR_File_Artifacts.json
new file mode 100644
index 000000000000..4ab6cbeaf744
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_File_Artifacts.json
@@ -0,0 +1,116 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdrfileartifacts",
+ "closeForm": false,
+ "columns": [
+ {
+ "displayName": "File Name",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "filename",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "File SHA256",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "filesha256",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "Alert Count",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "alertcount",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "number",
+ "width": 150
+ },
+ {
+ "displayName": "File Wildfire Verdict",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "filewildfireverdict",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "File Signature Vendor Name",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "filesignaturevendorname",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "File Signature Status",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "filesignaturestatus",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ }
+ ],
+ "content": false,
+ "defaultRows": [
+ {},
+ {},
+ {}
+ ],
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdrfileartifacts",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR File Artifacts",
+ "neverSetAsRequired": false,
+ "ownerOnly": true,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": [],
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "grid",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_File_Artifacts_CHANGELOG.md b/IncidentFields/incidentfield-XDR_File_Artifacts_CHANGELOG.md
new file mode 100644
index 000000000000..eaa88661224e
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_File_Artifacts_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added the **XDR File Artifacts** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_High_Severity_Alert_Count.json b/IncidentFields/incidentfield-XDR_High_Severity_Alert_Count.json
new file mode 100644
index 000000000000..66e713105f67
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_High_Severity_Alert_Count.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdrhighseverityalertcount",
+ "closeForm": false,
+ "columns": null,
+ "content": false,
+ "defaultRows": null,
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdrhighseverityalertcount",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR High Severity Alert Count",
+ "neverSetAsRequired": false,
+ "ownerOnly": true,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": [],
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "number",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_High_Severity_Alert_Count_CHANGELOG.md b/IncidentFields/incidentfield-XDR_High_Severity_Alert_Count_CHANGELOG.md
new file mode 100644
index 000000000000..140ec07fa398
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_High_Severity_Alert_Count_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added the **XDR High Severity Alert Count** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_Incident_ID.json b/IncidentFields/incidentfield-XDR_Incident_ID.json
new file mode 100644
index 000000000000..00ef63924f60
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Incident_ID.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdrincidentid",
+ "closeForm": false,
+ "columns": null,
+ "content": false,
+ "defaultRows": null,
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdrincidentid",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR Incident ID",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": [],
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_Incident_ID_CHANGELOG.md b/IncidentFields/incidentfield-XDR_Incident_ID_CHANGELOG.md
new file mode 100644
index 000000000000..f68d45d6bb56
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Incident_ID_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added the **XDR Incident ID** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_Low_Severity_Alert_Count.json b/IncidentFields/incidentfield-XDR_Low_Severity_Alert_Count.json
new file mode 100644
index 000000000000..352feab34d1a
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Low_Severity_Alert_Count.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdrlowseverityalertcount",
+ "closeForm": false,
+ "columns": null,
+ "content": false,
+ "defaultRows": null,
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdrlowseverityalertcount",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR Low Severity Alert Count",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "number",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_Low_Severity_Alert_Count_CHANGELOG.md b/IncidentFields/incidentfield-XDR_Low_Severity_Alert_Count_CHANGELOG.md
new file mode 100644
index 000000000000..3db2a1cb9641
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Low_Severity_Alert_Count_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added the **XDR Low Severity Alert Count** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_Medium_Severity_Alert_Count.json b/IncidentFields/incidentfield-XDR_Medium_Severity_Alert_Count.json
new file mode 100644
index 000000000000..f20d1ad1aa3d
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Medium_Severity_Alert_Count.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdrmediumseverityalertcount",
+ "closeForm": false,
+ "columns": null,
+ "content": false,
+ "defaultRows": null,
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdrmediumseverityalertcount",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR Medium Severity Alert Count",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "number",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_Medium_Severity_Alert_Count_CHANGELOG.md b/IncidentFields/incidentfield-XDR_Medium_Severity_Alert_Count_CHANGELOG.md
new file mode 100644
index 000000000000..ca530bf29df1
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Medium_Severity_Alert_Count_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added the **XDR Medium Severity Alert Count** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_Network_Artifacts.json b/IncidentFields/incidentfield-XDR_Network_Artifacts.json
new file mode 100644
index 000000000000..4801b52d171b
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Network_Artifacts.json
@@ -0,0 +1,128 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdrnetworkartifacts",
+ "closeForm": false,
+ "columns": [
+ {
+ "displayName": "Type",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "type",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "Alert Count",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "alertcount",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "number",
+ "width": 150
+ },
+ {
+ "displayName": "Is Manual",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "ismanual",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "boolean",
+ "width": 150
+ },
+ {
+ "displayName": "Network Domain",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "networkdomain",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "Network Remote IP",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "networkremoteip",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "Network Remote Port",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "networkremoteport",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ },
+ {
+ "displayName": "Network Country",
+ "fieldCalcScript": "",
+ "isDefault": true,
+ "isReadOnly": false,
+ "key": "networkcountry",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "type": "shortText",
+ "width": 150
+ }
+ ],
+ "content": false,
+ "defaultRows": [
+ {},
+ {},
+ {}
+ ],
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdrnetworkartifacts",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR Network Artifacts",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": [],
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "grid",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_Network_Artifacts_CHANGELOG.md b/IncidentFields/incidentfield-XDR_Network_Artifacts_CHANGELOG.md
new file mode 100644
index 000000000000..a5d6a8b5a780
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Network_Artifacts_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added the **XDR Network Artifacts** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_Notes.json b/IncidentFields/incidentfield-XDR_Notes.json
new file mode 100644
index 000000000000..72c06a048a77
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Notes.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdrnotes",
+ "closeForm": false,
+ "columns": null,
+ "content": false,
+ "defaultRows": null,
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdrnotes",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR Notes",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_Notes_CHANGELOG.md b/IncidentFields/incidentfield-XDR_Notes_CHANGELOG.md
new file mode 100644
index 000000000000..fac1c0d72a65
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Notes_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added the **XDR Notes** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_Resolve_Comment.json b/IncidentFields/incidentfield-XDR_Resolve_Comment.json
new file mode 100644
index 000000000000..aeafd4ec4125
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Resolve_Comment.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdrresolvecomment",
+ "closeForm": false,
+ "columns": null,
+ "content": false,
+ "defaultRows": null,
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdrresolvecomment",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR Resolve Comment",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": [],
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_Resolve_Comment_CHANGELOG.md b/IncidentFields/incidentfield-XDR_Resolve_Comment_CHANGELOG.md
new file mode 100644
index 000000000000..f47f01d919ec
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Resolve_Comment_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added the **XDR Resolve Comment** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_Status.json b/IncidentFields/incidentfield-XDR_Status.json
new file mode 100644
index 000000000000..aded594f7990
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Status.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdrstatus",
+ "closeForm": false,
+ "columns": null,
+ "content": false,
+ "defaultRows": null,
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdrstatus",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR Status",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": [],
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "shortText",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_Status_CHANGELOG.md b/IncidentFields/incidentfield-XDR_Status_CHANGELOG.md
new file mode 100644
index 000000000000..7de5d0de4195
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_Status_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added the **XDR Status** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_URL.json b/IncidentFields/incidentfield-XDR_URL.json
new file mode 100644
index 000000000000..2f0806f6708b
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_URL.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdrurl",
+ "closeForm": false,
+ "columns": null,
+ "content": false,
+ "defaultRows": null,
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdrurl",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR URL",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "url",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_URL_CHANGELOG.md b/IncidentFields/incidentfield-XDR_URL_CHANGELOG.md
new file mode 100644
index 000000000000..99d6cc130475
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_URL_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added the **XDR URL** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-XDR_User_Count.json b/IncidentFields/incidentfield-XDR_User_Count.json
new file mode 100644
index 000000000000..656042bb01db
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_User_Count.json
@@ -0,0 +1,39 @@
+{
+ "associatedToAll": false,
+ "associatedTypes": [
+ "Cortex XDR Incident"
+ ],
+ "breachScript": "",
+ "caseInsensitive": true,
+ "cliName": "xdrusercount",
+ "closeForm": false,
+ "columns": null,
+ "content": false,
+ "defaultRows": null,
+ "description": "",
+ "editForm": true,
+ "fieldCalcScript": "",
+ "group": 0,
+ "hidden": false,
+ "id": "incident_xdrusercount",
+ "isReadOnly": false,
+ "locked": false,
+ "name": "XDR User Count",
+ "neverSetAsRequired": false,
+ "ownerOnly": false,
+ "placeholder": "",
+ "required": false,
+ "script": "",
+ "selectValues": null,
+ "sla": 0,
+ "system": false,
+ "systemAssociatedTypes": null,
+ "threshold": 72,
+ "type": "number",
+ "unmapped": false,
+ "unsearchable": false,
+ "useAsKpi": false,
+ "validationRegex": "",
+ "version": -1,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-XDR_User_Count_CHANGELOG.md b/IncidentFields/incidentfield-XDR_User_Count_CHANGELOG.md
new file mode 100644
index 000000000000..6e15a3b274a9
--- /dev/null
+++ b/IncidentFields/incidentfield-XDR_User_Count_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added the **XDR User Count** incident field, which is associated to the **Cortex XDR Incident** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentFields/incidentfield-affecteddata.json b/IncidentFields/incidentfield-affecteddata.json
new file mode 100644
index 000000000000..37a96f13fe25
--- /dev/null
+++ b/IncidentFields/incidentfield-affecteddata.json
@@ -0,0 +1,41 @@
+{
+ "id": "incident_affecteddata",
+ "version": -1,
+ "modified": "2019-04-17T13:40:40.223872634Z",
+ "name": "Affected data",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "\"‘personal data’ means any information relating to an identified or identifiable natural person (‘data subject’); an identifiable natural person is one who can be identified, directly or indirectly, in particular by reference to an identifier such as a name, an identification number, location data, an online identifier or to one or more factors specific to the physical, physiological, genetic, mental, economic, cultural or social identity of that natural person;\" - GDPR Art. 4",
+ "cliName": "affecteddata",
+ "type": "multiSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [
+ "Personal",
+ "sensitive (eg. health/genetic data)",
+ "Non-sensitive",
+ "Non-personal"
+ ],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-affecteddatatype.json b/IncidentFields/incidentfield-affecteddatatype.json
new file mode 100644
index 000000000000..9d46126bb816
--- /dev/null
+++ b/IncidentFields/incidentfield-affecteddatatype.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_affecteddatatype",
+ "version": -1,
+ "modified": "2019-04-14T11:32:24.494022702Z",
+ "name": "Affected Data Type",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "affecteddatatype",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-affectedindividualscontactinformation.json b/IncidentFields/incidentfield-affectedindividualscontactinformation.json
new file mode 100644
index 000000000000..747dab2d63ef
--- /dev/null
+++ b/IncidentFields/incidentfield-affectedindividualscontactinformation.json
@@ -0,0 +1,79 @@
+{
+ "id": "incident_affectedindividualscontactinformation",
+ "version": -1,
+ "modified": "2019-04-16T10:35:49.430259514Z",
+ "name": "Affected Individuals Contact Information",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "affectedindividualscontactinformation",
+ "type": "grid",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "columns": [
+ {
+ "key": "fullname",
+ "displayName": "Full Name",
+ "type": "shortText",
+ "required": false,
+ "script": "",
+ "width": 310,
+ "isDefault": true,
+ "fieldCalcScript": "",
+ "isReadOnly": false,
+ "selectValues": null
+ },
+ {
+ "key": "emailaddress",
+ "displayName": "E-mail Address",
+ "type": "shortText",
+ "required": false,
+ "script": "",
+ "width": 272,
+ "isDefault": true,
+ "fieldCalcScript": "",
+ "isReadOnly": false,
+ "selectValues": null
+ },
+ {
+ "key": "approximatenumberofdatarecordsbreached",
+ "displayName": "Approximate number of data records breached",
+ "type": "shortText",
+ "required": true,
+ "script": "",
+ "width": 150,
+ "isDefault": true,
+ "fieldCalcScript": "",
+ "isReadOnly": false,
+ "selectValues": null
+ }
+ ],
+ "defaultRows": [
+ {},
+ {},
+ {}
+ ],
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-approximatenumberofaffecteddatasubjects.json b/IncidentFields/incidentfield-approximatenumberofaffecteddatasubjects.json
new file mode 100644
index 000000000000..d39df479b8c3
--- /dev/null
+++ b/IncidentFields/incidentfield-approximatenumberofaffecteddatasubjects.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_approximatenumberofaffecteddatasubjects",
+ "version": -1,
+ "modified": "2019-04-16T10:36:19.006319903Z",
+ "name": "Approximate number of affected data subjects",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "approximatenumberofaffecteddatasubjects",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-city.json b/IncidentFields/incidentfield-city.json
new file mode 100644
index 000000000000..6ec7463b0e5e
--- /dev/null
+++ b/IncidentFields/incidentfield-city.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_city",
+ "version": -1,
+ "modified": "2019-04-14T11:15:16.200447742Z",
+ "name": "City",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "city",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-companyaddress.json b/IncidentFields/incidentfield-companyaddress.json
new file mode 100644
index 000000000000..37150e16a481
--- /dev/null
+++ b/IncidentFields/incidentfield-companyaddress.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_companyaddress",
+ "version": -1,
+ "modified": "2019-04-14T12:55:35.314175089Z",
+ "name": "Company Address",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "companyaddress",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-companycity.json b/IncidentFields/incidentfield-companycity.json
new file mode 100644
index 000000000000..985310fddcd0
--- /dev/null
+++ b/IncidentFields/incidentfield-companycity.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_companycity",
+ "version": -1,
+ "modified": "2019-04-14T13:01:47.329127394Z",
+ "name": "Company City",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "companycity",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-companycountry.json b/IncidentFields/incidentfield-companycountry.json
new file mode 100644
index 000000000000..acf49051a59b
--- /dev/null
+++ b/IncidentFields/incidentfield-companycountry.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_companycountry",
+ "version": -1,
+ "modified": "2019-04-14T13:02:05.26697112Z",
+ "name": "Company Country",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "companycountry",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-companyhasinsuranceforthebreach.json b/IncidentFields/incidentfield-companyhasinsuranceforthebreach.json
new file mode 100644
index 000000000000..0336503cb0b2
--- /dev/null
+++ b/IncidentFields/incidentfield-companyhasinsuranceforthebreach.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_companyhasinsuranceforthebreach",
+ "version": -1,
+ "modified": "2019-04-14T11:36:20.070426559Z",
+ "name": "Company has Insurance for the Breach",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "companyhasinsuranceforthebreach",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-companyname.json b/IncidentFields/incidentfield-companyname.json
new file mode 100644
index 000000000000..1436f54f6796
--- /dev/null
+++ b/IncidentFields/incidentfield-companyname.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_companyname",
+ "version": -1,
+ "modified": "2019-04-14T11:14:33.557604116Z",
+ "name": "Company Name",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "companyname",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-companypostalcode.json b/IncidentFields/incidentfield-companypostalcode.json
new file mode 100644
index 000000000000..cf824fcbc572
--- /dev/null
+++ b/IncidentFields/incidentfield-companypostalcode.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_companypostalcode",
+ "version": -1,
+ "modified": "2019-04-14T13:01:33.133942483Z",
+ "name": "Company Postal Code",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "companypostalcode",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-contactaddress.json b/IncidentFields/incidentfield-contactaddress.json
new file mode 100644
index 000000000000..4d5f9b0a4374
--- /dev/null
+++ b/IncidentFields/incidentfield-contactaddress.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_contactaddress",
+ "version": -1,
+ "modified": "2019-04-14T12:55:48.980552955Z",
+ "name": "Contact Address",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "contactaddress",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-contactname.json b/IncidentFields/incidentfield-contactname.json
new file mode 100644
index 000000000000..1d6a162a9eea
--- /dev/null
+++ b/IncidentFields/incidentfield-contactname.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_contactname",
+ "version": -1,
+ "modified": "2019-04-14T11:17:29.254822349Z",
+ "name": "Contact Name",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "contactname",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-country.json b/IncidentFields/incidentfield-country.json
new file mode 100644
index 000000000000..102035d5ccd3
--- /dev/null
+++ b/IncidentFields/incidentfield-country.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_country",
+ "version": -1,
+ "modified": "2019-04-14T11:15:28.698594118Z",
+ "name": "Country",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "country",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-country_CHANGELOG.md b/IncidentFields/incidentfield-country_CHANGELOG.md
new file mode 100644
index 000000000000..96a6b4bc4554
--- /dev/null
+++ b/IncidentFields/incidentfield-country_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.8.0] - 2019-08-06
+-
diff --git a/IncidentFields/incidentfield-countrywherebusinesshasitsmainestablishment.json b/IncidentFields/incidentfield-countrywherebusinesshasitsmainestablishment.json
new file mode 100644
index 000000000000..a201dd5d597b
--- /dev/null
+++ b/IncidentFields/incidentfield-countrywherebusinesshasitsmainestablishment.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_countrywherebusinesshasitsmainestablishment",
+ "version": -1,
+ "modified": "2019-04-17T12:51:19.238592047Z",
+ "name": "Country where business has its main establishment",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "\"‘main establishment’ means: as regards a controller with establishments in more than one Member State, the place of its central administration in the Union, unless the decisions on the purposes and means of the processing of personal data are taken in another establishment of the controller in the Union and the latter establishment has the power to have such decisions implemented, in which case the establishment having taken such decisions is to be considered to be the main establishment; as regards a processor with establishments in more than one Member State, the place of its central administration in the Union, or, if the processor has no central administration in the Union, the establishment of the processor in the Union where the main processing activities in the context of the activities of an establishment of the processor take place to the extent that the processor is subject to specific obligations under this Regulation;\" - GDPR Art. 4",
+ "cliName": "countrywherebusinesshasitsmainestablishment",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-countrywherethebreachtookplace.json b/IncidentFields/incidentfield-countrywherethebreachtookplace.json
new file mode 100644
index 000000000000..1ac0d0a3f34d
--- /dev/null
+++ b/IncidentFields/incidentfield-countrywherethebreachtookplace.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_countrywherethebreachtookplace",
+ "version": -1,
+ "modified": "2019-04-14T11:28:33.557006122Z",
+ "name": "Country where the breach took place",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "countrywherethebreachtookplace",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-criticalassets.json b/IncidentFields/incidentfield-criticalassets.json
new file mode 100644
index 000000000000..a7d5dc0bdb8a
--- /dev/null
+++ b/IncidentFields/incidentfield-criticalassets.json
@@ -0,0 +1,69 @@
+{
+ "id": "incident_criticalassets",
+ "version": -1,
+ "modified": "2019-10-06T12:42:27.040292745Z",
+ "name": "Critical Assets",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "A table of critical assets involved in the incident, including the name and asset type.",
+ "cliName": "criticalassets",
+ "type": "grid",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [],
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing",
+ "Malware",
+ "Access",
+ "Network"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "columns": [
+ {
+ "key": "assetname",
+ "displayName": "Asset Name",
+ "type": "shortText",
+ "required": false,
+ "script": "",
+ "width": 150,
+ "isDefault": true,
+ "fieldCalcScript": "",
+ "isReadOnly": false,
+ "selectValues": null
+ },
+ {
+ "key": "assettype",
+ "displayName": "Asset Type",
+ "type": "shortText",
+ "required": false,
+ "script": "",
+ "width": 150,
+ "isDefault": true,
+ "fieldCalcScript": "",
+ "isReadOnly": false,
+ "selectValues": null
+ }
+ ],
+ "defaultRows": [
+ {}
+ ],
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
diff --git a/IncidentFields/incidentfield-criticalassets_CHANGELOG.md b/IncidentFields/incidentfield-criticalassets_CHANGELOG.md
new file mode 100644
index 000000000000..b107765afbc3
--- /dev/null
+++ b/IncidentFields/incidentfield-criticalassets_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Incident Field
+Critical Assets
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-dataencryptionstatus.json b/IncidentFields/incidentfield-dataencryptionstatus.json
new file mode 100644
index 000000000000..1eabf569d70a
--- /dev/null
+++ b/IncidentFields/incidentfield-dataencryptionstatus.json
@@ -0,0 +1,40 @@
+{
+ "id": "incident_dataencryptionstatus",
+ "version": -1,
+ "modified": "2019-04-17T12:35:36.369158352Z",
+ "name": "Data Encryption Status",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "dataencryptionstatus",
+ "type": "multiSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [
+ "Full",
+ "Partial",
+ "None"
+ ],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-datetimeofthebreach.json b/IncidentFields/incidentfield-datetimeofthebreach.json
new file mode 100644
index 000000000000..384a364a4d55
--- /dev/null
+++ b/IncidentFields/incidentfield-datetimeofthebreach.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_datetimeofthebreach",
+ "version": -1,
+ "modified": "2019-04-14T11:29:16.341909673Z",
+ "name": "Date/time of the breach",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "datetimeofthebreach",
+ "type": "date",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-detectionsla.json b/IncidentFields/incidentfield-detectionsla.json
new file mode 100644
index 000000000000..4a319fb23d6c
--- /dev/null
+++ b/IncidentFields/incidentfield-detectionsla.json
@@ -0,0 +1,36 @@
+{
+ "closeForm": false,
+ "cliName": "detectionsla",
+ "fromVersion": "4.1.0",
+ "neverSetAsRequired": false,
+ "threshold": 0,
+ "id": "incident_detectionsla",
+ "group": 0,
+ "script": "",
+ "isReadOnly": true,
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "timer",
+ "editForm": false,
+ "description": "The time it took from incident creation until the maliciousness was determined.",
+ "associatedToAll": true,
+ "breachScript": "",
+ "associatedTypes": [],
+ "caseInsensitive": true,
+ "placeholder": "",
+ "useAsKpi": true,
+ "systemAssociatedTypes": null,
+ "locked": false,
+ "name": "Detection SLA",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2018-12-11T12:53:48.369705659Z",
+ "fieldCalcScript": "",
+ "selectValues": [],
+ "validationRegex": "",
+ "sla": 20
+}
diff --git a/IncidentFields/incidentfield-dpoemailaddress.json b/IncidentFields/incidentfield-dpoemailaddress.json
new file mode 100644
index 000000000000..be9c08b5bb1d
--- /dev/null
+++ b/IncidentFields/incidentfield-dpoemailaddress.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_dpoemailaddress",
+ "version": -1,
+ "modified": "2019-04-14T12:05:03.812194029Z",
+ "name": "DPO E-mail Address",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "dpoemailaddress",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-emailaddress.json b/IncidentFields/incidentfield-emailaddress.json
new file mode 100644
index 000000000000..87a442ca42c8
--- /dev/null
+++ b/IncidentFields/incidentfield-emailaddress.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_emailaddress",
+ "version": -1,
+ "modified": "2019-04-14T12:02:08.966283663Z",
+ "name": "E-mail Address",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailaddress",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-emailaddress_CHANGELOG.md b/IncidentFields/incidentfield-emailaddress_CHANGELOG.md
new file mode 100644
index 000000000000..96a6b4bc4554
--- /dev/null
+++ b/IncidentFields/incidentfield-emailaddress_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.8.0] - 2019-08-06
+-
diff --git a/IncidentFields/incidentfield-emailauthenticitycheck.json b/IncidentFields/incidentfield-emailauthenticitycheck.json
new file mode 100644
index 000000000000..b699e8a3581d
--- /dev/null
+++ b/IncidentFields/incidentfield-emailauthenticitycheck.json
@@ -0,0 +1,45 @@
+{
+ "id": "incident_emailauthenticitycheck",
+ "version": -1,
+ "modified": "2019-09-08T11:50:56.532385842Z",
+ "name": "Email Authenticity Check",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "Indicates the authenticity of the email. This check is done using the CheckEmailAuthenticity script.",
+ "cliName": "emailauthenticitycheck",
+ "type": "singleSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [
+ "",
+ "Pass",
+ "Undetermined",
+ "Suspicious",
+ "Fail"
+ ],
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": true,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "columns": null,
+ "defaultRows": null,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
diff --git a/IncidentFields/incidentfield-emailauthenticitycheck_CHANGELOG.md b/IncidentFields/incidentfield-emailauthenticitycheck_CHANGELOG.md
new file mode 100644
index 000000000000..cba7926b9efd
--- /dev/null
+++ b/IncidentFields/incidentfield-emailauthenticitycheck_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+Indicates the authenticity of the email. This is done using the CheckEmailAuthenticity script.
diff --git a/IncidentFields/incidentfield-emailclassification.json b/IncidentFields/incidentfield-emailclassification.json
new file mode 100644
index 000000000000..72f83ad90a1f
--- /dev/null
+++ b/IncidentFields/incidentfield-emailclassification.json
@@ -0,0 +1,44 @@
+{
+ "id": "incident_emailclassification",
+ "version": -1,
+ "modified": "2019-09-02T12:32:29.881674101Z",
+ "name": "Email Classification",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "Classification of the email. Can be \"Legitimate\", \"Spam\", or \"Malicious\".",
+ "cliName": "emailclassification",
+ "type": "singleSelect",
+ "closeForm": true,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [
+ "",
+ "Legitimate",
+ "Spam",
+ "Malicious"
+ ],
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "columns": null,
+ "defaultRows": null,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-emailclassification_CHANGELOG.md b/IncidentFields/incidentfield-emailclassification_CHANGELOG.md
new file mode 100644
index 000000000000..612623354f84
--- /dev/null
+++ b/IncidentFields/incidentfield-emailclassification_CHANGELOG.md
@@ -0,0 +1,2 @@
+## [Unreleased]
+Classification of the email. Can be "Legitimate", "Spam", or "Malicious".
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-emailhtml.json b/IncidentFields/incidentfield-emailhtml.json
new file mode 100644
index 000000000000..e865d40733e8
--- /dev/null
+++ b/IncidentFields/incidentfield-emailhtml.json
@@ -0,0 +1,39 @@
+{
+ "id": "incident_emailhtml",
+ "version": -1,
+ "modified": "2019-07-01T10:34:23.758886583Z",
+ "name": "Email HTML",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "emailhtml",
+ "type": "longText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "validationRegex": "",
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "columns": null,
+ "defaultRows": null,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-emailhtml_CHANGELOG.md b/IncidentFields/incidentfield-emailhtml_CHANGELOG.md
new file mode 100644
index 000000000000..16311e1a1656
--- /dev/null
+++ b/IncidentFields/incidentfield-emailhtml_CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+-
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-gdprnotifyauthorities.json b/IncidentFields/incidentfield-gdprnotifyauthorities.json
new file mode 100644
index 000000000000..d352b989ed69
--- /dev/null
+++ b/IncidentFields/incidentfield-gdprnotifyauthorities.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_gdprnotifyauthorities",
+ "version": -1,
+ "modified": "2019-04-17T13:32:10.330437058Z",
+ "name": "GDPR Notify Authorities",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "\"In the case of a personal data breach, the controller shall without undue delay and, where feasible, not later than 72 hours after having become aware of it, notify the personal data breach to the supervisory authority competent in accordance with Article 55, unless the personal data breach is unlikely to result in a risk to the rights and freedoms of natural persons.\" - GDPR Art. 33",
+ "cliName": "gdprnotifyauthorities",
+ "type": "timer",
+ "closeForm": false,
+ "editForm": false,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": true,
+ "selectValues": [],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 4500,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-isthedatasubjecttodpia.json b/IncidentFields/incidentfield-isthedatasubjecttodpia.json
new file mode 100644
index 000000000000..b4c7e091af03
--- /dev/null
+++ b/IncidentFields/incidentfield-isthedatasubjecttodpia.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_isthedatasubjecttodpia",
+ "version": -1,
+ "modified": "2019-04-14T11:34:16.232406809Z",
+ "name": "Is the Data Subject to DPIA ",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "isthedatasubjecttodpia",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-likelyimpact.json b/IncidentFields/incidentfield-likelyimpact.json
new file mode 100644
index 000000000000..18e947c8ddaa
--- /dev/null
+++ b/IncidentFields/incidentfield-likelyimpact.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_likelyimpact",
+ "version": -1,
+ "modified": "2019-04-17T13:37:07.009127127Z",
+ "name": "Likely Impact",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "\"A data protection impact assessment (...) shall in particular be required in the case of: (a) a systematic and extensive evaluation of personal aspects relating to natural persons which is based on automated processing, including profiling, and on which decisions are based that produce legal effects concerning the natural person or similarly significantly affect the natural person; (b) processing on a large scale of special categories of data referred to in Article 9(1), or of personal data relating to criminal convictions and offences referred to in Article 10; or (c) a systematic monitoring of a publicly accessible area on a large scale. - GDPR Art. 35",
+ "cliName": "likelyimpact",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-maliciouscauseifthecauseisamaliciousattack.json b/IncidentFields/incidentfield-maliciouscauseifthecauseisamaliciousattack.json
new file mode 100644
index 000000000000..d2f440c14f51
--- /dev/null
+++ b/IncidentFields/incidentfield-maliciouscauseifthecauseisamaliciousattack.json
@@ -0,0 +1,46 @@
+{
+ "id": "incident_maliciouscauseifthecauseisamaliciousattack",
+ "version": -1,
+ "modified": "2019-04-16T10:43:41.342957399Z",
+ "name": "Malicious Cause (If the cause is a malicious attack)",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "maliciouscauseifthecauseisamaliciousattack",
+ "type": "multiSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [
+ "Unknown vulnerability",
+ "Cryptolockers",
+ "Fire reconnaissance",
+ "Phishing",
+ "Distributed denial of service",
+ "Malware",
+ "Social engineering",
+ "Blackmail",
+ "Other"
+ ],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-measurestomitigate.json b/IncidentFields/incidentfield-measurestomitigate.json
new file mode 100644
index 000000000000..11332d5b879e
--- /dev/null
+++ b/IncidentFields/incidentfield-measurestomitigate.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_measurestomitigate",
+ "version": -1,
+ "modified": "2019-04-17T13:55:44.5688721Z",
+ "name": "Measures to Mitigate",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "\" (d) describe the measures taken or proposed to be taken by the controller to address the personal data breach, including, where appropriate, measures to mitigate its possible adverse effects.\" - GDPR Art. 33",
+ "cliName": "measurestomitigate",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-phishingsubtype.json b/IncidentFields/incidentfield-phishingsubtype.json
new file mode 100644
index 000000000000..54ab0fbec96f
--- /dev/null
+++ b/IncidentFields/incidentfield-phishingsubtype.json
@@ -0,0 +1,46 @@
+{
+ "id": "incident_phishingsubtype",
+ "version": -1,
+ "modified": "2019-09-02T12:35:58.575458134Z",
+ "name": "Phishing Sub-type",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "Phishing incident sub-type. Can be \"Spear Phishing\", \"Whaling\", \"Credential Harvesting\", \"Malware\", \"Scam\", or \"Automatic\".",
+ "cliName": "phishingsubtype",
+ "type": "multiSelect",
+ "closeForm": true,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [
+ "Spear Phishing",
+ "Whaling",
+ "Credential Harvesting",
+ "Malware",
+ "Scam",
+ "Automatic"
+ ],
+ "validationRegex": "",
+ "useAsKpi": true,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "Phishing"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "columns": null,
+ "defaultRows": null,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-phishingsubtype_CHANGELOG.md b/IncidentFields/incidentfield-phishingsubtype_CHANGELOG.md
new file mode 100644
index 000000000000..df7930a2fcae
--- /dev/null
+++ b/IncidentFields/incidentfield-phishingsubtype_CHANGELOG.md
@@ -0,0 +1,2 @@
+## [Unreleased]
+Phishing incident sub-type. Can be "Spear Phishing", "Whaling", "Credential Harvesting", "Malware", "Scam", or "Automatic".
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-possiblecauseofthebreach.json b/IncidentFields/incidentfield-possiblecauseofthebreach.json
new file mode 100644
index 000000000000..82bb84d38cd3
--- /dev/null
+++ b/IncidentFields/incidentfield-possiblecauseofthebreach.json
@@ -0,0 +1,42 @@
+{
+ "id": "incident_possiblecauseofthebreach",
+ "version": -1,
+ "modified": "2019-04-16T10:43:51.71269953Z",
+ "name": "Possible Cause of the Breach",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "possiblecauseofthebreach",
+ "type": "multiSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [
+ "Malicious attack - internal",
+ "Malicious attack - external",
+ "Accident (system failure)",
+ "Negligence (human error)",
+ "Other"
+ ],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-postalcode.json b/IncidentFields/incidentfield-postalcode.json
new file mode 100644
index 000000000000..6ed8817186c5
--- /dev/null
+++ b/IncidentFields/incidentfield-postalcode.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_postalcode",
+ "version": -1,
+ "modified": "2019-04-14T11:15:06.414769944Z",
+ "name": "Postal Code",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "postalcode",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-remediationsla.json b/IncidentFields/incidentfield-remediationsla.json
new file mode 100644
index 000000000000..7120f53cefb3
--- /dev/null
+++ b/IncidentFields/incidentfield-remediationsla.json
@@ -0,0 +1,36 @@
+{
+ "closeForm": false,
+ "fromVersion": "4.1.0",
+ "cliName": "remediationsla",
+ "neverSetAsRequired": false,
+ "threshold": 0,
+ "id": "incident_remediationsla",
+ "group": 0,
+ "script": "",
+ "isReadOnly": true,
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "timer",
+ "editForm": false,
+ "description": "The time it took since remediation of the incident began, and until it ended.",
+ "associatedToAll": true,
+ "breachScript": "",
+ "associatedTypes": [],
+ "caseInsensitive": true,
+ "placeholder": "",
+ "useAsKpi": true,
+ "systemAssociatedTypes": null,
+ "locked": false,
+ "name": "Remediation SLA",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2018-12-11T12:53:56.816268002Z",
+ "fieldCalcScript": "",
+ "selectValues": [],
+ "validationRegex": "",
+ "sla": 7200
+ }
diff --git a/IncidentFields/incidentfield-sectorofaffectedparty.json b/IncidentFields/incidentfield-sectorofaffectedparty.json
new file mode 100644
index 000000000000..812ded601ad2
--- /dev/null
+++ b/IncidentFields/incidentfield-sectorofaffectedparty.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_sectorofaffectedparty",
+ "version": -1,
+ "modified": "2019-04-14T11:27:24.611690657Z",
+ "name": "Sector of Affected Party",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "sectorofaffectedparty",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-sizenumberofemployees.json b/IncidentFields/incidentfield-sizenumberofemployees.json
new file mode 100644
index 000000000000..5e184323e1d5
--- /dev/null
+++ b/IncidentFields/incidentfield-sizenumberofemployees.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_sizenumberofemployees",
+ "version": -1,
+ "modified": "2019-04-14T11:27:37.886534316Z",
+ "name": "Size - number of employees",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "sizenumberofemployees",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-sizeturnover.json b/IncidentFields/incidentfield-sizeturnover.json
new file mode 100644
index 000000000000..df43829a19f7
--- /dev/null
+++ b/IncidentFields/incidentfield-sizeturnover.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_sizeturnover",
+ "version": -1,
+ "modified": "2019-04-14T11:27:58.670504598Z",
+ "name": "Size - turnover",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "sizeturnover",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-telephoneno.json b/IncidentFields/incidentfield-telephoneno.json
new file mode 100644
index 000000000000..9e50b81b96ab
--- /dev/null
+++ b/IncidentFields/incidentfield-telephoneno.json
@@ -0,0 +1,36 @@
+{
+ "id": "incident_telephoneno",
+ "version": -1,
+ "modified": "2019-04-14T11:18:42.50105672Z",
+ "name": "Telephone no.",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "telephoneno",
+ "type": "shortText",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": null,
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfield-timetoassignment.json b/IncidentFields/incidentfield-timetoassignment.json
new file mode 100644
index 000000000000..2b4286a1fc7e
--- /dev/null
+++ b/IncidentFields/incidentfield-timetoassignment.json
@@ -0,0 +1,36 @@
+{
+ "closeForm": false,
+ "cliName": "timetoassignment",
+ "fromVersion": "4.1.0",
+ "neverSetAsRequired": false,
+ "threshold": 0,
+ "id": "incident_timetoassignment",
+ "group": 0,
+ "script": "",
+ "isReadOnly": true,
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "timer",
+ "editForm": false,
+ "description": "The time it took from when the incident was created until a user was assigned to it.",
+ "associatedToAll": true,
+ "breachScript": "",
+ "associatedTypes": null,
+ "caseInsensitive": true,
+ "placeholder": "",
+ "useAsKpi": true,
+ "systemAssociatedTypes": null,
+ "locked": false,
+ "name": "Time to Assignment",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2018-12-11T12:55:38.305896432Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0
+}
diff --git a/IncidentFields/incidentfield-whereisdatahosted.json b/IncidentFields/incidentfield-whereisdatahosted.json
new file mode 100644
index 000000000000..2ded90aa0bb7
--- /dev/null
+++ b/IncidentFields/incidentfield-whereisdatahosted.json
@@ -0,0 +1,39 @@
+{
+ "id": "incident_whereisdatahosted",
+ "version": -1,
+ "modified": "2019-04-14T11:30:51.376887853Z",
+ "name": "Where is data hosted",
+ "ownerOnly": false,
+ "placeholder": "",
+ "description": "",
+ "cliName": "whereisdatahosted",
+ "type": "multiSelect",
+ "closeForm": false,
+ "editForm": true,
+ "required": false,
+ "script": "",
+ "fieldCalcScript": "",
+ "neverSetAsRequired": false,
+ "isReadOnly": false,
+ "selectValues": [
+ "Onsite",
+ "Cloud"
+ ],
+ "useAsKpi": false,
+ "locked": false,
+ "system": false,
+ "content": false,
+ "group": 0,
+ "hidden": false,
+ "associatedTypes": [
+ "GDPR Data Breach"
+ ],
+ "systemAssociatedTypes": null,
+ "associatedToAll": false,
+ "unmapped": false,
+ "unsearchable": false,
+ "caseInsensitive": true,
+ "sla": 0,
+ "threshold": 72,
+ "breachScript": ""
+}
\ No newline at end of file
diff --git a/IncidentFields/incidentfields.json b/IncidentFields/incidentfields.json
deleted file mode 100644
index 2e06106ba701..000000000000
--- a/IncidentFields/incidentfields.json
+++ /dev/null
@@ -1,1656 +0,0 @@
-{
- "incidentFields": [
- {
- "id": "incident_app",
- "version": 2,
- "modified": "2018-08-23T18:31:06.389092425+03:00",
- "name": "App",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "app",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Access"
- ],
- "systemAssociatedTypes": [
- "Access"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_assetid",
- "version": 2,
- "modified": "2018-08-23T18:31:06.427700464+03:00",
- "name": "Asset ID",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "assetid",
- "type": "shortText",
- "closeForm": true,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": false,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Vulnerability"
- ],
- "systemAssociatedTypes": [
- "Vulnerability"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_attachmentcount",
- "version": 2,
- "modified": "2018-08-23T18:31:06.313509514+03:00",
- "name": "Attachment Count",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "attachmentcount",
- "type": "number",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_attachmentextension",
- "version": 2,
- "modified": "2018-08-23T18:31:06.316669039+03:00",
- "name": "Attachment Extension",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "attachmentextension",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_attachmenthash",
- "version": 4,
- "modified": "2018-08-23T18:31:06.320408213+03:00",
- "name": "Attachment Hash",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "attachmenthash",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_attachmentid",
- "version": 2,
- "modified": "2018-08-23T18:31:06.323188662+03:00",
- "name": "Attachment ID",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "attachmentid",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_attachmentname",
- "version": 2,
- "modified": "2018-08-23T18:31:06.326381385+03:00",
- "name": "Attachment Name",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "attachmentname",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_attachmentsize",
- "version": 2,
- "modified": "2018-08-23T18:31:06.329260341+03:00",
- "name": "Attachment size",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "attachmentsize",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_attachmenttype",
- "version": 2,
- "modified": "2018-08-23T18:31:06.332108621+03:00",
- "name": "Attachment type",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "attachmenttype",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_bugtraq",
- "version": 2,
- "modified": "2018-08-23T18:31:06.430765635+03:00",
- "name": "Bugtraq",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "bugtraq",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": false,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Vulnerability"
- ],
- "systemAssociatedTypes": [
- "Vulnerability"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_cve",
- "version": 2,
- "modified": "2018-08-23T18:31:06.433685165+03:00",
- "name": "CVE",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "cve",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": false,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Vulnerability"
- ],
- "systemAssociatedTypes": [
- "Vulnerability"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_cvss",
- "version": 2,
- "modified": "2018-08-23T18:31:06.436656554+03:00",
- "name": "CVSS",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "cvss",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": false,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Vulnerability"
- ],
- "systemAssociatedTypes": [
- "Vulnerability"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_dest",
- "version": 2,
- "modified": "2018-08-23T18:31:06.39174697+03:00",
- "name": "Dest",
- "ownerOnly": false,
- "placeholder": "",
- "description": "Destination",
- "cliName": "dest",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Access",
- "Malware"
- ],
- "systemAssociatedTypes": [
- "Access",
- "Malware"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_destntdomain",
- "version": 2,
- "modified": "2018-08-23T18:31:06.394843894+03:00",
- "name": "Dest NT Domain",
- "ownerOnly": false,
- "placeholder": "",
- "description": "Destination NT Domain",
- "cliName": "destntdomain",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Access",
- "Malware"
- ],
- "systemAssociatedTypes": [
- "Access",
- "Malware"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_duration",
- "version": 2,
- "modified": "2018-08-23T18:31:06.397705995+03:00",
- "name": "Duration",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "duration",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Access"
- ],
- "systemAssociatedTypes": [
- "Access"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailbcc",
- "version": 2,
- "modified": "2018-08-23T18:31:06.335010031+03:00",
- "name": "Email BCC",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailbcc",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailbody",
- "version": 2,
- "modified": "2018-08-23T18:31:06.337643992+03:00",
- "name": "Email Body",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailbody",
- "type": "longText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": true
- },
- {
- "id": "incident_emailbodyformat",
- "version": 2,
- "modified": "2018-08-23T18:31:06.34104065+03:00",
- "name": "Email Body Format",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailbodyformat",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailbodyhtml",
- "version": 2,
- "modified": "2018-08-23T18:31:06.344156391+03:00",
- "name": "Email Body HTML",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailbodyhtml",
- "type": "html",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": true
- },
- {
- "id": "incident_emailcc",
- "version": 2,
- "modified": "2018-08-23T18:31:06.347108901+03:00",
- "name": "Email CC",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailcc",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailclientname",
- "version": 2,
- "modified": "2018-08-23T18:31:06.349792637+03:00",
- "name": "Email Client Name",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailclientname",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailfrom",
- "version": 2,
- "modified": "2018-08-23T18:31:06.35251857+03:00",
- "name": "Email From",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailfrom",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailinreplyto",
- "version": 1,
- "modified": "2018-09-02T10:53:05.262517498+03:00",
- "name": "Email In Reply To",
- "ownerOnly": false,
- "placeholder": "emailinreplyto",
- "description": "The mail to whom you reply",
- "cliName": "emailinreplyto",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": false,
- "locked": false,
- "system": false,
- "content": false,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": null,
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailkeywords",
- "version": 2,
- "modified": "2018-08-23T18:31:06.355250331+03:00",
- "name": "Email Keywords",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailkeywords",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailmessageid",
- "version": 2,
- "modified": "2018-08-23T18:31:06.358269902+03:00",
- "name": "Email Message ID",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailmessageid",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailreceived",
- "version": 2,
- "modified": "2018-08-23T18:31:06.360983117+03:00",
- "name": "Email Received",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailreceived",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailreplyto",
- "version": 2,
- "modified": "2018-08-23T18:31:06.36346759+03:00",
- "name": "Email Reply To",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailreplyto",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailreturnpath",
- "version": 2,
- "modified": "2018-08-23T18:31:06.366098374+03:00",
- "name": "Email Return Path",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailreturnpath",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailsenderip",
- "version": 2,
- "modified": "2018-08-23T18:31:06.368675488+03:00",
- "name": "Email Sender IP",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailsenderip",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailsize",
- "version": 2,
- "modified": "2018-08-23T18:31:06.37171586+03:00",
- "name": "Email Size",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailsize",
- "type": "number",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailsource",
- "version": 2,
- "modified": "2018-08-23T18:31:06.375343721+03:00",
- "name": "Email Source",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailsource",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailsubject",
- "version": 2,
- "modified": "2018-08-23T18:31:06.378224888+03:00",
- "name": "Email Subject",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailsubject",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailto",
- "version": 2,
- "modified": "2018-08-23T18:31:06.380974651+03:00",
- "name": "Email To",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailto",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailtocount",
- "version": 2,
- "modified": "2018-08-23T18:31:06.383761983+03:00",
- "name": "Email To Count",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailtocount",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_emailurlclicked",
- "version": 2,
- "modified": "2018-08-23T18:31:06.386379045+03:00",
- "name": "Email URL Clicked",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "emailurlclicked",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Phishing"
- ],
- "systemAssociatedTypes": [
- "Phishing"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_filehash",
- "version": 2,
- "modified": "2018-08-23T18:31:06.405371418+03:00",
- "name": "File Hash",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "filehash",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Malware"
- ],
- "systemAssociatedTypes": [
- "Malware"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_filename",
- "version": 2,
- "modified": "2018-08-23T18:31:06.407952489+03:00",
- "name": "File Name",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "filename",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Malware"
- ],
- "systemAssociatedTypes": [
- "Malware"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_filepath",
- "version": 2,
- "modified": "2018-08-23T18:31:06.410700747+03:00",
- "name": "File Path",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "filepath",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": false,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Malware"
- ],
- "systemAssociatedTypes": [
- "Malware"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_malwarefamily",
- "version": 2,
- "modified": "2018-08-23T18:31:06.416319236+03:00",
- "name": "Malware Family",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "malwarefamily",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Malware"
- ],
- "systemAssociatedTypes": [
- "Malware"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_signature",
- "version": 2,
- "modified": "2018-08-23T18:31:06.440145161+03:00",
- "name": "Signature",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "signature",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": false,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Vulnerability",
- "Malware"
- ],
- "systemAssociatedTypes": [
- "Vulnerability",
- "Malware"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_src",
- "version": 2,
- "modified": "2018-08-23T18:31:06.400366431+03:00",
- "name": "Src",
- "ownerOnly": false,
- "placeholder": "",
- "description": "Source",
- "cliName": "src",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Access",
- "Malware"
- ],
- "systemAssociatedTypes": [
- "Access",
- "Malware"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_srcntdomain",
- "version": 2,
- "modified": "2018-08-23T18:31:06.402950395+03:00",
- "name": "Src NT Domain",
- "ownerOnly": false,
- "placeholder": "",
- "description": "Source NT Domain",
- "cliName": "srcntdomain",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Access"
- ],
- "systemAssociatedTypes": [
- "Access"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_srcuser",
- "version": 2,
- "modified": "2018-08-23T18:31:06.42473716+03:00",
- "name": "Src User",
- "ownerOnly": false,
- "placeholder": "",
- "description": "Source User",
- "cliName": "srcuser",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Access"
- ],
- "systemAssociatedTypes": [
- "Access"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_user",
- "version": 2,
- "modified": "2018-08-23T18:31:06.421943082+03:00",
- "name": "User",
- "ownerOnly": false,
- "placeholder": "",
- "description": "The user involved",
- "cliName": "user",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": false,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Malware"
- ],
- "systemAssociatedTypes": [
- "Malware"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_vendorid",
- "version": 2,
- "modified": "2018-08-23T18:31:06.443496625+03:00",
- "name": "Vendor ID",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "vendorid",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": [],
- "validationRegex": "",
- "useAsKpi": false,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Vulnerability"
- ],
- "systemAssociatedTypes": [
- "Vulnerability"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_vendorproduct",
- "version": 2,
- "modified": "2018-08-23T18:31:06.419089491+03:00",
- "name": "Vendor Product",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "vendorproduct",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": true,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Malware"
- ],
- "systemAssociatedTypes": [
- "Malware"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- },
- {
- "id": "incident_vulnerabilitycategory",
- "version": 2,
- "modified": "2018-08-23T18:31:06.446623086+03:00",
- "name": "Vulnerability Category",
- "ownerOnly": false,
- "placeholder": "",
- "description": "",
- "cliName": "vulnerabilitycategory",
- "type": "shortText",
- "closeForm": false,
- "editForm": true,
- "required": false,
- "script": "",
- "fieldCalcScript": "",
- "neverSetAsRequired": false,
- "isReadOnly": false,
- "selectValues": null,
- "validationRegex": "",
- "useAsKpi": false,
- "locked": false,
- "system": false,
- "content": true,
- "group": 0,
- "hidden": false,
- "associatedTypes": [
- "Vulnerability"
- ],
- "systemAssociatedTypes": [
- "Vulnerability"
- ],
- "associatedToAll": false,
- "unmapped": false,
- "unsearchable": false
- }
- ]
-}
-
diff --git a/IncidentTypes/incidenttype-Cortex_XDR_Incident.json b/IncidentTypes/incidenttype-Cortex_XDR_Incident.json
new file mode 100644
index 000000000000..5f7d55bd40fb
--- /dev/null
+++ b/IncidentTypes/incidenttype-Cortex_XDR_Incident.json
@@ -0,0 +1,24 @@
+{
+ "autorun": true,
+ "closureScript": "",
+ "color": "#6200EA",
+ "days": 0,
+ "daysR": 0,
+ "default": false,
+ "disabled": false,
+ "hours": 0,
+ "hoursR": 0,
+ "id": "Cortex XDR Incident",
+ "locked": false,
+ "name": "Cortex XDR Incident",
+ "playbookId": "Cortex XDR Incident Sync",
+ "preProcessingScript": "",
+ "readonly": false,
+ "reputationCalc": 0,
+ "sortValues": null,
+ "system": false,
+ "version": -1,
+ "weeks": 0,
+ "weeksR": 0,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentTypes/incidenttype-Cortex_XDR_Incident_CHANGELOG.md b/IncidentTypes/incidenttype-Cortex_XDR_Incident_CHANGELOG.md
new file mode 100644
index 000000000000..c93de5a44a4b
--- /dev/null
+++ b/IncidentTypes/incidenttype-Cortex_XDR_Incident_CHANGELOG.md
@@ -0,0 +1,2 @@
+## [Unreleased]
+Added the **Cortex XDR** incident type. **(Available from Demsito 5.0)**
diff --git a/IncidentTypes/incidenttype-GDPR_Data_Breach.json b/IncidentTypes/incidenttype-GDPR_Data_Breach.json
new file mode 100644
index 000000000000..b4592a0852a1
--- /dev/null
+++ b/IncidentTypes/incidenttype-GDPR_Data_Breach.json
@@ -0,0 +1,24 @@
+{
+ "autorun": true,
+ "closureScript": "",
+ "color": "#a3c9ff",
+ "days": 0,
+ "daysR": 0,
+ "default": false,
+ "disabled": false,
+ "hours": 0,
+ "hoursR": 0,
+ "id": "GDPR Data Breach",
+ "locked": false,
+ "name": "GDPR Data Breach",
+ "playbookId": "GDPR Breach Notification",
+ "preProcessingScript": "",
+ "readonly": false,
+ "reputationCalc": 0,
+ "sortValues": null,
+ "system": false,
+ "version": -1,
+ "weeks": 0,
+ "weeksR": 0,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IncidentTypes/incidenttype-GDPR_Data_Breach_CHANGELOG.md b/IncidentTypes/incidenttype-GDPR_Data_Breach_CHANGELOG.md
new file mode 100644
index 000000000000..9d377b7a09dc
--- /dev/null
+++ b/IncidentTypes/incidenttype-GDPR_Data_Breach_CHANGELOG.md
@@ -0,0 +1,2 @@
+## [Unreleased]
+**New incident type GDPR Data Breach**
\ No newline at end of file
diff --git a/IncidentTypes/incidenttype-Traps.json b/IncidentTypes/incidenttype-Traps.json
new file mode 100644
index 000000000000..410e0fcdecea
--- /dev/null
+++ b/IncidentTypes/incidenttype-Traps.json
@@ -0,0 +1,24 @@
+{
+ "autorun": true,
+ "closureScript": "",
+ "color": "#32d296",
+ "days": 0,
+ "daysR": 0,
+ "default": false,
+ "disabled": false,
+ "hours": 0,
+ "hoursR": 0,
+ "id": "Traps",
+ "locked": false,
+ "name": "Traps",
+ "playbookId": "Palo Alto Networks - Endpoint Malware Investigation",
+ "preProcessingScript": "",
+ "readonly": false,
+ "reputationCalc": 0,
+ "sortValues": null,
+ "system": false,
+ "version": -1,
+ "weeks": 0,
+ "weeksR": 0,
+ "fromVersion": "5.0.0"
+}
\ No newline at end of file
diff --git a/IndicatorFields/incidentfield-accounttype.json b/IndicatorFields/incidentfield-accounttype.json
new file mode 100644
index 000000000000..13650b5a1819
--- /dev/null
+++ b/IndicatorFields/incidentfield-accounttype.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "accounttype",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_accounttype",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Account"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Account Type",
+ "locked": false,
+ "name": "Account Type",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.888212274Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-actor.json b/IndicatorFields/incidentfield-actor.json
new file mode 100644
index 000000000000..11e3ac791cf0
--- /dev/null
+++ b/IndicatorFields/incidentfield-actor.json
@@ -0,0 +1,49 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "actor",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_actor",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "multiSelect",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Actor",
+ "locked": false,
+ "name": "Actor",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.952150491Z",
+ "fieldCalcScript": "",
+ "selectValues": [
+ "",
+ ""
+ ],
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-admincountry.json b/IndicatorFields/incidentfield-admincountry.json
new file mode 100644
index 000000000000..97d1fb4cc431
--- /dev/null
+++ b/IndicatorFields/incidentfield-admincountry.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "admincountry",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_admincountry",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Admin Country",
+ "locked": false,
+ "name": "Admin Country",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.852448936Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-adminemail.json b/IndicatorFields/incidentfield-adminemail.json
new file mode 100644
index 000000000000..0eabca2f8157
--- /dev/null
+++ b/IndicatorFields/incidentfield-adminemail.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "adminemail",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_adminemail",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Admin Email",
+ "locked": false,
+ "name": "Admin Email",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.953431222Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-adminname.json b/IndicatorFields/incidentfield-adminname.json
new file mode 100644
index 000000000000..ac06efe128f4
--- /dev/null
+++ b/IndicatorFields/incidentfield-adminname.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "adminname",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_adminname",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Admin Name",
+ "locked": false,
+ "name": "Admin Name",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.987043849Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-adminphone.json b/IndicatorFields/incidentfield-adminphone.json
new file mode 100644
index 000000000000..0b2eb6d09229
--- /dev/null
+++ b/IndicatorFields/incidentfield-adminphone.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "adminphone",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_adminphone",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Admin Phone",
+ "locked": false,
+ "name": "Admin Phone",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.831030953Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-asn.json b/IndicatorFields/incidentfield-asn.json
new file mode 100644
index 000000000000..0efaf432298b
--- /dev/null
+++ b/IndicatorFields/incidentfield-asn.json
@@ -0,0 +1,47 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "asn",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_asn",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "IP",
+ "IP_tmp"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "ASN",
+ "locked": false,
+ "name": "ASN",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.869618253Z",
+ "fieldCalcScript": "",
+ "selectValues": [],
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-associatedfilenames.json b/IndicatorFields/incidentfield-associatedfilenames.json
new file mode 100644
index 000000000000..41507ab0f64b
--- /dev/null
+++ b/IndicatorFields/incidentfield-associatedfilenames.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "associatedfilenames",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_associatedfilenames",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "multiSelect",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": true,
+ "systemAssociatedTypes": null,
+ "prevName": "Associated File Names",
+ "locked": false,
+ "name": "Associated File Names",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.853697006Z",
+ "fieldCalcScript": "",
+ "selectValues": [],
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-biosversion.json b/IndicatorFields/incidentfield-biosversion.json
new file mode 100644
index 000000000000..d36a1f627dbe
--- /dev/null
+++ b/IndicatorFields/incidentfield-biosversion.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "biosversion",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_biosversion",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Host"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "BIOS Version",
+ "locked": false,
+ "name": "BIOS Version",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.957195324Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-creationdate.json b/IndicatorFields/incidentfield-creationdate.json
new file mode 100644
index 000000000000..c1e5ae0d89c0
--- /dev/null
+++ b/IndicatorFields/incidentfield-creationdate.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "creationdate",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_creationdate",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "date",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Creation Date",
+ "locked": false,
+ "name": "Creation Date",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.86043008Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-cvedescription.json b/IndicatorFields/incidentfield-cvedescription.json
new file mode 100644
index 000000000000..86001c4bcca9
--- /dev/null
+++ b/IndicatorFields/incidentfield-cvedescription.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "cvedescription",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_cvedescription",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "CVE"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "CVE Description",
+ "locked": false,
+ "name": "CVE Description",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.837856802Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-cvss.json b/IndicatorFields/incidentfield-cvss.json
new file mode 100644
index 000000000000..025ce2e92c96
--- /dev/null
+++ b/IndicatorFields/incidentfield-cvss.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "cvss",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_cvss",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": false,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "CVE"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "CVSS",
+ "locked": false,
+ "name": "CVSS",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-25T23:48:06.77753976Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-description.json b/IndicatorFields/incidentfield-description.json
new file mode 100644
index 000000000000..deb6c22c3f7f
--- /dev/null
+++ b/IndicatorFields/incidentfield-description.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "description",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_description",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": false,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "longText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "AWS Security Hub"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Description",
+ "locked": false,
+ "name": "Description",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-02-28T07:18:30.676828384Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-description_CHANGELOG.md b/IndicatorFields/incidentfield-description_CHANGELOG.md
new file mode 100644
index 000000000000..63439c17f377
--- /dev/null
+++ b/IndicatorFields/incidentfield-description_CHANGELOG.md
@@ -0,0 +1,2 @@
+## [Unreleased]
+-
diff --git a/IndicatorFields/incidentfield-detectionengines.json b/IndicatorFields/incidentfield-detectionengines.json
new file mode 100644
index 000000000000..2eade6d7e569
--- /dev/null
+++ b/IndicatorFields/incidentfield-detectionengines.json
@@ -0,0 +1,44 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "detectionengines",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_detectionengines",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "number",
+ "breachScript": "",
+ "editForm": true,
+ "description": "Total number of engines that checked the indicator",
+ "associatedToAll": true,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": null,
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Detection Engines",
+ "locked": false,
+ "name": "Detection Engines",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.946829022Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-devicemodel.json b/IndicatorFields/incidentfield-devicemodel.json
new file mode 100644
index 000000000000..39a0bc6772d2
--- /dev/null
+++ b/IndicatorFields/incidentfield-devicemodel.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "devicemodel",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_devicemodel",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Host"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Device Model",
+ "locked": false,
+ "name": "Device Model",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.777067613Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-dhcpserver.json b/IndicatorFields/incidentfield-dhcpserver.json
new file mode 100644
index 000000000000..0f3fc37da87d
--- /dev/null
+++ b/IndicatorFields/incidentfield-dhcpserver.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "dhcpserver",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_dhcpserver",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Host"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "DHCP Server",
+ "locked": false,
+ "name": "DHCP Server",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.982671564Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-displayname.json b/IndicatorFields/incidentfield-displayname.json
new file mode 100644
index 000000000000..143b4945bcb6
--- /dev/null
+++ b/IndicatorFields/incidentfield-displayname.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "displayname",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_displayname",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Account"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Display Name",
+ "locked": false,
+ "name": "Display Name",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.788714027Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-dns.json b/IndicatorFields/incidentfield-dns.json
new file mode 100644
index 000000000000..acca85c42008
--- /dev/null
+++ b/IndicatorFields/incidentfield-dns.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "dns",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_dns",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "DNS",
+ "locked": false,
+ "name": "DNS",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.897424416Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-domainname.json b/IndicatorFields/incidentfield-domainname.json
new file mode 100644
index 000000000000..6112e9ded424
--- /dev/null
+++ b/IndicatorFields/incidentfield-domainname.json
@@ -0,0 +1,48 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "domainname",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_domainname",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Account",
+ "Email",
+ "Host"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Domain Name",
+ "locked": false,
+ "name": "Domain Name",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.782485969Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-domainstatus.json b/IndicatorFields/incidentfield-domainstatus.json
new file mode 100644
index 000000000000..bcde15ac7538
--- /dev/null
+++ b/IndicatorFields/incidentfield-domainstatus.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "domainstatus",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_domainstatus",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Domain Status",
+ "locked": false,
+ "name": "Domain Status",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.872350501Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-emailaddress.json b/IndicatorFields/incidentfield-emailaddress.json
new file mode 100644
index 000000000000..1ff61256bd88
--- /dev/null
+++ b/IndicatorFields/incidentfield-emailaddress.json
@@ -0,0 +1,47 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "emailaddress",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_emailaddress",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Account",
+ "Email"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Email Address",
+ "locked": false,
+ "name": "Email Address",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.984088642Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-entryid.json b/IndicatorFields/incidentfield-entryid.json
new file mode 100644
index 000000000000..2524e1c0f1bc
--- /dev/null
+++ b/IndicatorFields/incidentfield-entryid.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "entryid",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_entryid",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Entry ID",
+ "locked": false,
+ "name": "Entry ID",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.840427815Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-expirationdate.json b/IndicatorFields/incidentfield-expirationdate.json
new file mode 100644
index 000000000000..ed324b462f68
--- /dev/null
+++ b/IndicatorFields/incidentfield-expirationdate.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "expirationdate",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_expirationdate",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "date",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Expiration Date",
+ "locked": false,
+ "name": "Expiration Date",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.94414155Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-fileextension.json b/IndicatorFields/incidentfield-fileextension.json
new file mode 100644
index 000000000000..d69b2661bb25
--- /dev/null
+++ b/IndicatorFields/incidentfield-fileextension.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "fileextension",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_fileextension",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "File Extension",
+ "locked": false,
+ "name": "File Extension",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.918113205Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-filetype.json b/IndicatorFields/incidentfield-filetype.json
new file mode 100644
index 000000000000..eeb2987de072
--- /dev/null
+++ b/IndicatorFields/incidentfield-filetype.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "filetype",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_filetype",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "File Type",
+ "locked": false,
+ "name": "File Type",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.933135216Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-geocountry.json b/IndicatorFields/incidentfield-geocountry.json
new file mode 100644
index 000000000000..05fdbf038d68
--- /dev/null
+++ b/IndicatorFields/incidentfield-geocountry.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "geocountry",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_geocountry",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "IP"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Geo Country",
+ "locked": false,
+ "name": "Geo Country",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.934601473Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-geolocation.json b/IndicatorFields/incidentfield-geolocation.json
new file mode 100644
index 000000000000..549a2e7e25a8
--- /dev/null
+++ b/IndicatorFields/incidentfield-geolocation.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "geolocation",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_geolocation",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "IP"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Geo Location",
+ "locked": false,
+ "name": "Geo Location",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.988518454Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-groups.json b/IndicatorFields/incidentfield-groups.json
new file mode 100644
index 000000000000..7276c839c73f
--- /dev/null
+++ b/IndicatorFields/incidentfield-groups.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "groups",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_groups",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "longText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Account"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Groups",
+ "locked": false,
+ "name": "Groups",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.880275123Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-hostname.json b/IndicatorFields/incidentfield-hostname.json
new file mode 100644
index 000000000000..badf96abee84
--- /dev/null
+++ b/IndicatorFields/incidentfield-hostname.json
@@ -0,0 +1,47 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "hostname",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_hostname",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "IP",
+ "hostname"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Hostname",
+ "locked": false,
+ "name": "Hostname",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.95990462Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-id.json b/IndicatorFields/incidentfield-id.json
new file mode 100644
index 000000000000..37b15001e1e6
--- /dev/null
+++ b/IndicatorFields/incidentfield-id.json
@@ -0,0 +1,48 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "id",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_id",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Host",
+ "Account",
+ "CVE"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "ID",
+ "locked": false,
+ "name": "ID",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.784955234Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-internal.json b/IndicatorFields/incidentfield-internal.json
new file mode 100644
index 000000000000..8266d897906d
--- /dev/null
+++ b/IndicatorFields/incidentfield-internal.json
@@ -0,0 +1,51 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "internal",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_internal",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "boolean",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Account",
+ "Domain",
+ "Email",
+ "IP",
+ "URL",
+ "Host"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Internal",
+ "locked": false,
+ "name": "Internal",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.962741914Z",
+ "fieldCalcScript": "",
+ "selectValues": [],
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-ipaddress.json b/IndicatorFields/incidentfield-ipaddress.json
new file mode 100644
index 000000000000..d11785ffc334
--- /dev/null
+++ b/IndicatorFields/incidentfield-ipaddress.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "ipaddress",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_ipaddress",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Host"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "IP Address",
+ "locked": false,
+ "name": "IP Address",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.791332825Z",
+ "fieldCalcScript": "",
+ "selectValues": [],
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-macaddress.json b/IndicatorFields/incidentfield-macaddress.json
new file mode 100644
index 000000000000..a40a6bae8f7e
--- /dev/null
+++ b/IndicatorFields/incidentfield-macaddress.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "macaddress",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_macaddress",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Host"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "MAC Address",
+ "locked": false,
+ "name": "MAC Address",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.894840101Z",
+ "fieldCalcScript": "",
+ "selectValues": [],
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-malwarefamily.json b/IndicatorFields/incidentfield-malwarefamily.json
new file mode 100644
index 000000000000..a6d3aff96f80
--- /dev/null
+++ b/IndicatorFields/incidentfield-malwarefamily.json
@@ -0,0 +1,55 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "malwarefamily",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_malwarefamily",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": false,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "multiSelect",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Registry Key",
+ "CVE",
+ "Domain",
+ "Email",
+ "IP",
+ "File",
+ "URL"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Malware Family",
+ "locked": false,
+ "name": "Malware Family",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-25T23:48:06.811606449Z",
+ "fieldCalcScript": "",
+ "selectValues": [
+ "",
+ ""
+ ],
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-md5.json b/IndicatorFields/incidentfield-md5.json
new file mode 100644
index 000000000000..ad7dfed1544c
--- /dev/null
+++ b/IndicatorFields/incidentfield-md5.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "md5",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_md5",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "MD5",
+ "locked": false,
+ "name": "MD5",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.948154879Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-memory.json b/IndicatorFields/incidentfield-memory.json
new file mode 100644
index 000000000000..c4416a1d634d
--- /dev/null
+++ b/IndicatorFields/incidentfield-memory.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "memory",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_memory",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "number",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Host"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Memory",
+ "locked": false,
+ "name": "Memory",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.804871416Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-modified.json b/IndicatorFields/incidentfield-modified.json
new file mode 100644
index 000000000000..fba9f11d2838
--- /dev/null
+++ b/IndicatorFields/incidentfield-modified.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "modified",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_modified",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "date",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "CVE"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Modified",
+ "locked": false,
+ "name": "Modified",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.774473931Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-name.json b/IndicatorFields/incidentfield-name.json
new file mode 100644
index 000000000000..6b5eb685610c
--- /dev/null
+++ b/IndicatorFields/incidentfield-name.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "name",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_name",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Registry Key"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Name",
+ "locked": false,
+ "name": "Name",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.848389046Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-nameservers.json b/IndicatorFields/incidentfield-nameservers.json
new file mode 100644
index 000000000000..2e84561b8792
--- /dev/null
+++ b/IndicatorFields/incidentfield-nameservers.json
@@ -0,0 +1,49 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "nameservers",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_nameservers",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "multiSelect",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Name Servers",
+ "locked": false,
+ "name": "Name Servers",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.937388926Z",
+ "fieldCalcScript": "",
+ "selectValues": [
+ "",
+ ""
+ ],
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-operatingsystem.json b/IndicatorFields/incidentfield-operatingsystem.json
new file mode 100644
index 000000000000..d994d5029aef
--- /dev/null
+++ b/IndicatorFields/incidentfield-operatingsystem.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "operatingsystem",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_operatingsystem",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Host"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Operating System",
+ "locked": false,
+ "name": "Operating System",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.841713051Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-operatingsystemversion.json b/IndicatorFields/incidentfield-operatingsystemversion.json
new file mode 100644
index 000000000000..336c988d026f
--- /dev/null
+++ b/IndicatorFields/incidentfield-operatingsystemversion.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "operatingsystemversion",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_operatingsystemversion",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Host"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Operating System Version",
+ "locked": false,
+ "name": "Operating System Version",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.809174231Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-organization.json b/IndicatorFields/incidentfield-organization.json
new file mode 100644
index 000000000000..bf8aa5503c02
--- /dev/null
+++ b/IndicatorFields/incidentfield-organization.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "organization",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_organization",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Organization",
+ "locked": false,
+ "name": "Organization",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.786265318Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-organizationalunit(ou).json b/IndicatorFields/incidentfield-organizationalunit(ou).json
new file mode 100644
index 000000000000..a7623e2638af
--- /dev/null
+++ b/IndicatorFields/incidentfield-organizationalunit(ou).json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "organizationalunitou",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_organizationalunitou",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Account"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Organizational Unit (OU)",
+ "locked": false,
+ "name": "Organizational Unit (OU)",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.889595847Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-osversion.json b/IndicatorFields/incidentfield-osversion.json
new file mode 100644
index 000000000000..08e5d7aeaee9
--- /dev/null
+++ b/IndicatorFields/incidentfield-osversion.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "osversion",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_osversion",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": false,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Crowdstrike"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "OS Version",
+ "locked": false,
+ "name": "OS Version",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-28T00:54:02.710776811Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-osversion_CHANGELOG.md b/IndicatorFields/incidentfield-osversion_CHANGELOG.md
new file mode 100644
index 000000000000..63439c17f377
--- /dev/null
+++ b/IndicatorFields/incidentfield-osversion_CHANGELOG.md
@@ -0,0 +1,2 @@
+## [Unreleased]
+-
diff --git a/IndicatorFields/incidentfield-path.json b/IndicatorFields/incidentfield-path.json
new file mode 100644
index 000000000000..c4430869eac0
--- /dev/null
+++ b/IndicatorFields/incidentfield-path.json
@@ -0,0 +1,47 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "path",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_path",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File",
+ "Registry Key"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Path",
+ "locked": false,
+ "name": "Path",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.873603229Z",
+ "fieldCalcScript": "",
+ "selectValues": [],
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-positivedetections.json b/IndicatorFields/incidentfield-positivedetections.json
new file mode 100644
index 000000000000..5c3d07dd91b4
--- /dev/null
+++ b/IndicatorFields/incidentfield-positivedetections.json
@@ -0,0 +1,44 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "positivedetections",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_positivedetections",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "number",
+ "breachScript": "",
+ "editForm": true,
+ "description": "Number of engines that positively detected the indicator as malicious",
+ "associatedToAll": true,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": null,
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Positive Detections",
+ "locked": false,
+ "name": "Positive Detections",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.800745022Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-processor.json b/IndicatorFields/incidentfield-processor.json
new file mode 100644
index 000000000000..9b94622f3545
--- /dev/null
+++ b/IndicatorFields/incidentfield-processor.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "processor",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_processor",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "number",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Host"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Processor",
+ "locked": false,
+ "name": "Processor",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.949557833Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-processors.json b/IndicatorFields/incidentfield-processors.json
new file mode 100644
index 000000000000..e37df19449d1
--- /dev/null
+++ b/IndicatorFields/incidentfield-processors.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "processors",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_processors",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "number",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Host"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Processors",
+ "locked": false,
+ "name": "Processors",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.950915488Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-published.json b/IndicatorFields/incidentfield-published.json
new file mode 100644
index 000000000000..b52e68845892
--- /dev/null
+++ b/IndicatorFields/incidentfield-published.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "published",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_published",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "date",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "CVE"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Published",
+ "locked": false,
+ "name": "Published",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.792683357Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-registrantcountry.json b/IndicatorFields/incidentfield-registrantcountry.json
new file mode 100644
index 000000000000..2995b57f272d
--- /dev/null
+++ b/IndicatorFields/incidentfield-registrantcountry.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "registrantcountry",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_registrantcountry",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Registrant Country",
+ "locked": false,
+ "name": "Registrant Country",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.881612331Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-registrantemail.json b/IndicatorFields/incidentfield-registrantemail.json
new file mode 100644
index 000000000000..1bcc7f2f88cb
--- /dev/null
+++ b/IndicatorFields/incidentfield-registrantemail.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "registrantemail",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_registrantemail",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Registrant Email",
+ "locked": false,
+ "name": "Registrant Email",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.938649623Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-registrantname.json b/IndicatorFields/incidentfield-registrantname.json
new file mode 100644
index 000000000000..211ab874ddb2
--- /dev/null
+++ b/IndicatorFields/incidentfield-registrantname.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "registrantname",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_registrantname",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Registrant Name",
+ "locked": false,
+ "name": "Registrant Name",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.978277Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-registrantphone.json b/IndicatorFields/incidentfield-registrantphone.json
new file mode 100644
index 000000000000..d591c970fc73
--- /dev/null
+++ b/IndicatorFields/incidentfield-registrantphone.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "registrantphone",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_registrantphone",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Registrant Phone",
+ "locked": false,
+ "name": "Registrant Phone",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.890827286Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-registrarabuseemail.json b/IndicatorFields/incidentfield-registrarabuseemail.json
new file mode 100644
index 000000000000..77fd283fc0ab
--- /dev/null
+++ b/IndicatorFields/incidentfield-registrarabuseemail.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "registrarabuseemail",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_registrarabuseemail",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Registrar Abuse Email",
+ "locked": false,
+ "name": "Registrar Abuse Email",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.92524818Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-registrarabusephone.json b/IndicatorFields/incidentfield-registrarabusephone.json
new file mode 100644
index 000000000000..732fd945eb2c
--- /dev/null
+++ b/IndicatorFields/incidentfield-registrarabusephone.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "registrarabusephone",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_registrarabusephone",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Registrar Abuse Phone",
+ "locked": false,
+ "name": "Registrar Abuse Phone",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.979699038Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-registrarname.json b/IndicatorFields/incidentfield-registrarname.json
new file mode 100644
index 000000000000..0658b6807c99
--- /dev/null
+++ b/IndicatorFields/incidentfield-registrarname.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "registrarname",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_registrarname",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Registrar Name",
+ "locked": false,
+ "name": "Registrar Name",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.849685808Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-sha1.json b/IndicatorFields/incidentfield-sha1.json
new file mode 100644
index 000000000000..5afc0a76da2c
--- /dev/null
+++ b/IndicatorFields/incidentfield-sha1.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "sha1",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_sha1",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "SHA1",
+ "locked": false,
+ "name": "SHA1",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.866939952Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-sha256.json b/IndicatorFields/incidentfield-sha256.json
new file mode 100644
index 000000000000..1be35b835937
--- /dev/null
+++ b/IndicatorFields/incidentfield-sha256.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "sha256",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_sha256",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "SHA256",
+ "locked": false,
+ "name": "SHA256",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.911506918Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-sha512.json b/IndicatorFields/incidentfield-sha512.json
new file mode 100644
index 000000000000..95d979f5e4d6
--- /dev/null
+++ b/IndicatorFields/incidentfield-sha512.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "sha512",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_sha512",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "SHA512",
+ "locked": false,
+ "name": "SHA512",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.955892136Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-signatureauthentihash.json b/IndicatorFields/incidentfield-signatureauthentihash.json
new file mode 100644
index 000000000000..8339b1dd5c5f
--- /dev/null
+++ b/IndicatorFields/incidentfield-signatureauthentihash.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "signatureauthentihash",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_signatureauthentihash",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Signature Authentihash",
+ "locked": false,
+ "name": "Signature Authentihash",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.819101201Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-signaturecopyright.json b/IndicatorFields/incidentfield-signaturecopyright.json
new file mode 100644
index 000000000000..cf0f7635eb21
--- /dev/null
+++ b/IndicatorFields/incidentfield-signaturecopyright.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "signaturecopyright",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_signaturecopyright",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Signature Copyright",
+ "locked": false,
+ "name": "Signature Copyright",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.829624633Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-signaturedescription.json b/IndicatorFields/incidentfield-signaturedescription.json
new file mode 100644
index 000000000000..d69d61c54692
--- /dev/null
+++ b/IndicatorFields/incidentfield-signaturedescription.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "signaturedescription",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_signaturedescription",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Signature Description",
+ "locked": false,
+ "name": "Signature Description",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.868258939Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-signaturefileversion.json b/IndicatorFields/incidentfield-signaturefileversion.json
new file mode 100644
index 000000000000..1088a374d7f1
--- /dev/null
+++ b/IndicatorFields/incidentfield-signaturefileversion.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "signaturefileversion",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_signaturefileversion",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Signature File Version",
+ "locked": false,
+ "name": "Signature File Version",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.835218156Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-signatureinternalname.json b/IndicatorFields/incidentfield-signatureinternalname.json
new file mode 100644
index 000000000000..dad903d5f33b
--- /dev/null
+++ b/IndicatorFields/incidentfield-signatureinternalname.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "signatureinternalname",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_signatureinternalname",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Signature Internal Name",
+ "locked": false,
+ "name": "Signature Internal Name",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.862923979Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-signed.json b/IndicatorFields/incidentfield-signed.json
new file mode 100644
index 000000000000..6d8cd7ea5a3c
--- /dev/null
+++ b/IndicatorFields/incidentfield-signed.json
@@ -0,0 +1,47 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "signed",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_signed",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "boolean",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File",
+ "URL"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Signed",
+ "locked": false,
+ "name": "Signed",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.930479918Z",
+ "fieldCalcScript": "",
+ "selectValues": [],
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-size.json b/IndicatorFields/incidentfield-size.json
new file mode 100644
index 000000000000..fa5e2df3ae83
--- /dev/null
+++ b/IndicatorFields/incidentfield-size.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "size",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_size",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "number",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Size",
+ "locked": false,
+ "name": "Size",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.985574854Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-ssdeep.json b/IndicatorFields/incidentfield-ssdeep.json
new file mode 100644
index 000000000000..fc9412c1d27d
--- /dev/null
+++ b/IndicatorFields/incidentfield-ssdeep.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "ssdeep",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_ssdeep",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "SSDeep",
+ "locked": false,
+ "name": "SSDeep",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.912771358Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-subdomains.json b/IndicatorFields/incidentfield-subdomains.json
new file mode 100644
index 000000000000..07a70f6c1c52
--- /dev/null
+++ b/IndicatorFields/incidentfield-subdomains.json
@@ -0,0 +1,49 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "subdomains",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_subdomains",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "multiSelect",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Subdomains",
+ "locked": false,
+ "name": "Subdomains",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.836445241Z",
+ "fieldCalcScript": "",
+ "selectValues": [
+ "",
+ ""
+ ],
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-tags.json b/IndicatorFields/incidentfield-tags.json
new file mode 100644
index 000000000000..c87d4df9d74f
--- /dev/null
+++ b/IndicatorFields/incidentfield-tags.json
@@ -0,0 +1,49 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "tags",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_tags",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "multiSelect",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "File"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Tags",
+ "locked": false,
+ "name": "Tags",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.882925705Z",
+ "fieldCalcScript": "",
+ "selectValues": [
+ "",
+ ""
+ ],
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-updateddate.json b/IndicatorFields/incidentfield-updateddate.json
new file mode 100644
index 000000000000..8040d0723a03
--- /dev/null
+++ b/IndicatorFields/incidentfield-updateddate.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "updateddate",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_updateddate",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "date",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Domain"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Updated Date",
+ "locked": false,
+ "name": "Updated Date",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.794046235Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/IndicatorFields/incidentfield-username.json b/IndicatorFields/incidentfield-username.json
new file mode 100644
index 000000000000..02f80ed00962
--- /dev/null
+++ b/IndicatorFields/incidentfield-username.json
@@ -0,0 +1,46 @@
+{
+ "sortValues": null,
+ "closeForm": false,
+ "cliName": "username",
+ "neverSetAsRequired": false,
+ "threshold": 72,
+ "defaultRows": null,
+ "id": "indicator_username",
+ "validatedError": "",
+ "group": 2,
+ "script": "",
+ "isReadOnly": false,
+ "commitMessage": "",
+ "system": false,
+ "content": true,
+ "unsearchable": false,
+ "version": -1,
+ "unmapped": false,
+ "hidden": false,
+ "type": "shortText",
+ "breachScript": "",
+ "editForm": true,
+ "description": "",
+ "associatedToAll": false,
+ "shouldCommit": false,
+ "shouldPublish": false,
+ "associatedTypes": [
+ "Account"
+ ],
+ "caseInsensitive": true,
+ "columns": null,
+ "placeholder": "",
+ "useAsKpi": false,
+ "systemAssociatedTypes": null,
+ "prevName": "Username",
+ "locked": false,
+ "name": "Username",
+ "ownerOnly": false,
+ "required": false,
+ "modified": "2019-07-29T10:37:51.876223442Z",
+ "fieldCalcScript": "",
+ "selectValues": null,
+ "validationRegex": "",
+ "sla": 0,
+ "fromVersion": "5.0.0"
+}
diff --git a/Integrations/ANYRUN/ANYRUN.py b/Integrations/ANYRUN/ANYRUN.py
new file mode 100644
index 000000000000..94e7cc647e9d
--- /dev/null
+++ b/Integrations/ANYRUN/ANYRUN.py
@@ -0,0 +1,912 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+from typing import Optional
+
+''' IMPORTS '''
+
+import re
+import os
+import json
+import requests
+from base64 import b64encode
+
+''' GLOBAL VARS / INSTANCE CONFIGURATION '''
+
+PARAMS = demisto.params()
+USERNAME = PARAMS.get('credentials', {}).get('identifier', '')
+PASSWORD = PARAMS.get('credentials', {}).get('password', '')
+AUTH = (USERNAME + ':' + PASSWORD).encode('utf-8')
+BASIC_AUTH = 'Basic ' + b64encode(AUTH).decode()
+# Remove trailing slash to prevent wrong URL path to service
+SERVER = PARAMS.get('url', '')
+SERVER = SERVER[:-1] if (SERVER and SERVER.endswith('/')) else SERVER
+# Service base URL
+BASE_URL = SERVER + '/v1/'
+# Should we use SSL
+USE_SSL = not PARAMS.get('insecure', False)
+PROXY = PARAMS.get('proxy', False)
+# Headers to be sent in requests
+HEADERS = {
+ 'Authorization': BASIC_AUTH
+}
+# Context fields that should always be uppercase
+ALWAYS_UPPER_CASE = {
+ 'md5', 'sha1', 'sha256', 'sha512', 'pcap', 'ip',
+ 'url', 'id', 'pid', 'ppid', 'uuid', 'asn', 'mime'
+}
+THREAT_TEXT_TO_DBOTSCORE = {
+ 'no threats detected': 1,
+ 'suspicious activity': 2,
+ 'malicious activity': 3
+}
+
+''' SETUP '''
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+# Remove proxy if not set to true in params
+if not PROXY:
+ os.environ.pop('HTTP_PROXY', '')
+ os.environ.pop('HTTPS_PROXY', '')
+ os.environ.pop('http_proxy', '')
+ os.environ.pop('https_proxy', '')
+
+''' HELPER FUNCTIONS '''
+
+
+def underscore_to_camel_case(s):
+ """
+ Convert an underscore separated string to camel case, leaving one-word strings untouched
+
+ Parameters
+ ----------
+ s : str
+ The string to convert (e.g. heLLo_world) (required).
+
+ Returns
+ -------
+ str
+ The converted string (e.g. heLLoWorld).
+ """
+
+ if not isinstance(s, str):
+ return s
+ components = s.split('_')
+ return ''.join(x.title() if i != 0 else x for i, x in enumerate(components))
+
+
+def make_upper(string):
+ """
+ Make argument uppercase if it is a member of 'ALWAYS_UPPER_CASE' global variable
+
+ Parameters
+ ----------
+ string : str
+ The string to check and potentially make uppercase.
+
+ Returns
+ -------
+ str
+ Uppercased string (or original string if it didn't match the criteria).
+ """
+
+ if isinstance(string, str):
+ if string.casefold() in ALWAYS_UPPER_CASE:
+ return string.upper()
+ elif string.casefold() == 'ssdeep': # special case
+ return 'SSDeep'
+ else:
+ return string
+ else:
+ return string
+
+
+def make_capital(string):
+ """Capitalize first letter of a string, leaving the rest of the string as is
+
+ Parameters
+ ----------
+ string : str
+ The string to capitalize (e.g. 'foRUm').
+
+ Returns
+ -------
+ str
+ The capitalized string (e.g. 'FoRUm').
+ """
+
+ if isinstance(string, str) and string:
+ return string[:1].upper() + string[1:]
+ else:
+ return string
+
+
+def make_singular(word):
+ """Relatively naive/imperfect function to make a word singular
+
+ Parameters
+ ----------
+ word : str
+ The string to make singular (e.g. 'zebras').
+
+ Returns
+ -------
+ str
+ The string in singular form (e.e. 'zebra').
+ """
+
+ if not isinstance(word, str) or not word:
+ return word
+
+ word_as_lower = word.casefold()
+ # Not a plural
+ if not word_as_lower.endswith('s'):
+ return word
+ # Word ends in 's' and is therefore possibly plural
+ else:
+ es_endings = ('sses', 'shes', 'ches', 'xes', 'zes')
+ if word_as_lower.endswith(es_endings):
+ # Then the word was pluralized by adding 'es'
+ return word[:-2]
+ elif word_as_lower.endswith('ss'):
+ # Then it's probably not a plural, e.g. 'assess' or 'process'
+ return word
+ elif len(word) <= 2:
+ # Then it's probably not a plural, e.g. 'OS'
+ return word
+ elif word_as_lower.endswith('sis') or word_as_lower.endswith('us'):
+ # Then it's probably singular like 'analysis' and 'cactus' and 'focus'
+ return word
+ else:
+ # Assume regular noun pluralization of adding an 's'
+ return word[:-1]
+
+
+def travel_object(obj, key_functions=[], val_functions=[]):
+ """Recursively apply functions to the keys and values of a dictionary
+
+ Parameters
+ ----------
+ obj : dict/list
+ List or dict to recurse through.
+ key_functions : list
+ Functions to apply to the keys in 'obj'.
+ val_functions : list
+ Functions to apply to the values in 'obj'
+
+ Returns
+ -------
+ list/dict
+ A list or dict in which all nested keys and values have been
+ altered by the key_functions and val_functions respectively.
+ """
+
+ def operate_on_dict(the_dict):
+ new_dict = {}
+ for key, val in the_dict.items():
+ new_key = key
+ for key_func in key_functions:
+ new_key = key_func(new_key)
+ if isinstance(val, dict) or isinstance(val, list):
+ new_val = travel_object(val, key_functions=key_functions, val_functions=val_functions)
+ else:
+ new_val = val
+ for val_func in val_functions:
+ new_val = val_func(val)
+ new_dict[new_key] = new_val
+ return new_dict
+
+ if isinstance(obj, list):
+ new_list = []
+ for item in obj:
+ new_item = operate_on_dict(item) if isinstance(item, dict) else item
+ new_list.append(new_item)
+ return new_list
+ elif isinstance(obj, dict):
+ altered_dict = operate_on_dict(obj)
+ return altered_dict
+ else:
+ err_msg = 'Invalid type: the passed "obj" argument was not of type "dict" or "list".'
+ raise TypeError(err_msg)
+
+
+def generate_dbotscore(response):
+ """Creates DBotScore object based on the contents of 'response' argument
+
+ Parameters
+ ----------
+ response : dict
+ Object returned by ANYRUN API call in 'get_report' function.
+
+ Returns
+ -------
+ dict
+ A DBotScore object.
+ """
+
+ analysis = response.get('data', {}).get('analysis', {})
+ main_object = analysis.get('content', {}).get('mainObject', {})
+ submission_type = main_object.get('type')
+ submission_type = 'hash' if submission_type in {'file', 'download'} else submission_type
+ threat_text = analysis.get('scores', {}).get('verdict', {}).get('threatLevelText', '').casefold()
+ if submission_type == 'hash':
+ hashes = main_object.get('hashes', {})
+ indicator = hashes.get('sha256', hashes.get('sha1', hashes.get('md5')))
+ else:
+ indicator = main_object.get('url')
+ dbot_score = {
+ "DBotScore": {
+ "Indicator": indicator,
+ "Type": submission_type,
+ "Vendor": "ANYRUN",
+ "Score": THREAT_TEXT_TO_DBOTSCORE.get(threat_text, 0)
+ }
+ }
+ return dbot_score
+
+
+def add_malicious_key(entity, verdict):
+ """Return the entity with the additional 'Malicious' key if determined as such by ANYRUN
+
+ Parameters
+ ----------
+ entity : dict
+ File or URL object.
+ verdict : dict
+ Task analysis verdict for a detonated file or url.
+
+ Returns
+ -------
+ dict
+ The modified entity if it was malicious, otherwise the original entity.
+ """
+
+ threat_level_text = verdict.get('threatLevelText', '')
+
+ if threat_level_text.casefold() == 'malicious activity':
+ entity['Malicious'] = {
+ 'Vendor': 'ANYRUN',
+ 'Description': threat_level_text
+ }
+ return entity
+
+
+def ec_file(main_object):
+ """Return File entity in Demisto format for use in entry context
+
+ Parameters
+ ----------
+ main_object : dict
+ The main object from a report's contents.
+
+ Returns
+ -------
+ dict
+ File object populated by report contents.
+ """
+
+ name = main_object.get('filename')
+ hashes = main_object.get('hashes', {})
+ md5 = hashes.get('md5')
+ sha1 = hashes.get('sha1')
+ sha256 = hashes.get('sha256')
+ ssdeep = hashes.get('ssdeep')
+ ext = main_object.get('info', {}).get('ext')
+
+ file_ec = {
+ 'File': {
+ 'Name': name,
+ 'MD5': md5,
+ 'SHA1': sha1,
+ 'SHA256': sha256,
+ 'SSDeep': ssdeep,
+ 'Extension': ext
+ }
+ }
+ return file_ec
+
+
+def ec_url(main_object):
+ """Return URL entity in Demisto format for use in entry context
+
+ Parameters
+ ----------
+ main_object : dict
+ The main object from a report's contents.
+
+ Returns
+ -------
+ dict
+ URL object populated by report contents.
+ """
+
+ url = main_object.get('url')
+
+ url_ec = {
+ 'URL': {
+ 'Data': url
+ }
+ }
+ return url_ec
+
+
+def ec_entity(response):
+ """
+ Return URL or File entity in Demisto format for use in entry
+ context depending on data in 'response' (the report)
+
+ Parameters
+ ----------
+ response : dict
+ Object returned by ANYRUN API call in 'get_report' function.
+
+ Returns
+ -------
+ dict
+ File or URL object populated by report contents.
+ """
+
+ analysis = response.get('data', {}).get('analysis', {})
+ verdict = analysis.get('scores', {}).get('verdict', {})
+ main_object = analysis.get('content', {}).get('mainObject', {})
+ submission_type = main_object.get('type')
+ entity = None
+ if submission_type == 'url':
+ entity = ec_url(main_object)
+ entity['URL'] = add_malicious_key(entity.get('URL', {}), verdict)
+ else:
+ entity = ec_file(main_object)
+ entity['File'] = add_malicious_key(entity.get('File', {}), verdict)
+ return entity
+
+
+def taskid_from_url(anyrun_url):
+ """Extract task ID from ANYRUN url inside a 'task' result returned by the get_history command
+
+ Parameters
+ ----------
+ anyrun_url : str
+ URL that contains an ANYRUN task ID.
+
+ Returns
+ -------
+ str
+ An ANYRUN task ID.
+ """
+
+ pattern = r'tasks/(.*?)/'
+ match = re.search(pattern, anyrun_url)
+ if match:
+ task_id = match.groups()[0]
+ else:
+ task_id = None
+ return task_id
+
+
+def images_from_report(response):
+ """Retrieve images from an ANYRUN report
+
+ Parameters
+ ----------
+ response : dict
+ Object returned by ANYRUN API call in 'get_report' function.
+
+ Returns
+ -------
+ list
+ List of images from ANYRUN report.
+ """
+
+ data = response.get('data', {})
+ analysis = data.get('analysis', {})
+ content = analysis.get('content', {})
+ screenshots = content.get('screenshots', [])
+
+ screen_captures = []
+ for idx, shot in enumerate(screenshots):
+ screen_cap_url = shot.get('permanentUrl')
+ img_response = requests.request('GET', screen_cap_url, verify=USE_SSL)
+ stored_img = fileResult('screenshot{}.png'.format(idx), img_response.content)
+ img_entry = {
+ 'Type': entryTypes['image'],
+ 'ContentsFormat': formats['text'],
+ 'File': stored_img['File'],
+ 'FileID': stored_img['FileID'],
+ 'Contents': ''
+ }
+ screen_captures.append(img_entry)
+ return screen_captures
+
+
+def contents_from_report(response):
+ """Selectively retrieve content from an ANYRUN report
+
+ Parameters
+ ----------
+ response : dict
+ Object returned by ANYRUN API call in 'get_report' function.
+
+ Returns
+ -------
+ dict
+ Selected content from ANYRUN report.
+ """
+
+ data = response.get('data', {})
+ environment = data.get('environments', {})
+ analysis = data.get('analysis', {})
+ processes = data.get('processes', [])
+ incidents = data.get('incidents', [])
+ status = data.get('status')
+
+ # Retrieve environment info from response
+ os = environment.get('os', {}).get('title')
+
+ # Retrieve threat score + info from response
+ score = analysis.get('scores', {})
+ verdict = score.get('verdict', {})
+ threat_level_text = verdict.get('threatLevelText')
+
+ # Retrieve analysis time stuff
+ start_text = analysis.get('creationText')
+
+ # Retrieve submitted file info from response
+ content = analysis.get('content', {})
+ main_object = content.get('mainObject', {})
+ info = main_object.get('info', {})
+ mime = info.get('mime')
+ file_info = info.get('file')
+ hashes = main_object.get('hashes')
+
+ # Retrieve network details
+ network = data.get('network', {})
+ threats = network.get('threats', [])
+ connections = network.get('connections', [])
+ http_reqs = network.get('httpRequests', [])
+ dns_requests = network.get('dnsRequests', [])
+
+ reformatted_threats = []
+ for threat in threats:
+ reformatted_threat = {
+ 'ProcessUUID': threat.get('process'),
+ 'Message': threat.get('msg'),
+ 'Class': threat.get('class'),
+ 'SrcPort': threat.get('srcport'),
+ 'DstPort': threat.get('dstport'),
+ 'SrcIP': threat.get('srcip'),
+ 'DstIP': threat.get('dstip')
+ }
+ reformatted_threats.append(reformatted_threat)
+ network['threats'] = reformatted_threats
+
+ reformatted_connections = []
+ for connection in connections:
+ reformatted_connection = {
+ 'Reputation': connection.get('reputation'),
+ 'ProcessUUID': connection.get('process'),
+ 'ASN': connection.get('asn'),
+ 'Country': connection.get('country'),
+ 'Protocol': connection.get('protocol'),
+ 'Port': connection.get('port'),
+ 'IP': connection.get('ip')
+ }
+ reformatted_connections.append(reformatted_connection)
+ network['connections'] = reformatted_connections
+
+ reformatted_http_reqs = []
+ for http_req in http_reqs:
+ reformatted_http_req = {
+ 'Reputation': http_req.get('reputation'),
+ 'Country': http_req.get('country'),
+ 'ProcessUUID': http_req.get('process'),
+ 'Body': http_req.get('body'),
+ 'HttpCode': http_req.get('httpCode'),
+ 'Status': http_req.get('status'),
+ 'ProxyDetected': http_req.get('proxyDetected'),
+ 'Port': http_req.get('port'),
+ 'IP': http_req.get('ip'),
+ 'URL': http_req.get('url'),
+ 'Host': http_req.get('host'),
+ 'Method': http_req.get('method')
+ }
+ reformatted_http_reqs.append(reformatted_http_req)
+ network['httpRequests'] = reformatted_http_reqs
+
+ reformatted_dns_requests = []
+ for dns_request in dns_requests:
+ reformatted_dns_request = {
+ 'Reputation': dns_request.get('reputation'),
+ 'IP': dns_request.get('ips'),
+ 'Domain': dns_request.get('domain')
+ }
+ reformatted_dns_requests.append(reformatted_dns_request)
+ network['dnsRequests'] = reformatted_dns_requests
+
+ # Retrieve process details
+ reformatted_processes = []
+ for process in processes:
+ context = process.get('context', {})
+ reformatted_process = {
+ 'FileName': process.get('fileName'),
+ 'PID': process.get('pid'),
+ 'PPID': process.get('ppid'),
+ 'ProcessUUID': process.get('uuid'),
+ 'CMD': process.get('commandLine'),
+ 'Path': process.get('image'),
+ 'User': context.get('userName'),
+ 'IntegrityLevel': context.get('integrityLevel'),
+ 'ExitCode': process.get('exitCode'),
+ 'MainProcess': process.get('mainProcess'),
+ 'Version': process.get('versionInfo', {})
+ }
+ reformatted_processes.append(reformatted_process)
+
+ # Retrieve incident details
+ reformatted_incidents = []
+ for incident in incidents:
+ reformatted_incident = {
+ 'ProcessUUID': incident.get('process'),
+ 'Category': incident.get('desc'),
+ 'Action': incident.get('title'),
+ 'ThreatLevel': incident.get('threatLevel')
+ }
+ reformatted_incidents.append(reformatted_incident)
+
+ contents = {
+ 'OS': os,
+ 'AnalysisDate': start_text,
+ 'Verdict': threat_level_text,
+ 'MIME': mime,
+ 'FileInfo': file_info,
+ 'Process': reformatted_processes,
+ 'Behavior': reformatted_incidents,
+ 'Status': status
+ }
+ if hashes:
+ for key, val in hashes.items():
+ contents[key] = val
+ if network:
+ for key, val in network.items():
+ contents[key] = val
+
+ return contents
+
+
+def humanreadable_from_report_contents(contents):
+ """Make the selected contents pulled from a report suitable for war room output
+
+ Parameters
+ ----------
+ contents : dict
+ Contents selected from an ANYRUN report for Demisto output.
+
+ Returns
+ -------
+ dict
+ Contents formatted so that nested dicts/lists appear nicely in a war room
+ entry.
+ """
+
+ def dict_to_string(nested_dict):
+ return json.dumps(nested_dict).lstrip('{').rstrip('}').replace('\'', '').replace('\"', '')
+
+ humanreadable_contents = {}
+ for key, val in contents.items():
+ if isinstance(val, dict):
+ humanreadable_contents[key] = dict_to_string(val)
+ elif isinstance(val, list):
+ humanreadable_vals = []
+ for item in val:
+ if isinstance(item, dict):
+ humanreadable_vals.append(dict_to_string(item))
+ else:
+ humanreadable_vals.append(item)
+ humanreadable_contents[key] = humanreadable_vals
+ else:
+ humanreadable_contents[key] = val
+ return humanreadable_contents
+
+
+def contents_from_history(filter, response):
+ """Return desired fields from filtered response
+
+ Parameters
+ ----------
+ filter : str
+ File name (for a file analysis), URL (for a URL analysis),
+ Task ID, or hash by which to filter task history.
+ response : dict
+ Object returned by ANYRUN API call in 'get_history' function.
+
+ Returns
+ -------
+ list
+ List of Task summaries matching the filter.
+ """
+
+ # Filter response
+ tasks = response.get('data', {}).get('tasks', {})
+ desired_fields = {'related', 'verdict', 'date'}
+ filtered_tasks = []
+ for task in tasks:
+ # First fetch fields that we can filter on
+ name = task.get('name')
+ hashes = task.get('hashes')
+ file_url = task.get('file')
+ task_id = taskid_from_url(file_url)
+
+ if filter and filter not in {name, task_id, *hashes.values()}:
+ continue
+
+ # Reconstruct task dict with desired output fields if filter satisfied
+ filtered_task = {'name': name, 'id': task_id, 'file': file_url, 'hashes': hashes}
+ for field in task:
+ if field in desired_fields:
+ filtered_task[field] = task.get(field)
+ filtered_tasks.append(filtered_task)
+
+ return filtered_tasks
+
+
+def http_request(method, url_suffix, params=None, data=None, files=None):
+ """
+ A wrapper for requests lib to send our requests and handle requests
+ and responses better
+
+ Parameters
+ ----------
+ method : str
+ HTTP method, e.g. 'GET', 'POST' ... etc.
+ url_suffix : str
+ API endpoint.
+ params : dict
+ URL parameters.
+ data : dict
+ Data to be sent in a 'POST' request.
+ files : dict
+ File data to be sent in a 'POST' request.
+
+ Returns
+ -------
+ dict
+ Response JSON from having made the request.
+ """
+ try:
+ res = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ verify=USE_SSL,
+ params=params,
+ data=data,
+ files=files,
+ headers=HEADERS
+ )
+
+ # Handle error responses gracefully
+ if res.status_code not in {200, 201}:
+ err_msg = 'Error in ANYRUN Integration API call [{}] - {}'.format(res.status_code, res.reason)
+ try:
+ if res.json().get('error'):
+ err_msg += '\n{}'.format(res.json().get('message'))
+ return_error(err_msg)
+ except json.decoder.JSONDecodeError:
+ return_error(err_msg)
+
+ return res.json()
+
+ except requests.exceptions.ConnectionError:
+ err_msg = 'Connection Error - Check that the Server URL parameter is correct.'
+ return_error(err_msg)
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """Performs get_history API call to verify integration is operational
+
+ Returns
+ -------
+ str
+ 'ok' message.
+ """
+
+ get_history(args={'limit': 1})
+ demisto.results('ok')
+
+
+def get_history(args={}):
+ """Make API call to ANYRUN to get analysis history
+
+ Parameters
+ ----------
+ args : dict
+ URL parameters that determine which, and how many results are
+ returned in the response.
+
+ Returns
+ -------
+ dict
+ Response JSON from ANYRUN API call.
+ """
+
+ url_suffix = 'analysis/'
+ params = args
+ response = http_request('GET', url_suffix=url_suffix, params=params)
+ return response
+
+
+def get_history_command():
+ """Return ANYRUN task analysis history to Demisto"""
+
+ args = demisto.args()
+ filter = args.pop('filter', None)
+ response = get_history(args)
+ contents = contents_from_history(filter, response)
+
+ formatting_funcs = [underscore_to_camel_case, make_capital, make_singular, make_upper]
+ formatted_contents = travel_object(contents, key_functions=formatting_funcs)
+ if contents:
+ entry_context: Optional[dict] = {
+ 'ANYRUN.Task(val.ID && val.ID === obj.ID)': formatted_contents
+ }
+ title = 'Task History - Filtered By "{}"'.format(filter) if filter else 'Task History'
+ # Make Related Clickable
+ for task in formatted_contents:
+ related = task.get('Related', '')
+ task['Related'] = '[{}]({})'.format(related, related)
+ human_readable = tableToMarkdown(title, formatted_contents, removeNull=True)
+ else:
+ human_readable = 'No results found.'
+ entry_context = None
+
+ return_outputs(readable_output=human_readable, outputs=entry_context, raw_response=response)
+
+
+def get_report(task_id):
+ """Make API call to ANYRUN to get task report
+
+ Parameters
+ ----------
+ task_id : str
+ The unique task ID of the analysis whose report to fetch.
+
+ Returns
+ -------
+ dict
+ Response JSON from ANYRUN API call.
+ """
+
+ url_suffix = 'analysis/' + task_id
+ response = http_request('GET', url_suffix=url_suffix)
+ return response
+
+
+def get_report_command():
+ """Return ANYRUN analysis report to Demisto"""
+ args = demisto.args()
+ task_id = args.get('task')
+ response = get_report(task_id)
+
+ images = images_from_report(response)
+ contents = contents_from_report(response)
+ formatting_funcs = [underscore_to_camel_case, make_capital, make_singular, make_upper]
+ formatted_contents = travel_object(contents, key_functions=formatting_funcs)
+
+ dbot_score = generate_dbotscore(response)
+ entity = ec_entity(response)
+
+ entry_context = {
+ 'ANYRUN.Task(val.ID && val.ID === obj.ID)': {
+ 'ID': task_id,
+ **formatted_contents
+ },
+ **dbot_score,
+ **entity
+ }
+
+ title = 'Report for Task {}'.format(task_id)
+ human_readable_content = humanreadable_from_report_contents(formatted_contents)
+ human_readable = tableToMarkdown(title, human_readable_content, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=entry_context, raw_response=response)
+ if images:
+ demisto.results(images)
+
+
+def run_analysis(args):
+ """Make API call to ANYRUN to submit file or url for analysis
+
+ Parameters
+ ----------
+ args : dict
+ The analysis specifications and data.
+
+ Returns
+ -------
+ dict
+ Response JSON from ANYRUN API call.
+ """
+
+ try:
+ entry_id = args.pop('file', None)
+ obj_url = args.get('obj_url')
+ obj_type = args.get('obj_type')
+ if obj_type == 'remote file':
+ obj_type = 'download'
+ args['obj_type'] = 'download'
+ # In the case only a url was entered but the object type arg wasn't changed
+ if not entry_id and obj_url and obj_type == 'file':
+ args['obj_type'] = obj_type = 'url'
+ files = None
+ if obj_type == 'file':
+ cmd_res = demisto.getFilePath(entry_id)
+ file_path = cmd_res.get('path')
+ name = cmd_res.get('name')
+ files = {
+ 'file': (name, open(file_path, 'rb'))
+ }
+
+ # Format command arguments to API's parameter expectations
+ env_bitness = int(args.get('env_bitness', 32))
+ args['env_bitness'] = env_bitness
+ env_version = args.get('env_version').lower()
+ if env_version == 'windows vista':
+ args['env_version'] = 'vista'
+ elif env_version == 'windows 8.1':
+ args['env_version'] = '8.1'
+ elif env_version == 'windows 10':
+ args['env_version'] = '10'
+ else:
+ args['env_version'] = '7'
+ url_suffix = 'analysis'
+ response = http_request('POST', url_suffix, data=args, files=files)
+ return response
+ except ValueError:
+ err_msg = 'Invalid entryID - File not found for the given entryID'
+ return_error(err_msg)
+
+
+def run_analysis_command():
+ """Submit file or URL to ANYRUN for analysis and return task ID to Demisto"""
+
+ args = demisto.args()
+ response = run_analysis(args)
+ task_id = response.get('data', {}).get('taskid')
+ title = 'Submission Successful'
+ human_readable = tableToMarkdown(title, {'Task': task_id}, removeNull=True)
+ entry_context = {'ANYRUN.Task(val.ID && val.ID === obj.ID)': {'ID': task_id}}
+ return_outputs(readable_output=human_readable, outputs=entry_context, raw_response=response)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+COMMANDS = {
+ 'test-module': test_module,
+ 'anyrun-get-history': get_history_command,
+ 'anyrun-get-report': get_report_command,
+ 'anyrun-run-analysis': run_analysis_command
+}
+
+''' EXECUTION '''
+
+
+def main():
+ """Main Execution block"""
+
+ try:
+ cmd_name = demisto.command()
+ LOG('Command being called is {}'.format(cmd_name))
+
+ if cmd_name in COMMANDS.keys():
+ COMMANDS[cmd_name]()
+
+ except Exception as e:
+ # return_error(str(e))
+ raise e
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == '__builtin__' or __name__ == 'builtins':
+ main()
diff --git a/Integrations/ANYRUN/ANYRUN.yml b/Integrations/ANYRUN/ANYRUN.yml
new file mode 100644
index 000000000000..2a51a794f793
--- /dev/null
+++ b/Integrations/ANYRUN/ANYRUN.yml
@@ -0,0 +1,443 @@
+category: Forensics & Malware Analysis
+commonfields:
+ id: ANYRUN
+ version: -1
+configuration:
+- defaultvalue: https://api.any.run
+ display: Server URL
+ name: url
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: true
+ type: 9
+- defaultvalue: 'true'
+ display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+description: ANY.RUN is a cloud-based sanbox with interactive access.
+display: ANY.RUN
+name: ANYRUN
+script:
+ commands:
+ - arguments:
+ - auto: PREDEFINED
+ default: true
+ defaultValue: 'false'
+ description: If true, gets team history. If empty, gets your submitted analyses
+ history.
+ isArray: false
+ name: team
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: true
+ defaultValue: '0'
+ description: The number of analyses to skip.
+ isArray: false
+ name: skip
+ predefined:
+ - ''
+ required: false
+ secret: false
+ - default: true
+ defaultValue: '25'
+ description: Limits the history retrieved/searched to the specified number of
+ executed analyses. The range is 1-100.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: File name, hash, or task ID by which to filter the task history.
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ deprecated: false
+ description: Get analysis history.
+ execution: false
+ name: anyrun-get-history
+ outputs:
+ - contextPath: ANYRUN.Task.Name
+ description: Task name.
+ type: String
+ - contextPath: ANYRUN.Task.Verdict
+ description: ANY.RUN verdict for the submitted file's status.
+ type: String
+ - contextPath: ANYRUN.Task.Related
+ description: ANY.RUN link to a related file.
+ type: String
+ - contextPath: ANYRUN.Task.File
+ description: ANY.RUN link to download the submitted file.
+ type: String
+ - contextPath: ANYRUN.Task.Date
+ description: The date that the file was submitted for analysis.
+ type: Date
+ - contextPath: ANYRUN.Task.Hash.MD5
+ description: MD5 hash of the submitted file.
+ type: String
+ - contextPath: ANYRUN.Task.Hash.SHA1
+ description: SHA1 hash of the submitted file.
+ type: String
+ - contextPath: ANYRUN.Task.Hash.SHA256
+ description: SHA256 hash of the submitted file.
+ type: String
+ - contextPath: ANYRUN.Task.Hash.HeadHash
+ description: Head hash of the submitted file.
+ type: String
+ - contextPath: ANYRUN.Task.Hash.SSDeep
+ description: SSDeep hash of the submitted file.
+ type: String
+ - arguments:
+ - default: false
+ description: Unique task ID. A task ID is returned when submitting a file or
+ URL for analysis using the `anyrun-run-analysis` command. Task IDs can also
+ be located in the `ID` field of the output of executing the `anyrun-get-history`
+ command.
+ isArray: false
+ name: task
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets the report of a task created for a submitted file or URL.
+ execution: false
+ name: anyrun-get-report
+ outputs:
+ - contextPath: ANYRUN.Task.AnalysisDate
+ description: Date and time the analysis was executed.
+ type: String
+ - contextPath: ANYRUN.Task.Behavior.Category
+ description: Category of a process behavior.
+ type: String
+ - contextPath: ANYRUN.Task.Behavior.Action
+ description: Actions performed by a process.
+ type: String
+ - contextPath: ANYRUN.Task.Behavior.ThreatLevel
+ description: Threat score associated with a process behavior.
+ type: Number
+ - contextPath: ANYRUN.Task.Behavior.ProcessUUID
+ description: Unique ID of the process whose behaviors are being profiled.
+ type: String
+ - contextPath: ANYRUN.Task.Connection.Reputation
+ description: Connection reputation.
+ type: String
+ - contextPath: ANYRUN.Task.Connection.ProcessUUID
+ description: ID of the process that created the connection.
+ type: String
+ - contextPath: ANYRUN.Task.Connection.ASN
+ description: Connection autonomous system network.
+ type: String
+ - contextPath: ANYRUN.Task.Connection.Country
+ description: Connection country.
+ type: String
+ - contextPath: ANYRUN.Task.Connection.Protocol
+ description: Connection protocol.
+ type: String
+ - contextPath: ANYRUN.Task.Connection.Port
+ description: Connection port number.
+ type: Number
+ - contextPath: ANYRUN.Task.Connection.IP
+ description: Connection IP number.
+ type: String
+ - contextPath: ANYRUN.Task.DnsRequest.Reputation
+ description: Reputation of the DNS request.
+ type: String
+ - contextPath: ANYRUN.Task.DnsRequest.IP
+ description: IP addresses associated with a DNS request.
+ type: Unknown
+ - contextPath: ANYRUN.Task.DnsRequest.Domain
+ description: Domain resolution of a DNS request.
+ type: String
+ - contextPath: ANYRUN.Task.Threat.ProcessUUID
+ description: Unique process ID from where the threat originated.
+ type: String
+ - contextPath: ANYRUN.Task.Threat.Msg
+ description: Threat message.
+ type: String
+ - contextPath: ANYRUN.Task.Threat.Class
+ description: Class of the threat.
+ type: String
+ - contextPath: ANYRUN.Task.Threat.SrcPort
+ description: Port on which the threat originated.
+ type: Number
+ - contextPath: ANYRUN.Task.Threat.DstPort
+ description: Destination port of the threat.
+ type: Number
+ - contextPath: ANYRUN.Task.Threat.SrcIP
+ description: Source IP address where the threat originated.
+ type: String
+ - contextPath: ANYRUN.Task.Threat.DstIP
+ description: Destination IP address of the threat.
+ type: String
+ - contextPath: ANYRUN.Task.HttpRequest.Reputation
+ description: Reputation of the HTTP request.
+ type: String
+ - contextPath: ANYRUN.Task.HttpRequest.Country
+ description: HTTP request country.
+ type: String
+ - contextPath: ANYRUN.Task.HttpRequest.ProcessUUID
+ description: ID of the process making the HTTP request.
+ type: String
+ - contextPath: ANYRUN.Task.HttpRequest.Body
+ description: HTTP request body parameters and details.
+ type: Unknown
+ - contextPath: ANYRUN.Task.HttpRequest.HttpCode
+ description: HTTP request response code.
+ type: Number
+ - contextPath: ANYRUN.Task.HttpRequest.Status
+ description: Status of the HTTP request.
+ type: String
+ - contextPath: ANYRUN.Task.HttpRequest.ProxyDetected
+ description: Whether the HTTP request was made through a proxy.
+ type: Boolean
+ - contextPath: ANYRUN.Task.HttpRequest.Port
+ description: HTTP request port.
+ type: Number
+ - contextPath: ANYRUN.Task.HttpRequest.IP
+ description: HTTP request IP address.
+ type: String
+ - contextPath: ANYRUN.Task.HttpRequest.URL
+ description: HTTP request URL.
+ type: String
+ - contextPath: ANYRUN.Task.HttpRequest.Host
+ description: HTTP request host.
+ type: String
+ - contextPath: ANYRUN.Task.HttpRequest.Method
+ description: HTTP request method type.
+ type: String
+ - contextPath: ANYRUN.Task.FileInfo
+ description: Details of the submitted file.
+ type: String
+ - contextPath: ANYRUN.Task.OS
+ description: OS of the sandbox in which the file was analyzed.
+ type: String
+ - contextPath: ANYRUN.Task.ID
+ description: The unique ID of the task.
+ type: String
+ - contextPath: ANYRUN.Task.MIME
+ description: The MIME of the file submitted for analysis.
+ type: String
+ - contextPath: ANYRUN.Task.MD5
+ description: The MD5 hash of the file submitted for analysis.
+ type: String
+ - contextPath: ANYRUN.Task.SHA1
+ description: The SHA1 hash of the file submitted for analysis.
+ type: String
+ - contextPath: ANYRUN.Task.SHA256
+ description: The SHA256 hash of the file submitted for analysis.
+ type: String
+ - contextPath: ANYRUN.Task.SSDeep
+ description: SSDeep hash of the file submitted for analysis.
+ type: String
+ - contextPath: ANYRUN.Task.Verdict
+ description: ANY.RUN verdict for the maliciousness of the submitted file or
+ URL.
+ type: String
+ - contextPath: ANYRUN.Task.Process.FileName
+ description: File name of the process.
+ type: String
+ - contextPath: ANYRUN.Task.Process.PID
+ description: Process identification number.
+ type: Number
+ - contextPath: ANYRUN.Task.Process.PPID
+ description: Parent process identification number.
+ type: Number
+ - contextPath: ANYRUN.Task.Process.ProcessUUID
+ description: Unique process ID (used by ANY.RUN).
+ type: String
+ - contextPath: ANYRUN.Task.Process.CMD
+ description: Process command.
+ type: String
+ - contextPath: ANYRUN.Task.Process.Path
+ description: Path of the executed command.
+ type: String
+ - contextPath: ANYRUN.Task.Process.User
+ description: User who executed the command.
+ type: String
+ - contextPath: ANYRUN.Task.Process.IntegrityLevel
+ description: The process integrity level.
+ type: String
+ - contextPath: ANYRUN.Task.Process.ExitCode
+ description: Process exit code.
+ type: Number
+ - contextPath: ANYRUN.Task.Process.MainProcess
+ description: Whether the process is the main process.
+ type: Boolean
+ - contextPath: ANYRUN.Task.Process.Version.Company
+ description: Company responsible for the program executed.
+ type: String
+ - contextPath: ANYRUN.Task.Process.Version.Description
+ description: Description of the type of program.
+ type: String
+ - contextPath: ANYRUN.Task.Process.Version.Version
+ description: Version of the program executed.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - contextPath: DBotScore.Type
+ description: Type of indicator.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: String
+ - contextPath: File.Extension
+ description: Extension of the file submitted for analysis.
+ type: String
+ - contextPath: File.Name
+ description: The name of the file submitted for analysis.
+ type: String
+ - contextPath: File.MD5
+ description: MD5 hash of the file submitted for analysis.
+ type: String
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file submitted for analysis.
+ type: String
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file submitted for analysis.
+ type: String
+ - contextPath: File.SSDeep
+ description: SSDeep hash of the file submitted for analysis.
+ type: String
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision.
+ type: String
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the reason that the vendor made the decision.
+ type: String
+ - contextPath: URL.Data
+ description: URL data.
+ type: String
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: String
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the reason that the vendor made the decision.
+ type: String
+ - contextPath: ANYRUN.Task.Status
+ description: Task analysis status.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: true
+ defaultValue: file
+ description: Type of new task.
+ isArray: false
+ name: obj_type
+ predefined:
+ - file
+ - url
+ - remote file
+ required: false
+ secret: false
+ - default: false
+ description: EntryID of the file to analyze.
+ isArray: false
+ name: file
+ required: false
+ secret: false
+ - default: false
+ description: URL, used only if 'obj_type' command argument is 'url' or 'download'.
+ Permitted size is 5-512 characters long.
+ isArray: false
+ name: obj_url
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: true
+ defaultValue: '32'
+ description: Bitness of OS.
+ isArray: false
+ name: env_bitness
+ predefined:
+ - '32'
+ - '64'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: true
+ defaultValue: Windows 7
+ description: Version of Windows OS.
+ isArray: false
+ name: env_version
+ predefined:
+ - Windows Vista
+ - Windows 7
+ - Windows 8.1
+ - Windows 10
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: true
+ defaultValue: complete
+ description: Environment preset type.
+ isArray: false
+ name: env_type
+ predefined:
+ - complete
+ - clean
+ - office
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: true
+ defaultValue: 'true'
+ description: Network connection state.
+ isArray: false
+ name: opt_network_connect
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: true
+ defaultValue: 'false'
+ description: Heavy evasion option.
+ isArray: false
+ name: opt_kernel_heavyevasion
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: true
+ defaultValue: owner
+ description: Privacy settings for generated task.
+ isArray: false
+ name: opt_privacy_type
+ predefined:
+ - owner
+ - bylink
+ - public
+ required: false
+ secret: false
+ deprecated: false
+ description: Submit a file or url for analysis.
+ execution: false
+ name: anyrun-run-analysis
+ outputs:
+ - contextPath: ANYRUN.Task.ID
+ description: ID of the task created to analyze the submission.
+ type: String
+ dockerimage: demisto/python3:3.7.3.221
+ subtype: python3
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- ANYRUN-Test
\ No newline at end of file
diff --git a/Integrations/ANYRUN/ANYRUN_description.md b/Integrations/ANYRUN/ANYRUN_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/ANYRUN/ANYRUN_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/ANYRUN/ANYRUN_image.png b/Integrations/ANYRUN/ANYRUN_image.png
new file mode 100644
index 000000000000..aa3eebf8fef8
Binary files /dev/null and b/Integrations/ANYRUN/ANYRUN_image.png differ
diff --git a/Integrations/ANYRUN/ANYRUN_test.py b/Integrations/ANYRUN/ANYRUN_test.py
new file mode 100644
index 000000000000..30e2983e7892
--- /dev/null
+++ b/Integrations/ANYRUN/ANYRUN_test.py
@@ -0,0 +1,130 @@
+import pytest
+import json
+from ANYRUN import underscore_to_camel_case
+from ANYRUN import make_capital, make_singular, make_upper
+from ANYRUN import generate_dbotscore
+from ANYRUN import taskid_from_url
+
+
+@pytest.fixture(scope="module")
+def get_response():
+ ssdeep = "6144:u77HUUUUUUUUUUUUUUUUUUUT52V6JoGXPjm+iNQBA81RqHOF:u77HUUUUUUUUUUUUUUUUUUUTCyoUmQBj"
+ response = {
+ "data": {
+ "analysis": {
+ "content": {
+ "mainObject": {
+ "type": "file",
+ "hashes": {
+ "ssdeep": ssdeep,
+ "sha256": "22b6830432e47e54619e0448c93f699b096e0e73165e051598a82836ab8e38ab",
+ "sha1": "fd0d6e5e7ff1db4b3b12b8b6c8a35464b3bcd1e5",
+ "md5": "06b2ace5e7ff00d6cf6dcdc793020f45"
+ },
+ "url": "http://www.madeup.net/someuri?what=huh&for=derr" # disable-secrets-detection
+ }
+ },
+ "scores": {
+ "verdict": {
+ "threatLevelText": "Malicious activity"
+ }
+ }
+ }
+ }
+ }
+ response_as_string = json.dumps(response).replace('file', 'download')
+ response_as_string = response_as_string.replace('Malicious activity', 'Suspicious activity')
+ response2 = json.loads(response_as_string)
+ response_as_string = response_as_string.replace('download', 'url')
+ response_as_string = response_as_string.replace('Suspicious activity', 'No threats detected')
+ response3 = json.loads(response_as_string)
+ return response, response2, response3
+
+
+class TestUnderscoreToCamelCase(object):
+ def test_one(self):
+ assert underscore_to_camel_case({}) == {}
+
+ def test_two(self):
+ assert underscore_to_camel_case('cApItAl') == 'cApItAl'
+
+ def test_three(self):
+ assert underscore_to_camel_case('capital_cAsE') == 'capitalCase'
+
+
+class TestMakeCapital(object):
+ def test_make_capital_1(self):
+ assert make_capital('heLLo') == 'HeLLo'
+
+
+class TestMakeSingular(object):
+ def test_make_singular_1(self):
+ assert make_singular('assess') == 'assess'
+
+ def test_make_singular_2(self):
+ assert make_singular('bass') == 'bass'
+
+ def test_make_singular_3(self):
+ assert make_singular('assesses') == 'assess'
+
+ def test_make_singular_4(self):
+ assert make_singular('checks') == 'check'
+
+ def test_make_singular_5(self):
+ assert make_singular('analysis') == 'analysis'
+
+ def test_make_singular_6(self):
+ assert make_singular('status') == 'status'
+
+ def test_make_singular_7(self):
+ assert make_singular('os') == 'os'
+
+
+class TestMakeUpper(object):
+ def test_make_upper_1(self):
+ assert make_upper('id') == 'ID'
+
+ def test_make_upper_2(self):
+ assert make_upper('ssdeEp') == 'SSDeep'
+
+ def test_make_upper_3(self):
+ assert make_upper('hello') == 'hello'
+
+ def test_make_upper_4(self):
+ assert make_upper({}) == {}
+
+
+class TestGenerateDBotScore(object):
+ def test_generate_dbotscore_1(self, get_response):
+ response1, response2, response3 = get_response
+
+ dbot_score = generate_dbotscore(response1).get('DBotScore')
+ main_object = response1.get('data', {}).get('analysis', {}).get('content', {}).get('mainObject', {})
+ sha_256 = main_object.get('hashes', {}).get('sha256')
+ assert dbot_score.get('Indicator') == sha_256
+ assert dbot_score.get('Score') == 3
+ assert dbot_score.get('Type') == 'hash'
+ assert dbot_score.get('Vendor') == 'ANYRUN'
+
+ dbot_score = generate_dbotscore(response2).get('DBotScore')
+ main_object = response2.get('data', {}).get('analysis', {}).get('content', {}).get('mainObject', {})
+ sha_256 = main_object.get('hashes', {}).get('sha256')
+ assert dbot_score.get('Indicator') == sha_256
+ assert dbot_score.get('Score') == 2
+ assert dbot_score.get('Type') == 'hash'
+ assert dbot_score.get('Vendor') == 'ANYRUN'
+
+ dbot_score = generate_dbotscore(response3).get('DBotScore')
+ main_object = response3.get('data', {}).get('analysis', {}).get('content', {}).get('mainObject', {})
+ url = main_object.get('url')
+ assert dbot_score.get('Indicator') == url
+ assert dbot_score.get('Score') == 1
+ assert dbot_score.get('Type') == 'url'
+ assert dbot_score.get('Vendor') == 'ANYRUN'
+
+
+class TestTaskIDFromURL(object):
+ def test_taskid_from_url(self):
+ url = 'https://www.madeup.com/madeup/tasks/' # disable-secrets-detection
+ url += 'this-is-the-task-id/blah/&someotherstuff'
+ assert taskid_from_url(url) == 'this-is-the-task-id'
diff --git a/Integrations/AWS-ACM/AWS-ACM.py b/Integrations/AWS-ACM/AWS-ACM.py
new file mode 100644
index 000000000000..41cf24d510db
--- /dev/null
+++ b/Integrations/AWS-ACM/AWS-ACM.py
@@ -0,0 +1,355 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+'''IMPORTS'''
+import re
+import boto3
+import json
+from datetime import datetime, date
+from botocore.config import Config
+from botocore.parsers import ResponseParserError
+
+'''GLOBAL VARIABLES'''
+AWS_DEFAULT_REGION = demisto.params().get('defaultRegion')
+AWS_ROLE_ARN = demisto.params().get('roleArn')
+AWS_ROLE_SESSION_NAME = demisto.params().get('roleSessionName')
+AWS_ROLE_SESSION_DURATION = demisto.params().get('sessionDuration')
+AWS_ROLE_POLICY = None
+AWS_ACCESS_KEY_ID = demisto.params().get('access_key')
+AWS_SECRET_ACCESS_KEY = demisto.params().get('secret_key')
+VERIFY_CERTIFICATE = not demisto.params().get('insecure', True)
+proxies = handle_proxy(proxy_param_name='proxy', checkbox_default_value=False)
+config = Config(
+ connect_timeout=1,
+ retries=dict(
+ max_attempts=5
+ ),
+ proxies=proxies
+)
+
+
+def aws_session(service='acm', region=None, roleArn=None, roleSessionName=None, roleSessionDuration=None,
+ rolePolicy=None):
+ kwargs = {}
+ if roleArn and roleSessionName is not None:
+ kwargs.update({
+ 'RoleArn': roleArn,
+ 'RoleSessionName': roleSessionName,
+ })
+ elif AWS_ROLE_ARN and AWS_ROLE_SESSION_NAME is not None:
+ kwargs.update({
+ 'RoleArn': AWS_ROLE_ARN,
+ 'RoleSessionName': AWS_ROLE_SESSION_NAME,
+ })
+
+ if roleSessionDuration is not None:
+ kwargs.update({'DurationSeconds': int(roleSessionDuration)})
+ elif AWS_ROLE_SESSION_DURATION is not None:
+ kwargs.update({'DurationSeconds': int(AWS_ROLE_SESSION_DURATION)})
+
+ if rolePolicy is not None:
+ kwargs.update({'Policy': rolePolicy})
+ elif AWS_ROLE_POLICY is not None:
+ kwargs.update({'Policy': AWS_ROLE_POLICY})
+ if kwargs and AWS_ACCESS_KEY_ID is None:
+
+ if AWS_ACCESS_KEY_ID is None:
+ sts_client = boto3.client('sts', config=config, verify=VERIFY_CERTIFICATE)
+ sts_response = sts_client.assume_role(**kwargs)
+ if region is not None:
+ client = boto3.client(
+ service_name=service,
+ region_name=region,
+ aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
+ aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
+ aws_session_token=sts_response['Credentials']['SessionToken'],
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ else:
+ client = boto3.client(
+ service_name=service,
+ region_name=AWS_DEFAULT_REGION,
+ aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
+ aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
+ aws_session_token=sts_response['Credentials']['SessionToken'],
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ elif AWS_ACCESS_KEY_ID and AWS_ROLE_ARN:
+ sts_client = boto3.client(
+ service_name='sts',
+ aws_access_key_id=AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ kwargs.update({
+ 'RoleArn': AWS_ROLE_ARN,
+ 'RoleSessionName': AWS_ROLE_SESSION_NAME,
+ })
+ sts_response = sts_client.assume_role(**kwargs)
+ client = boto3.client(
+ service_name=service,
+ region_name=AWS_DEFAULT_REGION,
+ aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
+ aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
+ aws_session_token=sts_response['Credentials']['SessionToken'],
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ else:
+ if region is not None:
+ client = boto3.client(
+ service_name=service,
+ region_name=region,
+ aws_access_key_id=AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ else:
+ client = boto3.client(
+ service_name=service,
+ region_name=AWS_DEFAULT_REGION,
+ aws_access_key_id=AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+
+ return client
+
+
+def parse_tag_field(tags_str):
+ tags = []
+ regex = re.compile(r"key=([\w\d_:.-]+),value=([ /\w\d@_,.*-]+)", flags=re.I)
+ for f in tags_str.split(';'):
+ match = regex.match(f)
+ if match is None:
+ demisto.log('could not parse field: %s' % (f,))
+ continue
+
+ tags.append({
+ 'Key': match.group(1),
+ 'Value': match.group(2)
+ })
+ return tags
+
+
+def parse_subnet_mappings(subnets_str):
+ subnets = []
+ regex = re.compile(r"subnetid=([\w\d_:.-]+),allocationid=([ /\w\d@_,.*-]+)", flags=re.I)
+ for f in subnets_str.split(';'):
+ match = regex.match(f)
+ if match is None:
+ demisto.log('could not parse field: %s' % (f,))
+ continue
+
+ subnets.append({
+ 'SubnetId': match.group(1),
+ 'AllocationId': match.group(2)
+ })
+ return subnets
+
+
+class DatetimeEncoder(json.JSONEncoder):
+ # pylint: disable=method-hidden
+ def default(self, obj):
+ if isinstance(obj, datetime):
+ return obj.strftime('%Y-%m-%dT%H:%M:%S')
+ elif isinstance(obj, date):
+ return obj.strftime('%Y-%m-%d')
+ # Let the base class default method raise the TypeError
+ return json.JSONEncoder.default(self, obj)
+
+
+def parse_resource_ids(resource_id):
+ id_list = resource_id.replace(" ", "")
+ resource_ids = id_list.split(",")
+ return resource_ids
+
+
+'''MAIN FUNCTIONS'''
+
+
+def describe_certificate(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+
+ response = client.describe_certificate(CertificateArn=args.get('certificateArn'))
+ cert = response['Certificate']
+ data = ({
+ 'CertificateArn': cert.get('CertificateArn'),
+ 'DomainName': cert.get('DomainName'),
+ 'Subject': cert.get('Subject'),
+ 'Issuer': cert.get('Issuer'),
+ 'Status': cert.get('Status'),
+ 'KeyAlgorithm': cert.get('KeyAlgorithm'),
+ 'SignatureAlgorithm': cert.get('SignatureAlgorithm'),
+ 'Type': cert.get('Type'),
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+
+ if 'Serial' in cert:
+ data.update({'Serial': cert['Serial']})
+
+ try:
+ raw = json.loads(json.dumps(response['Certificate'], cls=DatetimeEncoder))
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+
+ if raw:
+ raw.update({'Region': obj['_user_provided_options']['region_name']})
+ ec = {'AWS.ACM.Certificates(val.CertificateArn === obj.CertificateArn)': raw}
+ human_readable = tableToMarkdown('AWS ACM Certificates', data)
+ return_outputs(human_readable, ec)
+
+
+def list_certificates(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ kwargs = {}
+ data = []
+ includes = {}
+
+ if args.get('certificateStatuses') is not None:
+ kwargs.update({'CertificateStatuses': args.get('certificateStatuses')})
+ if args.get('extendedKeyUsage') is not None:
+ includes.update({'extendedKeyUsage': [args.get('extendedKeyUsage')]})
+ if args.get('keyUsage') is not None:
+ includes.update({'keyUsage': [args.get('keyUsage')]})
+ if args.get('keyTypes') is not None:
+ includes.update({'keyTypes': [args.get('keyTypes')]})
+ if includes:
+ kwargs.update({'Includes': includes})
+
+ response = client.list_certificates(**kwargs)
+ for cert in response['CertificateSummaryList']:
+ data.append({
+ 'CertificateArn': cert['CertificateArn'],
+ 'DomainName': cert['DomainName'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+
+ ec = {'AWS.ACM.Certificates(val.CertificateArn === obj.CertificateArn)': data}
+ human_readable = tableToMarkdown('AWS ACM Certificates', data)
+ return_outputs(human_readable, ec)
+
+
+def add_tags_to_certificate(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {
+ 'CertificateArn': args.get('certificateArn'),
+ 'Tags': parse_tag_field(args.get('tags'))
+ }
+ response = client.add_tags_to_certificate(**kwargs)
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Certificate was Tagged successfully")
+
+
+def remove_tags_from_certificate(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {
+ 'CertificateArn': args.get('certificateArn'),
+ 'Tags': parse_tag_field(args.get('tags'))
+ }
+ response = client.remove_tags_from_certificate(**kwargs)
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Certificate Tags were removed successfully")
+
+
+def list_tags_for_certificate(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ kwargs = {'CertificateArn': args.get('certificateArn')}
+ response = client.list_tags_for_certificate(**kwargs)
+
+ data = ({'CertificateArn': args.get('certificateArn')})
+ for tag in response['Tags']:
+ data.update({
+ tag['Key']: tag['Value']
+ })
+
+ ec = {'AWS.ACM.Certificates(val.CertificateArn === obj.CertificateArn).Tags': data}
+ human_readable = tableToMarkdown('AWS ACM Certificate Tags', data)
+ return_outputs(human_readable, ec)
+
+
+def get_certificate(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ kwargs = {'CertificateArn': args.get('certificateArn')}
+ response = client.get_certificate(**kwargs)
+
+ if 'Certificate' in response:
+ fileResult('Certificate.pem', response['Certificate'])
+ if 'CertificateChain' in response:
+ fileResult('CertificateChain.pem', response['CertificateChain'])
+
+ demisto.results('### Certificate files for ARN: {arn}'.format(arn=args.get('certificateArn')))
+
+
+def test_function():
+ client = aws_session()
+ response = client.list_certificates()
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results('ok')
+
+
+'''EXECUTION BLOCK'''
+try:
+ if demisto.command() == 'test-module':
+ test_function()
+ if demisto.command() == 'aws-acm-describe-certificate':
+ describe_certificate(demisto.args())
+ if demisto.command() == 'aws-acm-list-certificates':
+ list_certificates(demisto.args())
+ if demisto.command() == 'aws-acm-add-tags-to-certificate':
+ add_tags_to_certificate(demisto.args())
+ if demisto.command() == 'aws-acm-remove-tags-from-certificate':
+ remove_tags_from_certificate(demisto.args())
+ if demisto.command() == 'aws-acm-list-tags-for-certificate':
+ list_tags_for_certificate(demisto.args())
+ if demisto.command() == 'aws-acm-get-certificate':
+ get_certificate(demisto.args())
+except ResponseParserError as e:
+ return_error('Could not connect to the AWS endpoint. Please check that the region is valid.\n {error}'.format(
+ error=type(e)))
+ LOG(str(e))
+
+except Exception as e:
+ LOG(str(e))
+ return_error('Error has occurred in the AWS ACM Integration: {code}\n {message}'.format(
+ code=type(e), message=str(e)))
diff --git a/Integrations/AWS-ACM/AWS-ACM.yml b/Integrations/AWS-ACM/AWS-ACM.yml
new file mode 100644
index 000000000000..bc3dfb7fc468
--- /dev/null
+++ b/Integrations/AWS-ACM/AWS-ACM.yml
@@ -0,0 +1,517 @@
+commonfields:
+ id: AWS - ACM
+ version: -1
+name: AWS - ACM
+display: AWS - ACM
+category: IT Services
+description: Amazon Web Services Certificate Manager Service (acm)
+configuration:
+- display: Role Arn
+ name: roleArn
+ required: false
+ type: 0
+- display: Role Session Name
+ name: roleSessionName
+ required: false
+ type: 0
+- display: AWS Default Region
+ name: defaultRegion
+ options:
+ - us-east-1
+ - us-east-2
+ - us-west-1
+ - us-west-2
+ - ca-central-1
+ - eu-west-1
+ - eu-central-1
+ - eu-west-2
+ - ap-northeast-1
+ - ap-northeast-2
+ - ap-southeast-1
+ - ap-southeast-2
+ - ap-south-1
+ - sa-east-1
+ - eu-north-1
+ - eu-west-3
+ required: false
+ type: 15
+- display: Role Session Duration
+ name: sessionDuration
+ required: false
+ type: 0
+- display: Access Key
+ name: access_key
+ required: false
+ type: 0
+- display: Secret Key
+ name: secret_key
+ required: false
+ type: 4
+- display: Trust any cert (Not Secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+script:
+ script: ''
+ type: python
+ commands:
+ - name: aws-acm-describe-certificate
+ arguments:
+ - name: certificateArn
+ required: true
+ description: The Amazon Resource Name (ARN) of the ACM certificate.
+ - name: region
+ description: The AWS Region, if not specified the default region will be used.
+ - name: roleArn
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ - name: roleSessionName
+ description: An identifier for the assumed role session.
+ - name: roleSessionDuration
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ outputs:
+ - contextPath: AWS.ACM.Certificates.CertificateArn
+ description: he Amazon Resource Name (ARN) of the certificate.
+ type: string
+ - contextPath: AWS.ACM.Certificates.DomainName
+ description: The fully qualified domain name for the certificate, such as www.example.com
+ or example.com.
+ type: string
+ - contextPath: AWS.ACM.Certificates.SubjectAlternativeNames
+ description: One or more domain names (subject alternative names) included in
+ the certificate. This list contains the domain names that are bound to the
+ public key that is contained in the certificate. The subject alternative names
+ include the canonical domain name (CN) of the certificate and additional domain
+ names that can be used to connect to the website.
+ type: string
+ - contextPath: AWS.ACM.Certificates.DomainValidationOptions.DomainName
+ description: A fully qualified domain name (FQDN) in the certificate.
+ type: string
+ - contextPath: AWS.ACM.Certificates.DomainValidationOptions.ValidationEmails
+ description: A list of email addresses that ACM used to send domain validation
+ emails.
+ type: string
+ - contextPath: AWS.ACM.Certificates.DomainValidationOptions.ValidationDomain
+ description: The domain name that ACM used to send domain validation emails.
+ type: string
+ - contextPath: AWS.ACM.Certificates.DomainValidationOptions.ValidationStatu
+ description: The validation status of the domain name.
+ type: string
+ - contextPath: AWS.ACM.Certificates.DomainValidationOptions.ResourceRecord.Name
+ description: The name of the DNS record to create in your domain. This is supplied
+ by ACM.
+ type: string
+ - contextPath: AWS.ACM.Certificates.DomainValidationOptions.ResourceRecord.Type
+ description: The type of DNS record. Currently this can be CNAME.
+ type: string
+ - contextPath: AWS.ACM.Certificates.DomainValidationOptions.ResourceRecord.Value
+ description: The value of the CNAME record to add to your DNS database. This
+ is supplied by ACM.
+ type: string
+ - contextPath: AWS.ACM.Certificates.DomainValidationOptions.ValidationMethod
+ description: Specifies the domain validation method.
+ type: string
+ - contextPath: AWS.ACM.Certificates.Serial
+ description: The serial number of the certificate.
+ type: string
+ - contextPath: AWS.ACM.Certificates.Subject
+ description: The name of the entity that is associated with the public key contained
+ in the certificate.
+ type: string
+ - contextPath: AWS.ACM.Certificates.Issuer
+ description: The name of the certificate authority that issued and signed the
+ certificate.
+ type: string
+ - contextPath: AWS.ACM.Certificates.CreatedAt
+ description: The time at which the certificate was requested. This value exists
+ only when the certificate type is AMAZON_ISSUED .
+ type: date
+ - contextPath: AWS.ACM.Certificates.IssuedAt
+ description: The time at which the certificate was issued. This value exists
+ only when the certificate type is AMAZON_ISSUED .
+ type: date
+ - contextPath: AWS.ACM.Certificates.ImportedAt
+ description: The date and time at which the certificate was imported. This value
+ exists only when the certificate type is IMPORTED.
+ type: date
+ - contextPath: AWS.ACM.Certificates.Status
+ description: The status of the certificate.
+ type: string
+ - contextPath: AWS.ACM.Certificates.RevokedAt
+ description: The time at which the certificate was revoked. This value exists
+ only when the certificate status is REVOKED.
+ type: date
+ - contextPath: AWS.ACM.Certificates.RevocationReason
+ description: The reason the certificate was revoked. This value exists only
+ when the certificate status is REVOKED.
+ type: string
+ - contextPath: AWS.ACM.Certificates.NotBefore
+ description: The time before which the certificate is not valid.
+ type: date
+ - contextPath: AWS.ACM.Certificates.NotAfter
+ description: The time after which the certificate is not valid.
+ type: date
+ - contextPath: AWS.ACM.Certificates.KeyAlgorithm
+ description: The algorithm that was used to generate the public-private key
+ pair.
+ type: string
+ - contextPath: AWS.ACM.Certificates.SignatureAlgorithm
+ description: The algorithm that was used to sign the certificate.
+ type: string
+ - contextPath: AWS.ACM.Certificates.InUseBy
+ description: A list of ARNs for the AWS resources that are using the certificate.
+ A certificate can be used by multiple AWS resources.
+ type: string
+ - contextPath: AWS.ACM.Certificates.FailureReason
+ description: The reason the certificate request failed.
+ type: string
+ - contextPath: AWS.ACM.Certificates.Type
+ description: The source of the certificate.
+ type: string
+ - contextPath: AWS.ACM.Certificates.RenewalSummary.RenewalStatus
+ description: The status of ACM's managed renewal of the certificate.
+ type: string
+ - contextPath: AWS.ACM.Certificates.RenewalSummary.DomainValidationOptions.DomainName
+ description: A fully qualified domain name (FQDN) in the certificate.
+ type: string
+ - contextPath: AWS.ACM.Certificates.RenewalSummary.DomainValidationOptions.ValidationEmails
+ description: A list of email addresses that ACM used to send domain validation
+ emails.
+ type: string
+ - contextPath: AWS.ACM.Certificates.RenewalSummary.DomainValidationOptions.ValidationDomain
+ description: The domain name that ACM used to send domain validation emails.
+ type: string
+ - contextPath: AWS.ACM.Certificates.RenewalSummary.DomainValidationOptions.ValidationStatus
+ description: The validation status of the domain name.
+ type: string
+ - contextPath: AWS.ACM.Certificates.RenewalSummary.DomainValidationOptions.ResourceRecord.Name
+ description: The name of the DNS record to create in your domain. This is supplied
+ by ACM.
+ type: string
+ - contextPath: AWS.ACM.Certificates.RenewalSummary.DomainValidationOptions.ResourceRecord.Type
+ description: The type of DNS record.
+ type: string
+ - contextPath: AWS.ACM.Certificates.RenewalSummary.DomainValidationOptions.ResourceRecord.Value
+ description: The value of the CNAME record to add to your DNS database. This
+ is supplied by ACM.
+ type: string
+ - contextPath: AWS.ACM.Certificates.RenewalSummary.DomainValidationOptions.ValidationMethod
+ description: Specifies the domain validation method.
+ type: string
+ - contextPath: AWS.ACM.Certificates.KeyUsages.Name
+ description: A list of Key Usage X.509 v3 extension objects. Each object is
+ a string value that identifies the purpose of the public key contained in
+ the certificate.
+ type: string
+ - contextPath: AWS.ACM.Certificates.ExtendedKeyUsages.Name
+ description: The name of an Extended Key Usage value.
+ type: string
+ - contextPath: AWS.ACM.Certificates.ExtendedKeyUsages.OID
+ description: An object identifier (OID) for the extension value. OIDs are strings
+ of numbers separated by periods.
+ - contextPath: AWS.ACM.Certificates.CertificateAuthorityArn
+ description: The Amazon Resource Name (ARN) of the ACM PCA private certificate
+ authority (CA) that issued the certificate.
+ type: string
+ - contextPath: AWS.ACM.Certificates.RenewalEligibility
+ description: Specifies whether the certificate is eligible for renewal.
+ type: string
+ - contextPath: AWS.ACM.Certificates.Options.CertificateTransparencyLoggingPreference
+ description: You can opt out of certificate transparency logging by specifying
+ the DISABLED option. Opt in by specifying ENABLED.
+ type: string
+ description: Returns detailed metadata about the specified ACM certificate.
+ - name: aws-acm-list-certificates
+ arguments:
+ - name: certificateStatuses
+ auto: PREDEFINED
+ predefined:
+ - PENDING_VALIDATION
+ - ISSUED
+ - INACTIVE
+ - EXPIRED
+ - VALIDATION_TIMED_OUT
+ - REVOKED
+ - FAILED
+ description: Filter the certificate list by status value.
+ - name: extendedKeyUsage
+ auto: PREDEFINED
+ predefined:
+ - TLS_WEB_SERVER_AUTHENTICATION
+ - TLS_WEB_CLIENT_AUTHENTICATION
+ - CODE_SIGNING
+ - EMAIL_PROTECTION
+ - TIME_STAMPING
+ - OCSP_SIGNING
+ - IPSEC_END_SYSTEM
+ - IPSEC_TUNNEL
+ - IPSEC_USER
+ - ANY
+ - NONE
+ - CUSTOM
+ description: Specify one or more ExtendedKeyUsage extension values.
+ - name: keyUsage
+ auto: PREDEFINED
+ predefined:
+ - DIGITAL_SIGNATURE
+ - NON_REPUDIATION
+ - KEY_ENCIPHERMENT
+ - DATA_ENCIPHERMENT
+ - KEY_AGREEMENT
+ - CERTIFICATE_SIGNING
+ - CRL_SIGNING
+ - ENCIPHER_ONLY
+ - DECIPHER_ONLY
+ - ANY
+ - CUSTOM
+ description: Specify one or more KeyUsage extension values.
+ - name: keyTypes
+ auto: PREDEFINED
+ predefined:
+ - RSA_2048
+ - RSA_1024
+ - RSA_4096
+ - EC_prime256v1
+ - EC_secp384r1
+ - EC_secp521r1
+ description: Specify one or more algorithms that can be used to generate key
+ pairs.
+ - auto: PREDEFINED
+ default: false
+ description: The AWS Region, if not specified the default region will be used
+ isArray: false
+ name: region
+ predefined:
+ - us-east-1
+ - us-east-2
+ - us-west-1
+ - us-west-2
+ - ca-central-1
+ - eu-west-1
+ - eu-central-1
+ - eu-west-2
+ - ap-northeast-1
+ - ap-northeast-2
+ - ap-southeast-1
+ - ap-southeast-2
+ - ap-south-1
+ - sa-east-1
+ - eu-north-1
+ - eu-west-3
+ required: false
+ secret: false
+ - name: roleArn
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ - name: roleSessionName
+ description: An identifier for the assumed role session.
+ - name: roleSessionDuration
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ outputs:
+ - contextPath: AWS.ACM.Certificates.CertificateArn
+ description: Amazon Resource Name (ARN) of the certificate.
+ type: string
+ - contextPath: AWS.ACM.Certificates.DomainName
+ description: Fully qualified domain name (FQDN), such as www.example.com or
+ example.com, for the certificate.
+ type: string
+ - contextPath: AWS.ACM.Certificates.Region
+ description: The AWS region were the certificate is located.
+ description: Retrieves a list of certificate ARNs and domain names. You can request
+ that only certificates that match a specific status be listed. You can also
+ filter by specific attributes of the certificate.
+ - name: aws-acm-add-tags-to-certificate
+ arguments:
+ - name: certificateArn
+ required: true
+ description: String that contains the ARN of the ACM certificate to which the
+ tag is to be applied.
+ - name: tags
+ required: true
+ description: The key-value pair that defines the tag. The tag value is optional.
+ - auto: PREDEFINED
+ default: false
+ description: The AWS Region, if not specified the default region will be used
+ isArray: false
+ name: region
+ predefined:
+ - us-east-1
+ - us-east-2
+ - us-west-1
+ - us-west-2
+ - ca-central-1
+ - eu-west-1
+ - eu-central-1
+ - eu-west-2
+ - ap-northeast-1
+ - ap-northeast-2
+ - ap-southeast-1
+ - ap-southeast-2
+ - ap-south-1
+ - sa-east-1
+ - eu-north-1
+ - eu-west-3
+ required: false
+ secret: false
+ - name: roleArn
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ - name: roleSessionName
+ description: An identifier for the assumed role session.
+ - name: roleSessionDuration
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ description: Adds one or more tags to an ACM certificate.
+ execution: true
+ - name: aws-acm-remove-tags-from-certificate
+ arguments:
+ - name: certificateArn
+ required: true
+ description: The ARN of the ACM Certificate with one or more tags that you want
+ to remove.
+ - name: tags
+ required: true
+ description: The key-value pair that defines the tag to remove.
+ - auto: PREDEFINED
+ default: false
+ description: The AWS Region, if not specified the default region will be used
+ isArray: false
+ name: region
+ predefined:
+ - us-east-1
+ - us-east-2
+ - us-west-1
+ - us-west-2
+ - ca-central-1
+ - eu-west-1
+ - eu-central-1
+ - eu-west-2
+ - ap-northeast-1
+ - ap-northeast-2
+ - ap-southeast-1
+ - ap-southeast-2
+ - ap-south-1
+ - sa-east-1
+ - eu-north-1
+ - eu-west-3
+ required: false
+ secret: false
+ - name: roleArn
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ - name: roleSessionName
+ description: An identifier for the assumed role session.
+ - name: roleSessionDuration
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ description: Remove one or more tags from an ACM certificate.
+ execution: true
+ - name: aws-acm-list-tags-for-certificate
+ arguments:
+ - name: certificateArn
+ required: true
+ description: The ARN of the ACM certificate for which you want to list the tags.
+ - auto: PREDEFINED
+ default: false
+ description: The AWS Region, if not specified the default region will be used
+ isArray: false
+ name: region
+ predefined:
+ - us-east-1
+ - us-east-2
+ - us-west-1
+ - us-west-2
+ - ca-central-1
+ - eu-west-1
+ - eu-central-1
+ - eu-west-2
+ - ap-northeast-1
+ - ap-northeast-2
+ - ap-southeast-1
+ - ap-southeast-2
+ - ap-south-1
+ - sa-east-1
+ - eu-north-1
+ - eu-west-3
+ required: false
+ secret: false
+ - name: roleArn
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ - name: roleSessionName
+ description: An identifier for the assumed role session.
+ - name: roleSessionDuration
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ outputs:
+ - contextPath: AWS.ACM.Certificates.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.ACM.Certificates.Tags.Value
+ description: The value of the tag.
+ type: string
+ description: Lists the tags that have been applied to the ACM certificate. Use
+ the certificate's Amazon Resource Name (ARN) to specify the certificate.
+ - name: aws-acm-get-certificate
+ arguments:
+ - name: certificateArn
+ required: true
+ description: The ARN of the certificate.
+ - auto: PREDEFINED
+ default: false
+ description: The AWS Region, if not specified the default region will be used
+ isArray: false
+ name: region
+ predefined:
+ - us-east-1
+ - us-east-2
+ - us-west-1
+ - us-west-2
+ - ca-central-1
+ - eu-west-1
+ - eu-central-1
+ - eu-west-2
+ - ap-northeast-1
+ - ap-northeast-2
+ - ap-southeast-1
+ - ap-southeast-2
+ - ap-south-1
+ - sa-east-1
+ - eu-north-1
+ - eu-west-3
+ required: false
+ secret: false
+ - name: roleArn
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ - name: roleSessionName
+ description: An identifier for the assumed role session.
+ - name: roleSessionDuration
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ outputs:
+ - contextPath: AWS.ACM.Certificates.Certificate
+ description: String that contains the ACM certificate represented by the ARN
+ specified at input.
+ type: string
+ - contextPath: AWS.ACM.Certificates.CertificateChain
+ description: The certificate chain that contains the root certificate issued
+ by the certificate authority (CA).
+ type: string
+ description: Retrieves a certificate specified by an ARN and its certificate chain
+ . The chain is an ordered list of certificates that contains the end entity
+ certificate, intermediate certificates of subordinate CAs, and the root certificate
+ in that order. The certificate and certificate chain are base64 encoded. If
+ you want to decode the certificate to see the individual fields, you can use
+ OpenSSL.
+ dockerimage: demisto/boto3py3:1.0.0.1030
+ runonce: false
+ subtype: python3
+tests:
+ - ACM-Test
diff --git a/Integrations/AWS-ACM/AWS-ACM_desc.md b/Integrations/AWS-ACM/AWS-ACM_desc.md
new file mode 100644
index 000000000000..881eae32951a
--- /dev/null
+++ b/Integrations/AWS-ACM/AWS-ACM_desc.md
@@ -0,0 +1,16 @@
+Before you can use the AWS ACM integration in Demisto, you need to perform several configuration steps in your AWS environment.
+
+### Prerequisites
+- Attach an instance profile with the required permissions to the Demisto server or engine that is running
+on your AWS environment.
+- Instance profile requires minimum permission: sts:AssumeRole.
+- Instance profile requires permission to assume the roles needed by the AWS integrations.
+
+### Configure AWS Settings
+1. Create an IAM Role for the Instance Profile.
+2. Attach a Role to the Instance Profile.
+3. Configure the Necessary IAM Roles that the AWS Integration Can Assume.
+
+For detailed instructions, [see the AWS Integrations Configuration Guide](https://support.demisto.com/hc/en-us/articles/360005686854-AWS-Integrations-Configuration-Guide).
+
+Command descriptions, input descriptions, and output descriptions are taken from the Amazon ACM documentation. For more information, see the [Amazon ACM documention](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/acm.html).
\ No newline at end of file
diff --git a/Integrations/AWS-ACM/CHANGELOG.md b/Integrations/AWS-ACM/CHANGELOG.md
new file mode 100644
index 000000000000..91a19ee7e514
--- /dev/null
+++ b/Integrations/AWS-ACM/CHANGELOG.md
@@ -0,0 +1,2 @@
+## [Unreleased]
+* Bugfix for Proxy/Insecure issues.
\ No newline at end of file
diff --git a/Integrations/AWS-ACM/awsacm.png b/Integrations/AWS-ACM/awsacm.png
new file mode 100644
index 000000000000..03de83c72b88
Binary files /dev/null and b/Integrations/AWS-ACM/awsacm.png differ
diff --git a/Integrations/AWS-EC2/AWS-EC2.py b/Integrations/AWS-EC2/AWS-EC2.py
new file mode 100644
index 000000000000..418e950774cb
--- /dev/null
+++ b/Integrations/AWS-EC2/AWS-EC2.py
@@ -0,0 +1,2840 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import boto3
+import json
+import re
+from datetime import datetime, date
+from botocore.config import Config
+from botocore.parsers import ResponseParserError
+
+import urllib3.util
+
+# Disable insecure warnings
+urllib3.disable_warnings()
+
+"""PARAMETERS"""
+AWS_DEFAULT_REGION = demisto.params().get('defaultRegion')
+AWS_ROLE_ARN = demisto.params().get('roleArn')
+AWS_ROLE_SESSION_NAME = demisto.params().get('roleSessionName')
+AWS_ROLE_SESSION_DURATION = demisto.params().get('sessionDuration')
+AWS_ROLE_POLICY = None
+AWS_ACCESS_KEY_ID = demisto.params().get('access_key')
+AWS_SECRET_ACCESS_KEY = demisto.params().get('secret_key')
+VERIFY_CERTIFICATE = not demisto.params().get('insecure', True)
+proxies = handle_proxy(proxy_param_name='proxy', checkbox_default_value=False)
+config = Config(
+ connect_timeout=1,
+ retries=dict(
+ max_attempts=5
+ ),
+ proxies=proxies
+)
+
+
+"""HELPER FUNCTIONS"""
+
+
+def aws_session(service='ec2', region=None, roleArn=None, roleSessionName=None, roleSessionDuration=None,
+ rolePolicy=None):
+ kwargs = {}
+ if roleArn and roleSessionName is not None:
+ kwargs.update({
+ 'RoleArn': roleArn,
+ 'RoleSessionName': roleSessionName,
+ })
+ elif AWS_ROLE_ARN and AWS_ROLE_SESSION_NAME is not None:
+ kwargs.update({
+ 'RoleArn': AWS_ROLE_ARN,
+ 'RoleSessionName': AWS_ROLE_SESSION_NAME,
+ })
+
+ if roleSessionDuration is not None:
+ kwargs.update({'DurationSeconds': int(roleSessionDuration)})
+ elif AWS_ROLE_SESSION_DURATION is not None:
+ kwargs.update({'DurationSeconds': int(AWS_ROLE_SESSION_DURATION)})
+
+ if rolePolicy is not None:
+ kwargs.update({'Policy': rolePolicy})
+ elif AWS_ROLE_POLICY is not None:
+ kwargs.update({'Policy': AWS_ROLE_POLICY})
+ if kwargs and AWS_ACCESS_KEY_ID is None:
+
+ if AWS_ACCESS_KEY_ID is None:
+ sts_client = boto3.client('sts', config=config, verify=VERIFY_CERTIFICATE)
+ sts_response = sts_client.assume_role(**kwargs)
+ if region is not None:
+ client = boto3.client(
+ service_name=service,
+ region_name=region,
+ aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
+ aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
+ aws_session_token=sts_response['Credentials']['SessionToken'],
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ else:
+ client = boto3.client(
+ service_name=service,
+ region_name=AWS_DEFAULT_REGION,
+ aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
+ aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
+ aws_session_token=sts_response['Credentials']['SessionToken'],
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ elif AWS_ACCESS_KEY_ID and AWS_ROLE_ARN:
+ sts_client = boto3.client(
+ service_name='sts',
+ aws_access_key_id=AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ kwargs.update({
+ 'RoleArn': AWS_ROLE_ARN,
+ 'RoleSessionName': AWS_ROLE_SESSION_NAME,
+ })
+ sts_response = sts_client.assume_role(**kwargs)
+ client = boto3.client(
+ service_name=service,
+ region_name=AWS_DEFAULT_REGION,
+ aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
+ aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
+ aws_session_token=sts_response['Credentials']['SessionToken'],
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ else:
+ if region is not None:
+ client = boto3.client(
+ service_name=service,
+ region_name=region,
+ aws_access_key_id=AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ else:
+ client = boto3.client(
+ service_name=service,
+ region_name=AWS_DEFAULT_REGION,
+ aws_access_key_id=AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+
+ return client
+
+
+def parse_filter_field(filter_str):
+ filters = []
+ regex = re.compile(r'name=([\w\d_:.-]+),values=([ /\w\d@_,.*-]+)', flags=re.I)
+ for f in filter_str.split(';'):
+ match = regex.match(f)
+ if match is None:
+ demisto.log('could not parse filter: %s' % (f,))
+ continue
+
+ filters.append({
+ 'Name': match.group(1),
+ 'Values': match.group(2).split(',')
+ })
+
+ return filters
+
+
+def parse_tag_field(tags_str):
+ tags = []
+ regex = re.compile(r'key=([\w\d_:.-]+),value=([ /\w\d@_,.*-]+)', flags=re.I)
+ for f in tags_str.split(';'):
+ match = regex.match(f)
+ if match is None:
+ demisto.log('could not parse field: %s' % (f,))
+ continue
+
+ tags.append({
+ 'Key': match.group(1),
+ 'Value': match.group(2)
+ })
+
+ return tags
+
+
+class DatetimeEncoder(json.JSONEncoder):
+ # pylint: disable=method-hidden
+ def default(self, obj):
+ if isinstance(obj, datetime):
+ return obj.strftime('%Y-%m-%dT%H:%M:%S')
+ elif isinstance(obj, date):
+ return obj.strftime('%Y-%m-%d')
+ # Let the base class default method raise the TypeError
+ return json.JSONEncoder.default(self, obj)
+
+
+def parse_resource_ids(resource_id):
+ id_list = resource_id.replace(" ", "")
+ resourceIds = id_list.split(",")
+ return resourceIds
+
+
+def multi_split(data):
+ data = data.replace(" ", "")
+ data = data.split(";")
+ return data
+
+
+def parse_date(dt):
+ try:
+ arr = dt.split("-")
+ parsed_date = (datetime(int(arr[0]), int(arr[1]), int(arr[2]))).isoformat()
+ except ValueError as e:
+ return_error("Date could not be parsed. Please check the date again.\n{error}".format(error=type(e)))
+ return parsed_date
+
+
+"""MAIN FUNCTIONS"""
+
+
+def describe_regions_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ data = []
+ kwargs = {}
+ if args.get('regionNames') is not None:
+ kwargs.update({'RegionNames': parse_resource_ids(args.get('regionNames'))})
+
+ response = client.describe_regions(**kwargs)
+ for region in response['Regions']:
+ data.append({
+ 'Endpoint': region['Endpoint'],
+ 'RegionName': region['RegionName']
+ })
+
+ ec = {'AWS.Regions(val.RegionName === obj.RegionName)': data}
+ human_readable = tableToMarkdown('AWS Regions', data)
+ return_outputs(human_readable, ec)
+
+
+def describe_instances_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ data = []
+ kwargs = {}
+ output = []
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('instanceIds') is not None:
+ kwargs.update({'InstanceIds': parse_resource_ids(args.get('instanceIds'))})
+
+ response = client.describe_instances(**kwargs)
+ for i, reservation in enumerate(response['Reservations']):
+ for instance in reservation['Instances']:
+ try:
+ launch_date = datetime.strftime(instance['LaunchTime'], '%Y-%m-%dT%H:%M:%SZ')
+ except ValueError as e:
+ return_error('Date could not be parsed. Please check the date again.\n{error}'.format(error=type(e)))
+ data.append({
+ 'InstanceId': instance['InstanceId'],
+ 'ImageId': instance['ImageId'],
+ 'State': instance['State']['Name'],
+ 'PublicIPAddress': instance.get('PublicIpAddress'),
+ 'Region': obj['_user_provided_options']['region_name'],
+ 'Type': instance['InstanceType'],
+ 'LaunchDate': launch_date,
+ 'PublicDNSName': instance['PublicDnsName'],
+ 'Monitoring': instance['Monitoring']['State'],
+ })
+ if 'Tags' in instance:
+ for tag in instance['Tags']:
+ data[i].update({
+ tag['Key']: tag['Value']
+ })
+ if 'KeyName' in instance:
+ data[i].update({'KeyName': instance['KeyName']})
+
+ instance.update({'Region': obj['_user_provided_options']['region_name']})
+ output.append(instance)
+
+ try:
+ raw = json.loads(json.dumps(output, cls=DatetimeEncoder))
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.Instances(val.InstanceId === obj.InstanceId)': raw}
+ human_readable = tableToMarkdown('AWS Instances', data)
+ return_outputs(human_readable, ec)
+
+
+def describe_images_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ kwargs = {}
+ data = []
+
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('imageIds') is not None:
+ kwargs.update({'ImageIds': parse_resource_ids(args.get('imageIds'))})
+ if args.get('owners') is not None:
+ kwargs.update({'Owners': parse_resource_ids(args.get('owners'))})
+ if args.get('executableUsers') is not None:
+ kwargs.update({'ExecutableUsers': parse_resource_ids(args.get('executableUsers'))})
+
+ response = client.describe_images(**kwargs)
+ for i, image in enumerate(response['Images']):
+ data.append({
+ 'CreationDate': image['CreationDate'],
+ 'ImageId': image['ImageId'],
+ 'Public': image['Public'],
+ 'State': image['State'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+ if 'Description' in image:
+ data[i].update({'Description': image['Description']})
+ if 'EnaSupport' in image:
+ data[i].update({'EnaSupport': image['EnaSupport']})
+ if 'Name' in image:
+ data[i].update({'Name': image['Name']})
+ if 'Tags' in image:
+ for tag in image['Tags']:
+ data[i].update({
+ tag['Key']: tag['Value']
+ })
+ try:
+ output = json.dumps(response['Images'], cls=DatetimeEncoder)
+ raw = json.loads(output)
+ raw[0].update({'Region': obj['_user_provided_options']['region_name']})
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.Images(val.ImageId === obj.ImageId)': raw}
+ human_readable = tableToMarkdown('AWS EC2 Images', data)
+ return_outputs(human_readable, ec)
+
+
+def describe_addresses_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ obj = vars(client._client_config)
+ kwargs = {}
+ data = []
+
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('publicIps') is not None:
+ kwargs.update({'PublicIps': parse_resource_ids(args.get('publicIps'))})
+ if args.get('allocationIds') is not None:
+ kwargs.update({'AllocationIds': parse_resource_ids(args.get('allocationIds'))})
+
+ response = client.describe_addresses(**kwargs)
+
+ for i, address in enumerate(response['Addresses']):
+ data.append({
+ 'PublicIp': address['PublicIp'],
+ 'AllocationId': address['AllocationId'],
+ 'Domain': address['Domain'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+ if 'InstanceId' in address:
+ data[i].update({'InstanceId': address['InstanceId']})
+ if 'AssociationId' in address:
+ data[i].update({'AssociationId': address['AssociationId']})
+ if 'NetworkInterfaceId' in address:
+ data[i].update({'NetworkInterfaceId': address['NetworkInterfaceId']})
+ if 'PrivateIpAddress' in address:
+ data[i].update({'PrivateIpAddress': address['PrivateIpAddress']})
+ if 'Tags' in address:
+ for tag in address['Tags']:
+ data[i].update({
+ tag['Key']: tag['Value']
+ })
+
+ raw = response['Addresses']
+ raw[0].update({'Region': obj['_user_provided_options']['region_name']})
+ ec = {'AWS.EC2.ElasticIPs(val.AllocationId === obj.AllocationId)': raw}
+ human_readable = tableToMarkdown('AWS EC2 ElasticIPs', data)
+ return_outputs(human_readable, ec)
+
+
+def describe_snapshots_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ obj = vars(client._client_config)
+ kwargs = {}
+ data = []
+
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('ownerIds') is not None:
+ kwargs.update({'OwnerIds': parse_resource_ids(args.get('ownerIds'))})
+ if args.get('snapshotIds') is not None:
+ kwargs.update({'SnapshotIds': parse_resource_ids(args.get('snapshotIds'))})
+ if args.get('restorableByUserIds') is not None:
+ kwargs.update({'RestorableByUserIds': parse_resource_ids(args.get('restorableByUserIds'))})
+
+ response = client.describe_snapshots(**kwargs)
+
+ for i, snapshot in enumerate(response['Snapshots']):
+ try:
+ start_time = datetime.strftime(snapshot['StartTime'], '%Y-%m-%dT%H:%M:%SZ')
+ except ValueError as e:
+ return_error('Date could not be parsed. Please check the date again.\n{error}'.format(error=type(e)))
+ data.append({
+ 'Description': snapshot['Description'],
+ 'Encrypted': snapshot['Encrypted'],
+ 'OwnerId': snapshot['OwnerId'],
+ 'Progress': snapshot['Progress'],
+ 'SnapshotId': snapshot['SnapshotId'],
+ 'StartTime': start_time,
+ 'State': snapshot['State'],
+ 'VolumeId': snapshot['VolumeId'],
+ 'VolumeSize': snapshot['VolumeSize'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+ if 'Tags' in snapshot:
+ for tag in snapshot['Tags']:
+ data[i].update({
+ tag['Key']: tag['Value']
+ })
+
+ try:
+ output = json.dumps(response['Snapshots'], cls=DatetimeEncoder)
+ raw = json.loads(output)
+ raw[0].update({'Region': obj['_user_provided_options']['region_name']})
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.Snapshots(val.SnapshotId === obj.SnapshotId)': raw}
+ human_readable = tableToMarkdown('AWS EC2 Snapshots', data)
+ return_outputs(human_readable, ec)
+
+
+def describe_volumes_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ obj = vars(client._client_config)
+ kwargs = {}
+ data = []
+
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('volumeIds') is not None:
+ kwargs.update({'VolumeIds': parse_resource_ids(args.get('volumeIds'))})
+
+ response = client.describe_volumes(**kwargs)
+
+ for i, volume in enumerate(response['Volumes']):
+ try:
+ create_date = datetime.strftime(volume['CreateTime'], '%Y-%m-%dT%H:%M:%SZ')
+ except ValueError as e:
+ return_error('Date could not be parsed. Please check the date again.\n{error}'.format(error=type(e)))
+ data.append({
+ 'AvailabilityZone': volume['AvailabilityZone'],
+ 'Encrypted': volume['Encrypted'],
+ 'State': volume['State'],
+ 'VolumeId': volume['VolumeId'],
+ 'VolumeType': volume['VolumeType'],
+ 'CreateTime': create_date,
+ })
+ if 'Tags' in volume:
+ for tag in volume['Tags']:
+ data[i].update({
+ tag['Key']: tag['Value']
+ })
+ try:
+ output = json.dumps(response['Volumes'], cls=DatetimeEncoder)
+ raw = json.loads(output)
+ raw[0].update({'Region': obj['_user_provided_options']['region_name']})
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.Volumes(val.VolumeId === obj.VolumeId)': raw}
+ human_readable = tableToMarkdown('AWS EC2 Volumes', data)
+ return_outputs(human_readable, ec)
+
+
+def describe_launch_templates_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ obj = vars(client._client_config)
+ kwargs = {}
+ data = []
+
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('launchTemplateIds') is not None:
+ kwargs.update({'LaunchTemplateIds': parse_resource_ids(args.get('launchTemplateIds'))})
+ if args.get('launchTemplateNames') is not None:
+ kwargs.update({'LaunchTemplateNames': parse_resource_ids(args.get('launchTemplateNamess'))})
+
+ response = client.describe_launch_templates(**kwargs)
+
+ for i, template in enumerate(response['LaunchTemplates']):
+ try:
+ create_time = datetime.strftime(template['CreateTime'], '%Y-%m-%dT%H:%M:%SZ')
+ except ValueError as e:
+ return_error('Date could not be parsed. Please check the date again.\n{error}'.format(error=type(e)))
+ data.append({
+ 'LaunchTemplateId': template['LaunchTemplateId'],
+ 'LaunchTemplateName': template['LaunchTemplateName'],
+ 'CreatedBy': template['CreatedBy'],
+ 'DefaultVersionNumber': template['DefaultVersionNumber'],
+ 'LatestVersionNumber': template['LatestVersionNumber'],
+ 'CreateTime': create_time,
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+
+ if 'Tags' in template:
+ for tag in template['Tags']:
+ data[i].update({
+ tag['Key']: tag['Value']
+ })
+
+ try:
+ output = json.dumps(response['LaunchTemplates'], cls=DatetimeEncoder)
+ raw = json.loads(output)
+ raw[0].update({'Region': obj['_user_provided_options']['region_name']})
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.LaunchTemplates(val.LaunchTemplateId === obj.LaunchTemplateId)': raw}
+ human_readable = tableToMarkdown('AWS EC2 LaunchTemplates', data)
+ return_outputs(human_readable, ec)
+
+
+def describe_key_pairs_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ obj = vars(client._client_config)
+ kwargs = {}
+ data = []
+
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('keyNames') is not None:
+ kwargs.update({'KeyNames': parse_resource_ids(args.get('keyNames'))})
+
+ response = client.describe_key_pairs(**kwargs)
+
+ for key in response['KeyPairs']:
+ data.append({
+ 'KeyFingerprint': key['KeyFingerprint'],
+ 'KeyName': key['KeyName'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+
+ ec = {'AWS.EC2.KeyPairs(val.KeyName === obj.KeyName)': data}
+ human_readable = tableToMarkdown('AWS EC2 Key Pairs', data)
+ return_outputs(human_readable, ec)
+
+
+def describe_vpcs_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ obj = vars(client._client_config)
+ kwargs = {}
+ data = []
+
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('vpcIds') is not None:
+ kwargs.update({'VpcIds': parse_resource_ids(args.get('vpcIds'))})
+
+ response = client.describe_vpcs(**kwargs)
+
+ for i, vpc in enumerate(response['Vpcs']):
+ data.append({
+ 'CidrBlock': vpc['CidrBlock'],
+ 'DhcpOptionsId': vpc['DhcpOptionsId'],
+ 'State': vpc['State'],
+ 'VpcId': vpc['VpcId'],
+ 'InstanceTenancy': vpc['InstanceTenancy'],
+ 'IsDefault': vpc['IsDefault'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+
+ if 'Tags' in vpc:
+ for tag in vpc['Tags']:
+ data[i].update({
+ tag['Key']: tag['Value']
+ })
+
+ try:
+ output = json.dumps(response['Vpcs'], cls=DatetimeEncoder)
+ raw = json.loads(output)
+ raw[0].update({'Region': obj['_user_provided_options']['region_name']})
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.Vpcs(val.VpcId === obj.VpcId)': raw}
+ human_readable = tableToMarkdown('AWS EC2 Vpcs', data)
+ return_outputs(human_readable, ec)
+
+
+def describe_subnets_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ obj = vars(client._client_config)
+ kwargs = {}
+ data = []
+
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('subnetIds') is not None:
+ kwargs.update({'SubnetIds': parse_resource_ids(args.get('subnetIds'))})
+
+ response = client.describe_subnets(**kwargs)
+
+ for i, subnet in enumerate(response['Subnets']):
+ data.append({
+ 'AvailabilityZone': subnet['AvailabilityZone'],
+ 'AvailableIpAddressCount': subnet['AvailableIpAddressCount'],
+ 'CidrBlock': subnet['CidrBlock'],
+ 'DefaultForAz': subnet['DefaultForAz'],
+ 'State': subnet['State'],
+ 'SubnetId': subnet['SubnetId'],
+ 'VpcId': subnet['VpcId'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+
+ if 'Tags' in subnet:
+ for tag in subnet['Tags']:
+ data[i].update({
+ tag['Key']: tag['Value']
+ })
+
+ try:
+ output = json.dumps(response['Subnets'], cls=DatetimeEncoder)
+ raw = json.loads(output)
+ raw[0].update({'Region': obj['_user_provided_options']['region_name']})
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.Subnets(val.SubnetId === obj.SubnetId)': raw}
+ human_readable = tableToMarkdown('AWS EC2 Subnets', data)
+ return_outputs(human_readable, ec)
+
+
+def describe_security_groups_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ obj = vars(client._client_config)
+ kwargs = {}
+ data = []
+
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('groupIds') is not None:
+ kwargs.update({'GroupIds': parse_resource_ids(args.get('groupIds'))})
+ if args.get('groupNames') is not None:
+ kwargs.update({'GroupNames': parse_resource_ids(args.get('groupNames'))})
+
+ response = client.describe_security_groups(**kwargs)
+
+ for i, sg in enumerate(response['SecurityGroups']):
+ data.append({
+ 'Description': sg['Description'],
+ 'GroupName': sg['GroupName'],
+ 'OwnerId': sg['OwnerId'],
+ 'GroupId': sg['GroupId'],
+ 'VpcId': sg['VpcId'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+
+ if 'Tags' in sg:
+ for tag in sg['Tags']:
+ data[i].update({
+ tag['Key']: tag['Value']
+ })
+
+ try:
+ output = json.dumps(response['SecurityGroups'], cls=DatetimeEncoder)
+ raw = json.loads(output)
+ raw[0].update({'Region': obj['_user_provided_options']['region_name']})
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.SecurityGroups(val.GroupId === obj.GroupId)': raw}
+ human_readable = tableToMarkdown('AWS EC2 SecurityGroups', data)
+ return_outputs(human_readable, ec)
+
+
+def allocate_address_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ obj = vars(client._client_config)
+
+ response = client.allocate_address(Domain='vpc')
+ data = ({
+ 'PublicIp': response['PublicIp'],
+ 'AllocationId': response['AllocationId'],
+ 'Domain': response['Domain'],
+ 'Region': obj['_user_provided_options']['region_name']
+ })
+ ec = {'AWS.EC2.ElasticIPs': data}
+ human_readable = tableToMarkdown('AWS EC2 ElasticIP', data)
+ return_outputs(human_readable, ec)
+
+
+def associate_address_command(args):
+ client = aws_session(
+ service='ec2',
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ obj = vars(client._client_config)
+ kwargs = {'AllocationId': args.get('allocationId')}
+
+ if args.get('instanceId') is not None:
+ kwargs.update({'InstanceId': args.get('instanceId')})
+ if args.get('allowReassociation') is not None:
+ kwargs.update({'AllowReassociation': True if args.get('allowReassociation') == 'True' else False})
+ if args.get('networkInterfaceId') is not None:
+ kwargs.update({'NetworkInterfaceId': args.get('networkInterfaceId')})
+ if args.get('privateIpAddress') is not None:
+ kwargs.update({'PrivateIpAddress': args.get('privateIpAddress')})
+
+ response = client.associate_address(**kwargs)
+ data = ({
+ 'AllocationId': args.get('allocationId'),
+ 'AssociationId': response['AssociationId'],
+ 'Region': obj['_user_provided_options']['region_name']
+ })
+
+ ec = {"AWS.EC2.ElasticIPs(val.AllocationId === obj.AllocationId)": data}
+ human_readable = tableToMarkdown('AWS EC2 ElasticIP', data)
+ return_outputs(human_readable, ec)
+
+
+def create_snapshot_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ kwargs = {'VolumeId': args.get('volumeId')}
+
+ if args.get('description') is not None:
+ kwargs.update({'Description': args.get('description')})
+ if args.get('tags') is not None:
+ kwargs.update({
+ 'TagSpecifications': [{
+ 'ResourceType': 'snapshot',
+ 'Tags': parse_tag_field(args.get('tags'))}]
+ })
+
+ response = client.create_snapshot(**kwargs)
+
+ try:
+ start_time = datetime.strftime(response['StartTime'], '%Y-%m-%dT%H:%M:%SZ')
+ except ValueError as e:
+ return_error('Date could not be parsed. Please check the date again.\n{error}'.format(error=type(e)))
+
+ data = ({
+ 'Description': response['Description'],
+ 'Encrypted': response['Encrypted'],
+ 'Progress': response['Progress'],
+ 'SnapshotId': response['SnapshotId'],
+ 'State': response['State'],
+ 'VolumeId': response['VolumeId'],
+ 'VolumeSize': response['VolumeSize'],
+ 'StartTime': start_time,
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+
+ if 'Tags' in response:
+ for tag in response['Tags']:
+ data.update({
+ tag['Key']: tag['Value']
+ })
+
+ try:
+ output = json.dumps(response, cls=DatetimeEncoder)
+ raw = json.loads(output)
+ del raw['ResponseMetadata']
+ raw.update({'Region': obj['_user_provided_options']['region_name']})
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.Snapshots': raw}
+ human_readable = tableToMarkdown('AWS EC2 Snapshots', data)
+ return_outputs(human_readable, ec)
+
+
+def delete_snapshot_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ response = client.delete_snapshot(SnapshotId=args.get('snapshotId'))
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Snapshot with ID: {snapshot_id} was deleted".format(snapshot_id=args.get('snapshotId')))
+
+
+def create_image_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ kwargs = {
+ 'Name': args.get('name'),
+ 'InstanceId': args.get('instanceId')
+ }
+
+ if args.get('description') is not None:
+ kwargs.update({'Description': args.get('description')})
+ if args.get('noReboot') is not None:
+ kwargs.update({'NoReboot': True if args.get('noReboot') == 'True' else False})
+
+ response = client.create_image(**kwargs)
+
+ data = ({
+ 'ImageId': response['ImageId'],
+ 'Name': args.get('name'),
+ 'InstanceId': args.get('instanceId'),
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+
+ ec = {'AWS.EC2.Images': data}
+ human_readable = tableToMarkdown('AWS EC2 Images', data)
+ return_outputs(human_readable, ec)
+
+
+def deregister_image_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ response = client.deregister_image(ImageId=args.get('imageId'))
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The AMI with ID: {image_id} was deregistered".format(image_id=args.get('imageId')))
+
+
+def modify_volume_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ obj = vars(client._client_config)
+ kwargs = {'VolumeId': args.get('volumeId')}
+
+ if args.get('size') is not None:
+ kwargs.update({'Size': int(args.get('size'))})
+ if args.get('volumeType') is not None:
+ kwargs.update({'VolumeType': args.get('volumeType')})
+ if args.get('iops') is not None:
+ kwargs.update({'Iops': int(args.get('iops'))})
+
+ response = client.modify_volume(**kwargs)
+ volumeModification = response['VolumeModification']
+
+ try:
+ start_time = datetime.strftime(volumeModification['StartTime'], '%Y-%m-%dT%H:%M:%SZ')
+ except ValueError as e:
+ return_error('Date could not be parsed. Please check the date again.\n{error}'.format(error=type(e)))
+
+ data = ({
+ 'VolumeId': volumeModification['VolumeId'],
+ 'ModificationState': volumeModification['ModificationState'],
+ 'TargetSize': volumeModification['TargetSize'],
+ 'TargetIops': volumeModification['TargetIops'],
+ 'TargetVolumeType': volumeModification['TargetVolumeType'],
+ 'OriginalSize': volumeModification['OriginalSize'],
+ 'OriginalIops': volumeModification['OriginalIops'],
+ 'OriginalVolumeType': volumeModification['OriginalVolumeType'],
+ 'StartTime': start_time,
+ 'Progress': volumeModification['Progress'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+
+ output = json.dumps(response['VolumeModification'], cls=DatetimeEncoder)
+ raw = json.loads(output)
+ raw.update({'Region': obj['_user_provided_options']['region_name']})
+
+ ec = {'AWS.EC2.Volumes(val.VolumeId === obj.VolumeId).Modification': raw}
+ human_readable = tableToMarkdown('AWS EC2 Volume Modification', data)
+ return_outputs(human_readable, ec)
+
+
+def create_tags_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {
+ 'Resources': parse_resource_ids(args.get('resources')),
+ 'Tags': parse_tag_field(args.get('tags'))
+ }
+ response = client.create_tags(**kwargs)
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The recources where taged successfully")
+
+
+def disassociate_address_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ response = client.disassociate_address(AssociationId=args.get('associationId'))
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Elastic IP was disassociated")
+
+
+def release_address_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ response = client.release_address(AllocationId=args.get('allocationId'))
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Elastic IP was released")
+
+
+def start_instances_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ response = client.start_instances(InstanceIds=parse_resource_ids(args.get('instanceIds')))
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Instances were started")
+
+
+def stop_instances_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ response = client.stop_instances(InstanceIds=parse_resource_ids(args.get('instanceIds')))
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Instances were stopped")
+
+
+def terminate_instances_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ response = client.terminate_instances(InstanceIds=parse_resource_ids(args.get('instanceIds')))
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Instances were terminated")
+
+
+def create_volume_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ kwargs = {'AvailabilityZone': args.get('availabilityZone')}
+
+ if args.get('encrypted') is not None:
+ kwargs.update({'Encrypted': True if args.get('encrypted') == 'True' else False})
+ if args.get('iops') is not None:
+ kwargs.update({'Iops': int(args.get('iops'))})
+ if args.get('kmsKeyId') is not None:
+ kwargs.update({'KmsKeyId': args.get('kmsKeyId')})
+ if args.get('size') is not None:
+ kwargs.update({'Size': int(args.get('size'))})
+ if args.get('snapshotId') is not None:
+ kwargs.update({'SnapshotId': args.get('snapshotId')})
+ if args.get('volumeType') is not None:
+ kwargs.update({'VolumeType': args.get('volumeType')})
+ if args.get('kmsKeyId') is not None:
+ kwargs.update({'KmsKeyId': args.get('kmsKeyId')})
+ if args.get('tags') is not None:
+ kwargs.update({
+ 'TagSpecifications': [{
+ 'ResourceType': 'volume',
+ 'Tags': parse_tag_field(args.get('tags'))}]
+ })
+
+ response = client.create_volume(**kwargs)
+
+ try:
+ create_time = datetime.strftime(response['CreateTime'], '%Y-%m-%dT%H:%M:%SZ')
+ except ValueError as e:
+ return_error('Date could not be parsed. Please check the date again.\n{error}'.format(type(e)))
+
+ data = ({
+ 'AvailabilityZone': response['AvailabilityZone'],
+ 'CreateTime': create_time,
+ 'Encrypted': response['Encrypted'],
+ 'Size': response['Size'],
+ 'State': response['State'],
+ 'VolumeId': response['VolumeId'],
+ 'Iops': response['Iops'],
+ 'VolumeType': response['VolumeType'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+ if 'SnapshotId' in response:
+ data.update({'SnapshotId': response['SnapshotId']})
+ if 'KmsKeyId' in response:
+ data.update({'KmsKeyId': response['KmsKeyId']})
+ if 'Tags' in response:
+ for tag in response['Tags']:
+ data.update({
+ tag['Key']: tag['Value']
+ })
+
+ ec = {'AWS.EC2.Volumes': data}
+ human_readable = tableToMarkdown('AWS EC2 Volumes', data)
+ return_outputs(human_readable, ec)
+
+
+def attach_volume_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ kwargs = {
+ 'Device': args.get('device'),
+ 'InstanceId': args.get('instanceId'),
+ 'VolumeId': args.get('volumeId'),
+ }
+ response = client.attach_volume(**kwargs)
+ try:
+ attach_time = datetime.strftime(response['AttachTime'], '%Y-%m-%dT%H:%M:%SZ')
+ except ValueError as e:
+ return_error('Date could not be parsed. Please check the date again.\n{error}'.format(type(e)))
+ data = ({
+ 'AttachTime': attach_time,
+ 'Device': response['Device'],
+ 'InstanceId': response['InstanceId'],
+ 'State': response['State'],
+ 'VolumeId': response['VolumeId'],
+ })
+ if 'DeleteOnTermination' in response:
+ data.update({'DeleteOnTermination': response['DeleteOnTermination']})
+
+ ec = {'AWS.EC2.Volumes(val.VolumeId === obj.VolumeId).Attachments': data}
+ human_readable = tableToMarkdown('AWS EC2 Volume Attachments', data)
+ return_outputs(human_readable, ec)
+
+
+def detach_volume_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ kwargs = {'VolumeId': args.get('volumeId')}
+
+ if args.get('force') is not None:
+ kwargs.update({'Force': True if args.get('force') == 'True' else False})
+ if args.get('device') is not None:
+ kwargs.update({'Device': int(args.get('device'))})
+ if args.get('instanceId') is not None:
+ kwargs.update({'InstanceId': args.get('instanceId')})
+
+ response = client.detach_volume(**kwargs)
+ try:
+ attach_time = datetime.strftime(response['AttachTime'], '%Y-%m-%dT%H:%M:%SZ')
+ except ValueError as e:
+ return_error('Date could not be parsed. Please check the date again.\n{error}'.format(type(e)))
+ data = ({
+ 'AttachTime': attach_time,
+ 'Device': response['Device'],
+ 'InstanceId': response['InstanceId'],
+ 'State': response['State'],
+ 'VolumeId': response['VolumeId'],
+ })
+ if 'DeleteOnTermination' in response:
+ data.update({'DeleteOnTermination': response['DeleteOnTermination']})
+
+ ec = {'AWS.EC2.Volumes(val.VolumeId === obj.VolumeId).Attachments': data}
+ human_readable = tableToMarkdown('AWS EC2 Volume Attachments', data)
+ return_outputs(human_readable, ec)
+
+
+def delete_volume_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ response = client.delete_volume(VolumeId=args.get('volumeId'))
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Volume was deleted")
+
+
+def run_instances_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ kwargs = {
+ 'MinCount': int(args.get('count')),
+ 'MaxCount': int(args.get('count'))
+ } # type: dict
+ BlockDeviceMappings = {} # type: dict
+ if args.get('imageId') is not None:
+ kwargs.update({'ImageId': (args.get('imageId'))})
+ if args.get('instanceType') is not None:
+ kwargs.update({'InstanceType': args.get('instanceType')})
+ if args.get('securityGroupIds') is not None:
+ kwargs.update({'SecurityGroupIds': parse_resource_ids(args.get('securityGroupIds'))})
+ if args.get('securityGroups') is not None:
+ kwargs.update({'SecurityGroups': parse_resource_ids(args.get('securityGroups'))})
+ if args.get('subnetId') is not None:
+ kwargs.update({'SubnetId': args.get('subnetId')})
+ if args.get('userData') is not None:
+ kwargs.update({'UserData': args.get('userData')})
+ if args.get('keyName') is not None:
+ kwargs.update({'KeyName': args.get('keyName')})
+ if args.get('ebsOptimized') is not None:
+ kwargs.update({'EbsOptimized': args.get('ebsOptimized')})
+ if args.get('disableApiTermination') is not None:
+ kwargs.update({'DisableApiTermination': True if args.get('disableApiTermination') == 'True' else False})
+ if args.get('deviceName') is not None:
+ BlockDeviceMappings = {'DeviceName': args.get('deviceName')}
+ BlockDeviceMappings.update({'Ebs': {}})
+ if args.get('ebsVolumeSize') is not None:
+ BlockDeviceMappings['Ebs'].update({'VolumeSize': int(args.get('ebsVolumeSize'))})
+ if args.get('ebsVolumeType') is not None:
+ BlockDeviceMappings['Ebs'].update({'VolumeType': args.get('ebsVolumeType')})
+ if args.get('ebsIops') is not None:
+ BlockDeviceMappings['Ebs'].update({'Iops': int(args.get('ebsIops'))})
+ if args.get('ebsDeleteOnTermination') is not None:
+ BlockDeviceMappings['Ebs'].update(
+ {'DeleteOnTermination': True if args.get('ebsDeleteOnTermination') == 'True' else False})
+ if args.get('ebsKmsKeyId') is not None:
+ BlockDeviceMappings['Ebs'].update({'KmsKeyId': args.get('ebsKmsKeyId')})
+ if args.get('ebsSnapshotId') is not None:
+ BlockDeviceMappings['Ebs'].update({'SnapshotId': args.get('ebsSnapshotId')})
+ if args.get('ebsEncrypted') is not None:
+ BlockDeviceMappings['Ebs'].update({'Encrypted': True if args.get('ebsEncrypted') == 'True' else False})
+ if BlockDeviceMappings:
+ kwargs.update({'BlockDeviceMappings': [BlockDeviceMappings]}) # type: ignore
+
+ if args.get('iamInstanceProfileArn') is not None:
+ kwargs.update({
+ 'IamInstanceProfile': {
+ 'Arn': args.get('iamInstanceProfileArn')}
+ })
+ if args.get('iamInstanceProfileName') is not None:
+ kwargs.update({ # type: ignore
+ 'IamInstanceProfile': {
+ 'Name': args.get('iamInstanceProfileName')}
+ })
+ if args.get('launchTemplateId') is not None:
+ kwargs.update({
+ 'LaunchTemplate': {
+ 'LaunchTemplateId': args.get('launchTemplateId')}
+ })
+ if args.get('launchTemplateName') is not None:
+ kwargs.update({
+ 'LaunchTemplate': {
+ 'LaunchTemplateName': args.get('launchTemplateName')}
+ })
+ if args.get('launchTemplateVersion') is not None:
+ kwargs['LaunchTemplate'].update({ # type: ignore
+ 'Version': args.get('launchTemplateVersion')
+ })
+ if args.get('tags') is not None:
+ kwargs.update({
+ 'TagSpecifications': [{
+ 'ResourceType': 'instance',
+ 'Tags': parse_tag_field(args.get('tags'))}]
+ })
+
+ response = client.run_instances(**kwargs)
+ data = []
+ for i, instance in enumerate(response['Instances']):
+ try:
+ launch_date = datetime.strftime(instance['LaunchTime'], '%Y-%m-%dT%H:%M:%SZ')
+ except ValueError as e:
+ return_error('Date could not be parsed. Please check the date again.\n{error}'.format(type(e)))
+ data.append({
+ 'InstanceId': instance['InstanceId'],
+ 'ImageId': instance['ImageId'],
+ 'State': instance['State']['Name'],
+ 'PublicIPAddress': instance.get('PublicIpAddress'),
+ 'Region': obj['_user_provided_options']['region_name'],
+ 'Type': instance['InstanceType'],
+ 'LaunchDate': launch_date,
+ 'PublicDNSName': instance['PublicDnsName'],
+ 'KeyName': instance['KeyName'],
+ 'Monitoring': instance['Monitoring']['State'],
+ })
+ if 'Tags' in instance:
+ for tag in instance['Tags']:
+ data[i].update({
+ tag['Key']: tag['Value']
+ })
+ try:
+ output = json.dumps(response['Instances'], cls=DatetimeEncoder)
+ raw = json.loads(output)
+ raw[0].update({'Region': obj['_user_provided_options']['region_name']})
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.Instances': raw}
+ human_readable = tableToMarkdown('AWS Instances', data)
+ return_outputs(human_readable, ec)
+
+
+def waiter_instance_running_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {}
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('instanceIds') is not None:
+ kwargs.update({'InstanceIds': parse_resource_ids(args.get('instanceIds'))})
+ if args.get('waiterDelay') is not None:
+ kwargs.update({'WaiterConfig': {'Delay': int(args.get('waiterDelay'))}})
+ if args.get('waiterMaxAttempts') is not None:
+ kwargs.update({'WaiterConfig': {'MaxAttempts': int(args.get('waiterMaxAttempts'))}})
+
+ waiter = client.get_waiter('instance_running')
+ waiter.wait(**kwargs)
+ demisto.results("success")
+
+
+def waiter_instance_status_ok_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {}
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('instanceIds') is not None:
+ kwargs.update({'InstanceIds': parse_resource_ids(args.get('instanceIds'))})
+ if args.get('waiterDelay') is not None:
+ kwargs.update({'WaiterConfig': {'Delay': int(args.get('waiterDelay'))}})
+ if args.get('waiterMaxAttempts') is not None:
+ kwargs.update({'WaiterConfig': {'MaxAttempts': int(args.get('waiterMaxAttempts'))}})
+
+ waiter = client.get_waiter('instance_status_ok')
+ waiter.wait(**kwargs)
+ demisto.results("success")
+
+
+def waiter_instance_stopped_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {}
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('instanceIds') is not None:
+ kwargs.update({'InstanceIds': parse_resource_ids(args.get('instanceIds'))})
+ if args.get('waiterDelay') is not None:
+ kwargs.update({'WaiterConfig': {'Delay': int(args.get('waiterDelay'))}})
+ if args.get('waiterMaxAttempts') is not None:
+ kwargs.update({'WaiterConfig': {'MaxAttempts': int(args.get('waiterMaxAttempts'))}})
+
+ waiter = client.get_waiter('instance_stopped')
+ waiter.wait(**kwargs)
+ demisto.results("success")
+
+
+def waiter_instance_terminated_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {}
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('instanceIds') is not None:
+ kwargs.update({'InstanceIds': parse_resource_ids(args.get('instanceIds'))})
+ if args.get('waiterDelay') is not None:
+ kwargs.update({'WaiterConfig': {'Delay': int(args.get('waiterDelay'))}})
+ if args.get('waiterMaxAttempts') is not None:
+ kwargs.update({'WaiterConfig': {'MaxAttempts': int(args.get('waiterMaxAttempts'))}})
+
+ waiter = client.get_waiter('instance_terminated')
+ waiter.wait(**kwargs)
+ demisto.results("success")
+
+
+def waiter_image_available_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {}
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('imageIds') is not None:
+ kwargs.update({'ImageIds': parse_resource_ids(args.get('imageIds'))})
+ if args.get('executableUsers') is not None:
+ kwargs.update({'ExecutableUsers': parse_resource_ids(args.get('executableUsers'))})
+ if args.get('owners') is not None:
+ kwargs.update({'Owners': parse_resource_ids(args.get('owners'))})
+ if args.get('waiterDelay') is not None:
+ kwargs.update({'WaiterConfig': {'Delay': int(args.get('waiterDelay'))}})
+ if args.get('waiterMaxAttempts') is not None:
+ kwargs.update({'WaiterConfig': {'MaxAttempts': int(args.get('waiterMaxAttempts'))}})
+
+ waiter = client.get_waiter('image_available')
+ waiter.wait(**kwargs)
+ demisto.results("success")
+
+
+def waiter_snapshot_completed_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {}
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('ownerIds') is not None:
+ kwargs.update({'OwnerIds': parse_resource_ids(args.get('ownerIds'))})
+ if args.get('restorableByUserIds') is not None:
+ kwargs.update({'RestorableByUserIds': parse_resource_ids(args.get('restorableByUserIds'))})
+ if args.get('snapshotIds') is not None:
+ kwargs.update({'SnapshotIds': parse_resource_ids(args.get('snapshotIds'))})
+ if args.get('waiterDelay') is not None:
+ kwargs.update({'WaiterConfig': {'Delay': int(args.get('waiterDelay'))}})
+ if args.get('waiterMaxAttempts') is not None:
+ kwargs.update({'WaiterConfig': {'MaxAttempts': int(args.get('waiterMaxAttempts'))}})
+
+ waiter = client.get_waiter('snapshot_completed')
+ waiter.wait(**kwargs)
+ demisto.results("Success")
+
+
+def get_latest_ami_command(args):
+ client = aws_session(
+ service='ec2',
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ kwargs = {}
+ data = {} # type: dict
+
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('imageIds') is not None:
+ kwargs.update({'ImageIds': parse_resource_ids(args.get('imageIds'))})
+ if args.get('owners') is not None:
+ kwargs.update({'Owners': parse_resource_ids(args.get('owners'))})
+ if args.get('executableUsers') is not None:
+ kwargs.update({'ExecutableUsers': parse_resource_ids(args.get('executableUsers'))})
+ response = client.describe_images(**kwargs)
+ amis = sorted(response['Images'],
+ key=lambda x: x['CreationDate'],
+ reverse=True)
+ image = amis[0]
+ data = ({
+ 'CreationDate': image['CreationDate'],
+ 'ImageId': image['ImageId'],
+ 'Public': image['Public'],
+ 'Name': image['Name'],
+ 'State': image['State'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+ if 'Description' in image:
+ data.update({'Description': image['Description']})
+ if 'Tags' in image:
+ for tag in image['Tags']:
+ data.update({
+ tag['Key']: tag['Value']
+ })
+
+ try:
+ raw = json.loads(json.dumps(image, cls=DatetimeEncoder))
+ raw.update({'Region': obj['_user_provided_options']['region_name']})
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.Images': image}
+ human_readable = tableToMarkdown('AWS EC2 Images', data)
+ return_outputs(human_readable, ec)
+
+
+def create_security_group_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {
+ 'GroupName': args.get('groupName'),
+ 'Description': args.get('description'),
+ 'VpcId': args.get('vpcId'),
+ }
+ response = client.create_security_group(**kwargs)
+ data = ({
+ 'GroupName': args.get('groupName'),
+ 'Description': args.get('description'),
+ 'VpcId': args.get('vpcId'),
+ 'GroupId': response['GroupId']
+ })
+ ec = {'AWS.EC2.SecurityGroups': data}
+ human_readable = tableToMarkdown('AWS EC2 Security Groups', data)
+ return_outputs(human_readable, ec)
+
+
+def delete_security_group_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {}
+ if args.get('groupId') is not None:
+ kwargs.update({'GroupId': args.get('groupId')})
+ if args.get('groupName') is not None:
+ kwargs.update({'GroupName': args.get('groupName')})
+
+ response = client.delete_security_group(**kwargs)
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Security Group was Deleted")
+
+
+def authorize_security_group_ingress_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {'GroupId': args.get('groupId')}
+ IpPermissions = []
+ IpPermissions_dict = {}
+ UserIdGroupPairs = []
+ UserIdGroupPairs_dict = {}
+
+ if args.get('IpPermissionsfromPort') is not None:
+ IpPermissions_dict.update({'FromPort': int(args.get('IpPermissionsfromPort'))})
+ if args.get('IpPermissionsIpProtocol') is not None:
+ IpPermissions_dict.update({'IpProtocol': str(args.get('IpPermissionsIpProtocol'))}) # type: ignore
+ if args.get('IpPermissionsToPort') is not None:
+ IpPermissions_dict.update({'ToPort': int(args.get('IpPermissionsToPort'))})
+
+ if args.get('IpRangesCidrIp') is not None:
+ IpRanges = [{
+ 'CidrIp': args.get('IpRangesCidrIp'),
+ 'Description': args.get('IpRangesDesc', None)
+ }]
+ IpPermissions_dict.update({'IpRanges': IpRanges}) # type: ignore
+ if args.get('Ipv6RangesCidrIp') is not None:
+ Ipv6Ranges = [{
+ 'CidrIp': args.get('Ipv6RangesCidrIp'),
+ 'Description': args.get('Ipv6RangesDesc', None)
+ }]
+ IpPermissions_dict.update({'Ipv6Ranges': Ipv6Ranges}) # type: ignore
+ if args.get('PrefixListId') is not None:
+ PrefixListIds = [{
+ 'PrefixListId': args.get('PrefixListId'),
+ 'Description': args.get('PrefixListIdDesc', None)
+ }]
+ IpPermissions_dict.update({'PrefixListIds': PrefixListIds}) # type: ignore
+
+ if args.get('UserIdGroupPairsDescription') is not None:
+ UserIdGroupPairs_dict.update({'Description': args.get('UserIdGroupPairsDescription')})
+ if args.get('UserIdGroupPairsGroupId') is not None:
+ UserIdGroupPairs_dict.update({'GroupId': args.get('UserIdGroupPairsGroupId')})
+ if args.get('UserIdGroupPairsGroupName') is not None:
+ UserIdGroupPairs_dict.update({'GroupName': args.get('UserIdGroupPairsGroupName')})
+ if args.get('UserIdGroupPairsPeeringStatus') is not None:
+ UserIdGroupPairs_dict.update({'PeeringStatus': args.get('UserIdGroupPairsPeeringStatus')})
+ if args.get('UserIdGroupPairsUserId') is not None:
+ UserIdGroupPairs_dict.update({'UserId': args.get('UserIdGroupPairsUserId')})
+ if args.get('UserIdGroupPairsVpcId') is not None:
+ UserIdGroupPairs_dict.update({'VpcId': args.get('UserIdGroupPairsVpcId')})
+ if args.get('UserIdGroupPairsVpcPeeringConnectionId') is not None:
+ UserIdGroupPairs_dict.update({'VpcPeeringConnectionId': args.get('UserIdGroupPairsVpcPeeringConnectionId')})
+
+ if args.get('fromPort') is not None:
+ kwargs.update({'FromPort': int(args.get('fromPort'))})
+ if args.get('cidrIp') is not None:
+ kwargs.update({'CidrIp': args.get('cidrIp')})
+ if args.get('toPort') is not None:
+ kwargs.update({'ToPort': int(args.get('toPort'))})
+ if args.get('ipProtocol') is not None:
+ kwargs.update({'IpProtocol': args.get('ipProtocol')})
+ if args.get('sourceSecurityGroupName') is not None:
+ kwargs.update({'SourceSecurityGroupName': args.get('sourceSecurityGroupName')})
+ if args.get('SourceSecurityGroupOwnerId') is not None:
+ kwargs.update({'SourceSecurityGroupOwnerId': args.get('SourceSecurityGroupOwnerId')})
+
+ if UserIdGroupPairs_dict is not None:
+ UserIdGroupPairs.append(UserIdGroupPairs_dict)
+ IpPermissions_dict.update({'UserIdGroupPairs': UserIdGroupPairs}) # type: ignore
+
+ if IpPermissions_dict is not None:
+ IpPermissions.append(IpPermissions_dict)
+ kwargs.update({'IpPermissions': IpPermissions})
+
+ response = client.authorize_security_group_ingress(**kwargs)
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Security Group ingress rule was created")
+
+
+def revoke_security_group_ingress_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {'GroupId': args.get('groupId')}
+
+ if args.get('fromPort') is not None:
+ kwargs.update({'FromPort': int(args.get('fromPort'))})
+ if args.get('cidrIp') is not None:
+ kwargs.update({'CidrIp': args.get('cidrIp')})
+ if args.get('toPort') is not None:
+ kwargs.update({'ToPort': int(args.get('toPort'))})
+ if args.get('ipProtocol') is not None:
+ kwargs.update({'IpProtocol': args.get('ipProtocol')})
+ if args.get('sourceSecurityGroupName') is not None:
+ kwargs.update({'SourceSecurityGroupName': args.get('sourceSecurityGroupName')})
+
+ response = client.revoke_security_group_ingress(**kwargs)
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Security Group ingress rule was revoked")
+
+
+def copy_image_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ kwargs = {
+ 'Name': args.get('name'),
+ 'SourceImageId': args.get('sourceImageId'),
+ 'SourceRegion': args.get('sourceRegion'),
+ }
+ if args.get('clientToken') is not None:
+ kwargs.update({'ClientToken': args.get('clientToken')})
+ if args.get('description') is not None:
+ kwargs.update({'Description': args.get('description')})
+ if args.get('encrypted') is not None:
+ kwargs.update({'Encrypted': True if args.get('ebsEncrypted') == 'True' else False})
+ if args.get('kmsKeyId') is not None:
+ kwargs.update({'KmsKeyId': args.get('kmsKeyId')})
+
+ response = client.copy_image(**kwargs)
+ data = ({
+ 'ImageId': response['ImageId'],
+ 'Region': obj['_user_provided_options']['region_name']
+ })
+
+ ec = {'AWS.EC2.Images(val.ImageId === obj.ImageId)': data}
+ human_readable = tableToMarkdown('AWS EC2 Images', data)
+ return_outputs(human_readable, ec)
+
+
+def copy_snapshot_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ kwargs = {
+ 'SourceSnapshotId': args.get('sourceSnapshotId'),
+ 'SourceRegion': args.get('sourceRegion'),
+ }
+ if args.get('description') is not None:
+ kwargs.update({'Description': args.get('description')})
+ if args.get('encrypted') is not None:
+ kwargs.update({'Encrypted': True if args.get('ebsEncrypted') == 'True' else False})
+ if args.get('kmsKeyId') is not None:
+ kwargs.update({'KmsKeyId': args.get('kmsKeyId')})
+
+ response = client.copy_snapshot(**kwargs)
+ data = ({
+ 'SnapshotId': response['SnapshotId'],
+ 'Region': obj['_user_provided_options']['region_name']
+ })
+
+ ec = {'AWS.EC2.Snapshots(val.SnapshotId === obj.SnapshotId)': data}
+ human_readable = tableToMarkdown('AWS EC2 Snapshots', data)
+ return_outputs(human_readable, ec)
+
+
+def describe_reserved_instances_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ kwargs = {}
+ data = []
+ output = []
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('offeringClass') is not None:
+ kwargs.update({'OfferingClass': args.get('offeringClass')})
+ if args.get('reservedInstancesIds') is not None:
+ kwargs.update({'ReservedInstancesIds': parse_resource_ids(args.get('reservedInstancesIds'))})
+
+ response = client.describe_reserved_instances(**kwargs)
+ for i, reservation in enumerate(response['ReservedInstances']):
+ try:
+ start_time = datetime.strftime(reservation['Start'], '%Y-%m-%dT%H:%M:%SZ')
+ end_time = datetime.strftime(reservation['End'], '%Y-%m-%dT%H:%M:%SZ')
+ except ValueError as e:
+ return_error('Date could not be parsed. Please check the date again.\n{error}'.format(type(e)))
+ data.append({
+ 'ReservedInstancesId': reservation['ReservedInstancesId'],
+ 'Start': start_time,
+ 'End': end_time,
+ 'Duration': reservation['Duration'],
+ 'InstanceType': reservation['InstanceType'],
+ 'InstanceCount': reservation['InstanceCount'],
+ 'OfferingClass': reservation['OfferingClass'],
+ 'Scope': reservation['Scope'],
+ 'State': reservation['State']
+ })
+ if 'Tags' in reservation:
+ for tag in reservation['Tags']:
+ data[i].update({
+ tag['Key']: tag['Value']
+ })
+ reservation.update({'Region': obj['_user_provided_options']['region_name']})
+ output.append(reservation)
+
+ try:
+ raw = json.loads(json.dumps(output, cls=DatetimeEncoder))
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.ReservedInstances(val.ReservedInstancesId === obj.ReservedInstancesId)': raw}
+ human_readable = tableToMarkdown('AWS EC2 Reserved Instances', data)
+ return_outputs(human_readable, ec)
+
+
+def monitor_instances_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ data = []
+ response = client.monitor_instances(InstanceIds=parse_resource_ids(args.get('instancesIds')))
+
+ for instance in response['InstanceMonitorings']:
+ data.append({
+ 'InstanceId': instance['InstanceId'],
+ 'MonitoringState': instance['Monitoring']['State']
+ })
+
+ ec = {'AWS.EC2.Instances(val.InstancesId === obj.InstancesId)': response['InstanceMonitorings']}
+ human_readable = tableToMarkdown('AWS EC2 Instances', data)
+ return_outputs(human_readable, ec)
+
+
+def unmonitor_instances_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ data = []
+ response = client.unmonitor_instances(InstanceIds=parse_resource_ids(args.get('instancesIds')))
+
+ for instance in response['InstanceMonitorings']:
+ data.append({
+ 'InstanceId': instance['InstanceId'],
+ 'MonitoringState': instance['Monitoring']['State']
+ })
+
+ ec = {'AWS.EC2.Instances(val.InstancesId === obj.InstancesId)': response['InstanceMonitorings']}
+ human_readable = tableToMarkdown('AWS EC2 Instances', data)
+ return_outputs(human_readable, ec)
+
+
+def reboot_instances_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ response = client.reboot_instances(InstanceIds=parse_resource_ids(args.get('instanceIds')))
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Instances were rebooted")
+
+
+def get_password_data_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ response = client.get_password_data(InstanceId=args.get('instanceId'))
+ try:
+ time_stamp = datetime.strftime(response['Timestamp'], '%Y-%m-%dT%H:%M:%SZ')
+ except ValueError as e:
+ return_error('Date could not be parsed. Please check the date again.\n{error}'.format(type(e)))
+ data = {
+ 'InstanceId': response['InstanceId'],
+ 'PasswordData': response['PasswordData'],
+ 'Timestamp': time_stamp
+ }
+
+ ec = {'AWS.EC2.Instances(val.InstancesId === obj.InstancesId).PasswordData': data}
+ human_readable = tableToMarkdown('AWS EC2 Instances', data)
+ return_outputs(human_readable, ec)
+
+
+def modify_network_interface_attribute_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {'NetworkInterfaceId': args.get('networkInterfaceId')}
+
+ if args.get('sourceDestCheck') is not None:
+ kwargs.update({'SourceDestCheck': {'Value': True if args.get('sourceDestCheck') == 'True' else False}})
+ if args.get('attachmentId') is not None and args.get('deleteOnTermination') is not None:
+ kwargs.update({
+ 'Attachment': {
+ 'AttachmentId': args.get('attachmentId'),
+ 'DeleteOnTermination': True if args.get('deleteOnTermination') == 'True' else False
+ }})
+ if args.get('description') is not None:
+ kwargs.update({'Description': {'Value': args.get('description')}})
+ if args.get('groups') is not None:
+ kwargs.update({'Groups': parse_resource_ids(args.get('groups'))})
+
+ response = client.modify_network_interface_attribute(**kwargs)
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Network Interface Atttribute was successfully modified")
+
+
+def modify_instance_attribute_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {'InstanceId': args.get('instanceId')}
+
+ if args.get('sourceDestCheck') is not None:
+ kwargs.update({'SourceDestCheck': {'Value': True if args.get('sourceDestCheck') == 'True' else False}})
+ if args.get('disableApiTermination') is not None:
+ kwargs.update(
+ {'DisableApiTermination': {'Value': True if args.get('disableApiTermination') == 'True' else False}})
+ if args.get('ebsOptimized') is not None:
+ kwargs.update({'EbsOptimized': {'Value': True if args.get('ebsOptimized') == 'True' else False}})
+ if args.get('enaSupport') is not None:
+ kwargs.update({'EnaSupport': {'Value': True if args.get('enaSupport') == 'True' else False}})
+ if args.get('instanceType') is not None:
+ kwargs.update({'InstanceType': {'Value': args.get('instanceType')}})
+ if args.get('instanceInitiatedShutdownBehavior') is not None:
+ kwargs.update(
+ {'InstanceInitiatedShutdownBehavior': {'Value': args.get('instanceInitiatedShutdownBehavior')}})
+ if args.get('groups') is not None:
+ kwargs.update({'Groups': parse_resource_ids(args.get('groups'))})
+
+ response = client.modify_instance_attribute(**kwargs)
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Instance attribute was successfully modified")
+
+
+def create_network_acl_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {'VpcId': args.get('VpcId')}
+
+ if args.get('DryRun') is not None:
+ kwargs.update({'DryRun': True if args.get('DryRun') == 'True' else False})
+
+ response = client.create_network_acl(**kwargs)
+ network_acl = response['NetworkAcl']
+
+ data = {
+ 'Associations': network_acl['Associations'],
+ 'IsDefault': network_acl['IsDefault'],
+ 'NetworkAclId': network_acl['NetworkAclId'],
+ 'Tags': network_acl['Tags'],
+ 'VpcId': network_acl['VpcId']
+ }
+ entries = []
+ for entry in network_acl['Entries']:
+ entries.append(entry)
+ hr_entries = tableToMarkdown('AWS EC2 ACL Entries', entries, removeNull=True)
+ ec = {'AWS.EC2.VpcId(val.VpcId === obj.VpcId).NetworkAcl': network_acl}
+ hr_acl = tableToMarkdown('AWS EC2 Instance ACL', data, removeNull=True)
+ human_readable = hr_acl + hr_entries
+ return_outputs(human_readable, ec)
+
+
+def create_network_acl_entry_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {
+ 'Egress': True if args.get('Egress') == 'True' else False,
+ 'NetworkAclId': args.get('NetworkAclId'),
+ 'Protocol': args.get('Protocol'),
+ 'RuleAction': args.get('RuleAction'),
+ 'RuleNumber': int(args.get('RuleNumber'))
+ }
+
+ if args.get('CidrBlock') is not None:
+ kwargs.update({'CidrBlock': args.get('CidrBlock')})
+ if args.get('Code') is not None:
+ kwargs.update({'IcmpTypeCode': {'Code': int(args.get('Code'))}})
+ if args.get('Type') is not None:
+ kwargs.update({'IcmpTypeCode': {'Type': int(args.get('Type'))}})
+ if args.get('Ipv6CidrBlock') is not None:
+ kwargs.update({'Ipv6CidrBlock': args.get('Ipv6CidrBlock')})
+ if args.get('From') is not None:
+ kwargs.update({'PortRange': {'From': int(args.get('From'))}})
+ if args.get('To') is not None:
+ kwargs.update({'PortRange': {'To': int(args.get('To'))}})
+ if args.get('DryRun') is not None:
+ kwargs.update({'DryRun': True if args.get('DryRun') == 'True' else False})
+
+ response = client.create_network_acl_entry(**kwargs)
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results("The Instance ACL was successfully modified")
+
+
+def create_fleet_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {} # type: dict
+
+ if args.get('DryRun') is not None:
+ kwargs.update({'DryRun': True if args.get('DryRun') == 'True' else False})
+
+ if args.get('ClientToken') is not None:
+ kwargs.update({'ClientToken': (args.get('ClientToken'))})
+
+ SpotOptions = {}
+ if args.get('SpotAllocationStrategy') is not None:
+ SpotOptions.update({
+ 'AllocationStrategy': args.get('SpotAllocationStrategy')
+ })
+ if args.get('InstanceInterruptionBehavior') is not None:
+ SpotOptions.update({
+ 'InstanceInterruptionBehavior': args.get('InstanceInterruptionBehavior')
+ })
+ if args.get('InstancePoolsToUseCount') is not None:
+ SpotOptions.update({
+ 'InstancePoolsToUseCount': args.get('InstancePoolsToUseCount')
+ })
+ if args.get('SingleInstanceType') is not None:
+ SpotOptions.update({'SingleInstanceType': True if args.get('SingleInstanceType') == 'True' else False})
+ if args.get('SingleAvailabilityZone') is not None:
+ SpotOptions.update({
+ 'SingleAvailabilityZone': True if args.get('SingleAvailabilityZone') == 'True' else False
+ })
+ if args.get('MinTargetCapacity') is not None:
+ SpotOptions.update({
+ 'MinTargetCapacity': int(args.get('MinTargetCapacity'))
+ })
+
+ if SpotOptions:
+ kwargs.update({'SpotOptions': SpotOptions})
+
+ OnDemandOptions = {}
+ if args.get('OnDemandAllocationStrategy') is not None:
+ OnDemandOptions.update({
+ 'AllocationStrategy': args.get('OnDemandAllocationStrategy')
+ })
+ if args.get('OnDemandSingleInstanceType') is not None:
+ SpotOptions.update({
+ 'SingleInstanceType': True if args.get('OnDemandSingleInstanceType') == 'True' else False
+ })
+ if args.get('OnDemandSingleAvailabilityZone') is not None:
+ SpotOptions.update({
+ 'SingleAvailabilityZone': True if args.get('OnDemandSingleAvailabilityZone') == 'True' else False
+ })
+ if args.get('OnDemandMinTargetCapacity') is not None:
+ SpotOptions.update({
+ 'MinTargetCapacity': int(args.get('OnDemandMinTargetCapacity'))
+ })
+
+ if OnDemandOptions:
+ kwargs.update({'OnDemandOptions': OnDemandOptions})
+
+ if args.get('ExcessCapacityTerminationPolicy') is not None:
+ kwargs.update({'ExcessCapacityTerminationPolicy': (args.get('ExcessCapacityTerminationPolicy'))})
+
+ LaunchTemplateConfigs = {} # type: dict
+ LaunchTemplateSpecification = {}
+ if args.get('LaunchTemplateId') is not None:
+ LaunchTemplateSpecification.update({
+ 'LaunchTemplateId': args.get('LaunchTemplateId')
+ })
+ if args.get('LaunchTemplateName') is not None:
+ LaunchTemplateSpecification.update({
+ 'LaunchTemplateName': args.get('LaunchTemplateName')
+ })
+ if args.get('LaunchTemplateVersion') is not None:
+ LaunchTemplateSpecification.update({
+ 'Version': str(args.get('LaunchTemplateVersion'))
+ })
+
+ if LaunchTemplateSpecification:
+ LaunchTemplateConfigs.update({'LaunchTemplateSpecification': LaunchTemplateSpecification})
+
+ Overrides = [] # type: list
+
+ if args.get('OverrideInstanceType') is not None:
+ arr = multi_split(args.get('OverrideInstanceType'))
+ for i, item in enumerate(arr):
+ if len(Overrides) - 1 < i:
+ Overrides.append({})
+ Overrides[i].update({
+ 'InstanceType': item
+ })
+ if args.get('OverrideMaxPrice') is not None:
+ arr = multi_split(args.get('OverrideMaxPrice'))
+ for i, item in enumerate(arr):
+ if len(Overrides) - 1 < i:
+ Overrides.append({})
+ Overrides[i].update({
+ 'MaxPrice': item
+ })
+
+ if args.get('OverrideSubnetId') is not None:
+ arr = multi_split(args.get('OverrideSubnetId'))
+ for i, item in enumerate(arr):
+ if len(Overrides) - 1 < i:
+ Overrides.append({})
+ Overrides[i].update({
+ 'SubnetId': item
+ })
+
+ if args.get('OverrideAvailabilityZone') is not None:
+ arr = multi_split(args.get('OverrideAvailabilityZone'))
+ for i, item in enumerate(arr):
+ if len(Overrides) - 1 < i:
+ Overrides.append({})
+ Overrides[i].update({
+ 'AvailabilityZone': item
+ })
+
+ if args.get('OverrideWeightedCapacity') is not None:
+ arr = multi_split(args.get('OverrideWeightedCapacity'))
+ for i, item in enumerate(arr):
+ if len(Overrides) - 1 < i:
+ Overrides.append({})
+ Overrides[i].update({
+ 'WeightedCapacity': item
+ })
+
+ if args.get('OverridePriority') is not None:
+ arr = multi_split(args.get('OverridePriority'))
+ for i, item in enumerate(arr):
+ if len(Overrides) - 1 < i:
+ Overrides.append({})
+ Overrides[i].update({
+ 'Priority': item
+ })
+
+ if Overrides:
+ LaunchTemplateConfigs.update({'Overrides': Overrides})
+
+ if LaunchTemplateConfigs:
+ kwargs.update({'LaunchTemplateConfigs': [LaunchTemplateConfigs]})
+
+ TargetCapacitySpecification = {}
+ if args.get('TotalTargetCapacity') is not None:
+ TargetCapacitySpecification.update({
+ 'TotalTargetCapacity': int(args.get('TotalTargetCapacity'))
+ })
+ if args.get('OnDemandTargetCapacity') is not None:
+ TargetCapacitySpecification.update({
+ 'OnDemandTargetCapacity': int(args.get('OnDemandTargetCapacity'))
+ })
+ if args.get('SpotTargetCapacity') is not None:
+ TargetCapacitySpecification.update({
+ 'SpotTargetCapacity': int(args.get('SpotTargetCapacity'))
+ })
+ if args.get('DefaultTargetCapacityType') is not None:
+ TargetCapacitySpecification.update({
+ 'DefaultTargetCapacityType': args.get('DefaultTargetCapacityType')
+ })
+ if TargetCapacitySpecification:
+ kwargs.update({'TargetCapacitySpecification': TargetCapacitySpecification})
+
+ if args.get('TerminateInstancesWithExpiration') is not None:
+ kwargs.update({'TerminateInstancesWithExpiration': True if args.get(
+ 'TerminateInstancesWithExpiration') == 'True' else False})
+
+ if args.get('Type') is not None:
+ kwargs.update({'Type': (args.get('Type'))})
+
+ if args.get('ValidFrom') is not None:
+ kwargs.update({'ValidFrom': (parse_date(args.get('ValidFrom')))})
+
+ if args.get('ValidUntil') is not None:
+ kwargs.update({'ValidUntil': (parse_date(args.get('ValidUntil')))})
+
+ if args.get('ReplaceUnhealthyInstances') is not None:
+ kwargs.update({'ReplaceUnhealthyInstances': (args.get('ReplaceUnhealthyInstances'))})
+
+ TagSpecifications = [] # type: List[dict]
+ if args.get('Tags') is not None:
+ arr = args.get('Tags').split('#')
+ for i, item in enumerate(arr):
+ if len(TagSpecifications) - 1 < (i):
+ TagSpecifications.append({})
+ tg = item.split(':')
+ TagSpecifications[i].update({
+ 'ResourceType': tg[0],
+ 'Tags': parse_tag_field(tg[1])
+ })
+
+ if TagSpecifications:
+ kwargs.update({'TagSpecifications': TagSpecifications})
+ response = client.create_fleet(**kwargs)
+ data = [{
+ 'FleetId': response['FleetId'],
+ }]
+ output = json.dumps(response)
+ raw = json.loads(output)
+ ec = {'AWS.EC2.Fleet': raw}
+ human_readable = tableToMarkdown('AWS EC2 Fleet', data)
+ return_outputs(human_readable, ec)
+
+
+def delete_fleet_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ data = []
+ kwargs = {}
+ output = []
+ if args.get('DryRun') is not None:
+ kwargs.update({'DryRun': True if args.get('DryRun') == 'True' else False})
+ if args.get('FleetIds') is not None:
+ kwargs.update({'FleetIds': parse_resource_ids(args.get('FleetIds'))})
+ if args.get('TerminateInstances') is not None:
+ kwargs.update({'TerminateInstances': bool(args.get('TerminateInstances'))})
+
+ response = client.delete_fleets(**kwargs)
+ for i, item in enumerate(response['SuccessfulFleetDeletions']):
+ data.append({'SuccessfulFleetDeletions': {
+ 'CurrentFleetState': item['CurrentFleetState'],
+ 'PreviousFleetState': item['PreviousFleetState'],
+ 'FleetId': item['FleetId'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ }})
+ output.append(item)
+ for i, item in enumerate(response['UnsuccessfulFleetDeletions']):
+ data.append({'UnsuccessfulFleetDeletions': {
+ 'Error-Code': item['Error']['Code'],
+ 'Error-Message': item['Error']['Message'],
+ 'FleetId': item['FleetId'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ }})
+ output.append(item)
+
+ try:
+ raw = json.loads(json.dumps(output, cls=DatetimeEncoder))
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.DeletedFleets': raw}
+ human_readable = tableToMarkdown('AWS Deleted Fleets', data)
+ return_outputs(human_readable, ec)
+
+
+def describe_fleets_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config) # noqa:F841
+ data = []
+ kwargs = {}
+ output = []
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('FleetIds') is not None:
+ kwargs.update({'FleetIds': parse_resource_ids(args.get('FleetIds'))})
+ if args.get('MaxResults') is not None:
+ kwargs.update({'MaxResults': args.get('MaxResults')})
+ if args.get('NextToken') is not None:
+ kwargs.update({'NextToken': args.get('NextToken')})
+
+ response = client.describe_fleets(**kwargs)
+ for i, item in enumerate(response['Fleets']):
+
+ data.append({
+ 'ActivityStatus': item['ActivityStatus'] if 'ActivityStatus' in item.keys() is not None else "None",
+ 'FleetId': item['FleetId'],
+ 'FleetState': item['FleetState'],
+ 'FulfilledCapacity': item['FulfilledCapacity'],
+ 'FulfilledOnDemandCapacity': item['FulfilledOnDemandCapacity'],
+ 'LaunchTemplateId': item['LaunchTemplateConfigs'][0]['LaunchTemplateSpecification'][
+ 'LaunchTemplateId'],
+ 'CreateTime': datetime.strftime(item['CreateTime'], '%Y-%m-%dT%H:%M:%SZ'),
+ 'TotalTargetCapacity': item['TargetCapacitySpecification']['TotalTargetCapacity'],
+ 'OnDemandTargetCapacity': item['TargetCapacitySpecification']['OnDemandTargetCapacity'],
+ 'SpotTargetCapacity': item['TargetCapacitySpecification']['SpotTargetCapacity'],
+ 'DefaultTargetCapacityType': item['TargetCapacitySpecification']['DefaultTargetCapacityType'],
+ 'TerminateInstancesWithExpiration': item['TerminateInstancesWithExpiration'],
+ 'Type': item['Type'],
+ 'InstanceInterruptionBehavior': item['SpotOptions']['InstanceInterruptionBehavior'],
+ })
+ if 'Tags' in item:
+ for tag in item['Tags']:
+ data[i].update({
+ tag['Key']: tag['Value']
+ })
+ output.append(item)
+
+ try:
+ raw = json.loads(json.dumps(output, cls=DatetimeEncoder))
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.Fleet(val.FleetId === obj.FleetId)': raw}
+ human_readable = tableToMarkdown('AWS EC2 Fleets', data)
+ return_outputs(human_readable, ec)
+
+
+def describe_fleet_instances_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ data = []
+ kwargs = {}
+ output = []
+ if args.get('filters') is not None:
+ kwargs.update({'Filters': parse_filter_field(args.get('filters'))})
+ if args.get('FleetId') is not None:
+ kwargs.update({'FleetId': args.get('FleetId')})
+ if args.get('MaxResults') is not None:
+ kwargs.update({'MaxResults': int(args.get('MaxResults'))})
+ if args.get('NextToken') is not None:
+ kwargs.update({'NextToken': args.get('NextToken')})
+
+ response = client.describe_fleet_instances(**kwargs)
+ for i, item in enumerate(response['ActiveInstances']):
+ demisto.log(str(item))
+ data.append({
+ 'InstanceId': item['InstanceId'],
+ 'InstanceType': item['InstanceType'],
+ 'SpotInstanceRequestId': item['SpotInstanceRequestId'],
+ 'FleetId': response['FleetId'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+ if 'InstanceHealth' in item:
+ data.append({'InstanceHealth': item['InstanceHealth']})
+ output.append(item)
+
+ try:
+ raw = json.loads(json.dumps(output, cls=DatetimeEncoder))
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.Fleet(val.FleetId === obj.FleetId).ActiveInstances': raw}
+ human_readable = tableToMarkdown('AWS EC2 Fleets Instances', data)
+ return_outputs(human_readable, ec)
+
+
+def modify_fleet_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {}
+ if args.get('FleetId') is not None:
+ kwargs.update({'FleetIds': args.get('FleetId')})
+ if args.get('ExcessCapacityTerminationPolicy') is not None:
+ kwargs.update({'ExcessCapacityTerminationPolicy': args.get('ExcessCapacityTerminationPolicy')})
+ TargetCapacitySpecification = {}
+ if args.get('TotalTargetCapacity') is not None:
+ TargetCapacitySpecification.update({
+ 'TotalTargetCapacity': int(args.get('TotalTargetCapacity'))
+ })
+ if args.get('OnDemandTargetCapacity') is not None:
+ TargetCapacitySpecification.update({
+ 'OnDemandTargetCapacity': int(args.get('OnDemandTargetCapacity'))
+ })
+ if args.get('SpotTargetCapacity') is not None:
+ TargetCapacitySpecification.update({
+ 'SpotTargetCapacity': int(args.get('SpotTargetCapacity'))
+ })
+ if args.get('DefaultTargetCapacityType') is not None:
+ TargetCapacitySpecification.update({
+ 'DefaultTargetCapacityType': args.get('DefaultTargetCapacityType')
+ })
+ if TargetCapacitySpecification:
+ kwargs.update({'TargetCapacitySpecification': TargetCapacitySpecification})
+
+ response = client.modify_fleet(**kwargs)
+
+ if response['Return'] == 'True':
+ demisto.results("AWS EC2 Fleet was successfully modified")
+ else:
+ demisto.results("AWS EC2 Fleet was not successfully modified: " + response['Return'])
+
+
+def create_launch_template_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config) # noqa:F841
+ kwargs = {}
+
+ BlockDeviceMappings = {} # type: dict
+
+ LaunchTemplateData = {} # type: dict
+
+ if args.get('ClientToken') is not None:
+ kwargs.update({'ClientToken': args.get('ClientToken')})
+ if args.get('LaunchTemplateName') is not None:
+ kwargs.update({'LaunchTemplateName': args.get('LaunchTemplateName')})
+ if args.get('VersionDescription') is not None:
+ kwargs.update({'VersionDescription': args.get('VersionDescription')})
+
+ if args.get('KernelId') is not None:
+ LaunchTemplateData.update({'KernelId': args.get('KernelId')})
+ if args.get('EbsOptimized') is not None:
+ LaunchTemplateData.update({'EbsOptimized': args.get('EbsOptimized')})
+
+ if args.get('iamInstanceProfileArn') is not None and args.get('iamInstanceProfileName') is not None:
+ LaunchTemplateData.update({
+ 'IamInstanceProfile': {
+ 'Arn': args.get('iamInstanceProfileArn'),
+ 'Name': args.get('iamInstanceProfileName')}
+ })
+
+ if args.get('deviceName') is not None:
+ BlockDeviceMappings = {'DeviceName': args.get('deviceName')}
+ BlockDeviceMappings.update({'Ebs': {}})
+ if args.get('VirtualName') is not None:
+ BlockDeviceMappings.update({'VirtualName': {args.get('VirtualName')}})
+ if args.get('ebsVolumeSize') is not None:
+ BlockDeviceMappings['Ebs'].update({'VolumeSize': int(args.get('ebsVolumeSize'))})
+ if args.get('ebsVolumeType') is not None:
+ BlockDeviceMappings['Ebs'].update({'VolumeType': args.get('ebsVolumeType')})
+ if args.get('ebsIops') is not None:
+ BlockDeviceMappings['Ebs'].update({'Iops': int(args.get('ebsIops'))})
+ if args.get('ebsDeleteOnTermination') is not None:
+ BlockDeviceMappings['Ebs'].update(
+ {'DeleteOnTermination': True if args.get('ebsDeleteOnTermination') == 'True' else False})
+ if args.get('ebsKmsKeyId') is not None:
+ BlockDeviceMappings['Ebs'].update({'KmsKeyId': args.get('ebsKmsKeyId')})
+ if args.get('ebsSnapshotId') is not None:
+ BlockDeviceMappings['Ebs'].update({'SnapshotId': args.get('ebsSnapshotId')})
+ if args.get('ebsEncrypted') is not None:
+ BlockDeviceMappings['Ebs'].update({'Encrypted': True if args.get('ebsEncrypted') == 'True' else False})
+ if args.get('NoDevice') is not None:
+ BlockDeviceMappings.update({'NoDevice': {args.get('NoDevice')}})
+ if BlockDeviceMappings:
+ LaunchTemplateData.update({'BlockDeviceMappings': [BlockDeviceMappings]})
+
+ NetworkInterfaces = {} # type: dict
+ if args.get('AssociatePublicIpAddress') is not None:
+ NetworkInterfaces.update({'AssociatePublicIpAddress': args.get('AssociatePublicIpAddress')})
+ if args.get('NetworkInterfacesDeleteOnTermination') is not None:
+ NetworkInterfaces.update({'DeleteOnTermination': args.get('NetworkInterfacesDeleteOnTermination')})
+ if args.get('NetworkInterfacesDescription') is not None:
+ NetworkInterfaces.update({'Description': args.get('NetworkInterfacesDescription')})
+ if args.get('NetworkInterfacesDeviceIndex') is not None:
+ NetworkInterfaces.update({'DeviceIndex': args.get('NetworkInterfacesDeviceIndex')})
+ if args.get('NetworkInterfaceGroups') is not None:
+ NetworkInterfaces.update({'Groups': parse_resource_ids(args.get('NetworkInterfaceGroups'))})
+ if args.get('Ipv6AddressCount') is not None:
+ NetworkInterfaces.update({'Ipv6AddressCount': args.get('Ipv6AddressCount')})
+ if args.get('Ipv6Addresses') is not None:
+ arr = args.get('Ipv6Addresses').split(',')
+ NetworkInterfaces.update({'Ipv6Addresses': []})
+ for a in arr:
+ NetworkInterfaces['Ipv6Addresses'].append({'Ipv6Address': a})
+ if args.get('NetworkInterfaceId') is not None:
+ NetworkInterfaces.update({'NetworkInterfaceId': args.get('NetworkInterfaceId')})
+ if args.get('PrivateIpAddress') is not None:
+ NetworkInterfaces.update({'PrivateIpAddress': args.get('PrivateIpAddress')})
+ if args.get('SubnetId') is not None:
+ NetworkInterfaces.update({'SubnetId': args.get('SubnetId')})
+ if NetworkInterfaces:
+ LaunchTemplateData.update({'NetworkInterfaces': [NetworkInterfaces]})
+ if args.get('ImageId') is not None:
+ LaunchTemplateData.update({'ImageId': args.get('ImageId')})
+ if args.get('InstanceType') is not None:
+ LaunchTemplateData.update({'InstanceType': args.get('InstanceType')})
+ if args.get('KeyName') is not None:
+ LaunchTemplateData.update({'KeyName': args.get('KeyName')})
+ if args.get('Monitoring') is not None:
+ LaunchTemplateData.update({'Monitoring': {'Enabled': args.get('Monitoring')}})
+ if args.get('AvailabilityZone') is not None:
+ LaunchTemplateData.update({
+ 'Placement': {
+ 'AvailabilityZone': args.get('AvailabilityZone')}
+ })
+ if args.get('AvailabilityZoneGroupName') is not None:
+ LaunchTemplateData.update({
+ 'Placement': {
+ 'GroupName': args.get('AvailabilityZoneGroupName')}
+ })
+ if args.get('PlacementTenancy') is not None:
+ LaunchTemplateData.update({
+ 'Placement': {
+ 'Tenancy': args.get('PlacementTenancy')}
+ })
+ if args.get('PlacementAffinity') is not None:
+ LaunchTemplateData.update({
+ 'Placement': {
+ 'Affinity': args.get('PlacementAffinity')}
+ })
+ if args.get('PlacementHostId') is not None:
+ LaunchTemplateData.update({
+ 'Placement': {
+ 'HostId': args.get('PlacementHostId')}
+ })
+ if args.get('PlacementSpreadDomain') is not None:
+ LaunchTemplateData.update({
+ 'Placement': {
+ 'SpreadDomain': args.get('PlacementSpreadDomain')}
+ })
+ if args.get('RamDiskId') is not None:
+ LaunchTemplateData.update({'RamDiskId': args.get('RamDiskId')})
+ if args.get('DisableApiTermination') is not None:
+ LaunchTemplateData.update({'DisableApiTermination': args.get('DisableApiTermination')})
+ if args.get('InstanceInitiatedShutdownBehavior') is not None:
+ LaunchTemplateData.update(
+ {'InstanceInitiatedShutdownBehavior': args.get('InstanceInitiatedShutdownBehavior')})
+ if args.get('UserData') is not None:
+ LaunchTemplateData.update({'UserData': args.get('UserData')})
+ TagSpecifications = [] # type: list
+ if args.get('Tags') is not None:
+ arr = args.get('Tags').split('#')
+ for i, item in enumerate(arr):
+ if len(TagSpecifications) - 1 < (i):
+ TagSpecifications.append({})
+ tg = item.split(':')
+ TagSpecifications[i].update({
+ 'ResourceType': tg[0],
+ 'Tags': parse_tag_field(tg[1])
+ })
+
+ ElasticGpuSpecifications = [] # type: list
+ if args.get('ElasticGpuSpecificationsType') is not None:
+ arr = multi_split(args.get('ElasticGpuSpecificationsType'))
+ for i, item in enumerate(arr):
+ if len(ElasticGpuSpecifications) - 1 < i:
+ ElasticGpuSpecifications.append({})
+ ElasticGpuSpecifications[i].update({
+ 'Type': item
+ })
+
+ if ElasticGpuSpecifications:
+ LaunchTemplateData.update({'ElasticGpuSpecifications': ElasticGpuSpecifications})
+
+ ElasticInferenceAccelerators = [] # type: list
+ if args.get('ElasticInferenceAcceleratorsType') is not None:
+ arr = multi_split(args.get('ElasticInferenceAcceleratorsType'))
+ for i, item in enumerate(arr):
+ if len(ElasticInferenceAccelerators) - 1 < i:
+ ElasticInferenceAccelerators.append({})
+ ElasticInferenceAccelerators[i].update({
+ 'Type': item
+ })
+ if ElasticGpuSpecifications:
+ LaunchTemplateData.update({'ElasticInferenceAccelerators': ElasticInferenceAccelerators})
+ if TagSpecifications:
+ LaunchTemplateData.update({'TagSpecifications': TagSpecifications})
+ if args.get('securityGroupIds') is not None:
+ LaunchTemplateData.update({'SecurityGroupIds': parse_resource_ids(args.get('securityGroupIds'))})
+ if args.get('securityGroups') is not None:
+ LaunchTemplateData.update({'SecurityGroups': parse_resource_ids(args.get('securityGroups'))})
+
+ InstanceMarketOptions = {} # type: dict
+ if args.get('MarketType') is not None:
+ InstanceMarketOptions.update({
+ 'MarketType': args.get('MarketType')
+ })
+
+ SpotOptions = {} # type: dict
+ if args.get('SpotInstanceType') is not None:
+ SpotOptions.update({
+ 'SpotInstanceType': args.get('SpotInstanceType')
+ })
+ if args.get('BlockDurationMinutes') is not None:
+ SpotOptions.update({
+ 'BlockDurationMinutes': args.get('BlockDurationMinutes')
+ })
+ if args.get('SpotValidUntil') is not None:
+ SpotOptions.update({
+ 'ValidUntil': parse_date(args.get('SpotValidUntil'))
+ })
+ if args.get('SpotInstanceInterruptionBehavior') is not None:
+ SpotOptions.update({
+ 'InstanceInterruptionBehavior': args.get('SpotInstanceInterruptionBehavior')
+ })
+ if args.get('SpotMaxPrice') is not None:
+ SpotOptions.update({
+ 'MaxPrice': args.get('SpotMaxPrice')
+ })
+
+ if SpotOptions:
+ InstanceMarketOptions.update({'SpotOptions': SpotOptions})
+
+ if InstanceMarketOptions:
+ LaunchTemplateData.update({'InstanceMarketOptions': InstanceMarketOptions})
+
+ if LaunchTemplateData:
+ kwargs.update({'LaunchTemplateData': LaunchTemplateData})
+
+ response = client.create_launch_template(**kwargs)
+
+ data = []
+ template = response['LaunchTemplate']
+ data.append({
+ 'LaunchTemplateId': response['LaunchTemplate']['LaunchTemplateId'],
+ 'LaunchTemplateName': response['LaunchTemplate']['LaunchTemplateName'],
+ 'CreateTime': response['LaunchTemplate']['CreateTime'],
+ 'CreatedBy': response['LaunchTemplate']['CreatedBy'],
+ 'DefaultVersionNumber': response['LaunchTemplate']['DefaultVersionNumber'],
+ 'LatestVersionNumber': response['LaunchTemplate']['LatestVersionNumber'],
+ })
+ try:
+ output = json.dumps(template, cls=DatetimeEncoder)
+ data_json = json.dumps(data, cls=DatetimeEncoder)
+ data_hr = json.loads(data_json) # type: ignore
+ raw = json.loads(output)
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.LaunchTemplates': raw}
+ human_readable = tableToMarkdown('AWS LaunchTemplates', data_hr)
+ return_outputs(human_readable, ec)
+
+
+def delete_launch_template_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config) # noqa:F841
+ data = []
+ kwargs = {}
+ output = []
+ if args.get('LaunchTemplateId') is not None:
+ kwargs.update({'LaunchTemplateId': args.get('LaunchTemplateId')})
+ if args.get('LaunchTemplateName') is not None:
+ kwargs.update({'LaunchTemplateName': args.get('LaunchTemplateName')})
+
+ response = client.delete_launch_template(**kwargs)
+ item = response['LaunchTemplate']
+ data.append({
+ 'LaunchTemplateId': item['LaunchTemplateId'],
+ 'LaunchTemplateName': item['LaunchTemplateName'],
+ 'CreateTime': datetime.strftime(item['CreateTime'], '%Y-%m-%dT%H:%M:%SZ'),
+ 'CreatedBy': item['CreatedBy'],
+ 'DefaultVersionNumber': item['DefaultVersionNumber'],
+ 'LatestVersionNumber': item['LatestVersionNumber'],
+ })
+ output.append(item)
+
+ try:
+ raw = json.loads(json.dumps(output, cls=DatetimeEncoder))
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.EC2.DeletedLaunchTemplates': raw}
+ human_readable = tableToMarkdown('AWS Deleted Launch Templates', data)
+ return_outputs(human_readable, ec)
+
+
+def modify_image_attribute_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config) # noqa:F841
+ kwargs = {}
+
+ if args.get('Attribute') is not None:
+ kwargs.update({'Attribute': args.get('Attribute')})
+ if args.get('Description') is not None:
+ kwargs.update({'Description': {'Value': args.get('Description')}})
+ if args.get('ImageId') is not None:
+ kwargs.update({'ImageId': args.get('ImageId')})
+
+ LaunchPermission = {"Add": [], "Remove": []} # type: dict
+ if args.get('LaunchPermission-Add-Group') is not None:
+ LaunchPermission["Add"].append({'Group': args.get('LaunchPermission-Add-Group')})
+ if args.get('LaunchPermission-Add-UserId') is not None:
+ LaunchPermission["Add"].append({'UserId': args.get('LaunchPermission-Add-UserId')})
+
+ if args.get('LaunchPermission-Remove-Group') is not None:
+ LaunchPermission["Remove"].append({'Group': args.get('LaunchPermission-Remove-Group')})
+ if args.get('LaunchPermission-Remove-UserId') is not None:
+ LaunchPermission["Remove"].append({'UserId': args.get('LaunchPermission-Remove-UserId')})
+
+ if LaunchPermission:
+ kwargs.update({'LaunchPermission': LaunchPermission})
+
+ if args.get('OperationType') is not None:
+ kwargs.update({'OperationType': args.get('OperationType')})
+ if args.get('ProductCodes') is not None:
+ kwargs.update({'ProductCodes': parse_resource_ids(args.get('ProductCodes'))})
+ if args.get('UserGroups') is not None:
+ kwargs.update({'UserGroups': parse_resource_ids(args.get('UserGroups'))})
+ if args.get('UserIds') is not None:
+ kwargs.update({'UserIds': parse_resource_ids(args.get('UserIds'))})
+ if args.get('Value') is not None:
+ kwargs.update({'Value': args.get('Value')})
+
+ response = client.modify_image_attribute(**kwargs)
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results('Image attribute sucessfully modified')
+
+
+"""COMMAND BLOCK"""
+try:
+ LOG('Command being called is {command}'.format(command=demisto.command()))
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ client = aws_session()
+ response = client.describe_regions()
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results('ok')
+
+ elif demisto.command() == 'aws-ec2-describe-regions':
+ describe_regions_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-describe-instances':
+ describe_instances_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-describe-images':
+ describe_images_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-describe-addresses':
+ describe_addresses_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-describe-snapshots':
+ describe_snapshots_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-describe-volumes':
+ describe_volumes_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-describe-launch-templates':
+ describe_launch_templates_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-describe-key-pairs':
+ describe_key_pairs_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-describe-vpcs':
+ describe_vpcs_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-describe-subnets':
+ describe_subnets_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-describe-security-groups':
+ describe_security_groups_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-allocate-address':
+ allocate_address_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-associate-address':
+ associate_address_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-create-snapshot':
+ create_snapshot_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-delete-snapshot':
+ delete_snapshot_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-create-image':
+ create_image_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-deregister-image':
+ deregister_image_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-modify-volume':
+ modify_volume_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-create-tags':
+ create_tags_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-disassociate-address':
+ disassociate_address_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-release-address':
+ release_address_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-start-instances':
+ start_instances_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-stop-instances':
+ stop_instances_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-terminate-instances':
+ terminate_instances_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-create-volume':
+ create_volume_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-attach-volume':
+ attach_volume_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-detach-volume':
+ detach_volume_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-delete-volume':
+ delete_volume_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-run-instances':
+ run_instances_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-waiter-instance-running':
+ waiter_instance_running_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-waiter-instance-status-ok':
+ waiter_instance_status_ok_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-waiter-instance-stopped':
+ waiter_instance_stopped_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-waiter-instance-terminated':
+ waiter_instance_terminated_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-waiter-image-available':
+ waiter_image_available_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-waiter-snapshot_completed':
+ waiter_snapshot_completed_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-get-latest-ami':
+ get_latest_ami_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-create-security-group':
+ create_security_group_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-delete-security-group':
+ delete_security_group_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-authorize-security-group-ingress-rule':
+ authorize_security_group_ingress_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-revoke-security-group-ingress-rule':
+ revoke_security_group_ingress_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-copy-image':
+ copy_image_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-copy-snapshot':
+ copy_snapshot_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-describe-reserved-instances':
+ describe_reserved_instances_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-monitor-instances':
+ monitor_instances_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-unmonitor-instances':
+ unmonitor_instances_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-reboot-instances':
+ reboot_instances_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-get-password-data':
+ get_password_data_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-modify-network-interface-attribute':
+ modify_network_interface_attribute_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-modify-instance-attribute':
+ modify_instance_attribute_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-create-network-acl':
+ create_network_acl_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-create-network-acl-entry':
+ create_network_acl_entry_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-create-fleet':
+ create_fleet_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-delete-fleet':
+ delete_fleet_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-describe-fleets':
+ describe_fleets_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-describe-fleet-instances':
+ describe_fleet_instances_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-modify-fleet':
+ modify_fleet_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-create-launch-template':
+ create_launch_template_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-delete-launch-template':
+ delete_launch_template_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-modify-image-attribute':
+ modify_image_attribute_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-modify-network-interface-attribute':
+ modify_network_interface_attribute_command(demisto.args())
+
+ elif demisto.command() == 'aws-ec2-modify-instance-attribute':
+ modify_instance_attribute_command(demisto.args())
+
+except ResponseParserError as e:
+ return_error('Could not connect to the AWS endpoint. Please check that the region is valid.\n {error}'.format(
+ error=type(e)))
+ LOG(e.message)
+
+except Exception as e:
+ LOG(e.message)
+ return_error('Error has occurred in the AWS EC2 Integration: {code}\n {message}'.format(
+ code=type(e), message=e.message))
diff --git a/Integrations/AWS-EC2/AWS-EC2.yml b/Integrations/AWS-EC2/AWS-EC2.yml
new file mode 100644
index 000000000000..18084064f5fe
--- /dev/null
+++ b/Integrations/AWS-EC2/AWS-EC2.yml
@@ -0,0 +1,6152 @@
+category: IT Services
+commonfields:
+ id: AWS - EC2
+ version: -1
+configuration:
+- display: AWS Default Region
+ name: defaultRegion
+ options:
+ - us-east-1
+ - us-east-2
+ - us-west-1
+ - us-west-2
+ - ca-central-1
+ - eu-west-1
+ - eu-central-1
+ - eu-west-2
+ - ap-northeast-1
+ - ap-northeast-2
+ - ap-southeast-1
+ - ap-southeast-2
+ - ap-south-1
+ - sa-east-1
+ - eu-north-1
+ - eu-west-3
+ required: false
+ type: 15
+- display: Role Arn
+ name: roleArn
+ required: false
+ type: 0
+- display: Role Session Name
+ name: roleSessionName
+ required: false
+ type: 0
+- display: Role Session Duration
+ name: sessionDuration
+ required: false
+ type: 0
+- display: Access Key
+ name: access_key
+ required: false
+ type: 0
+- display: Secret Key
+ name: secret_key
+ required: false
+ type: 4
+- display: Use System Proxy
+ name: proxy
+ required: false
+ type: 8
+- display: Trust any certificate (Not Secure)
+ name: insecure
+ required: false
+ type: 8
+description: Amazon Web Services Elastic Compute Cloud (EC2)
+display: AWS - EC2
+name: AWS - EC2
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: One or more filters.See documentation for details & filter options.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: One or more instance IDs. Seprated by comma.
+ isArray: false
+ name: instanceIds
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Describes one or more of your instances.
+ execution: false
+ name: aws-ec2-describe-instances
+ outputs:
+ - contextPath: AWS.EC2.Instances.AmiLaunchIndex
+ description: The AMI launch index, which can be used to find this instance in
+ the launch group.
+ type: number
+ - contextPath: AWS.EC2.Instances.ImageId
+ description: The ID of the AMI used to launch the instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.InstanceId
+ description: The ID of the instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.InstanceType
+ description: The instance type.
+ type: string
+ - contextPath: AWS.EC2.Instances.KernelId
+ description: The kernel associated with this instance, if applicable.
+ type: string
+ - contextPath: AWS.EC2.Instances.KeyName
+ description: The name of the key pair, if this instance was launched with an
+ associated key pair.
+ type: string
+ - contextPath: AWS.EC2.Instances.LaunchTime
+ description: The time the instance was launched.
+ type: date
+ - contextPath: AWS.EC2.Instances.Monitoring.State
+ description: Indicates whether detailed monitoring is enabled. Otherwise, basic
+ monitoring is enabled.
+ type: string
+ - contextPath: AWS.EC2.Instances.Placement.AvailabilityZone
+ description: The Availability Zone of the instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.Placement.Affinity
+ description: The affinity setting for the instance on the Dedicated Host.
+ type: string
+ - contextPath: AWS.EC2.Instances.Placement.GroupName
+ description: The name of the placement group the instance is in (for cluster
+ compute instances).
+ type: string
+ - contextPath: AWS.EC2.Instances.Placement.HostId
+ description: he ID of the Dedicated Host on which the instance resides.
+ type: string
+ - contextPath: AWS.EC2.Instances.Placement.Tenancy
+ description: The tenancy of the instance (if the instance is running in a VPC).
+ type: string
+ - contextPath: AWS.EC2.Instances.Platform
+ description: The value is Windows for Windows instances; otherwise blank.
+ type: string
+ - contextPath: AWS.EC2.Instances.PrivateDnsName
+ description: (IPv4 only) The private DNS hostname name assigned to the instance.
+ This DNS hostname can only be used inside the Amazon EC2 network. This name
+ is not available until the instance enters the running state.
+ type: string
+ - contextPath: AWS.EC2.Instances.PrivateIpAddress
+ description: The private IPv4 address assigned to the instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.ProductCodes.ProductCodeId
+ description: The product code.
+ type: string
+ - contextPath: AWS.EC2.Instances.ProductCodes.ProductCodeType
+ description: The type of product code.
+ type: string
+ - contextPath: AWS.EC2.Instances.PublicDnsName
+ description: (IPv4 only) The public DNS name assigned to the instance. This
+ name is not available until the instance enters the running state.
+ type: string
+ - contextPath: AWS.EC2.Instances.PublicIpAddress
+ description: The public IPv4 address assigned to the instance, if applicable.
+ type: string
+ - contextPath: AWS.EC2.Instances.RamdiskId
+ description: The RAM disk associated with this instance, if applicable.
+ type: string
+ - contextPath: AWS.EC2.Instances.State.Code
+ description: The low byte represents the state.
+ type: string
+ - contextPath: AWS.EC2.Instances.State.Name
+ description: The current state of the instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.StateTransitionReason
+ description: The reason for the most recent state transition. This might be
+ an empty string.
+ type: string
+ - contextPath: AWS.EC2.Instances.SubnetId
+ description: The ID of the subnet in which the instance is running.
+ type: string
+ - contextPath: AWS.EC2.Instances.VpcId
+ description: The ID of the VPC in which the instance is running.
+ type: string
+ - contextPath: AWS.EC2.Instances.Architecture
+ description: The architecture of the image.
+ type: string
+ - contextPath: AWS.EC2.Instances.BlockDeviceMappings.DeviceName
+ description: The device name (for example, /dev/sdh or xvdh).
+ type: string
+ - contextPath: AWS.EC2.Instances.BlockDeviceMappings.Ebs.AttachTime
+ description: The time stamp when the attachment initiated.
+ type: string
+ - contextPath: AWS.EC2.Instances.BlockDeviceMappings.Ebs.DeleteOnTermination
+ description: Indicates whether the volume is deleted on instance termination.
+ type: string
+ - contextPath: AWS.EC2.Instances.BlockDeviceMappings.Ebs.Status
+ description: The attachment state.
+ type: string
+ - contextPath: AWS.EC2.Instances.BlockDeviceMappings.Ebs.VolumeId
+ description: The ID of the EBS volume.
+ type: string
+ - contextPath: AWS.EC2.Instances.ClientToken
+ description: The idempotency token you provided when you launched the instance,
+ if applicable.
+ type: string
+ - contextPath: AWS.EC2.Instances.EbsOptimized
+ description: Indicates whether the instance is optimized for Amazon EBS I/O.
+ type: boolean
+ - contextPath: AWS.EC2.Instances.EnaSupport
+ description: Specifies whether enhanced networking with ENA is enabled.
+ type: boolean
+ - contextPath: AWS.EC2.Instances.Hypervisor
+ description: The hypervisor type of the instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.IamInstanceProfile.Arn
+ description: The Amazon Resource Name (ARN) of the instance profile.
+ type: string
+ - contextPath: AWS.EC2.Instances.IamInstanceProfile.Id
+ description: The ID of the instance profile.
+ type: string
+ - contextPath: AWS.EC2.Instances.InstanceLifecycle
+ description: Indicates whether this is a Spot Instance or a Scheduled Instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.ElasticGpuAssociations.ElasticGpuId
+ description: The ID of the Elastic GPU.
+ type: string
+ - contextPath: AWS.EC2.Instances.ElasticGpuAssociations.ElasticGpuAssociationId
+ description: The ID of the association.
+ type: string
+ - contextPath: AWS.EC2.Instances.ElasticGpuAssociations.ElasticGpuAssociationState
+ description: The state of the association between the instance and the Elastic
+ GPU.
+ type: string
+ - contextPath: AWS.EC2.Instances.ElasticGpuAssociations.ElasticGpuAssociationTime
+ description: The time the Elastic GPU was associated with the instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Association.IpOwnerId
+ description: The ID of the owner of the Elastic IP address.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Association.PublicDnsName
+ description: The public DNS name.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Association.PublicIp
+ description: The public IP address or Elastic IP address bound to the network
+ interface.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Attachment.AttachTime
+ description: The time stamp when the attachment initiated.
+ type: date
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Attachment.AttachmentId
+ description: The ID of the network interface attachment.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Attachment.DeleteOnTermination
+ description: Indicates whether the network interface is deleted when the instance
+ is terminated.
+ type: boolean
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Attachment.DeviceIndex
+ description: The index of the device on the instance for the network interface
+ attachment.
+ type: number
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Attachment.Status
+ description: The attachment state.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Description
+ description: The description.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Groups.GroupName
+ description: The name of the security group.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Groups.GroupId
+ description: The ID of the security group.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Ipv6Addresses.Ipv6Address
+ description: The IPv6 addresses associated with the network interface.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.MacAddress
+ description: The MAC address.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.NetworkInterfaceId
+ description: The ID of the network interface.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.OwnerId
+ description: The ID of the AWS account that created the network interface.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateDnsName
+ description: The private DNS name.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress
+ description: The IPv4 address of the network interface within the subnet.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddresses.Association.IpOwnerId
+ description: The ID of the owner of the Elastic IP address.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddresses.Association.PublicDnsName
+ description: The public DNS name.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddresses.Association.PublicIp
+ description: The public IP address or Elastic IP address bound to the network
+ interface.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddresses.Primary
+ description: Indicates whether this IPv4 address is the primary private IP address
+ of the network interface.
+ type: boolean
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddresses.PrivateDnsName
+ description: The private IPv4 DNS name.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddresses.PrivateIpAddress
+ description: The private IPv4 address of the network interface.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.SourceDestCheck
+ description: Indicates whether to validate network traffic to or from this network
+ interface.
+ type: boolean
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Status
+ description: The status of the network interface.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.SubnetId
+ description: The ID of the subnet.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.VpcId
+ description: The ID of the VPC.
+ type: string
+ - contextPath: AWS.EC2.Instances.RootDeviceName
+ description: The device name of the root device volume (for example, /dev/sda1).
+ type: string
+ - contextPath: AWS.EC2.Instances.RootDeviceType
+ description: The root device type used by the AMI. The AMI can use an EBS volume
+ or an instance store volume.
+ type: string
+ - contextPath: AWS.EC2.Instances.SecurityGroups.GroupName
+ description: The name of the security group.
+ type: string
+ - contextPath: AWS.EC2.Instances.SecurityGroups.GroupId
+ description: The ID of the security group.
+ type: string
+ - contextPath: AWS.EC2.Instances.SourceDestCheck
+ description: Specifies whether to enable an instance launched in a VPC to perform
+ NAT.
+ type: boolean
+ - contextPath: AWS.EC2.Instances.SpotInstanceRequestId
+ description: If the request is a Spot Instance request, the ID of the request.
+ type: string
+ - contextPath: AWS.EC2.Instances.SriovNetSupport
+ description: Specifies whether enhanced networking with the Intel 82599 Virtual
+ Function interface is enabled.
+ type: string
+ - contextPath: AWS.EC2.Instances.StateReason.Code
+ description: The reason code for the state change.
+ type: string
+ - contextPath: AWS.EC2.Instances.StateReason.Message
+ description: The message for the state change.
+ type: string
+ - contextPath: AWS.EC2.Instances.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.EC2.Instances.Tags.Value
+ description: The value of the tag.
+ type: string
+ - contextPath: AWS.EC2.Instances.VirtualizationType
+ description: The virtualization type of the instance.
+ type: string
+ - arguments:
+ - default: false
+ description: One or more filters.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: One or more image IDs, Seperated by comma
+ isArray: false
+ name: imageIds
+ required: false
+ secret: false
+ - default: false
+ description: Filters the images by the owner. Specify an AWS account ID, self
+ (owner is the sender of the request), or an AWS owner alias (valid values
+ are amazon | aws-marketplace | microsoft ). Omitting this option returns all
+ images for which you have launch permissions, regardless of ownership.
+ isArray: false
+ name: owners
+ required: false
+ secret: false
+ - default: false
+ description: Scopes the images by users with explicit launch permissions. Specify
+ an AWS account ID, self (the sender of the request), or all (public AMIs).
+ isArray: false
+ name: executableUsers
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Describes one or more of the images (AMIs, AKIs, and ARIs) available
+ to you. Images available to you include public images, private images that you
+ own, and private images owned by other AWS accounts but for which you have explicit
+ launch permissions.
+ execution: false
+ name: aws-ec2-describe-images
+ outputs:
+ - contextPath: AWS.EC2.Images.Architecture
+ description: The architecture of the image.
+ type: string
+ - contextPath: AWS.EC2.Images.CreationDate
+ description: The date and time the image was created.
+ type: date
+ - contextPath: AWS.EC2.Images.ImageId
+ description: The ID of the AMI.
+ type: string
+ - contextPath: AWS.EC2.Images.ImageLocation
+ description: The location of the AMI.
+ type: string
+ - contextPath: AWS.EC2.Images.ImageType
+ description: The type of image.
+ type: string
+ - contextPath: AWS.EC2.Images.Public
+ description: Indicates whether the image has public launch permissions. The
+ value is true if this image has public launch permissions or false if it has
+ only implicit and explicit launch permissions.
+ type: boolean
+ - contextPath: AWS.EC2.Images.KernelId
+ description: The kernel associated with the image, if any. Only applicable for
+ machine images.
+ type: string
+ - contextPath: AWS.EC2.Images.OwnerId
+ description: The AWS account ID of the image owner.
+ type: string
+ - contextPath: AWS.EC2.Images.Platform
+ description: The value is Windows for Windows AMIs; otherwise blank.
+ type: string
+ - contextPath: AWS.EC2.Images.ProductCodes.ProductCodeId
+ description: The product code.
+ type: string
+ - contextPath: AWS.EC2.Images.ProductCodes.ProductCodeType
+ description: The type of product code.
+ type: string
+ - contextPath: AWS.EC2.Images.RamdiskId
+ description: The RAM disk associated with the image, if any. Only applicable
+ for machine images.
+ type: string
+ - contextPath: AWS.EC2.Images.State
+ description: The current state of the AMI. If the state is available , the image
+ is successfully registered and can be used to launch an instance.
+ type: string
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.DeviceName
+ description: The device name (for example, /dev/sdh or xvdh).
+ type: string
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.VirtualName
+ description: The virtual device name (ephemeral N).
+ type: string
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.Ebs.Encrypted
+ description: Indicates whether the EBS volume is encrypted.
+ type: boolean
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.Ebs.DeleteOnTermination
+ description: Indicates whether the EBS volume is deleted on instance termination.
+ type: boolean
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.Ebs.Iops
+ description: The number of I/O operations per second (IOPS) that the volume
+ supports.
+ type: number
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.Ebs.KmsKeyId
+ description: Identifier (key ID, key alias, ID ARN, or alias ARN) for a user-managed
+ CMK under which the EBS volume is encrypted.
+ type: string
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.Ebs.SnapshotId
+ description: The ID of the snapshot.
+ type: string
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.Ebs.VolumeSize
+ description: The size of the volume, in GiB.
+ type: number
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.Ebs.VolumeType
+ description: The volume type.
+ type: string
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.NoDevice
+ description: Suppresses the specified device included in the block device mapping
+ of the AMI.
+ type: string
+ - contextPath: AWS.EC2.Images.Description
+ description: The description of the AMI that was provided during image creation.
+ type: string
+ - contextPath: AWS.EC2.Images.EnaSupport
+ description: Specifies whether enhanced networking with ENA is enabled.
+ type: boolean
+ - contextPath: AWS.EC2.Images.Hypervisor
+ description: The hypervisor type of the image.
+ type: string
+ - contextPath: AWS.EC2.Images.ImageOwnerAlias
+ description: The AWS account alias (for example, amazon , self ) or the AWS
+ account ID of the AMI owner.
+ type: string
+ - contextPath: AWS.EC2.Images.Name
+ description: The name of the AMI that was provided during image creation.
+ type: string
+ - contextPath: AWS.EC2.Images.RootDeviceName
+ description: The device name of the root device volume (for example, /dev/sda1).
+ type: string
+ - contextPath: AWS.EC2.Images.RootDeviceType
+ description: The type of root device used by the AMI. The AMI can use an EBS
+ volume or an instance store volume.
+ type: string
+ - contextPath: AWS.EC2.Images.SriovNetSupport
+ description: Specifies whether enhanced networking with the Intel 82599 Virtual
+ Function interface is enabled.
+ type: string
+ - contextPath: AWS.EC2.Images.StateReason.Code
+ description: The reason code for the state change.
+ type: string
+ - contextPath: AWS.EC2.Images.StateReason.Message
+ description: The message for the state change.
+ type: string
+ - contextPath: AWS.EC2.Images.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.EC2.Images.Tags.Value
+ description: The value of the tag.
+ type: string
+ - contextPath: AWS.EC2.Images.VirtualizationType
+ description: The type of virtualization of the AMI.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the region (for example, us-east-1 ).
+ isArray: false
+ name: regionNames
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Describes one or more regions that are currently available to you.
+ execution: false
+ name: aws-ec2-describe-regions
+ outputs:
+ - contextPath: AWS.Regions.Endpoint
+ description: The region service endpoint.
+ type: string
+ - contextPath: AWS.Regions.RegionName
+ description: The name of the region.
+ type: string
+ - arguments:
+ - default: false
+ description: One or more filters. See documentation for filters list.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: One or more Elastic IP addresses.
+ isArray: false
+ name: publicIps
+ required: false
+ secret: false
+ - default: false
+ description: One or more allocation IDs.
+ isArray: false
+ name: allocationIds
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Describes one or more of your Elastic IP addresses.
+ execution: false
+ name: aws-ec2-describe-addresses
+ outputs:
+ - contextPath: AWS.EC2.ElasticIPs.InstanceId
+ description: The ID of the instance that the address is associated with (if
+ any).
+ type: string
+ - contextPath: AWS.EC2.ElasticIPs.PublicIp
+ description: The Elastic IP address.
+ type: string
+ - contextPath: AWS.EC2.ElasticIPs.AllocationId
+ description: The ID representing the allocation of the address for use with
+ EC2-VPC.
+ type: string
+ - contextPath: AWS.EC2.ElasticIPs.AssociationId
+ description: The ID representing the association of the address with an instance
+ in a VPC.
+ type: string
+ - contextPath: AWS.EC2.ElasticIPs.Domain
+ description: dicates whether this Elastic IP address is for use with instances
+ in EC2-Classic (standard) or instances in a VPC.
+ type: string
+ - contextPath: AWS.EC2.ElasticIPs.NetworkInterfaceId
+ description: The ID of the network interface.
+ type: string
+ - contextPath: AWS.EC2.ElasticIPs.NetworkInterfaceOwnerId
+ description: The ID of the AWS account that owns the network interface.
+ type: string
+ - contextPath: AWS.EC2.ElasticIPs.PrivateIpAddress
+ description: The private IP address associated with the Elastic IP address.
+ type: string
+ - contextPath: AWS.EC2.ElasticIPs.Region
+ description: The aws region were the elastic ip is located.
+ type: string
+ - contextPath: AWS.EC2.ElasticIPs.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.EC2.ElasticIPs.Tags.Value
+ description: The value of the tag.
+ type: string
+ - arguments:
+ - default: false
+ description: One or more filters. See documentation for filters list.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Returns the snapshots owned by the specified owner. Multiple owners
+ can be specified.
+ isArray: false
+ name: ownerIds
+ required: false
+ secret: false
+ - default: false
+ description: One or more snapshot IDs. Seperated by commas
+ isArray: false
+ name: snapshotIds
+ required: false
+ secret: false
+ - default: false
+ description: One or more AWS accounts IDs that can create volumes from the snapshot.
+ isArray: false
+ name: restorableByUserIds
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Describes one or more of the EBS snapshots available to you.
+ execution: false
+ name: aws-ec2-describe-snapshots
+ outputs:
+ - contextPath: AWS.EC2.Snapshots.DataEncryptionKeyId
+ description: The data encryption key identifier for the snapshot.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.Description
+ description: The description for the snapshot.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.Encrypted
+ description: Indicates whether the snapshot is encrypted.
+ type: boolean
+ - contextPath: AWS.EC2.Snapshots.KmsKeyId
+ description: The full ARN of the AWS Key Management Service (AWS KMS) customer
+ master key (CMK) that was used to protect the volume encryption key for the
+ parent volume.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.OwnerId
+ description: The AWS account ID of the EBS snapshot owner.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.Progress
+ description: The progress of the snapshot, as a percentage.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.SnapshotId
+ description: The ID of the snapshot.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.StartTime
+ description: The time stamp when the snapshot was initiated.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.State
+ description: The snapshot state.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.StateMessage
+ description: this field displays error state details to help you diagnose why
+ the error occurred.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.VolumeId
+ description: The ID of the volume that was used to create the snapshot.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.VolumeSize
+ description: The size of the volume, in GiB.
+ type: number
+ - contextPath: AWS.EC2.Snapshots.OwnerAlias
+ description: Value from an Amazon-maintained list of snapshot owners.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.Region
+ description: The aws region were the snapshot is located
+ type: string
+ - contextPath: AWS.EC2.Snapshots.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.Tags.Value
+ description: The value of the tag.
+ type: string
+ - arguments:
+ - default: false
+ description: One or more filters.See documentation for filters list.
+ isArray: false
+ name: Filters
+ required: false
+ secret: false
+ - default: false
+ description: One or more launch template names. Sepereted by comma.
+ isArray: false
+ name: LaunchTemplateNames
+ required: false
+ secret: false
+ - default: false
+ description: One or more launch template IDs. Sepereted by comma.
+ isArray: false
+ name: LaunchTemplateIds
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Describes one or more launch templates.
+ execution: false
+ name: aws-ec2-describe-launch-templates
+ outputs:
+ - contextPath: AWS.EC2.LaunchTemplates.LaunchTemplateId
+ description: The ID of the launch template.
+ type: string
+ - contextPath: AWS.EC2.LaunchTemplates.LaunchTemplateName
+ description: The name of the launch template.
+ type: string
+ - contextPath: AWS.EC2.LaunchTemplates.CreateTime
+ description: The time launch template was created.
+ type: date
+ - contextPath: AWS.EC2.LaunchTemplates.CreatedBy
+ description: The principal that created the launch template.
+ type: string
+ - contextPath: AWS.EC2.LaunchTemplates.DefaultVersionNumber
+ description: The version number of the default version of the launch template.
+ type: number
+ - contextPath: AWS.EC2.LaunchTemplates.LatestVersionNumber
+ description: The version number of the latest version of the launch template.
+ type: number
+ - contextPath: AWS.EC2.LaunchTemplates.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.EC2.LaunchTemplates.Tags.Value
+ description: The value of the tag.
+ type: string
+ - contextPath: AWS.EC2.LaunchTemplates.Region
+ description: The aws region where the template is located
+ type: string
+ - arguments:
+ - default: false
+ description: One or more filters. See documentation for filters list.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: One or more key pair names. Sepereted by comma.
+ isArray: false
+ name: keyNames
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Describes one or more of your key pairs.
+ execution: false
+ name: aws-ec2-describe-key-pairs
+ outputs:
+ - contextPath: AWS.EC2.KeyPairs.KeyFingerprint
+ description: If you used CreateKeyPair to create the key pair, this is the SHA-1
+ digest of the DER encoded private key. If you used ImportKeyPair to provide
+ AWS the public key, this is the MD5 public key fingerprint as specified in
+ section 4 of RFC4716.
+ type: Unknown
+ - contextPath: AWS.EC2.KeyPairs.KeyName
+ description: The name of the key pair.
+ type: Unknown
+ - contextPath: AWS.EC2.KeyPairs.Region
+ description: The aws region where the key pair is located
+ type: Unknown
+ - arguments:
+ - default: false
+ description: One or more filters. See documentation for filters list.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: One or more volume IDs. Sepereted by comma.
+ isArray: false
+ name: volumeIds
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Describes the specified EBS volumes.
+ execution: false
+ name: aws-ec2-describe-volumes
+ outputs:
+ - contextPath: AWS.EC2.Volumes.AvailabilityZone
+ description: The Availability Zone for the volume.
+ type: string
+ - contextPath: AWS.EC2.Volumes.CreateTime
+ description: The time stamp when volume creation was initiated.
+ type: date
+ - contextPath: AWS.EC2.Volumes.Encrypted
+ description: Indicates whether the volume will be encrypted.
+ type: boolean
+ - contextPath: AWS.EC2.Volumes.KmsKeyId
+ description: The full ARN of the AWS Key Management Service customer master
+ key that was used to protect the volume encryption key for the volume.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Size
+ description: The snapshot from which the volume was created, if applicable.
+ type: number
+ - contextPath: AWS.EC2.Volumes.State
+ description: The volume state.
+ type: string
+ - contextPath: AWS.EC2.Volumes.VolumeId
+ description: The ID of the volume.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Iops
+ description: The number of I/O operations per second (IOPS) that the volume
+ supports.
+ type: number
+ - contextPath: AWS.EC2.Volumes.VolumeType
+ description: The volume type. This can be gp2 for General Purpose SSD, io1 for
+ Provisioned IOPS SSD, st1 for Throughput Optimized HDD, sc1 for Cold HDD,
+ or standard for Magnetic volumes.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Tags.Value
+ description: The value of the tag.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Attachments.AttachTime
+ description: The time stamp when the attachment initiated.
+ type: date
+ - contextPath: AWS.EC2.Volumes.Attachments.Device
+ description: The device name.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Attachments.InstanceId
+ description: The ID of the instance.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Attachments.State
+ description: The attachment state of the volume.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Attachments.VolumeId
+ description: The ID of the volume.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Attachments.DeleteOnTermination
+ description: Indicates whether the EBS volume is deleted on instance termination.
+ type: boolean
+ - arguments:
+ - default: false
+ description: One or more filters. See documentation for filters list.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: One or more VPC IDs. Sepereted by comma.
+ isArray: false
+ name: vpcIds
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Describes one or more of your VPCs.
+ execution: false
+ name: aws-ec2-describe-vpcs
+ outputs:
+ - contextPath: AWS.EC2.Vpcs.CidrBlock
+ description: The primary IPv4 CIDR block for the VPC.
+ type: string
+ - contextPath: AWS.EC2.Vpcs.DhcpOptionsId
+ description: The ID of the set of DHCP options you have associated with the
+ VPC.
+ type: string
+ - contextPath: AWS.EC2.Vpcs.State
+ description: The current state of the VPC.
+ type: string
+ - contextPath: AWS.EC2.Vpcs.VpcId
+ description: The ID of the VPC.
+ type: string
+ - contextPath: AWS.EC2.Vpcs.InstanceTenancy
+ description: The allowed tenancy of instances launched into the VPC.
+ type: string
+ - contextPath: AWS.EC2.Vpcs.IsDefault
+ description: Indicates whether the VPC is the default VPC.
+ type: string
+ - contextPath: AWS.EC2.Vpcs.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.EC2.Vpcs.Tags.Value
+ description: The value of the tag.
+ type: string
+ - contextPath: AWS.EC2.Vpcs.Tags.Ipv6CidrBlockAssociationSet.AssociationId
+ description: The association ID for the IPv6 CIDR block.
+ type: string
+ - contextPath: AWS.EC2.Vpcs.Tags.Ipv6CidrBlockAssociationSet.Ipv6CidrBlock
+ description: The IPv6 CIDR block.
+ type: string
+ - contextPath: AWS.EC2.Vpcs.Tags.Ipv6CidrBlockAssociationSet.Ipv6CidrBlockState.State
+ description: The state of the CIDR block.
+ type: string
+ - contextPath: AWS.EC2.Vpcs.Tags.Ipv6CidrBlockAssociationSet.Ipv6CidrBlockState.StatusMessage
+ description: A message about the status of the CIDR block, if applicable.
+ type: string
+ - contextPath: AWS.EC2.Vpcs.Tags.CidrBlockAssociationSet.AssociationId
+ description: The association ID for the IPv4 CIDR block.
+ type: string
+ - contextPath: AWS.EC2.Vpcs.Tags.CidrBlockAssociationSet.CidrBlock
+ description: The IPv4 CIDR block.
+ type: string
+ - contextPath: AWS.EC2.Vpcs.Tags.CidrBlockAssociationSet.CidrBlockState.State
+ description: The state of the CIDR block.
+ type: string
+ - contextPath: AWS.EC2.Vpcs.Tags.CidrBlockAssociationSet.CidrBlockState.StatusMessage
+ description: A message about the status of the CIDR block, if applicable.
+ type: string
+ - arguments:
+ - default: false
+ description: One or more filters. See documetation for filters list.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: One or more subnet IDs. Sepereted by comma.
+ isArray: false
+ name: subnetIds
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Describes one or more of your subnets.
+ execution: false
+ name: aws-ec2-describe-subnets
+ outputs:
+ - contextPath: AWS.EC2.Subnets.AvailabilityZone
+ description: The Availability Zone of the subnet.
+ type: string
+ - contextPath: AWS.EC2.Subnets.AvailableIpAddressCount
+ description: The number of unused private IPv4 addresses in the subnet. Note
+ that the IPv4 addresses for any stopped instances are considered unavailable.
+ type: number
+ - contextPath: AWS.EC2.Subnets.CidrBlock
+ description: The IPv4 CIDR block assigned to the subnet.
+ type: string
+ - contextPath: AWS.EC2.Subnets.DefaultForAz
+ description: Indicates whether this is the default subnet for the Availability
+ Zone.
+ type: boolean
+ - contextPath: AWS.EC2.Subnets.MapPublicIpOnLaunch
+ description: Indicates whether instances launched in this subnet receive a public
+ IPv4 address.
+ type: boolean
+ - contextPath: AWS.EC2.Subnets.State
+ description: The current state of the subnet.
+ type: string
+ - contextPath: AWS.EC2.Subnets.SubnetId
+ description: The ID of the subnet.
+ type: string
+ - contextPath: AWS.EC2.Subnets.VpcId
+ description: The ID of the VPC the subnet is in.
+ type: string
+ - contextPath: AWS.EC2.Subnets.AssignIpv6AddressOnCreation
+ description: Indicates whether a network interface created in this subnet (including
+ a network interface created by RunInstances) receives an IPv6 address.
+ type: boolean
+ - contextPath: AWS.EC2.Subnets.Ipv6CidrBlockAssociationSet.AssociationId
+ description: The association ID for the CIDR block.
+ type: string
+ - contextPath: AWS.EC2.Subnets.Ipv6CidrBlockAssociationSet.Ipv6CidrBlock
+ description: The IPv6 CIDR block.
+ type: string
+ - contextPath: AWS.EC2.Subnets.Ipv6CidrBlockAssociationSet.Ipv6CidrBlockState.State
+ description: The state of a CIDR block.
+ type: string
+ - contextPath: AWS.EC2.Subnets.Ipv6CidrBlockAssociationSet.Ipv6CidrBlockState.StatusMessage
+ description: A message about the status of the CIDR block, if applicable.
+ type: string
+ - contextPath: AWS.EC2.Subnets.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.EC2.Subnets.Tags.Value
+ description: The value of the tag.
+ type: string
+ - arguments:
+ - default: false
+ description: One or more filters. See documetation for filters list.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: One or more security group IDs. Required for security groups in
+ a nondefault VPC. Sepereted by comma.
+ isArray: false
+ name: groupIds
+ required: false
+ secret: false
+ - default: false
+ description: One or more security group names. Sepereted by comma.
+ isArray: false
+ name: groupNames
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Describes one or more of your security groups.
+ execution: false
+ name: aws-ec2-describe-security-groups
+ outputs:
+ - contextPath: AWS.EC2.SecurityGroups.Description
+ description: A description of the security group.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.GroupName
+ description: The name of the security group.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.FromPort
+ description: The start of port range for the TCP and UDP protocols, or an ICMP/ICMPv6
+ type number. A value of -1 indicates all ICMP/ICMPv6 types.
+ type: number
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.IpProtocol
+ description: The IP protocol name (tcp , udp , icmp ) or number.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.IpRanges.CidrIp
+ description: The IPv4 CIDR range.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.IpRanges.Description
+ description: A description for the security group rule that references this
+ IPv4 address range.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.Ipv6Ranges.CidrIpv6
+ description: The IPv6 CIDR range.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.Ipv6Ranges.Description
+ description: A description for the security group rule that references this
+ IPv6 address range.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.PrefixListIds.Description
+ description: A description for the security group rule that references this
+ prefix list ID.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.PrefixListIds.PrefixListId
+ description: The ID of the prefix.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.ToPort
+ description: The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6
+ code.
+ type: number
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.UserIdGroupPairs.Description
+ description: A description for the security group rule that references this
+ user ID group pair.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.UserIdGroupPairs.GroupId
+ description: The ID of the security group.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.UserIdGroupPairs.GroupName
+ description: The name of the security group.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.UserIdGroupPairs.PeeringStatus
+ description: The status of a VPC peering connection, if applicable.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.UserIdGroupPairs.UserId
+ description: The ID of an AWS account.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.UserIdGroupPairs.VpcId
+ description: The ID of the VPC for the referenced security group, if applicable.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissions.UserIdGroupPairs.VpcPeeringConnectionId
+ description: The ID of the VPC peering connection, if applicable.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.OwnerId
+ description: The AWS account ID of the owner of the security group.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.GroupId
+ description: The ID of the security group.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.FromPort
+ description: The start of port range for the TCP and UDP protocols, or an ICMP/ICMPv6
+ type number.
+ type: number
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.IpProtocol
+ description: The IP protocol name (tcp , udp , icmp) or number.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.IpRanges.CidrIp
+ description: The IPv4 CIDR range.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.IpRanges.Description
+ description: A description for the security group rule that references this
+ IPv4 address range.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.Ipv6Ranges.CidrIpv6
+ description: The IPv6 CIDR range.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.Ipv6Ranges.Description
+ description: A description for the security group rule that references this
+ IPv6 address range.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.PrefixListIds.Description
+ description: A description for the security group rule that references this
+ prefix list ID.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.PrefixListIds.PrefixListId
+ description: The ID of the prefix.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.ToPort
+ description: The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6
+ code.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.UserIdGroupPairs.Description
+ description: A description for the security group rule that references this
+ user ID group pair.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.UserIdGroupPairs.GroupId
+ description: The ID of the security group.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.UserIdGroupPairs.GroupName
+ description: The name of the security group.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.UserIdGroupPairs.PeeringStatus
+ description: The status of a VPC peering connection, if applicable.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.UserIdGroupPairs.UserId
+ description: The ID of an AWS account.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.UserIdGroupPairs.VpcId
+ description: The ID of the VPC for the referenced security group, if applicable.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.IpPermissionsEgress.UserIdGroupPairs.VpcPeeringConnectionId
+ description: The ID of the VPC peering connection, if applicable.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.VpcId
+ description: The ID of the VPC for the security group.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.Tags.Value
+ description: The value of the tag.
+ type: string
+ - arguments:
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Allocates an Elastic IP address.
+ execution: true
+ name: aws-ec2-allocate-address
+ outputs:
+ - contextPath: AWS.EC2.ElasticIPs.PublicIp
+ description: The Elastic IP address.
+ type: Unknown
+ - contextPath: AWS.EC2.ElasticIPs.AllocationId
+ description: The ID that AWS assigns to represent the allocation of the Elastic
+ IP address for use with instances in a VPC.
+ type: string
+ - contextPath: AWS.EC2.ElasticIPs.Domain
+ description: Indicates whether this Elastic IP address is for use with instances
+ in EC2-Classic (standard ) or instances in a VPC (vpc).
+ type: string
+ - contextPath: AWS.EC2.ElasticIPs.Region
+ description: The aws region where the elastic IP is located.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The allocation ID.
+ isArray: false
+ name: allocationId
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the instance. For EC2-VPC, you can specify either the
+ instance ID or the network interface ID, but not both. The operation fails
+ if you specify an instance ID unless exactly one network interface is attached.
+ isArray: false
+ name: instanceId
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: For a VPC in an EC2-Classic account, specify true to allow an Elastic
+ IP address that is already associated with an instance or network interface
+ to be reassociated with the specified instance or network interface. Otherwise,
+ the operation fails. In a VPC in an EC2-VPC-only account, reassociation is
+ automatic, therefore you can specify false to ensure the operation fails if
+ the Elastic IP address is already associated with another resource.
+ isArray: false
+ name: allowReassociation
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the network interface. If the instance has more than
+ one network interface, you must specify a network interface ID.
+ isArray: false
+ name: networkInterfaceId
+ required: false
+ secret: false
+ - default: false
+ description: The primary or secondary private IP address to associate with the
+ Elastic IP address. If no private IP address is specified, the Elastic IP
+ address is associated with the primary private IP address.
+ isArray: false
+ name: privateIpAddress
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Associates an Elastic IP address with an instance or a network interface.
+ execution: true
+ name: aws-ec2-associate-address
+ outputs:
+ - contextPath: AWS.EC2.ElasticIPs.AssociationId
+ description: The ID that represents the association of the Elastic IP address
+ with an instance.
+ type: string
+ - arguments:
+ - default: false
+ description: The ID of the EBS volume.
+ isArray: false
+ name: volumeId
+ required: true
+ secret: false
+ - default: false
+ description: A description for the snapshot.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: The tags to apply to the snapshot during creation.
+ isArray: false
+ name: tags
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a snapshot of an EBS volume and stores it in Amazon S3. You
+ can use snapshots for backups, to make copies of EBS volumes, and to save data
+ before shutting down an instance.
+ execution: true
+ name: aws-ec2-create-snapshot
+ outputs:
+ - contextPath: AWS.EC2.Snapshots.DataEncryptionKeyId
+ description: The data encryption key identifier for the snapshot.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.Description
+ description: The description for the snapshot.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.Encrypted
+ description: Indicates whether the snapshot is encrypted.
+ type: number
+ - contextPath: AWS.EC2.Snapshots.KmsKeyId
+ description: The full ARN of the AWS Key Management Service (AWS KMS) customer
+ master key (CMK) that was used to protect the volume encryption key for the
+ parent volume.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.OwnerId
+ description: The AWS account ID of the EBS snapshot owner.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.Progress
+ description: The progress of the snapshot, as a percentage.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.SnapshotId
+ description: The ID of the snapshot.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.StartTime
+ description: The time stamp when the snapshot was initiated.
+ type: date
+ - contextPath: AWS.EC2.Snapshots.State
+ description: The snapshot state.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.StateMessage
+ description: this field displays error state details to help you diagnose why
+ the error occurred.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.VolumeId
+ description: The ID of the volume that was used to create the snapshot.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.VolumeSize
+ description: The size of the volume, in GiB.
+ type: number
+ - contextPath: AWS.EC2.Snapshots.OwnerAlias
+ description: Value from an Amazon-maintained list of snapshot owners.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.Tags.Value
+ description: The value of the tag.
+ type: string
+ - arguments:
+ - default: false
+ description: The ID of the EBS snapshot.
+ isArray: false
+ name: snapshotId
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes the specified snapshot.
+ execution: true
+ name: aws-ec2-delete-snapshot
+ - arguments:
+ - default: false
+ description: A name for the new image.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the instance.
+ isArray: false
+ name: instanceId
+ required: true
+ secret: false
+ - default: false
+ description: A description for the new image.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: By default, Amazon EC2 attempts to shut down and reboot the instance
+ before creating the image. If the noReboot option is set, Amazon EC2 wont
+ shut down the instance before creating the image. When this option is used,
+ file system integrity on the created image cant be guaranteed.
+ isArray: false
+ name: noReboot
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates an Amazon EBS-backed AMI from an Amazon EBS-backed instance
+ that is either running or stopped.
+ execution: true
+ name: aws-ec2-create-image
+ outputs:
+ - contextPath: AWS.EC2.Images.ImageId
+ description: The ID of the new AMI.
+ type: string
+ - contextPath: AWS.EC2.Images.Name
+ description: The name of the new AMI.
+ type: string
+ - contextPath: AWS.EC2.Images.InstanceId
+ description: The ID of the instance.
+ type: string
+ - contextPath: AWS.EC2.Images.Region
+ description: The aws region where the image is located
+ type: string
+ - arguments:
+ - default: false
+ description: The ID of the AMI.
+ isArray: false
+ name: imageId
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Deregisters the specified AMI.
+ execution: true
+ name: aws-ec2-deregister-image
+ - arguments:
+ - default: false
+ description: The ID of the volume.
+ isArray: false
+ name: volumeId
+ required: true
+ secret: false
+ - default: false
+ description: Target size in GiB of the volume to be modified.
+ isArray: false
+ name: size
+ required: false
+ secret: false
+ - default: false
+ description: Target EBS volume type of the volume to be modified The API does
+ not support modifications for volume type standard . You also cannot change
+ the type of a volume to standard .
+ isArray: false
+ name: volumeType
+ required: false
+ secret: false
+ - default: false
+ description: Target IOPS rate of the volume to be modified.
+ isArray: false
+ name: iops
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: You can modify several parameters of an existing EBS volume, including
+ volume size, volume type, and IOPS capacity.
+ execution: true
+ name: aws-ec2-modify-volume
+ outputs:
+ - contextPath: AWS.EC2.Volumes.Modification.VolumeId
+ description: ID of the volume being modified.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Modification.ModificationState
+ description: Current state of modification. Modification state is null for unmodified.
+ volumes.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Modification.StatusMessage
+ description: Generic status message on modification progress or failure.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Modification.TargetSize
+ description: Target size of the volume being modified.
+ type: number
+ - contextPath: AWS.EC2.Volumes.Modification.TargetIops
+ description: Target IOPS rate of the volume being modified.
+ type: number
+ - contextPath: AWS.EC2.Volumes.Modification.TargetVolumeType
+ description: Target EBS volume type of the volume being modified.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Modification.OriginalSize
+ description: Original size of the volume being modified.
+ type: number
+ - contextPath: AWS.EC2.Volumes.Modification.OriginalIops
+ description: Original IOPS rate of the volume being modified.
+ type: number
+ - contextPath: AWS.EC2.Volumes.Modification.OriginalVolumeType
+ description: Original EBS volume type of the volume being modified.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Modification.Progress
+ description: Modification progress from 0 to 100%.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Modification.StartTime
+ description: Modification start time.
+ type: date
+ - contextPath: AWS.EC2.Volumes.Modification.EndTime
+ description: Modification completion or failure time.
+ type: date
+ - arguments:
+ - default: false
+ description: The IDs of one or more resources to tag. For example, ami-1a2b3c4d.
+ isArray: false
+ name: resources
+ required: false
+ secret: false
+ - default: false
+ description: One or more tags.
+ isArray: false
+ name: tags
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Adds or overwrites one or more tags for the specified Amazon EC2
+ resource or resources.
+ execution: true
+ name: aws-ec2-create-tags
+ - arguments:
+ - default: false
+ description: The association ID.
+ isArray: false
+ name: associationId
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Disassociates an Elastic IP address from the instance or network
+ interface its associated with.
+ execution: true
+ name: aws-ec2-disassociate-address
+ - arguments:
+ - default: false
+ description: The allocation ID.
+ isArray: false
+ name: allocationId
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Releases the specified Elastic IP address.
+ execution: true
+ name: aws-ec2-release-address
+ - arguments:
+ - default: false
+ description: One or more instance IDs. Sepereted by comma.
+ isArray: false
+ name: instanceIds
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Starts an Amazon EBS-backed instance that you have previously stopped.
+ execution: true
+ name: aws-ec2-start-instances
+ - arguments:
+ - default: false
+ description: One or more instance IDs.
+ isArray: false
+ name: instanceIds
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Stops an Amazon EBS-backed instance.
+ execution: true
+ name: aws-ec2-stop-instances
+ - arguments:
+ - default: false
+ description: One or more instance IDs.
+ isArray: false
+ name: instanceIds
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Shuts down one or more instances. This operation is idempotent; if
+ you terminate an instance more than once, each call succeeds.
+ execution: true
+ name: aws-ec2-terminate-instances
+ - arguments:
+ - default: false
+ description: The Availability Zone in which to create the volume. Use DescribeAvailabilityZones
+ to list the Availability Zones that are currently available to you.
+ isArray: false
+ name: availabilityZone
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Specifies whether the volume should be encrypted.
+ isArray: false
+ name: encrypted
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: The number of I/O operations per second (IOPS) to provision for
+ the volume, with a maximum ratio of 50 IOPS/GiB. Range is 100 to 32000 IOPS
+ for volumes in most regions.
+ isArray: false
+ name: iops
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the AWS Key Management Service (AWS KMS) customer
+ master key (CMK) to use when creating the encrypted volume. This parameter
+ is only required if you want to use a non-default CMK; if this parameter is
+ not specified, the default CMK for EBS is used. If a KmsKeyId is specified,
+ the Encrypted flag must also be set.
+ isArray: false
+ name: kmsKeyId
+ required: false
+ secret: false
+ - default: false
+ description: The size of the volume, in GiBs.
+ isArray: false
+ name: size
+ required: false
+ secret: false
+ - default: false
+ description: The snapshot from which to create the volume.
+ isArray: false
+ name: snapshotId
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The volume type.
+ isArray: false
+ name: volumeType
+ predefined:
+ - standard
+ - io1
+ - gp2
+ - sc1
+ - st1
+ required: false
+ secret: false
+ - default: false
+ description: One or more tags.Example key=Name,value=test;key=Owner,value=Bob
+ isArray: false
+ name: tags
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates an EBS volume that can be attached to an instance in the
+ same Availability Zone.
+ execution: true
+ name: aws-ec2-create-volume
+ outputs:
+ - contextPath: AWS.EC2.Volumes.AvailabilityZone
+ description: The Availability Zone for the volume.
+ type: string
+ - contextPath: AWS.EC2.Volumes.CreateTime
+ description: The time stamp when volume creation was initiated.
+ type: date
+ - contextPath: AWS.EC2.Volumes.Encrypted
+ description: Indicates whether the volume will be encrypted.
+ type: boolean
+ - contextPath: AWS.EC2.Volumes.KmsKeyId
+ description: The full ARN of the AWS Key Management Service (AWS KMS) customer
+ master key (CMK) that was used to protect the volume encryption key for the
+ volume.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Size
+ description: The size of the volume, in GiBs.
+ type: number
+ - contextPath: AWS.EC2.Volumes.SnapshotId
+ description: The snapshot from which the volume was created, if applicable.
+ type: string
+ - contextPath: AWS.EC2.Volumes.State
+ description: The volume state.
+ type: string
+ - contextPath: AWS.EC2.Volumes.VolumeId
+ description: The ID of the volume.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Iops
+ description: The number of I/O operations per second (IOPS) that the volume
+ supports.
+ type: number
+ - contextPath: AWS.EC2.Volumes.VolumeType
+ description: The volume type. This can be gp2 for General Purpose SSD, io1 for
+ Provisioned IOPS SSD, st1 for Throughput Optimized HDD, sc1 for Cold HDD,
+ or standard for Magnetic volumes.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Tags.Value
+ description: The value of the tag.
+ type: string
+ - arguments:
+ - default: false
+ description: The device name (for example, /dev/sdh or xvdh).
+ isArray: false
+ name: device
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the instance.
+ isArray: false
+ name: instanceId
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the EBS volume. The volume and instance must be within
+ the same Availability Zone.
+ isArray: false
+ name: volumeId
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Attaches an EBS volume to a running or stopped instance and exposes
+ it to the instance with the specified device name.
+ execution: true
+ name: aws-ec2-attach-volume
+ outputs:
+ - contextPath: AWS.EC2.Volumes.Attachments.AttachTime
+ description: The time stamp when the attachment initiated.
+ type: date
+ - contextPath: AWS.EC2.Volumes.Attachments.Device
+ description: The device name.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Attachments.InstanceId
+ description: The ID of the instance.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Attachments.State
+ description: The attachment state of the volume.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Attachments.VolumeId
+ description: The ID of the volume.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Attachments.DeleteOnTermination
+ description: Indicates whether the EBS volume is deleted on instance termination.
+ type: boolean
+ - arguments:
+ - default: false
+ description: The ID of the volume.
+ isArray: false
+ name: volumeId
+ required: true
+ secret: false
+ - default: false
+ description: Forces detachment if the previous detachment attempt did not occur
+ cleanly. This option can lead to data loss or a corrupted file system. Use
+ this option only as a last resort to detach a volume from a failed instance.
+ isArray: false
+ name: force
+ required: false
+ secret: false
+ - default: false
+ description: The device name (for example, /dev/sdh or xvdh).
+ isArray: false
+ name: device
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the instance.
+ isArray: false
+ name: instanceId
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Detaches an EBS volume from an instance.
+ execution: true
+ name: aws-ec2-detach-volume
+ outputs:
+ - contextPath: AWS.EC2.Volumes.Attachments.AttachTime
+ description: The time stamp when the attachment initiated.
+ type: date
+ - contextPath: AWS.EC2.Volumes.Attachments.Device
+ description: The device name.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Attachments.InstanceId
+ description: The ID of the instance.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Attachments.State
+ description: The attachment state of the volume.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Attachments.VolumeId
+ description: The ID of the volume.
+ type: string
+ - contextPath: AWS.EC2.Volumes.Attachments.DeleteOnTermination
+ description: Indicates whether the EBS volume is deleted on instance termination.
+ type: boolean
+ - arguments:
+ - default: false
+ description: The ID of the volume.
+ isArray: false
+ name: volumeId
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes the specified EBS volume. The volume must be in the available
+ state (not attached to an instance).
+ execution: true
+ name: aws-ec2-delete-volume
+ - arguments:
+ - default: false
+ defaultValue: '1'
+ description: The number of instances to launch. must be grater then 0.
+ isArray: false
+ name: count
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the AMI, which you can get by calling DescribeImages
+ . An AMI is required to launch an instance and must be specified here or in
+ a launch template.
+ isArray: false
+ name: imageId
+ required: false
+ secret: false
+ - default: false
+ description: 'The instance type. for example: t2.large'
+ isArray: false
+ name: instanceType
+ required: false
+ secret: false
+ - default: false
+ description: One or more security group IDs. Sepereted by comma.
+ isArray: false
+ name: securityGroupIds
+ required: false
+ secret: false
+ - default: false
+ description: One or more security group names. For a nondefault VPC, you must
+ use security group IDs instead.
+ isArray: false
+ name: securityGroups
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the subnet to launch the instance into.
+ isArray: false
+ name: subnetId
+ required: false
+ secret: false
+ - default: false
+ description: The user data to make available to the instance.This value will
+ be base64 encoded automatically. Do not base64 encode this value prior to
+ performing the operation.
+ isArray: false
+ name: userData
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If you set this parameter to true , you cant terminate the instance
+ using the Amazon EC2 console, CLI, or API.
+ isArray: false
+ name: disableApiTermination
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the instance profile. Both iamInstanceProfileArn
+ and iamInstanceProfile are required if you would like to associate an instance
+ profile.
+ isArray: false
+ name: iamInstanceProfileArn
+ required: false
+ secret: false
+ - default: false
+ description: The name of the instance profile. Both iamInstanceProfileArn and
+ iamInstanceProfile are required if you would like to associate an instance
+ profile.
+ isArray: false
+ name: iamInstanceProfileName
+ required: false
+ secret: false
+ - default: false
+ description: The name of the key pair. Warning - If you do not specify a key
+ pair, you cant connect to the instance unless you choose an AMI that is configured
+ to allow users another way to log in.
+ isArray: false
+ name: keyName
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether the instance is optimized for Amazon EBS I/O.
+ isArray: false
+ name: ebsOptimized
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: The device name (for example, /dev/sdh or xvdh).
+ isArray: false
+ name: deviceName
+ required: false
+ secret: false
+ - default: false
+ description: The size of the volume, in GiB.
+ isArray: false
+ name: ebsVolumeSize
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The volume type.
+ isArray: false
+ name: ebsVolumeType
+ predefined:
+ - gp2
+ - io1
+ - st1
+ - sc1
+ - standard
+ required: false
+ secret: false
+ - default: false
+ description: The number of I/O operations per second (IOPS) that the volume
+ supports.
+ isArray: false
+ name: ebsIops
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether the EBS volume is deleted on instance termination.
+ isArray: false
+ name: ebsDeleteOnTermination
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: Identifier (key ID, key alias, ID ARN, or alias ARN) for a user-managed
+ CMK under which the EBS volume is encrypted.
+ isArray: false
+ name: ebsKmsKeyId
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the snapshot.
+ isArray: false
+ name: ebsSnapshotId
+ required: false
+ secret: false
+ - default: false
+ description: Indicates whether the EBS volume is encrypted.
+ isArray: false
+ name: ebsEncrypted
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the launch template. The launch template to use to launch
+ the instances. Any parameters that you specify in RunInstances override the
+ same parameters in the launch template. You can specify either the name or
+ ID of a launch template, but not both.
+ isArray: false
+ name: launchTemplateId
+ required: false
+ secret: false
+ - default: false
+ description: The name of the launch template. The launch template to use to
+ launch the instances. Any parameters that you specify in RunInstances override
+ the same parameters in the launch template. You can specify either the name
+ or ID of a launch template, but not both.
+ isArray: false
+ name: launchTemplateName
+ required: false
+ secret: false
+ - default: false
+ description: The version number of the launch template.
+ isArray: false
+ name: launchTemplateVersion
+ required: false
+ secret: false
+ - default: false
+ description: The tags to apply to the instance.
+ isArray: false
+ name: tags
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Launches the specified number of instances using an AMI for which
+ you have permissions. You can create a launch template , which is a resource
+ that contains the parameters to launch an instance. When you launch an instance
+ using RunInstances , you can specify the launch template instead of specifying
+ the launch parameters. An instance is ready for you to use when its in the running
+ state. You can check the state of your instance using DescribeInstances.
+ execution: true
+ name: aws-ec2-run-instances
+ outputs:
+ - contextPath: AWS.EC2.Instances.AmiLaunchIndex
+ description: The AMI launch index, which can be used to find this instance in
+ the launch group.
+ type: number
+ - contextPath: AWS.EC2.Instances.ImageId
+ description: The ID of the AMI used to launch the instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.InstanceId
+ description: The ID of the instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.InstanceType
+ description: The instance type.
+ type: string
+ - contextPath: AWS.EC2.Instances.KernelId
+ description: The kernel associated with this instance, if applicable.
+ type: string
+ - contextPath: AWS.EC2.Instances.KeyName
+ description: The name of the key pair, if this instance was launched with an
+ associated key pair.
+ type: string
+ - contextPath: AWS.EC2.Instances.LaunchTime
+ description: The time the instance was launched.
+ type: date
+ - contextPath: AWS.EC2.Instances.Monitoring.State
+ description: Indicates whether detailed monitoring is enabled. Otherwise, basic
+ monitoring is enabled.
+ type: string
+ - contextPath: AWS.EC2.Instances.Placement.AvailabilityZone
+ description: The Availability Zone of the instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.Placement.Affinity
+ description: The affinity setting for the instance on the Dedicated Host.
+ type: string
+ - contextPath: AWS.EC2.Instances.Placement.GroupName
+ description: The name of the placement group the instance is in (for cluster
+ compute instances).
+ type: string
+ - contextPath: AWS.EC2.Instances.Placement.HostId
+ description: he ID of the Dedicated Host on which the instance resides.
+ type: string
+ - contextPath: AWS.EC2.Instances.Placement.Tenancy
+ description: The tenancy of the instance (if the instance is running in a VPC).
+ type: string
+ - contextPath: AWS.EC2.Instances.Platform
+ description: The value is Windows for Windows instances; otherwise blank.
+ type: string
+ - contextPath: AWS.EC2.Instances.PrivateDnsName
+ description: (IPv4 only) The private DNS hostname name assigned to the instance.
+ This DNS hostname can only be used inside the Amazon EC2 network. This name
+ is not available until the instance enters the running state.
+ type: string
+ - contextPath: AWS.EC2.Instances.PrivateIpAddress
+ description: The private IPv4 address assigned to the instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.ProductCodes.ProductCodeId
+ description: The product code.
+ type: string
+ - contextPath: AWS.EC2.Instances.ProductCodes.ProductCodeType
+ description: The type of product code.
+ type: string
+ - contextPath: AWS.EC2.Instances.PublicDnsName
+ description: (IPv4 only) The public DNS name assigned to the instance. This
+ name is not available until the instance enters the running state.
+ type: string
+ - contextPath: AWS.EC2.Instances.PublicIpAddress
+ description: The public IPv4 address assigned to the instance, if applicable.
+ type: string
+ - contextPath: AWS.EC2.Instances.RamdiskId
+ description: The RAM disk associated with this instance, if applicable.
+ type: string
+ - contextPath: AWS.EC2.Instances.State.Code
+ description: The low byte represents the state.
+ type: string
+ - contextPath: AWS.EC2.Instances.State.Name
+ description: The current state of the instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.StateTransitionReason
+ description: The reason for the most recent state transition. This might be
+ an empty string.
+ type: string
+ - contextPath: AWS.EC2.Instances.SubnetId
+ description: The ID of the subnet in which the instance is running.
+ type: string
+ - contextPath: AWS.EC2.Instances.VpcId
+ description: The ID of the VPC in which the instance is running.
+ type: string
+ - contextPath: AWS.EC2.Instances.Architecture
+ description: The architecture of the image.
+ type: string
+ - contextPath: AWS.EC2.Instances.BlockDeviceMappings.DeviceName
+ description: The device name (for example, /dev/sdh or xvdh).
+ type: string
+ - contextPath: AWS.EC2.Instances.BlockDeviceMappings.Ebs.AttachTime
+ description: The time stamp when the attachment initiated.
+ type: string
+ - contextPath: AWS.EC2.Instances.BlockDeviceMappings.Ebs.DeleteOnTermination
+ description: Indicates whether the volume is deleted on instance termination.
+ type: string
+ - contextPath: AWS.EC2.Instances.BlockDeviceMappings.Ebs.Status
+ description: The attachment state.
+ type: string
+ - contextPath: AWS.EC2.Instances.BlockDeviceMappings.Ebs.VolumeId
+ description: The ID of the EBS volume.
+ type: string
+ - contextPath: AWS.EC2.Instances.ClientToken
+ description: The idempotency token you provided when you launched the instance,
+ if applicable.
+ type: string
+ - contextPath: AWS.EC2.Instances.EbsOptimized
+ description: Indicates whether the instance is optimized for Amazon EBS I/O.
+ type: boolean
+ - contextPath: AWS.EC2.Instances.EnaSupport
+ description: Specifies whether enhanced networking with ENA is enabled.
+ type: boolean
+ - contextPath: AWS.EC2.Instances.Hypervisor
+ description: The hypervisor type of the instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.IamInstanceProfile.Arn
+ description: The Amazon Resource Name (ARN) of the instance profile.
+ type: string
+ - contextPath: AWS.EC2.Instances.IamInstanceProfile.Id
+ description: The ID of the instance profile.
+ type: string
+ - contextPath: AWS.EC2.Instances.InstanceLifecycle
+ description: Indicates whether this is a Spot Instance or a Scheduled Instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.ElasticGpuAssociations.ElasticGpuId
+ description: The ID of the Elastic GPU.
+ type: string
+ - contextPath: AWS.EC2.Instances.ElasticGpuAssociations.ElasticGpuAssociationId
+ description: The ID of the association.
+ type: string
+ - contextPath: AWS.EC2.Instances.ElasticGpuAssociations.ElasticGpuAssociationState
+ description: The state of the association between the instance and the Elastic
+ GPU.
+ type: string
+ - contextPath: AWS.EC2.Instances.ElasticGpuAssociations.ElasticGpuAssociationTime
+ description: The time the Elastic GPU was associated with the instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Association.IpOwnerId
+ description: The ID of the owner of the Elastic IP address.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Association.PublicDnsName
+ description: The public DNS name.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Association.PublicIp
+ description: The public IP address or Elastic IP address bound to the network
+ interface.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Attachment.AttachTime
+ description: The time stamp when the attachment initiated.
+ type: date
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Attachment.AttachmentId
+ description: The ID of the network interface attachment.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Attachment.DeleteOnTermination
+ description: Indicates whether the network interface is deleted when the instance
+ is terminated.
+ type: boolean
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Attachment.DeviceIndex
+ description: The index of the device on the instance for the network interface
+ attachment.
+ type: number
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Attachment.Status
+ description: The attachment state.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Description
+ description: The description.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Groups.GroupName
+ description: The name of the security group.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Groups.GroupId
+ description: The ID of the security group.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Ipv6Addresses.Ipv6Address
+ description: The IPv6 addresses associated with the network interface.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.MacAddress
+ description: The MAC address.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.NetworkInterfaceId
+ description: The ID of the network interface.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.OwnerId
+ description: The ID of the AWS account that created the network interface.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateDnsName
+ description: The private DNS name.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddress
+ description: The IPv4 address of the network interface within the subnet.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddresses.Association.IpOwnerId
+ description: The ID of the owner of the Elastic IP address.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddresses.Association.PublicDnsName
+ description: The public DNS name.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddresses.Association.PublicIp
+ description: The public IP address or Elastic IP address bound to the network
+ interface.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddresses.Primary
+ description: Indicates whether this IPv4 address is the primary private IP address
+ of the network interface.
+ type: boolean
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddresses.PrivateDnsName
+ description: The private IPv4 DNS name.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.PrivateIpAddresses.PrivateIpAddress
+ description: The private IPv4 address of the network interface.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.SourceDestCheck
+ description: Indicates whether to validate network traffic to or from this network
+ interface.
+ type: boolean
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.Status
+ description: The status of the network interface.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.SubnetId
+ description: The ID of the subnet.
+ type: string
+ - contextPath: AWS.EC2.Instances.NetworkInterfaces.VpcId
+ description: The ID of the VPC.
+ type: string
+ - contextPath: AWS.EC2.Instances.RootDeviceName
+ description: The device name of the root device volume (for example, /dev/sda1).
+ type: string
+ - contextPath: AWS.EC2.Instances.RootDeviceType
+ description: The root device type used by the AMI. The AMI can use an EBS volume
+ or an instance store volume.
+ type: string
+ - contextPath: AWS.EC2.Instances.SecurityGroups.GroupName
+ description: The name of the security group.
+ type: string
+ - contextPath: AWS.EC2.Instances.SecurityGroups.GroupId
+ description: The ID of the security group.
+ type: string
+ - contextPath: AWS.EC2.Instances.SourceDestCheck
+ description: Specifies whether to enable an instance launched in a VPC to perform
+ NAT.
+ type: boolean
+ - contextPath: AWS.EC2.Instances.SpotInstanceRequestId
+ description: If the request is a Spot Instance request, the ID of the request.
+ type: string
+ - contextPath: AWS.EC2.Instances.SriovNetSupport
+ description: Specifies whether enhanced networking with the Intel 82599 Virtual
+ Function interface is enabled.
+ type: string
+ - contextPath: AWS.EC2.Instances.StateReason.Code
+ description: The reason code for the state change.
+ type: string
+ - contextPath: AWS.EC2.Instances.StateReason.Message
+ description: The message for the state change.
+ type: string
+ - contextPath: AWS.EC2.Instances.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.EC2.Instances.Tags.Value
+ description: The value of the tag.
+ type: string
+ - contextPath: AWS.EC2.Instances.VirtualizationType
+ description: The virtualization type of the instance.
+ type: string
+ - arguments:
+ - default: false
+ description: One or more filters. See documentation for details & filter options.
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ - default: false
+ description: One or more instance IDs. Sepreted by comma.
+ isArray: false
+ name: instanceIds
+ required: false
+ secret: false
+ - default: false
+ description: The amount of time in seconds to wait between attempts. Default
+ 15
+ isArray: false
+ name: waiterDelay
+ required: false
+ secret: false
+ - default: false
+ description: The maximum number of attempts to be made. Default 40
+ isArray: false
+ name: waiterMaxAttempts
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: A waiter function that runs every 15 seconds until a successful
+ state is reached.
+ execution: false
+ name: aws-ec2-waiter-instance-running
+ - arguments:
+ - default: false
+ description: One or more filters. See documentation for details & filter options.
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ - default: false
+ description: One or more instance IDs. Seprated by comma.
+ isArray: false
+ name: instanceIds
+ required: false
+ secret: false
+ - default: false
+ description: The amount of time in seconds to wait between attempts. Default
+ 15
+ isArray: false
+ name: waiterDelay
+ required: false
+ secret: false
+ - default: false
+ description: The maximum number of attempts to be made. Default 40.
+ isArray: false
+ name: waiterMaxAttempts
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: A waiter function that runs every 15 seconds until a successful
+ state is reached
+ execution: false
+ name: aws-ec2-waiter-instance-status-ok
+ - arguments:
+ - default: false
+ description: One or more filters. See documentation for details & filter options.
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ - default: false
+ description: One or more instance IDs. Seprated by comma.
+ isArray: false
+ name: instanceIds
+ required: false
+ secret: false
+ - default: false
+ description: The amount of time in seconds to wait between attempts. Default
+ 15
+ isArray: false
+ name: waiterDelay
+ required: false
+ secret: false
+ - default: false
+ description: The maximum number of attempts to be made. Default 40
+ isArray: false
+ name: waiterMaxAttempts
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: A waiter function that runs every 15 seconds until a successful
+ state is reached
+ execution: false
+ name: aws-ec2-waiter-instance-stopped
+ - arguments:
+ - default: false
+ description: One or more filters. See documentation for details & filter options.
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ - default: false
+ description: One or more instance IDs. Seprated by comma.
+ isArray: false
+ name: instanceIds
+ required: false
+ secret: false
+ - default: false
+ description: The amount of time in seconds to wait between attempts. Default
+ 15
+ isArray: false
+ name: waiterDelay
+ required: false
+ secret: false
+ - default: false
+ description: The maximum number of attempts to be made. Default 40
+ isArray: false
+ name: waiterMaxAttempts
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: A waiter function that runs every 15 seconds until a successful
+ state is reached
+ execution: false
+ name: aws-ec2-waiter-instance-terminated
+ - arguments:
+ - default: false
+ description: One or more filters. See documentation for available filters.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: One or more image IDs. Sperated by comma.
+ isArray: false
+ name: imageIds
+ required: false
+ secret: false
+ - default: false
+ description: Filters the images by the owner. Specify an AWS account ID, self
+ (owner is the sender of the request), or an AWS owner alias (valid values
+ are amazon | aws-marketplace | microsoft ). Omitting this option returns all
+ images for which you have launch permissions, regardless of ownership.
+ isArray: false
+ name: owners
+ required: false
+ secret: false
+ - default: false
+ description: Scopes the images by users with explicit launch permissions. Specify
+ an AWS account ID, self (the sender of the request), or all (public AMIs).
+ isArray: false
+ name: executableUsers
+ required: false
+ secret: false
+ - default: false
+ description: The amount of time in seconds to wait between attempts. Default
+ 15
+ isArray: false
+ name: waiterDelay
+ required: false
+ secret: false
+ - default: false
+ description: The maximum number of attempts to be made. Default 40
+ isArray: false
+ name: waiterMaxAttempts
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: A waiter function that waits until image is avilable
+ execution: false
+ name: aws-ec2-waiter-image-available
+ - arguments:
+ - default: false
+ description: One or more filters. See documentation for available filters.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Returns the snapshots owned by the specified owner. Multiple owners
+ can be specified. Sperated by comma.
+ isArray: false
+ name: ownerIds
+ required: false
+ secret: false
+ - default: false
+ description: One or more snapshot IDs. Sperated by comma.
+ isArray: false
+ name: snapshotIds
+ required: false
+ secret: false
+ - default: false
+ description: One or more AWS accounts IDs that can create volumes from the snapshot.
+ isArray: false
+ name: restorableByUserIds
+ required: false
+ secret: false
+ - default: false
+ description: The amount of time in seconds to wait between attempts. Default
+ 15
+ isArray: false
+ name: waiterDelay
+ required: false
+ secret: false
+ - default: false
+ description: The maximum number of attempts to be made. Default 40
+ isArray: false
+ name: waiterMaxAttempts
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: A waiter function that waits until the snapshot is complate
+ execution: false
+ name: aws-ec2-waiter-snapshot_completed
+ - arguments:
+ - default: false
+ description: One or more filters. See documentation for available filters.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Filters the images by the owner. Specify an AWS account ID, self
+ (owner is the sender of the request), or an AWS owner alias (valid values
+ are amazon | aws-marketplace | microsoft ). Omitting this option returns all
+ images for which you have launch permissions, regardless of ownership.
+ isArray: false
+ name: owners
+ required: false
+ secret: false
+ - default: false
+ description: Scopes the images by users with explicit launch permissions. Specify
+ an AWS account ID, self (the sender of the request), or all (public AMIs).
+ isArray: false
+ name: executableUsers
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Get The latest AMI
+ execution: false
+ name: aws-ec2-get-latest-ami
+ outputs:
+ - contextPath: AWS.EC2.Images.Architecture
+ description: The architecture of the image.
+ type: string
+ - contextPath: AWS.EC2.Images.CreationDate
+ description: The date and time the image was created.
+ type: date
+ - contextPath: AWS.EC2.Images.ImageId
+ description: The ID of the AMI.
+ type: string
+ - contextPath: AWS.EC2.Images.ImageLocation
+ description: The location of the AMI.
+ type: string
+ - contextPath: AWS.EC2.Images.ImageType
+ description: The type of image.
+ type: string
+ - contextPath: AWS.EC2.Images.Public
+ description: Indicates whether the image has public launch permissions. The
+ value is true if this image has public launch permissions or false if it has
+ only implicit and explicit launch permissions.
+ type: boolean
+ - contextPath: AWS.EC2.Images.KernelId
+ description: The kernel associated with the image, if any. Only applicable for
+ machine images.
+ type: string
+ - contextPath: AWS.EC2.Images.OwnerId
+ description: The AWS account ID of the image owner.
+ type: string
+ - contextPath: AWS.EC2.Images.Platform
+ description: The value is Windows for Windows AMIs; otherwise blank.
+ type: string
+ - contextPath: AWS.EC2.Images.ProductCodes.ProductCodeId
+ description: The product code.
+ type: string
+ - contextPath: AWS.EC2.Images.ProductCodes.ProductCodeType
+ description: The type of product code.
+ type: string
+ - contextPath: AWS.EC2.Images.RamdiskId
+ description: The RAM disk associated with the image, if any. Only applicable
+ for machine images.
+ type: string
+ - contextPath: AWS.EC2.Images.State
+ description: The current state of the AMI. If the state is available , the image
+ is successfully registered and can be used to launch an instance.
+ type: string
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.DeviceName
+ description: The device name (for example, /dev/sdh or xvdh ).
+ type: string
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.VirtualName
+ description: The virtual device name (ephemeral N).
+ type: string
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.Ebs.Encrypted
+ description: Indicates whether the EBS volume is encrypted.
+ type: boolean
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.Ebs.DeleteOnTermination
+ description: Indicates whether the EBS volume is deleted on instance termination.
+ type: boolean
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.Ebs.Iops
+ description: The number of I/O operations per second (IOPS) that the volume
+ supports.
+ type: number
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.Ebs.KmsKeyId
+ description: Identifier (key ID, key alias, ID ARN, or alias ARN) for a user-managed
+ CMK under which the EBS volume is encrypted.
+ type: string
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.Ebs.SnapshotId
+ description: The ID of the snapshot.
+ type: string
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.Ebs.VolumeSize
+ description: The size of the volume, in GiB.
+ type: number
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.Ebs.VolumeType
+ description: The volume type
+ type: string
+ - contextPath: AWS.EC2.Images.BlockDeviceMappings.NoDevice
+ description: Suppresses the specified device included in the block device mapping
+ of the AMI.
+ type: string
+ - contextPath: AWS.EC2.Images.Description
+ description: The description of the AMI that was provided during image creation.
+ type: string
+ - contextPath: AWS.EC2.Images.EnaSupport
+ description: Specifies whether enhanced networking with ENA is enabled.
+ type: boolean
+ - contextPath: AWS.EC2.Images.Hypervisor
+ description: The hypervisor type of the image.
+ type: string
+ - contextPath: AWS.EC2.Images.ImageOwnerAlias
+ description: The AWS account alias (for example, amazon , self ) or the AWS
+ account ID of the AMI owner.
+ type: string
+ - contextPath: AWS.EC2.Images.Name
+ description: The name of the AMI that was provided during image creation.
+ type: string
+ - contextPath: AWS.EC2.Images.RootDeviceName
+ description: The device name of the root device volume (for example, /dev/sda1).
+ type: string
+ - contextPath: AWS.EC2.Images.RootDeviceType
+ description: The type of root device used by the AMI. The AMI can use an EBS
+ volume or an instance store volume.
+ type: string
+ - contextPath: AWS.EC2.Images.SriovNetSupport
+ description: Specifies whether enhanced networking with the Intel 82599 Virtual
+ Function interface is enabled.
+ type: string
+ - contextPath: AWS.EC2.Images.StateReason.Code
+ description: The reason code for the state change.
+ type: string
+ - contextPath: AWS.EC2.Images.StateReason.Message
+ description: The message for the state change.
+ type: string
+ - contextPath: AWS.EC2.Images.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.EC2.Images.Tags.Value
+ description: The value of the tag.
+ type: string
+ - contextPath: AWS.EC2.Images.VirtualizationType
+ description: The type of virtualization of the AMI.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the security group.
+ isArray: false
+ name: groupName
+ required: true
+ secret: false
+ - default: false
+ description: A description for the security group.
+ isArray: false
+ name: description
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the VPC.
+ isArray: false
+ name: vpcId
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a security group.
+ execution: true
+ name: aws-ec2-create-security-group
+ outputs:
+ - contextPath: AWS.EC2.SecurityGroups.GroupName
+ description: The name of the security group.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.Description
+ description: A description for the security group.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.VpcId
+ description: The ID of the VPC.
+ type: string
+ - contextPath: AWS.EC2.SecurityGroups.GroupId
+ description: The ID of the security group.
+ type: string
+ - arguments:
+ - default: false
+ description: The ID of the security group. Required for a nondefault VPC.
+ isArray: false
+ name: groupId
+ required: false
+ secret: false
+ - default: false
+ description: default VPC only. The name of the security group. You can specify
+ either the security group name or the security group ID.
+ isArray: false
+ name: groupName
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes a security group.
+ execution: true
+ name: aws-ec2-delete-security-group
+ - arguments:
+ - default: false
+ description: The ID of the security group. You must specify either the security
+ group ID or the security group name in the request. For security groups in
+ a nondefault VPC, you must specify the security group ID.
+ isArray: false
+ name: groupId
+ required: true
+ secret: false
+ - default: false
+ description: The start of port range for the TCP and UDP protocols.
+ isArray: false
+ name: fromPort
+ required: false
+ secret: false
+ - default: false
+ description: The end of port range for the TCP and UDP protocols.
+ isArray: false
+ name: toPort
+ required: false
+ secret: false
+ - default: false
+ description: The CIDR IPv4 address range.
+ isArray: false
+ name: cidrIp
+ required: false
+ secret: false
+ - default: false
+ description: The IP protocol name (tcp , udp , icmp) or number. Use -1 to specify
+ all protocols.
+ isArray: false
+ name: ipProtocol
+ required: false
+ secret: false
+ - default: false
+ description: The name of the source security group. The source security group
+ must be in the same VPC.
+ isArray: false
+ name: sourceSecurityGroupName
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ - default: false
+ description: The start of port range for the TCP and UDP protocols, or an ICMP/ICMPv6
+ type number. A value of -1 indicates all ICMP/ICMPv6 types. If you specify
+ all ICMP/ICMPv6 types, you must specify all codes.
+ isArray: false
+ name: IpPermissionsfromPort
+ required: false
+ secret: false
+ - default: false
+ description: The IP protocol name (tcp, udp, icmp, icmpv6) or number.
+ isArray: false
+ name: IpPermissionsIpProtocol
+ required: false
+ secret: false
+ - default: false
+ description: The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6
+ code. A value of -1 indicates all ICMP/ICMPv6 codes. If you specify all ICMP/ICMPv6
+ types, you must specify all codes.
+ isArray: false
+ name: IpPermissionsToPort
+ required: false
+ secret: false
+ - default: false
+ description: The IPv4 CIDR range. You can either specify a CIDR range or a source
+ security group, not both. To specify a single IPv4 address, use the /32 prefix
+ length.
+ isArray: false
+ name: IpRangesCidrIp
+ required: false
+ secret: false
+ - default: false
+ description: |-
+ A description for the security group rule that references this IPv4 address range.
+
+ Constraints: Up to 255 characters in length. Allowed characters are a-z, A-Z, 0-9, spaces, and ._-:/()#,@[]+=;{}!$*
+ isArray: false
+ name: IpRangesDesc
+ required: false
+ secret: false
+ - default: false
+ description: The IPv6 CIDR range. You can either specify a CIDR range or a source
+ security group, not both. To specify a single IPv6 address, use the /128 prefix
+ length.
+ isArray: false
+ name: Ipv6RangesCidrIp
+ required: false
+ secret: false
+ - default: false
+ description: |-
+ A description for the security group rule that references this IPv6 address range.
+
+ Constraints: Up to 255 characters in length. Allowed characters are a-z, A-Z, 0-9, spaces, and ._-:/()#,@[]+=;{}!$*
+ isArray: false
+ name: Ipv6RangesDesc
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the prefix.
+ isArray: false
+ name: PrefixListId
+ required: false
+ secret: false
+ - default: false
+ description: |-
+ A description for the security group rule that references this prefix list ID.
+
+ Constraints: Up to 255 characters in length. Allowed characters are a-z, A-Z, 0-9, spaces, and ._-:/()#,@[]+=;{}!$*
+ isArray: false
+ name: PrefixListIdDesc
+ required: false
+ secret: false
+ - default: false
+ description: |-
+ A description for the security group rule that references this user ID group pair.
+
+ Constraints: Up to 255 characters in length. Allowed characters are a-z, A-Z, 0-9, spaces, and ._-:/()#,@[]+=;{}!$*
+ isArray: false
+ name: UserIdGroupPairsDescription
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the security group.
+ isArray: false
+ name: UserIdGroupPairsGroupId
+ required: false
+ secret: false
+ - default: false
+ description: The name of the security group. In a request, use this parameter
+ for a security group in EC2-Classic or a default VPC only. For a security
+ group in a nondefault VPC, use the security group ID.
+ isArray: false
+ name: UserIdGroupPairsGroupName
+ required: false
+ secret: false
+ - default: false
+ description: The status of a VPC peering connection, if applicable.
+ isArray: false
+ name: UserIdGroupPairsPeeringStatus
+ required: false
+ secret: false
+ - default: false
+ description: The ID of an AWS account.
+ isArray: false
+ name: UserIdGroupPairsUserId
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the VPC for the referenced security group, if applicable.
+ isArray: false
+ name: UserIdGroupPairsVpcId
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the VPC peering connection, if applicable.
+ isArray: false
+ name: UserIdGroupPairsVpcPeeringConnectionId
+ required: false
+ secret: false
+ deprecated: false
+ description: Adds ingress rule to a security group.
+ execution: true
+ name: aws-ec2-authorize-security-group-ingress-rule
+ - arguments:
+ - default: false
+ description: The ID of the security group.
+ isArray: false
+ name: groupId
+ required: true
+ secret: false
+ - default: false
+ description: The start of port range for the TCP and UDP protocols.
+ isArray: false
+ name: fromPort
+ required: false
+ secret: false
+ - default: false
+ description: The end of port range for the TCP and UDP protocols.
+ isArray: false
+ name: toPort
+ required: false
+ secret: false
+ - default: false
+ description: The CIDR IPv4 address range.
+ isArray: false
+ name: cidrIp
+ required: false
+ secret: false
+ - default: false
+ description: The IP protocol name (tcp , udp , icmp) or number. Use -1 to specify
+ all protocols.
+ isArray: false
+ name: ipProtocol
+ required: false
+ secret: false
+ - default: false
+ description: The name of the source security group. The source security group
+ must be in the same VPC.
+ isArray: false
+ name: sourceSecurityGroupName
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Removes egress rule from a security group. To remove a rule, the
+ values that you specify (for example, ports) must match the existing rule's
+ values exactly.
+ execution: true
+ name: aws-ec2-revoke-security-group-ingress-rule
+ - arguments:
+ - default: false
+ description: The name of the new AMI in the destination region.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the AMI to copy.
+ isArray: false
+ name: sourceImageId
+ required: true
+ secret: false
+ - default: false
+ description: The name of the region that contains the AMI to copy.
+ isArray: false
+ name: sourceRegion
+ required: true
+ secret: false
+ - default: false
+ description: A description for the new AMI in the destination region.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Specifies whether the destination snapshots of the copied image
+ should be encrypted. The default CMK for EBS is used unless a non-default
+ AWS Key Management Service (AWS KMS) CMK is specified with KmsKeyId .
+ isArray: false
+ name: encrypted
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the AWS Key Management Service (AWS KMS) customer
+ master key (CMK) to use when creating the encrypted volume. This parameter
+ is only required if you want to use a non-default CMK; if this parameter is
+ not specified, the default CMK for EBS is used. If a KmsKeyId is specified,
+ the Encrypted flag must also be set.
+ isArray: false
+ name: kmsKeyId
+ required: false
+ secret: false
+ - default: false
+ description: nique, case-sensitive identifier you provide to ensure idempotency
+ of the request.
+ isArray: false
+ name: clientToken
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Initiates the copy of an AMI from the specified source region to
+ the current region.
+ execution: true
+ name: aws-ec2-copy-image
+ outputs:
+ - contextPath: AWS.EC2.Images.ImageId
+ description: The ID of the new AMI.
+ type: string
+ - contextPath: AWS.EC2.Images.Region
+ description: The Region where the image is located.
+ type: string
+ - arguments:
+ - default: false
+ description: The ID of the EBS snapshot to copy.
+ isArray: false
+ name: sourceSnapshotId
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the region that contains the snapshot to be copied.
+ isArray: false
+ name: sourceRegion
+ required: true
+ secret: false
+ - default: false
+ description: A description for the EBS snapshot.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: ' Specifies whether the destination snapshot should be encrypted.
+ You can encrypt a copy of an unencrypted snapshot using this flag, but you
+ cannot use it to create an unencrypted copy from an encrypted snapshot. Your
+ default CMK for EBS is used unless a non-default AWS Key Management Service
+ (AWS KMS) CMK is specified with KmsKeyId .'
+ isArray: false
+ name: encrypted
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the AWS Key Management Service (AWS KMS) customer
+ master key (CMK) to use when creating the encrypted volume. This parameter
+ is only required if you want to use a non-default CMK; if this parameter is
+ not specified, the default CMK for EBS is used. If a KmsKeyId is specified,
+ the Encrypted flag must also be set.
+ isArray: false
+ name: kmsKeyId
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Copies a point-in-time snapshot of an EBS volume and stores it in
+ Amazon S3. You can copy the snapshot within the same region or from one region
+ to another.
+ execution: true
+ name: aws-ec2-copy-snapshot
+ outputs:
+ - contextPath: AWS.EC2.Snapshots.SnapshotId
+ description: The ID of the new snapshot.
+ type: string
+ - contextPath: AWS.EC2.Snapshots.Region
+ description: The Region where the snapshot is located.
+ type: string
+ - arguments:
+ - default: false
+ description: ne or more filters.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: One or more Reserved Instance IDs. Separated by comma.
+ isArray: false
+ name: reservedInstancesIds
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Describes whether the Reserved Instance is Standard or Convertible.
+ isArray: false
+ name: offeringClass
+ predefined:
+ - standard
+ - convertible
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Describes one or more of the Reserved Instances that you purchased.
+ execution: false
+ name: aws-ec2-describe-reserved-instances
+ outputs:
+ - contextPath: AWS.EC2.ReservedInstances.AvailabilityZone
+ description: The Availability Zone in which the Reserved Instance can be used.
+ type: string
+ - contextPath: AWS.EC2.ReservedInstances.Duration
+ description: The duration of the Reserved Instance, in seconds.
+ type: number
+ - contextPath: AWS.EC2.ReservedInstances.End
+ description: The time when the Reserved Instance expires.
+ type: date
+ - contextPath: AWS.EC2.ReservedInstances.FixedPrice
+ description: The purchase price of the Reserved Instance.
+ type: number
+ - contextPath: AWS.EC2.ReservedInstances.InstanceCount
+ description: The number of reservations purchased.
+ type: number
+ - contextPath: AWS.EC2.ReservedInstances.InstanceType
+ description: The instance type on which the Reserved Instance can be used.
+ type: string
+ - contextPath: AWS.EC2.ReservedInstances.ProductDescription
+ description: The Reserved Instance product platform description.
+ type: string
+ - contextPath: AWS.EC2.ReservedInstances.ReservedInstancesId
+ description: The ID of the Reserved Instance.
+ type: string
+ - contextPath: AWS.EC2.ReservedInstances.Start
+ description: The date and time the Reserved Instance started.
+ type: date
+ - contextPath: AWS.EC2.ReservedInstances.State
+ description: The state of the Reserved Instance purchase.
+ type: string
+ - contextPath: AWS.EC2.ReservedInstances.UsagePrice
+ description: The usage price of the Reserved Instance, per hour.
+ type: number
+ - contextPath: AWS.EC2.ReservedInstances.CurrencyCode
+ description: The currency of the Reserved Instance. It's specified using ISO
+ 4217 standard currency codes. At this time, the only supported currency is
+ USD .
+ type: string
+ - contextPath: AWS.EC2.ReservedInstances.InstanceTenancy
+ description: The tenancy of the instance.
+ type: string
+ - contextPath: AWS.EC2.ReservedInstances.OfferingClass
+ description: The offering class of the Reserved Instance.
+ type: string
+ - contextPath: AWS.EC2.ReservedInstances.OfferingType
+ description: The Reserved Instance offering type.
+ type: string
+ - contextPath: AWS.EC2.ReservedInstances.RecurringCharges.Amount
+ description: The amount of the recurring charge.
+ type: number
+ - contextPath: AWS.EC2.ReservedInstances.RecurringCharges.Frequency
+ description: he frequency of the recurring charge.
+ type: string
+ - contextPath: AWS.EC2.ReservedInstances.Scope
+ description: The scope of the Reserved Instance.
+ type: string
+ - contextPath: AWS.EC2.ReservedInstances.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.EC2.ReservedInstances.Tags.Value
+ description: The value of the tag.
+ type: string
+ - contextPath: AWS.EC2.ReservedInstances.Region
+ description: The AWS region where the reserved instance is located.
+ type: string
+ - arguments:
+ - default: false
+ description: One or more instance IDs. Separated by comma.
+ isArray: false
+ name: instancesIds
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Enables detailed monitoring for a running instance.
+ execution: true
+ name: aws-ec2-monitor-instances
+ outputs:
+ - contextPath: AWS.EC2.Instances.InstanceId
+ description: The ID of the instance.
+ type: string
+ - contextPath: AWS.EC2.Instances.Monitoring.State
+ description: Indicates whether detailed monitoring is enabled. Otherwise, basic
+ monitoring is enabled.
+ type: string
+ - arguments:
+ - default: false
+ description: One or more instance IDs. Separated by comma.
+ isArray: false
+ name: instancesIds
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Disables detailed monitoring for a running instance.
+ execution: true
+ name: aws-ec2-unmonitor-instances
+ outputs:
+ - contextPath: AWS.EC2.Instances.InstanceId
+ description: The ID of the instance.
+ type: Unknown
+ - contextPath: AWS.EC2.Instances.Monitoring.State
+ description: Indicates whether detailed monitoring is enabled. Otherwise, basic
+ monitoring is enabled.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: One or more instance IDs. Separated by comma.
+ isArray: false
+ name: instanceIds
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Requests a reboot of one or more instances. This operation is asynchronous;
+ it only queues a request to reboot the specified instances. The operation succeeds
+ if the instances are valid and belong to you. Requests to reboot terminated
+ instances are ignored. If an instance does not cleanly shut down within four
+ minutes, Amazon EC2 performs a hard reboot.
+ execution: true
+ name: aws-ec2-reboot-instances
+ - arguments:
+ - default: false
+ description: The ID of the Windows instance.
+ isArray: false
+ name: instanceId
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves the encrypted administrator password for a running Windows
+ instance.
+ execution: true
+ name: aws-ec2-get-password-data
+ outputs:
+ - contextPath: AWS.EC2.Instances.PasswordData.PasswordData
+ description: The password of the instance. Returns an empty string if the password
+ is not available.
+ type: string
+ - contextPath: AWS.EC2.Instances.PasswordData.Timestamp
+ description: The time the data was last updated.
+ type: date
+ - arguments:
+ - default: false
+ description: The ID of the network interface.
+ isArray: false
+ name: networkInterfaceId
+ required: true
+ secret: false
+ - default: false
+ description: Changes the security groups for the network interface. The new
+ set of groups you specify replaces the current set. You must specify at least
+ one group, even if it's just the default security group in the VPC. You must
+ specify the ID of the security group, not the name.
+ isArray: false
+ name: groups
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether source/destination checking is enabled. A value
+ of true means checking is enabled, and false means checking is disabled. This
+ value must be false for a NAT instance to perform NAT.
+ isArray: false
+ name: sourceDestCheck
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: A description for the network interface.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the network interface attachment. Information about the
+ interface attachment. If modifying the 'delete on termination' attribute,
+ you must specify the ID of the interface attachment.
+ isArray: false
+ name: attachmentId
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether the network interface is deleted when the instance
+ is terminated. Information about the interface attachment. If modifying the
+ 'delete on termination' attribute, you must specify the ID of the interface
+ attachment.
+ isArray: false
+ name: deleteOnTermination
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Modifies the specified network interface attribute. You can specify
+ only one attribute at a time.
+ execution: true
+ name: aws-ec2-modify-network-interface-attribute
+ - arguments:
+ - default: false
+ description: The ID of the instance.
+ isArray: false
+ name: instanceId
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Specifies whether source/destination checking is enabled. A value
+ of true means that checking is enabled, and false means that checking is disabled.
+ This value must be false for a NAT instance to perform NAT.
+ isArray: false
+ name: sourceDestCheck
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If the value is true , you can't terminate the instance using the
+ Amazon EC2 console, CLI, or API; otherwise, you can. You cannot use this parameter
+ for Spot Instances.
+ isArray: false
+ name: disableApiTermination
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Specifies whether the instance is optimized for Amazon EBS I/O.
+ This optimization provides dedicated throughput to Amazon EBS and an optimized
+ configuration stack to provide optimal EBS I/O performance. This optimization
+ isn't available with all instance types. Additional usage charges apply when
+ using an EBS Optimized instance.
+ isArray: false
+ name: ebsOptimized
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Set to true to enable enhanced networking with ENA for the instance. This
+ option is supported only for HVM instances. Specifying this option with a
+ PV instance can make it unreachable.
+ isArray: false
+ name: enaSupport
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: Changes the instance type to the specified value.
+ isArray: false
+ name: instanceType
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Specifies whether an instance stops or terminates when you initiate
+ shutdown from the instance (using the operating system command for system
+ shutdown)
+ isArray: false
+ name: instanceInitiatedShutdownBehavior
+ predefined:
+ - Stop
+ - Terminate
+ required: false
+ secret: false
+ - default: false
+ description: '[EC2-VPC] Changes the security groups of the instance. You must
+ specify at least one security group, even if it''s just the default security
+ group for the VPC. You must specify the security group ID, not the security
+ group name.'
+ isArray: false
+ name: groups
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Modifies the specified attribute of the specified instance. You can
+ specify only one attribute at a time. Using this action to change the security
+ groups associated with an elastic network interface (ENI) attached to an instance
+ in a VPC can result in an error if the instance has more than one ENI. To change
+ the security groups associated with an ENI attached to an instance that has
+ multiple ENIs, we recommend that you use the ModifyNetworkInterfaceAttribute
+ action.
+ execution: true
+ name: aws-ec2-modify-instance-attribute
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: Checks whether you have the required permissions for the action,
+ without actually making the request, and provides an error response.
+ isArray: false
+ name: DryRun
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the VPC.
+ isArray: false
+ name: VpcId
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a network ACL in a VPC. Network ACLs provide an optional
+ layer of security (in addition to security groups) for the instances in your
+ VPC.
+ execution: false
+ name: aws-ec2-create-network-acl
+ outputs:
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.Associations.NetworkAclAssociationId
+ description: The ID of the association between a network ACL and a subnet.
+ type: String
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.Associations.NetworkAclId
+ description: The ID of the network ACL.
+ type: String
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.Associations.SubnetId
+ description: The ID of the subnet.
+ type: String
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.Entries.CidrBlock
+ description: The IPv4 network range to allow or deny, in CIDR notation.
+ type: String
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.Entries.Egress
+ description: Indicates whether the rule is an egress rule (applied to traffic
+ leaving the subnet).
+ type: Boolean
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.Entries.IcmpTypeCode.Code
+ description: The ICMP code. A value of -1 means all codes for the specified
+ ICMP type.
+ type: Number
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.Entries.IcmpTypeCode.Type
+ description: The ICMP type. A value of -1 means all types.
+ type: Number
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.Entries.Ipv6CidrBlock
+ description: The IPv6 network range to allow or deny, in CIDR notation.
+ type: String
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.Entries.PortRange.From
+ description: The first port in the range.
+ type: Number
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.Entries.PortRange.To
+ description: The last port in the range.
+ type: Number
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.Entries.Protocol
+ description: The protocol number. A value of "-1" means all protocols.
+ type: String
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.Entries.RuleAction
+ description: Indicates whether to allow or deny the traffic that matches the
+ rule.
+ type: String
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.Entries.RuleNumber
+ description: The rule number for the entry. ACL entries are processed in ascending
+ order by rule number.
+ type: Number
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.NetworkAclId
+ description: The ID of the network ACL.
+ type: String
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.Tags.Key
+ description: The key of the tag.
+ type: String
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.Tags.Value
+ description: The value of the tag.
+ type: String
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.VpcId
+ description: The ID of the VPC for the network ACL.
+ type: String
+ - contextPath: AWS.EC2.VpcId.NetworkAcl.OwnerId
+ description: The ID of the AWS account that owns the network ACL.
+ type: String
+ - arguments:
+ - default: false
+ description: The IPv4 network range to allow or deny, in CIDR notation (for
+ example 172.16.0.0/24 ).
+ isArray: false
+ name: CidrBlock
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Checks whether you have the required permissions for the action,
+ without actually making the request, and provides an error response.
+ isArray: false
+ name: DryRun
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether this is an egress rule (rule is applied to traffic
+ leaving the subnet).
+ isArray: false
+ name: Egress
+ predefined:
+ - 'True'
+ - 'False'
+ required: true
+ secret: false
+ - default: false
+ description: The ICMP code. A value of -1 means all codes for the specified
+ ICMP type.
+ isArray: false
+ name: Code
+ required: false
+ secret: false
+ - default: false
+ description: The ICMP type. A value of -1 means all types.
+ isArray: false
+ name: Type
+ required: false
+ secret: false
+ - default: false
+ description: The IPv6 network range to allow or deny, in CIDR notation (for
+ example 2001:db8:1234:1a00::/64 ). # disable-secrets-detection
+ isArray: false
+ name: Ipv6CidrBlock
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the network ACL.
+ isArray: false
+ name: NetworkAclId
+ required: true
+ secret: false
+ - default: false
+ description: The first port in the range.
+ isArray: false
+ name: From
+ required: false
+ secret: false
+ - default: false
+ description: The last port in the range.
+ isArray: false
+ name: To
+ required: false
+ secret: false
+ - default: false
+ description: The protocol number. A value of "-1" means all protocols. If you
+ specify "-1" or a protocol number other than "6" (TCP), "17" (UDP), or "1"
+ (ICMP), traffic on all ports is allowed, regardless of any ports or ICMP types
+ or codes that you specify. If you specify protocol "58" (ICMPv6) and specify
+ an IPv4 CIDR block, traffic for all ICMP types and codes allowed, regardless
+ of any that you specify. If you specify protocol "58" (ICMPv6) and specify
+ an IPv6 CIDR block, you must specify an ICMP type and code.
+ isArray: false
+ name: Protocol
+ required: true
+ secret: false
+ - default: false
+ description: Indicates whether to allow or deny the traffic that matches the
+ rule.
+ isArray: false
+ name: RuleAction
+ required: true
+ secret: false
+ - default: false
+ description: The rule number for the entry (for example, 100). ACL entries are
+ processed in ascending order by rule number.
+ isArray: false
+ name: RuleNumber
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates an entry (a rule) in a network ACL with the specified rule
+ number.
+ execution: false
+ name: aws-ec2-create-network-acl-entry
+ - arguments:
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Checks whether you have the required permissions for the action,
+ without actually making the request, and provides an error response.
+ isArray: false
+ name: DryRun
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: Unique, case-sensitive identifier you provide to ensure the idempotency
+ of the request.
+ isArray: false
+ name: ClientToken
+ required: false
+ secret: false
+ - default: false
+ description: Indicates how to allocate the target capacity across the Spot pools
+ specified by the Spot Fleet request.
+ isArray: false
+ name: SpotAllocationStrategy
+ required: false
+ secret: false
+ - default: false
+ description: The behavior when a Spot Instance is interrupted.
+ isArray: false
+ name: InstanceInterruptionBehavior
+ required: false
+ secret: false
+ - default: false
+ description: The number of Spot pools across which to allocate your target Spot
+ capacity.
+ isArray: false
+ name: InstancePoolsToUseCount
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates that the fleet uses a single instance type to launch
+ all Spot Instances in the fleet.
+ isArray: false
+ name: SpotSingleInstanceType
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates that the fleet launches all Spot Instances into a single
+ Availability Zone.
+ isArray: false
+ name: SpotSingleInstanceType
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: The minimum target capacity for Spot Instances in the fleet. If
+ the minimum target capacity is not reached, the fleet launches no instances.
+ isArray: false
+ name: SpotMinTargetCapacity
+ required: false
+ secret: false
+ - default: false
+ description: The order of the launch template overrides to use in fulfilling
+ On-Demand capacity.
+ isArray: false
+ name: OnDemandAllocationStrategy
+ required: false
+ secret: false
+ - default: false
+ description: Indicates that the fleet uses a single instance type to launch
+ all On-Demand Instances in the fleet.
+ isArray: false
+ name: OnDemandSingleInstanceType
+ required: false
+ secret: false
+ - default: false
+ description: Indicates that the fleet launches all On-Demand Instances into
+ a single Availability Zone.
+ isArray: false
+ name: OnDemandSingleAvailabilityZone
+ required: false
+ secret: false
+ - default: false
+ description: The minimum target capacity for On-Demand Instances in the fleet.
+ If the minimum target capacity is not reached, the fleet launches no instances.
+ isArray: false
+ name: OnDemandMinTargetCapacity
+ required: false
+ secret: false
+ - default: false
+ description: ' Indicates whether running instances should be terminated if the
+ total target capacity of the EC2 Fleet is decreased below the current size
+ of the EC2 Fleet.'
+ isArray: false
+ name: ExcessCapacityTerminationPolicy
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the launch template.
+ isArray: false
+ name: LaunchTemplateId
+ required: true
+ secret: false
+ - default: false
+ description: The name of the launch template.
+ isArray: false
+ name: LaunchTemplateName
+ required: true
+ secret: false
+ - default: false
+ description: The version number of the launch template.
+ isArray: false
+ name: Version
+ required: true
+ secret: false
+ - default: false
+ description: The instance type.
+ isArray: false
+ name: OverrideInstanceType
+ required: false
+ secret: false
+ - default: false
+ description: The maximum price per unit hour that you are willing to pay for
+ a Spot Instance.
+ isArray: false
+ name: OverrideMaxPrice
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the subnet in which to launch the instances.
+ isArray: false
+ name: OverrideSubnetId
+ required: false
+ secret: false
+ - default: false
+ description: The Availability Zone in which to launch the instances.
+ isArray: false
+ name: OverrideAvailabilityZone
+ required: false
+ secret: false
+ - default: false
+ description: The number of units provided by the specified instance type.
+ isArray: false
+ name: OverrideWeightedCapacity
+ required: false
+ secret: false
+ - default: false
+ description: The priority for the launch template override.
+ isArray: false
+ name: OverridePriority
+ required: false
+ secret: false
+ - default: false
+ description: The number of units to request, filled using DefaultTargetCapacityType
+ .
+ isArray: false
+ name: TotalTargetCapacity
+ required: true
+ secret: false
+ - default: false
+ description: The number of On-Demand units to request.
+ isArray: false
+ name: OnDemandTargetCapacity
+ required: true
+ secret: false
+ - default: false
+ description: The number of Spot units to request.
+ isArray: false
+ name: SpotTargetCapacity
+ required: true
+ secret: false
+ - default: false
+ description: The default TotalTargetCapacity, which is either Spot or On-Demand
+ .
+ isArray: false
+ name: DefaultTargetCapacityType
+ required: true
+ secret: false
+ - default: false
+ description: The type of the request.
+ isArray: false
+ name: Type
+ required: false
+ secret: false
+ - default: false
+ description: The start date and time of the request, in UTC format (for example,
+ YYYY -MM -DD T*HH* :MM :SS Z).
+ isArray: false
+ name: ValidFrom
+ required: false
+ secret: false
+ - default: false
+ description: The end date and time of the request, in UTC format (for example,
+ YYYY -MM -DD T*HH* :MM :SS Z).
+ isArray: false
+ name: ValidUntil
+ required: false
+ secret: false
+ - default: false
+ description: Indicates whether EC2 Fleet should replace unhealthy instances.
+ isArray: false
+ name: ReplaceUnhealthyInstances
+ required: false
+ secret: false
+ - default: false
+ description: The tags to apply to the resource.
+ isArray: false
+ name: Tags
+ required: false
+ secret: false
+ deprecated: false
+ description: Launches an EC2 Fleet.
+ execution: false
+ name: aws-ec2-create-fleet
+ outputs:
+ - contextPath: AWS.EC2.Fleet.FleetId
+ description: The ID of the EC2 Fleet.
+ type: String
+ - contextPath: AWS.EC2.Fleet.Errors
+ description: Information about the instances that could not be launched by the
+ fleet. Valid only when Type is set to instant.
+ type: String
+ - contextPath: AWS.EC2.Fleet.LaunchTemplateAndOverrides.LaunchTemplateSpecification.LaunchTemplateId
+ description: The ID of the launch template. You must specify either a template
+ ID or a template name.
+ type: String
+ - contextPath: AWS.EC2.Fleet.LaunchTemplateAndOverrides.LaunchTemplateSpecification.LaunchTemplateName
+ description: The name of the launch template. You must specify either a template
+ name or a template ID.
+ type: String
+ - contextPath: AWS.EC2.Fleet.LaunchTemplateAndOverrides.LaunchTemplateSpecification.Version
+ description: The version number of the launch template. You must specify a version
+ number.
+ type: String
+ - contextPath: AWS.EC2.Fleet.LaunchTemplateAndOverrides.Overrides.InstanceType
+ description: The instance type.
+ type: String
+ - contextPath: AWS.EC2.Fleet.LaunchTemplateAndOverrides.Overrides.MaxPrice
+ description: The maximum price per unit hour that you are willing to pay for
+ a Spot Instance.
+ type: String
+ - contextPath: AWS.EC2.Fleet.LaunchTemplateAndOverrides.Overrides.SubnetId
+ description: The ID of the subnet in which to launch the instances.
+ type: String
+ - contextPath: AWS.EC2.Fleet.LaunchTemplateAndOverrides.Overrides.AvailabilityZone
+ description: The Availability Zone in which to launch the instances.
+ type: String
+ - contextPath: AWS.EC2.Fleet.LaunchTemplateAndOverrides.Overrides.WeightedCapacity
+ description: The number of units provided by the specified instance type.
+ type: String
+ - contextPath: AWS.EC2.Fleet.LaunchTemplateAndOverrides.Overrides.Priority
+ description: The priority for the launch template override.
+ type: String
+ - contextPath: AWS.EC2.Fleet.LaunchTemplateAndOverrides.Overrides.Placement.GroupName
+ description: The name of the placement group the instance is in.
+ type: String
+ - contextPath: AWS.EC2.Fleet.LaunchTemplateAndOverrides.Lifecycle
+ description: Indicates if the instance that could not be launched was a Spot
+ Instance or On-Demand Instance.
+ type: String
+ - contextPath: AWS.EC2.Fleet.LaunchTemplateAndOverrides.ErrorCode
+ description: 'The error code that indicates why the instance could not be launched. '
+ type: String
+ - contextPath: AWS.EC2.Fleet.LaunchTemplateAndOverrides.ErrorMessage
+ description: The error message that describes why the instance could not be
+ launched.
+ type: String
+ - contextPath: AWS.EC2.Fleet.Instances.LaunchTemplateAndOverrides.LaunchTemplateSpecification.LaunchTemplateId
+ description: The ID of the launch template. You must specify either a template
+ ID or a template name.
+ type: String
+ - contextPath: AWS.EC2.Fleet.Instances.LaunchTemplateAndOverrides.LaunchTemplateSpecification.LaunchTemplateName
+ description: The name of the launch template. You must specify either a template
+ name or a template ID.
+ type: String
+ - contextPath: AWS.EC2.Fleet.Instances.LaunchTemplateAndOverrides.LaunchTemplateSpecification.Version
+ description: The version number of the launch template. You must specify a version
+ number.
+ type: String
+ - contextPath: AWS.EC2.Fleet.Instances.LaunchTemplateAndOverrides.Overrides.InstanceType
+ description: The instance type.
+ type: String
+ - contextPath: AWS.EC2.Fleet.Instances.LaunchTemplateAndOverrides.Overrides.MaxPrice
+ description: The maximum price per unit hour that you are willing to pay for
+ a Spot Instance.
+ type: String
+ - contextPath: AWS.EC2.Fleet.Instances.LaunchTemplateAndOverrides.Overrides.SubnetId
+ description: The ID of the subnet in which to launch the instances.
+ type: String
+ - contextPath: AWS.EC2.Fleet.Instances.LaunchTemplateAndOverrides.Overrides.AvailabilityZone
+ description: The Availability Zone in which to launch the instances.
+ type: String
+ - contextPath: AWS.EC2.Fleet.Instances.LaunchTemplateAndOverrides.Overrides.WeightedCapacity
+ description: The number of units provided by the specified instance type.
+ type: Number
+ - contextPath: AWS.EC2.Fleet.Instances.LaunchTemplateAndOverrides.Overrides.Priority
+ description: The priority for the launch template override.
+ type: Number
+ - contextPath: AWS.EC2.Fleet.Instances.LaunchTemplateAndOverrides.Overrides.Placement.GroupName
+ description: The name of the placement group the instance is in.
+ type: String
+ - contextPath: AWS.EC2.Fleet.Instances.LaunchTemplateAndOverrides.Overrides.Lifecycle
+ description: Indicates if the instance that was launched is a Spot Instance
+ or On-Demand Instance.
+ type: String
+ - contextPath: AWS.EC2.Fleet.Instances.LaunchTemplateAndOverrides.Overrides.InstanceIds
+ description: The IDs of the instances.
+ type: String
+ - contextPath: AWS.EC2.Fleet.Instances.LaunchTemplateAndOverrides.Overrides.InstanceType
+ description: The instance type.
+ type: String
+ - contextPath: AWS.EC2.Fleet.Instances.LaunchTemplateAndOverrides.Overrides.Platform
+ description: The value is Windows for Windows instances; otherwise blank.
+ type: String
+ - arguments:
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ - default: false
+ description: Checks whether you have the required permissions for the action,
+ without actually making the request, and provides an error response.
+ isArray: false
+ name: DryRun
+ required: false
+ secret: false
+ - default: false
+ description: The IDs of the EC2 Fleets.
+ isArray: false
+ name: FleetIds
+ required: true
+ secret: false
+ - default: false
+ description: Indicates whether to terminate instances for an EC2 Fleet if it
+ is deleted successfully.
+ isArray: false
+ name: TerminateInstances
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the specified EC2 Fleet.
+ execution: false
+ name: aws-ec2-delete-fleet
+ outputs:
+ - contextPath: AWS.EC2.DeletedFleets.SuccessfulFleetDeletions.CurrentFleetState
+ description: The current state of the EC2 Fleet.
+ type: String
+ - contextPath: AWS.EC2.DeletedFleets.SuccessfulFleetDeletions.PreviousFleetState
+ description: The previous state of the EC2 Fleet.
+ type: String
+ - contextPath: AWS.EC2.DeletedFleets.SuccessfulFleetDeletions.FleetId
+ description: The ID of the EC2 Fleet.
+ type: String
+ - contextPath: AWS.EC2.DeletedFleets.UnsuccessfulFleetDeletions.Error.Code
+ description: The error code.
+ type: String
+ - contextPath: AWS.EC2.DeletedFleets.UnsuccessfulFleetDeletions.Error.Message
+ description: The description for the error code.
+ type: String
+ - contextPath: AWS.EC2.DeletedFleets.UnsuccessfulFleetDeletions.FleetId
+ description: The ID of the EC2 Fleet.
+ type: String
+ - arguments:
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ - default: false
+ description: One or more filters.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the EC2 Fleets.
+ isArray: false
+ name: FleetIds
+ required: false
+ secret: false
+ - default: false
+ description: The maximum number of results to return in a single call. Specify
+ a value between 1 and 1000.
+ isArray: false
+ name: MaxResults
+ required: false
+ secret: false
+ - default: false
+ description: The token for the next set of results.
+ isArray: false
+ name: NextToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Describes one or more of your EC2 Fleets.
+ execution: false
+ name: aws-ec2-describe-fleets
+ outputs:
+ - contextPath: AWS.EC2.Fleet.NextToken
+ description: The token for the next set of results.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.ActivityStatus
+ description: 'The progress of the EC2 Fleet. If there is an error, the status
+ is error . '
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.CreateTime
+ description: The creation date and time of the EC2 Fleet.
+ type: date
+ - contextPath: AWS.EC2.Fleet.Fleets.FleetId
+ description: The ID of the EC2 Fleet.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.FleetState
+ description: The state of the EC2 Fleet.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.ClientToken
+ description: Unique, case-sensitive identifier you provide to ensure the idempotency
+ of the request.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.ExcessCapacityTerminationPolicy
+ description: Indicates whether running instances should be terminated if the
+ target capacity of the EC2 Fleet is decreased below the current size of the
+ EC2 Fleet.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.FulfilledCapacity
+ description: The number of units fulfilled by this request compared to the set
+ target capacity.
+ type: number
+ - contextPath: AWS.EC2.Fleet.Fleets.FulfilledOnDemandCapacity
+ description: The number of units fulfilled by this request compared to the set
+ target On-Demand capacity.
+ type: number
+ - contextPath: AWS.EC2.Fleet.Fleets.LaunchTemplateConfigs.LaunchTemplateSpecification.LaunchTemplateId
+ description: The ID of the launch template. You must specify either a template
+ ID or a template name.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.LaunchTemplateConfigs.LaunchTemplateSpecification.LaunchTemplateName
+ description: The name of the launch template. You must specify either a template
+ name or a template ID.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.LaunchTemplateConfigs.LaunchTemplateSpecification.Version
+ description: The version number of the launch template. You must specify a version
+ number.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.LaunchTemplateConfigs.LaunchTemplateSpecification.Overrides.InstanceType
+ description: The instance type.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.LaunchTemplateConfigs.LaunchTemplateSpecification.Overrides.MaxPrice
+ description: The maximum price per unit hour that you are willing to pay for
+ a Spot Instance.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.LaunchTemplateConfigs.LaunchTemplateSpecification.Overrides.SubnetId
+ description: The ID of the subnet in which to launch the instances.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.LaunchTemplateConfigs.LaunchTemplateSpecification.Overrides.AvailabilityZone
+ description: The Availability Zone in which to launch the instances.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.LaunchTemplateConfigs.LaunchTemplateSpecification.Overrides.WeightedCapacity
+ description: The number of units provided by the specified instance type.
+ type: number
+ - contextPath: AWS.EC2.Fleet.Fleets.LaunchTemplateConfigs.LaunchTemplateSpecification.Overrides.Priority
+ description: The priority for the launch template override.
+ type: number
+ - contextPath: AWS.EC2.Fleet.Fleets.LaunchTemplateConfigs.LaunchTemplateSpecification.Overrides.Placement.GroupName
+ description: The name of the placement group the instance is in.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.TargetCapacitySpecification.TotalTargetCapacity
+ description: The number of units to request, filled using DefaultTargetCapacityType
+ .
+ type: number
+ - contextPath: AWS.EC2.Fleet.Fleets.TargetCapacitySpecification.OnDemandTargetCapacity
+ description: The number of On-Demand units to request.
+ type: number
+ - contextPath: AWS.EC2.Fleet.Fleets.TargetCapacitySpecification.SpotTargetCapacity
+ description: The maximum number of Spot units to launch.
+ type: number
+ - contextPath: AWS.EC2.Fleet.Fleets.TargetCapacitySpecification.DefaultTargetCapacityType
+ description: The default TotalTargetCapacity , which is either Spot or On-Demand.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.TerminateInstancesWithExpiration
+ description: Indicates whether running instances should be terminated when the
+ EC2 Fleet expires.
+ type: boolean
+ - contextPath: AWS.EC2.Fleet.Fleets.Type
+ description: The type of request. Indicates whether the EC2 Fleet only requests
+ the target capacity, or also attempts to maintain it.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.ValidFrom
+ description: The start date and time of the request, in UTC format (for example,
+ YYYY -MM -DD T*HH* :MM :SS Z).
+ type: date
+ - contextPath: AWS.EC2.Fleet.Fleets.ValidUntil
+ description: The end date and time of the request, in UTC format (for example,
+ YYYY -MM -DD T*HH* :MM :SS Z).
+ type: date
+ - contextPath: AWS.EC2.Fleet.Fleets.ReplaceUnhealthyInstances
+ description: Indicates whether EC2 Fleet should replace unhealthy instances.
+ type: boolean
+ - contextPath: AWS.EC2.Fleet.Fleets.SpotOptions.AllocationStrategy
+ description: Indicates how to allocate the target capacity across the Spot pools
+ specified by the Spot Fleet request.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.SpotOptions.InstanceInterruptionBehavior
+ description: The behavior when a Spot Instance is interrupted. The default is
+ terminate.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.SpotOptions.InstancePoolsToUseCount
+ description: The number of Spot pools across which to allocate your target Spot
+ capacity.
+ type: number
+ - contextPath: AWS.EC2.Fleet.Fleets.SpotOptions.SingleInstanceType
+ description: Indicates that the fleet uses a single instance type to launch
+ all Spot Instances in the fleet.
+ type: boolean
+ - contextPath: AWS.EC2.Fleet.Fleets.SpotOptions.SingleAvailabilityZone
+ description: Indicates that the fleet launches all Spot Instances into a single
+ Availability Zone.
+ type: boolean
+ - contextPath: AWS.EC2.Fleet.Fleets.SpotOptions.MinTargetCapacity
+ description: The minimum target capacity for Spot Instances in the fleet.
+ type: number
+ - contextPath: AWS.EC2.Fleet.Fleets.OnDemandOptions.AllocationStrategy
+ description: The order of the launch template overrides to use in fulfilling
+ On-Demand capacity.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.OnDemandOptions.SingleInstanceType
+ description: Indicates that the fleet uses a single instance type to launch
+ all On-Demand Instances in the fleet.
+ type: boolean
+ - contextPath: AWS.EC2.Fleet.Fleets.OnDemandOptions.SingleAvailabilityZone
+ description: Indicates that the fleet launches all On-Demand Instances into
+ a single Availability Zone.
+ type: boolean
+ - contextPath: AWS.EC2.Fleet.Fleets.OnDemandOptions.MinTargetCapacity
+ description: 'The minimum target capacity for On-Demand Instances in the fleet. '
+ type: number
+ - contextPath: AWS.EC2.Fleet.Fleets.Tags.Key
+ description: The key of the tag.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Tags.Value
+ description: The value of the tag.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Errors.LaunchTemplateAndOverrides.LaunchTemplateSpecification.LaunchTemplateId
+ description: The ID of the launch template. You must specify either a template
+ ID or a template name.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Errors.LaunchTemplateAndOverrides.LaunchTemplateSpecification.LaunchTemplateName
+ description: The name of the launch template. You must specify either a template
+ name or a template ID.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Errors.LaunchTemplateAndOverrides.LaunchTemplateSpecification.Version
+ description: The version number of the launch template. You must specify a version
+ number.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Errors.Overrides.InstanceType
+ description: The instance type.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Errors.Overrides.MaxPrice
+ description: The maximum price per unit hour that you are willing to pay for
+ a Spot Instance.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Errors.Overrides.SubnetId
+ description: The ID of the subnet in which to launch the instances.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Errors.Overrides.AvailabilityZone
+ description: The Availability Zone in which to launch the instances.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Errors.Overrides.WeightedCapacity
+ description: The number of units provided by the specified instance type.
+ type: number
+ - contextPath: AWS.EC2.Fleet.Fleets.Errors.Overrides.Priority
+ description: The priority for the launch template override.
+ type: number
+ - contextPath: AWS.EC2.Fleet.Fleets.Errors.Overrides.Placement.GroupName
+ description: The name of the placement group the instance is in.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Errors.Lifecycle
+ description: Indicates if the instance that could not be launched was a Spot
+ Instance or On-Demand Instance.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Errors.ErrorCode
+ description: The error code that indicates why the instance could not be launched.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Errors.ErrorMessage
+ description: The error message that describes why the instance could not be
+ launched.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Instances.LaunchTemplateAndOverrides.LaunchTemplateSpecification.LaunchTemplateId
+ description: The ID of the launch template. You must specify either a template
+ ID or a template name.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Instances.LaunchTemplateAndOverrides.LaunchTemplateSpecification.LaunchTemplateName
+ description: The name of the launch template. You must specify either a template
+ name or a template ID.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Instances.LaunchTemplateAndOverrides.LaunchTemplateSpecification.Version
+ description: The version number of the launch template. You must specify a version
+ number.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Instances.LaunchTemplateAndOverrides.Overrides.InstanceType
+ description: The instance type.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Instances.LaunchTemplateAndOverrides.Overrides.MaxPrice
+ description: The maximum price per unit hour that you are willing to pay for
+ a Spot Instance.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Instances.LaunchTemplateAndOverrides.Overrides.SubnetId
+ description: The ID of the subnet in which to launch the instances.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Instances.LaunchTemplateAndOverrides.Overrides.AvailabilityZone
+ description: The Availability Zone in which to launch the instances.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Instances.LaunchTemplateAndOverrides.Overrides.WeightedCapacity
+ description: The number of units provided by the specified instance type.
+ type: number
+ - contextPath: AWS.EC2.Fleet.Fleets.Instances.LaunchTemplateAndOverrides.Overrides.Priority
+ description: The priority for the launch template override.
+ type: number
+ - contextPath: AWS.EC2.Fleet.Fleets.Instances.LaunchTemplateAndOverrides.Overrides.Placement.GroupName
+ description: The name of the placement group the instance is in.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Instances.Lifecycle
+ description: Indicates if the instance that was launched is a Spot Instance
+ or On-Demand Instance.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Instances.InstanceIds
+ description: The IDs of the instances.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Instances.InstanceType
+ description: The instance type.
+ type: string
+ - contextPath: AWS.EC2.Fleet.Fleets.Instances.Platform
+ description: The value is Windows for Windows instances; otherwise blank.
+ type: string
+ - arguments:
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ - default: false
+ description: A filter name and value pair that is used to return a more specific
+ list of results from a describe operation.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the EC2 Fleet.
+ isArray: false
+ name: FleetId
+ required: true
+ secret: false
+ - default: false
+ description: The maximum number of results to return in a single call. Specify
+ a value between 1 and 1000.
+ isArray: false
+ name: MaxResults
+ required: false
+ secret: false
+ - default: false
+ description: The token for the next set of results.
+ isArray: false
+ name: NextToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Describes the running instances for the specified EC2 Fleet.
+ execution: false
+ name: aws-ec2-describe-fleet-instances
+ outputs:
+ - contextPath: AWS.EC2.Fleet.ActiveInstances.InstanceId
+ description: The ID of the instance.
+ type: String
+ - contextPath: AWS.EC2.Fleet.ActiveInstances.InstanceType
+ description: The instance type.
+ type: String
+ - contextPath: AWS.EC2.Fleet.ActiveInstances.SpotInstanceRequestId
+ description: The ID of the Spot Instance request.
+ type: String
+ - contextPath: AWS.EC2.Fleet.ActiveInstances.InstanceHealth
+ description: The health status of the instance.
+ type: String
+ - contextPath: AWS.EC2.Fleet.NextToken
+ description: The token for the next set of results.
+ type: String
+ - contextPath: AWS.EC2.Fleet.FleetId
+ description: The ID of the EC2 Fleet.
+ type: String
+ - arguments:
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the EC2 Fleet.
+ isArray: false
+ name: FleetId
+ required: true
+ secret: false
+ - default: false
+ description: The number of units to request, filled using DefaultTargetCapacityType.
+ isArray: false
+ name: TotalTargetCapacity
+ required: true
+ secret: false
+ - default: false
+ description: The number of On-Demand units to request.
+ isArray: false
+ name: OnDemandTargetCapacity
+ required: false
+ secret: false
+ - default: false
+ description: The number of Spot units to request.
+ isArray: false
+ name: SpotTargetCapacity
+ required: false
+ secret: false
+ - default: false
+ description: The default TotalTargetCapacity, which is either Spot or On-Demand.
+ isArray: false
+ name: DefaultTargetCapacityType
+ required: false
+ secret: false
+ deprecated: false
+ description: Modifies the specified EC2 Fleet.
+ execution: false
+ name: aws-ec2-modify-fleet
+ - arguments:
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ - default: false
+ description: Unique, case-sensitive identifier you provide to ensure the idempotency
+ of the request.
+ isArray: false
+ name: ClientToken
+ required: false
+ secret: false
+ - default: false
+ description: A name for the launch template.
+ isArray: false
+ name: LaunchTemplateName
+ required: true
+ secret: false
+ - default: false
+ description: A description for the first version of the launch template.
+ isArray: false
+ name: VersionDescription
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the kernel.
+ isArray: false
+ name: KernelId
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether the instance is optimized for Amazon EBS I/O.
+ isArray: false
+ name: EbsOptimized
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the instance profile.
+ isArray: false
+ name: iamInstanceProfileArn
+ required: false
+ secret: false
+ - default: false
+ description: The name of the instance profile.
+ isArray: false
+ name: iamInstanceProfileName
+ required: false
+ secret: false
+ - default: false
+ description: The device name (for example, /dev/sdh or xvdh).
+ isArray: false
+ name: deviceName
+ required: false
+ secret: false
+ - default: false
+ description: The virtual device name (ephemeralN). Instance store volumes are
+ numbered starting from 0.
+ isArray: false
+ name: VirtualName
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether the EBS volume is encrypted.
+ isArray: false
+ name: ebsEncrypted
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether the EBS volume is deleted on instance termination.
+ isArray: false
+ name: ebsDeleteOnTermination
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: The number of I/O operations per second (IOPS) that the volume
+ supports.
+ isArray: false
+ name: ebsIops
+ required: false
+ secret: false
+ - default: false
+ description: The ARN of the AWS Key Management Service (AWS KMS) CMK used for
+ encryption.
+ isArray: false
+ name: ebsKmsKeyId
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the snapshot.
+ isArray: false
+ name: ebsSnapshotId
+ required: false
+ secret: false
+ - default: false
+ description: The size of the volume, in GiB.
+ isArray: false
+ name: ebsVolumeSize
+ required: false
+ secret: false
+ - default: false
+ description: The volume type.
+ isArray: false
+ name: ebsVolumeType
+ required: false
+ secret: false
+ - default: false
+ description: Suppresses the specified device included in the block device mapping
+ of the AMI.
+ isArray: false
+ name: NoDevice
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Associates a public IPv4 address with eth0 for a new network interface.
+ isArray: false
+ name: AssociatePublicIpAddress
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether the network interface is deleted when the instance
+ is terminated.
+ isArray: false
+ name: NetworkInterfacesDeleteOnTermination
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: A description for the network interface.
+ isArray: false
+ name: NetworkInterfacesDescription
+ required: false
+ secret: false
+ - default: false
+ description: The device index for the network interface attachment.
+ isArray: false
+ name: NetworkInterfacesDeviceIndex
+ required: false
+ secret: false
+ - default: false
+ description: The IDs of one or more security groups.
+ isArray: false
+ name: NetworkInterfaceGroups
+ required: false
+ secret: false
+ - default: false
+ description: 'The number of IPv6 addresses to assign to a network interface. '
+ isArray: false
+ name: Ipv6AddressCount
+ required: false
+ secret: false
+ - default: false
+ description: One or more specific IPv6 addresses from the IPv6 CIDR block range
+ of your subnet.
+ isArray: false
+ name: Ipv6Addresses
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the network interface.
+ isArray: false
+ name: NetworkInterfaceId
+ required: false
+ secret: false
+ - default: false
+ description: The primary private IPv4 address of the network interface.
+ isArray: false
+ name: PrivateIpAddress
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the subnet for the network interface.
+ isArray: false
+ name: SubnetId
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the AMI, which you can get by using DescribeImages.
+ isArray: false
+ name: ImageId
+ required: false
+ secret: false
+ - default: false
+ description: The instance type.
+ isArray: false
+ name: InstanceType
+ required: false
+ secret: false
+ - default: false
+ description: The name of the key pair.
+ isArray: false
+ name: KeyName
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Specify true to enable detailed monitoring. Otherwise, basic monitoring
+ is enabled.
+ isArray: false
+ name: Monitoring
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: The Availability Zone for the instance.
+ isArray: false
+ name: AvailabilityZone
+ required: false
+ secret: false
+ - default: false
+ description: The affinity setting for an instance on a Dedicated Host.
+ isArray: false
+ name: PlacementAffinity
+ required: false
+ secret: false
+ - default: false
+ description: The name of the placement group for the instance.
+ isArray: false
+ name: AvailabilityZoneGroupName
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the Dedicated Host for the instance.
+ isArray: false
+ name: PlacementHostId
+ required: false
+ secret: false
+ - default: false
+ description: The tenancy of the instance (if the instance is running in a VPC).
+ isArray: false
+ name: PlacementTenancy
+ required: false
+ secret: false
+ - default: false
+ description: Reserved for future use.
+ isArray: false
+ name: PlacementSpreadDomain
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the RAM disk.
+ isArray: false
+ name: RamDiskId
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If set to true , you can't terminate the instance using the Amazon
+ EC2 console, CLI, or API.
+ isArray: false
+ name: DisableApiTermination
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: Indicates whether an instance stops or terminates when you initiate
+ shutdown from the instance (using the operating system command for system
+ shutdown).
+ isArray: false
+ name: InstanceInitiatedShutdownBehavior
+ required: false
+ secret: false
+ - default: false
+ description: The Base64-encoded user data to make available to the instance.
+ isArray: false
+ name: UserData
+ required: false
+ secret: false
+ - default: false
+ description: The tags to apply to the resource.
+ isArray: false
+ name: Tags
+ required: false
+ secret: false
+ - default: false
+ description: The type of Elastic Graphics accelerator.
+ isArray: false
+ name: ElasticGpuSpecificationsType
+ required: false
+ secret: false
+ - default: false
+ description: The type of elastic inference accelerator. The possible values
+ are eia1.medium, eia1.large, and eia1.xlarge.
+ isArray: false
+ name: ElasticInferenceAcceleratorsType
+ required: false
+ secret: false
+ - default: false
+ description: One or more security group IDs.
+ isArray: false
+ name: securityGroupIds
+ required: false
+ secret: false
+ - default: false
+ description: One or more security group names.
+ isArray: false
+ name: securityGroups
+ required: false
+ secret: false
+ - default: false
+ description: The market type.
+ isArray: false
+ name: MarketType
+ required: false
+ secret: false
+ - default: false
+ description: The Spot Instance request type.
+ isArray: false
+ name: SpotInstanceType
+ required: false
+ secret: false
+ - default: false
+ description: The required duration for the Spot Instances (also known as Spot
+ blocks), in minutes. This value must be a multiple of 60 (60, 120, 180, 240,
+ 300, or 360).
+ isArray: false
+ name: BlockDurationMinutes
+ required: false
+ secret: false
+ - default: false
+ description: The end date of the request.
+ isArray: false
+ name: SpotValidUntil
+ required: false
+ secret: false
+ - default: false
+ description: The behavior when a Spot Instance is interrupted. The default is
+ terminate.
+ isArray: false
+ name: SpotInstanceInterruptionBehavior
+ required: false
+ secret: false
+ - default: false
+ description: The maximum hourly price you're willing to pay for the Spot Instances.
+ isArray: false
+ name: SpotMaxPrice
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a launch template. A launch template contains the parameters
+ to launch an instance.
+ execution: false
+ name: aws-ec2-create-launch-template
+ outputs:
+ - contextPath: AWS.EC2.LaunchTemplates.LaunchTemplate.LaunchTemplateId
+ description: The ID of the launch template.
+ type: String
+ - contextPath: AWS.EC2.LaunchTemplates.LaunchTemplate.LaunchTemplateName
+ description: The name of the launch template.
+ type: String
+ - contextPath: AWS.EC2.LaunchTemplates.LaunchTemplate.CreateTime
+ description: The time launch template was created.
+ type: Date
+ - contextPath: AWS.EC2.LaunchTemplates.LaunchTemplate.CreatedBy
+ description: The principal that created the launch template.
+ type: String
+ - contextPath: AWS.EC2.LaunchTemplates.LaunchTemplate.DefaultVersionNumber
+ description: The version number of the default version of the launch template.
+ type: Number
+ - contextPath: AWS.EC2.LaunchTemplates.LaunchTemplate.LatestVersionNumber
+ description: The version number of the latest version of the launch template.
+ type: Number
+ - contextPath: AWS.EC2.LaunchTemplates.LaunchTemplate.Tags.Key
+ description: The key of the tag.
+ type: String
+ - contextPath: AWS.EC2.LaunchTemplates.LaunchTemplate.Tags.Value
+ description: The value of the tag.
+ type: String
+ - arguments:
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the launch template.
+ isArray: false
+ name: LaunchTemplateId
+ required: false
+ secret: false
+ - default: false
+ description: The name of the launch template.
+ isArray: false
+ name: LaunchTemplateName
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes a launch template. Deleting a launch template deletes all
+ of its versions.
+ execution: false
+ name: aws-ec2-delete-launch-template
+ outputs:
+ - contextPath: AWS.EC2.DeletedLaunchTemplates.LaunchTemplate.LaunchTemplateId
+ description: The ID of the launch template.
+ type: String
+ - contextPath: AWS.EC2.DeletedLaunchTemplates.LaunchTemplate.LaunchTemplateName
+ description: The name of the launch template.
+ type: String
+ - contextPath: AWS.EC2.DeletedLaunchTemplates.LaunchTemplate.CreateTime
+ description: The time launch template was created.
+ type: Date
+ - contextPath: AWS.EC2.DeletedLaunchTemplates.LaunchTemplate.CreatedBy
+ description: The principal that created the launch template.
+ type: String
+ - contextPath: AWS.EC2.DeletedLaunchTemplates.LaunchTemplate.DefaultVersionNumber
+ description: The version number of the default version of the launch template.
+ type: Number
+ - contextPath: AWS.EC2.DeletedLaunchTemplates.LaunchTemplate.LatestVersionNumber
+ description: The version number of the latest version of the launch template.
+ type: Number
+ - arguments:
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ - default: false
+ description: The name of the attribute to modify. The valid values are description,
+ launchPermission, and productCodes.
+ isArray: false
+ name: Attribute
+ required: false
+ secret: false
+ - default: false
+ description: A new description for the AMI.
+ isArray: false
+ name: Description
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the AMI.
+ isArray: false
+ name: ImageId
+ required: true
+ secret: false
+ - default: false
+ description: The name of the group.
+ isArray: false
+ name: LaunchPermission-Add-Group
+ required: false
+ secret: false
+ - default: false
+ description: The AWS account ID.
+ isArray: false
+ name: LaunchPermission-Add-UserId
+ required: false
+ secret: false
+ - default: false
+ description: The name of the group.
+ isArray: false
+ name: LaunchPermission-Remove-Group
+ required: false
+ secret: false
+ - default: false
+ description: The AWS account ID.
+ isArray: false
+ name: LaunchPermission-Remove-UserId
+ required: false
+ secret: false
+ - default: false
+ description: The operation type.
+ isArray: false
+ name: OperationType
+ required: false
+ secret: false
+ - default: false
+ description: One or more DevPay product codes. After you add a product code
+ to an AMI, it can't be removed.
+ isArray: false
+ name: ProductCodes
+ required: false
+ secret: false
+ - default: false
+ description: One or more user groups. This parameter can be used only when the
+ Attribute parameter is launchPermission.
+ isArray: false
+ name: UserGroups
+ required: false
+ secret: false
+ - default: false
+ description: One or more AWS account IDs. This parameter can be used only when
+ the Attribute parameter is launchPermission.
+ isArray: false
+ name: UserIds
+ required: false
+ secret: false
+ - default: false
+ description: The value of the attribute being modified. This parameter can be
+ used only when the Attribute parameter is description or productCodes.
+ isArray: false
+ name: Value
+ required: false
+ secret: false
+ deprecated: false
+ description: Modifies the specified attribute of the specified AMI.
+ execution: false
+ name: aws-ec2-modify-image-attribute
+ dockerimage: demisto/boto3:1.9.55
+ isfetch: false
+ runonce: false
+ script: ''
+ type: python
+ subtype: python2
+tests:
+- 2142f8de-29d5-4288-8426-0db39abe988b
diff --git a/Integrations/AWS-EC2/AWS-EC2_description.md b/Integrations/AWS-EC2/AWS-EC2_description.md
new file mode 100644
index 000000000000..0da90c2304de
--- /dev/null
+++ b/Integrations/AWS-EC2/AWS-EC2_description.md
@@ -0,0 +1,14 @@
+Before you can use AWS EC2, you need to perform several configuration steps in your AWS environment.
+
+### Prerequisites
+- Attach an instance profile with the required permissions to the Demisto server or engine that is running
+on your AWS environment.
+- Instance profile requires minimum permission: sts:AssumeRole.
+- Instance profile requires permission to assume the roles needed by the AWS integrations.
+
+### Configure AWS Settings
+- Create an IAM Role for the Instance Profile.
+- Attach a Role to the Instance Profile.
+- Configure the Necessary IAM Roles that the AWS Integration Can Assume.
+
+For detailed instructions, [see the AWS Integrations Configuration Guide](https://support.demisto.com/hc/en-us/articles/360005686854-AWS-Integrations-Configuration-Guide).
diff --git a/Integrations/AWS-EC2/AWS-EC2_image.png b/Integrations/AWS-EC2/AWS-EC2_image.png
new file mode 100644
index 000000000000..7f17860ed854
Binary files /dev/null and b/Integrations/AWS-EC2/AWS-EC2_image.png differ
diff --git a/Integrations/AWS-EC2/CHANGELOG.md b/Integrations/AWS-EC2/CHANGELOG.md
new file mode 100644
index 000000000000..08a686f8b200
--- /dev/null
+++ b/Integrations/AWS-EC2/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+* Added several arguments to the ***authorize_security_group_ingress*** command.
+* Bugfix for Proxy/Insecure issues.
diff --git a/Integrations/AWS-Lambda/AWS-Lambda.py b/Integrations/AWS-Lambda/AWS-Lambda.py
new file mode 100644
index 000000000000..4a5eb49036a4
--- /dev/null
+++ b/Integrations/AWS-Lambda/AWS-Lambda.py
@@ -0,0 +1,399 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+"""IMPORTS"""
+import boto3
+import base64
+from datetime import datetime, date
+from botocore.config import Config
+from botocore.parsers import ResponseParserError
+import urllib3.util
+
+# Disable insecure warnings
+urllib3.disable_warnings()
+
+"""GLOBAL VARIABLES"""
+AWS_DEFAULT_REGION = demisto.params().get('defaultRegion')
+AWS_ROLE_ARN = demisto.params().get('roleArn')
+AWS_ROLE_SESSION_NAME = demisto.params().get('roleSessionName')
+AWS_ROLE_SESSION_DURATION = demisto.params().get('sessionDuration')
+AWS_ROLE_POLICY = None
+AWS_ACCESS_KEY_ID = demisto.params().get('access_key')
+AWS_SECRET_ACCESS_KEY = demisto.params().get('secret_key')
+VERIFY_CERTIFICATE = not demisto.params().get('insecure', True)
+proxies = handle_proxy(proxy_param_name='proxy', checkbox_default_value=False)
+config = Config(
+ connect_timeout=1,
+ retries=dict(
+ max_attempts=5
+ ),
+ proxies=proxies
+)
+
+"""HELPER FUNCTIONS"""
+
+
+def aws_session(service='lambda', region=None, roleArn=None, roleSessionName=None, roleSessionDuration=None,
+ rolePolicy=None):
+ kwargs = {}
+ if roleArn and roleSessionName is not None:
+ kwargs.update({
+ 'RoleArn': roleArn,
+ 'RoleSessionName': roleSessionName,
+ })
+ elif AWS_ROLE_ARN and AWS_ROLE_SESSION_NAME is not None:
+ kwargs.update({
+ 'RoleArn': AWS_ROLE_ARN,
+ 'RoleSessionName': AWS_ROLE_SESSION_NAME,
+ })
+
+ if roleSessionDuration is not None:
+ kwargs.update({'DurationSeconds': int(roleSessionDuration)})
+ elif AWS_ROLE_SESSION_DURATION is not None:
+ kwargs.update({'DurationSeconds': int(AWS_ROLE_SESSION_DURATION)})
+
+ if rolePolicy is not None:
+ kwargs.update({'Policy': rolePolicy})
+ elif AWS_ROLE_POLICY is not None:
+ kwargs.update({'Policy': AWS_ROLE_POLICY})
+ if kwargs and AWS_ACCESS_KEY_ID is None:
+
+ if AWS_ACCESS_KEY_ID is None:
+ sts_client = boto3.client('sts', config=config, verify=VERIFY_CERTIFICATE)
+ sts_response = sts_client.assume_role(**kwargs)
+ if region is not None:
+ client = boto3.client(
+ service_name=service,
+ region_name=region,
+ aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
+ aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
+ aws_session_token=sts_response['Credentials']['SessionToken'],
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ else:
+ client = boto3.client(
+ service_name=service,
+ region_name=AWS_DEFAULT_REGION,
+ aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
+ aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
+ aws_session_token=sts_response['Credentials']['SessionToken'],
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ elif AWS_ACCESS_KEY_ID and AWS_ROLE_ARN:
+ sts_client = boto3.client(
+ service_name='sts',
+ aws_access_key_id=AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ kwargs.update({
+ 'RoleArn': AWS_ROLE_ARN,
+ 'RoleSessionName': AWS_ROLE_SESSION_NAME,
+ })
+ sts_response = sts_client.assume_role(**kwargs)
+ client = boto3.client(
+ service_name=service,
+ region_name=AWS_DEFAULT_REGION,
+ aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
+ aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
+ aws_session_token=sts_response['Credentials']['SessionToken'],
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ else:
+ if region is not None:
+ client = boto3.client(
+ service_name=service,
+ region_name=region,
+ aws_access_key_id=AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ else:
+ client = boto3.client(
+ service_name=service,
+ region_name=AWS_DEFAULT_REGION,
+ aws_access_key_id=AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+
+ return client
+
+
+def parse_tag_field(tags_str):
+ tags = []
+ regex = re.compile(r'key=([\w\d_:.-]+),value=([ /\w\d@_,.\*-]+)', flags=re.I)
+ for f in tags_str.split(';'):
+ match = regex.match(f)
+ if match is None:
+ demisto.log('could not parse field: %s' % (f,))
+ continue
+
+ tags.append({
+ 'Key': match.group(1),
+ 'Value': match.group(2)
+ })
+ return tags
+
+
+class DatetimeEncoder(json.JSONEncoder):
+ # pylint: disable=method-hidden
+ def default(self, obj):
+ if isinstance(obj, datetime):
+ return obj.strftime('%Y-%m-%dT%H:%M:%S')
+ elif isinstance(obj, date):
+ return obj.strftime('%Y-%m-%d')
+ # Let the base class default method raise the TypeError
+ return json.JSONEncoder.default(self, obj)
+
+
+def parse_resource_ids(resource_id):
+ id_list = resource_id.replace(" ", "")
+ resource_ids = id_list.split(",")
+ return resource_ids
+
+
+def create_entry(title, data, ec):
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': data,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, data) if data else 'No result were found',
+ 'EntryContext': ec
+ }
+
+
+"""MAIN FUNCTIONS"""
+
+
+def get_function(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ kwargs = {'FunctionName': args.get('functionName')}
+ if args.get('qualifier') is not None:
+ kwargs.update({'Qualifier': args.get('qualifier')})
+
+ response = client.get_function(**kwargs)
+ func = response['Configuration']
+ data = ({
+ 'FunctionName': func['FunctionName'],
+ 'FunctionArn': func['FunctionArn'],
+ 'Runtime': func['Runtime'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+
+ raw = json.loads(json.dumps(response, cls=DatetimeEncoder))
+ if raw:
+ raw.update({'Region': obj['_user_provided_options']['region_name']})
+
+ ec = {'AWS.Lambda.Functions(val.FunctionArn === obj.FunctionArn)': raw}
+ human_readable = tableToMarkdown('AWS Lambda Functions', data)
+ return_outputs(human_readable, ec)
+
+
+def list_functions(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ data = []
+ output = []
+
+ paginator = client.get_paginator('list_functions')
+ for response in paginator.paginate():
+ for function in response['Functions']:
+ data.append({
+ 'FunctionName': function['FunctionName'],
+ 'FunctionArn': function['FunctionArn'],
+ 'Runtime': function['Runtime'],
+ 'LastModified': function['LastModified'],
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+ output.append(function)
+
+ raw = json.loads(json.dumps(response, cls=DatetimeEncoder))
+ if raw:
+ raw.update({'Region': obj['_user_provided_options']['region_name']})
+
+ ec = {'AWS.Lambda.Functions(val.FunctionArn === obj.FunctionArn)': raw}
+ human_readable = tableToMarkdown('AWS Lambda Functions', data)
+ return_outputs(human_readable, ec)
+
+
+def list_aliases(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ data = []
+ output = []
+ kwargs = {'FunctionName': args.get('functionName')}
+ if args.get('functionVersion') is not None:
+ kwargs.update({'FunctionVersion': args.get('functionVersion')})
+
+ paginator = client.get_paginator('list_aliases')
+ for response in paginator.paginate(**kwargs):
+ for alias in response['Aliases']:
+ data.append({
+ 'AliasArn': alias['AliasArn'],
+ 'Name': alias['Name'],
+ 'FunctionVersion': alias['FunctionVersion'],
+ })
+ output.append(alias)
+ try:
+ raw = json.loads(json.dumps(output, cls=DatetimeEncoder))
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ ec = {'AWS.Lambda.Aliases(val.AliasArn === obj.AliasArn)': raw}
+ human_readable = tableToMarkdown('AWS Lambda Aliases', data)
+ return_outputs(human_readable, ec)
+
+
+def invoke(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ kwargs = {'FunctionName': args.get('functionName')}
+ if args.get('invocationType') is not None:
+ kwargs.update({'InvocationType': args.get('invocationType')})
+ if args.get('logType') is not None:
+ kwargs.update({'LogType': args.get('logType')})
+ if args.get('clientContext') is not None:
+ kwargs.update({'ClientContext': args.get('clientContext')})
+ if args.get('payload') is not None:
+ kwargs.update({'Payload': json.dumps(args.get('payload'))})
+ if args.get('qualifier') is not None:
+ kwargs.update({'Qualifier': args.get('qualifier')})
+ response = client.invoke(**kwargs)
+ data = ({
+ 'FunctionName': args.get('functionName'),
+ 'Region': obj['_user_provided_options']['region_name'],
+ })
+ if 'LogResult' in response:
+ data.update({'LogResult': base64.b64decode(response['LogResult'])}) # type:ignore
+ if 'Payload' in response:
+ data.update({'Payload': response['Payload'].read().decode("utf-8")}) # type:ignore
+ if 'ExecutedVersion' in response:
+ data.update({'ExecutedVersion': response['ExecutedVersion']}) # type:ignore
+ if 'FunctionError' in response:
+ data.update({'FunctionError': response['FunctionError']})
+
+ ec = {'AWS.Lambda.InvokedFunctions(val.FunctionName === obj.FunctionName)': data}
+ human_readable = tableToMarkdown('AWS Lambda Invoked Functions', data)
+ return_outputs(human_readable, ec)
+
+
+def remove_permission(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {
+ 'FunctionName': args.get('functionName'),
+ 'StatementId': args.get('StatementId')
+ }
+ if args.get('Qualifier') is not None:
+ kwargs.update({'Qualifier': args.get('Qualifier')})
+ if args.get('RevisionId') is not None:
+ kwargs.update({'RevisionId': args.get('RevisionId')})
+
+ response = client.remove_permission(**kwargs)
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results('Permissions have been removed')
+
+
+def get_account_settings(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ obj = vars(client._client_config)
+ response = client.get_account_settings()
+ account_limit = response['AccountLimit']
+ account_usage = response['AccountUsage']
+ data = {
+ 'AccountLimit': {
+ 'TotalCodeSize': str(account_limit['TotalCodeSize']),
+ 'CodeSizeUnzipped': str(account_limit['CodeSizeUnzipped']),
+ 'CodeSizeZipped': str(account_limit['CodeSizeZipped']),
+ 'ConcurrentExecutions': str(account_limit['ConcurrentExecutions']),
+ 'UnreservedConcurrentExecutions': str(account_limit['UnreservedConcurrentExecutions'])
+ },
+ 'AccountUsage': {
+ 'TotalCodeSize': str(account_usage['TotalCodeSize']),
+ 'FunctionCount': str(account_usage['FunctionCount'])
+ }
+ }
+ try:
+ raw = json.loads(json.dumps(response, cls=DatetimeEncoder))
+ except ValueError as e:
+ return_error('Could not decode/encode the raw response - {err_msg}'.format(err_msg=e))
+ if raw:
+ raw.update({'Region': obj['_user_provided_options']['region_name']})
+
+ ec = {'AWS.Lambda.Functions(val.Region === obj.Region)': raw}
+ human_readable = tableToMarkdown('AWS Lambda Functions', data)
+ return_outputs(human_readable, ec)
+
+
+"""TEST FUNCTION"""
+
+
+def test_function():
+ client = aws_session()
+ response = client.list_functions()
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results('ok')
+
+
+"""EXECUTION BLOCK"""
+try:
+ if demisto.command() == 'test-module':
+ test_function()
+ elif demisto.command() == 'aws-lambda-get-function':
+ get_function(demisto.args())
+ elif demisto.command() == 'aws-lambda-list-functions':
+ list_functions(demisto.args())
+ elif demisto.command() == 'aws-lambda-list-aliases':
+ list_aliases(demisto.args())
+ elif demisto.command() == 'aws-lambda-invoke':
+ invoke(demisto.args())
+ elif demisto.command() == 'aws-lambda-remove-permission':
+ remove_permission(demisto.args())
+ elif demisto.command() == 'aws-lambda-get-account-settings':
+ get_account_settings(demisto.args())
+except ResponseParserError as e:
+ return_error('Could not connect to the AWS endpoint. Please check that the region is valid.\n {error}'.format(
+ error=type(e)))
+ LOG(str(e))
+
+except Exception as e:
+ return_error('Error has occurred in the AWS Lambda Integration: {error}\n {message}'.format(
+ error=type(e), message=str(e)))
diff --git a/Integrations/AWS-Lambda/AWS-Lambda.yml b/Integrations/AWS-Lambda/AWS-Lambda.yml
new file mode 100644
index 000000000000..154b1367969d
--- /dev/null
+++ b/Integrations/AWS-Lambda/AWS-Lambda.yml
@@ -0,0 +1,665 @@
+category: IT Services
+commonfields:
+ id: AWS - Lambda
+ version: -1
+configuration:
+- display: AWS Default Region
+ name: defaultRegion
+ options:
+ - us-east-1
+ - us-east-2
+ - us-west-1
+ - us-west-2
+ - ca-central-1
+ - eu-west-1
+ - eu-central-1
+ - eu-west-2
+ - ap-northeast-1
+ - ap-northeast-2
+ - ap-southeast-1
+ - ap-southeast-2
+ - ap-south-1
+ - sa-east-1
+ - eu-north-1
+ - eu-west-3
+ required: false
+ type: 15
+- display: Role Arn
+ name: roleArn
+ required: false
+ type: 0
+- display: Role Session Name
+ name: roleSessionName
+ required: false
+ type: 0
+- display: Role Session Duration
+ name: sessionDuration
+ required: false
+ type: 0
+- display: Access Key
+ name: access_key
+ required: false
+ type: 0
+- display: Secret Key
+ name: secret_key
+ required: false
+ type: 4
+- display: Trust any cert (Not Secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+description: Amazon Web Services Serverless Compute service (lambda)
+display: AWS - Lambda
+name: AWS - Lambda
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The name of the Lambda function.
+ isArray: false
+ name: functionName
+ required: true
+ secret: false
+ - default: false
+ description: Specify a version or alias to get details about a published version
+ of the function.
+ isArray: false
+ name: qualifier
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The AWS Region, if not specified the default region will be used
+ isArray: false
+ name: region
+ predefined:
+ - us-east-1
+ - us-east-2
+ - us-west-1
+ - us-west-2
+ - ca-central-1
+ - eu-west-1
+ - eu-central-1
+ - eu-west-2
+ - ap-northeast-1
+ - ap-northeast-2
+ - ap-southeast-1
+ - ap-southeast-2
+ - ap-south-1
+ - sa-east-1
+ - eu-north-1
+ - eu-west-3
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns the configuration information of the Lambda function and
+ a presigned URL link to the .zip file you uploaded with CreateFunction so you
+ can download the .zip file. Note that the URL is valid for up to 10 minutes.
+ The configuration information is the same information you provided as parameters
+ when uploading the function. Use the Qualifier parameter to retrieve a published
+ version of the function. Otherwise, returns the unpublished version ($LATEST
+ ).
+ execution: false
+ name: aws-lambda-get-function
+ outputs:
+ - contextPath: AWS.Lambda.Functions.Configuration.FunctionName
+ description: The name of the function.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.FunctionArn
+ description: The function's Amazon Resource Name.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.Runtime
+ description: The runtime environment for the Lambda function.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.Role
+ description: The function's execution role.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.Handler
+ description: The function Lambda calls to begin executing your function.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.CodeSize
+ description: The size of the function's deployment package in bytes.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.Description
+ description: The function's description.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.Timeout
+ description: The amount of time that Lambda allows a function to run before
+ terminating it.
+ type: number
+ - contextPath: AWS.Lambda.Functions.Configuration.MemorySize
+ description: The memory allocated to the function
+ type: number
+ - contextPath: 'AWS.Lambda.Functions.Configuration.LastModified '
+ description: The date and time that the function was last updated, in ISO-8601
+ format (YYYY-MM-DDThh:mm:ss.sTZD).
+ type: date
+ - contextPath: AWS.Lambda.Functions.Configuration.CodeSha256
+ description: The SHA256 hash of the function's deployment package.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.Version
+ description: The version of the Lambda function.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.VpcConfig.SubnetIds
+ description: A list of VPC subnet IDs.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.VpcConfig.SecurityGroupIds
+ description: A list of VPC security groups IDs.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.VpcConfig.VpcId
+ description: The ID of the VPC.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.DeadLetterConfig.TargetArn
+ description: The Amazon Resource Name (ARN) of an Amazon SQS queue or Amazon
+ SNS topic.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.Environment.Variables
+ description: Environment variable key-value pairs
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.Environment.Error.ErrorCode
+ description: Error messages for environment variables that could not be applied.
+ The error code.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.Environment.
+ description: Error messages for environment variables that could not be applied.
+ The error message.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.KMSKeyArn
+ description: The KMS key used to encrypt the function's environment variables.
+ Only returned if you've configured a customer managed CMK.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.TracingConfig.Mode
+ description: The function's AWS X-Ray tracing configuration. The tracing mode.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.MasterArn
+ description: The ARN of the master function.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.RevisionId
+ description: Represents the latest updated revision of the function or alias.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.Layers.Arn
+ description: The Amazon Resource Name (ARN) of the function layer.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Configuration.Layers.CodeSize
+ description: The size of the layer archive in bytes.
+ type: number
+ - contextPath: AWS.Lambda.Functions.Code.RepositoryType
+ description: The repository from which you can download the function.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Code.Location
+ description: The presigned URL you can use to download the function's .zip file
+ that you previously uploaded. The URL is valid for up to 10 minutes.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Tags
+ description: list of tags associated with the function.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Concurrency.ReservedConcurrentExecutions
+ description: The number of concurrent executions reserved for this function.
+ type: string
+ - arguments:
+ - default: false
+ auto: PREDEFINED
+ description: The AWS Region, if not specified the default region will be used
+ isArray: false
+ name: region
+ predefined:
+ - us-east-1
+ - us-east-2
+ - us-west-1
+ - us-west-2
+ - ca-central-1
+ - eu-west-1
+ - eu-central-1
+ - eu-west-2
+ - ap-northeast-1
+ - ap-northeast-2
+ - ap-southeast-1
+ - ap-southeast-2
+ - ap-south-1
+ - sa-east-1
+ - eu-north-1
+ - eu-west-3
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of your Lambda functions. For each function, the response
+ includes the function configuration information. You must use GetFunction to
+ retrieve the code for your function.
+ execution: false
+ name: aws-lambda-list-functions
+ outputs:
+ - contextPath: AWS.Lambda.Functions.FunctionName
+ description: The name of the function.
+ type: string
+ - contextPath: AWS.Lambda.Functions.FunctionArn
+ description: The function's Amazon Resource Name.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Runtime
+ description: The runtime environment for the Lambda function.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Role
+ description: The function's execution role.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Handler
+ description: The function Lambda calls to begin executing your function.
+ type: string
+ - contextPath: AWS.Lambda.Functions.CodeSize
+ description: The size of the function's deployment package in bytes.
+ type: number
+ - contextPath: AWS.Lambda.Functions.Description
+ description: The function's description.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Timeout
+ description: The amount of time that Lambda allows a function to run before
+ terminating it.
+ type: number
+ - contextPath: AWS.Lambda.Functions.MemorySize
+ description: The memory allocated to the function.
+ type: number
+ - contextPath: AWS.Lambda.Functions.LastModified
+ description: The date and time that the function was last updated, in ISO-8601
+ format (YYYY-MM-DDThh:mm:ss.sTZD).
+ type: date
+ - contextPath: AWS.Lambda.Functions.CodeSha256
+ description: The SHA256 hash of the function's deployment package.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Version
+ description: The version of the Lambda function.
+ type: string
+ - contextPath: AWS.Lambda.Functions.VpcConfig.SubnetIds
+ description: A list of VPC subnet IDs.
+ type: string
+ - contextPath: AWS.Lambda.Functions.VpcConfig.SecurityGroupIds
+ description: A list of VPC security groups IDs.
+ type: string
+ - contextPath: AWS.Lambda.Functions.VpcConfig.VpcId
+ description: The ID of the VPC.
+ type: string
+ - contextPath: AWS.Lambda.Functions.DeadLetterConfig.TargetArn
+ description: The Amazon Resource Name (ARN) of an Amazon SQS queue or Amazon
+ SNS topic.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Environment.Variables
+ description: Environment variable key-value pairs.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Environment.Error.ErrorCode
+ description: Error messages for environment variables that could not be applied.
+ The error code.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Environment.Error.Message
+ description: Error messages for environment variables that could not be applied.
+ The error message.
+ type: string
+ - contextPath: AWS.Lambda.Functions.KMSKeyArn
+ description: The KMS key used to encrypt the function's environment variables.
+ Only returned if you've configured a customer managed CMK.
+ type: string
+ - contextPath: AWS.Lambda.Functions.TracingConfig.Mode
+ description: The function's AWS X-Ray tracing configuration. The tracing mode.
+ type: string
+ - contextPath: AWS.Lambda.Functions.MasterArn
+ description: The ARN of the master function.
+ type: string
+ - contextPath: AWS.Lambda.Functions.RevisionId
+ description: Represents the latest updated revision of the function or alias.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Layers.Arn
+ description: The Amazon Resource Name (ARN) of the function layer.
+ type: string
+ - contextPath: AWS.Lambda.Functions.Layers.CodeSize
+ description: The size of the layer archive in bytes.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the lambda function.
+ isArray: false
+ name: functionName
+ required: true
+ secret: false
+ - default: false
+ description: If you specify this optional parameter, the API returns only the
+ aliases that are pointing to the specific Lambda function version, otherwise
+ the API returns all of the aliases created for the Lambda function.
+ isArray: false
+ name: functionVersion
+ required: false
+ secret: false
+ - default: false
+ auto: PREDEFINED
+ description: The AWS Region, if not specified the default region will be used
+ isArray: false
+ name: region
+ predefined:
+ - us-east-1
+ - us-east-2
+ - us-west-1
+ - us-west-2
+ - ca-central-1
+ - eu-west-1
+ - eu-central-1
+ - eu-west-2
+ - ap-northeast-1
+ - ap-northeast-2
+ - ap-southeast-1
+ - ap-southeast-2
+ - ap-south-1
+ - sa-east-1
+ - eu-north-1
+ - eu-west-3
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns list of aliases created for a Lambda function. For each alias,
+ the response includes information such as the alias ARN, description, alias
+ name, and the function version to which it points.
+ execution: false
+ name: aws-lambda-list-aliases
+ outputs:
+ - contextPath: AWS.Lambda.Aliases.AliasArn
+ description: 'Lambda function ARN that is qualified using the alias name as
+ the suffix. '
+ type: string
+ - contextPath: AWS.Lambda.Aliases.Name
+ description: Alias name.
+ type: string
+ - contextPath: AWS.Lambda.Aliases.FunctionVersion
+ description: Function version to which the alias points.
+ type: string
+ - contextPath: AWS.Lambda.Aliases.Description
+ description: Alias description.
+ type: string
+ - contextPath: AWS.Lambda.Aliases.RoutingConfig.AdditionalVersionWeights
+ description: The name of the second alias, and the percentage of traffic that
+ is routed to it.
+ type: string
+ - contextPath: AWS.Lambda.Aliases.RevisionId
+ description: Represents the latest updated revision of the function or alias.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the Lambda function.
+ isArray: false
+ name: functionName
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Choose from the following options. RequestResponse (default) -
+ Invoke the function synchronously. Keep the connection open until the function
+ returns a response or times out. Event - Invoke the function asynchronously.
+ Send events that fail multiple times to the function's dead-letter queue (if
+ configured). DryRun - Validate parameter values and verify that the user or
+ role has permission to invoke the function.
+ isArray: false
+ name: invocationType
+ predefined:
+ - Event
+ - RequestResponse
+ - DryRun
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: You can set this optional parameter to Tail in the request only
+ if you specify the InvocationType parameter with value RequestResponse . In
+ this case, AWS Lambda returns the base64-encoded last 4 KB of log data produced
+ by your Lambda function in the x-amz-log-result header.
+ isArray: false
+ name: logType
+ predefined:
+ - None
+ - Tail
+ required: false
+ secret: false
+ - default: false
+ description: 'Using the ClientContext you can pass client-specific information
+ to the Lambda function you are invoking. '
+ isArray: false
+ name: clientContext
+ required: false
+ secret: false
+ - default: false
+ description: JSON that you want to provide to your Lambda function as input.
+ isArray: false
+ name: payload
+ required: false
+ secret: false
+ - default: false
+ description: Specify a version or alias to invoke a published version of the
+ function.
+ isArray: false
+ name: qualifier
+ required: false
+ secret: false
+ - default: false
+ auto: PREDEFINED
+ description: The AWS Region, if not specified the default region will be used
+ isArray: false
+ name: region
+ predefined:
+ - us-east-1
+ - us-east-2
+ - us-west-1
+ - us-west-2
+ - ca-central-1
+ - eu-west-1
+ - eu-central-1
+ - eu-west-2
+ - ap-northeast-1
+ - ap-northeast-2
+ - ap-southeast-1
+ - ap-southeast-2
+ - ap-south-1
+ - sa-east-1
+ - eu-north-1
+ - eu-west-3
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Invokes a Lambda function. Specify just a function name to invoke
+ the latest version of the function. To invoke a published version, use the Qualifier
+ parameter to specify a version or alias . If you use the RequestResponse (synchronous)
+ invocation option, the function will be invoked only once. If you use the Event
+ (asynchronous) invocation option, the function will be invoked at least once
+ in response to an event and the function must be idempotent to handle this. For
+ functions with a long timeout, your client may be disconnected during synchronous
+ invocation while it waits for a response. Configure your demisto server to allow
+ for long connections with timeout or keep-alive settings.
+ execution: true
+ name: aws-lambda-invoke
+ outputs:
+ - contextPath: AWS.Lambda.InvokedFunctions.FunctionName
+ description: The name of the Lambda function.
+ type: string
+ - contextPath: AWS.Lambda.InvokedFunctions.FunctionError
+ description: Indicates whether an error occurred while executing the Lambda
+ function. If an error occurred this field will have one of two values; Handled
+ or Unhandled . Handled errors are errors that are reported by the function
+ while the Unhandled errors are those detected and reported by AWS Lambda.
+ Unhandled errors include out of memory errors and function timeouts.
+ type: string
+ - contextPath: AWS.Lambda.InvokedFunctions.LogResult
+ description: logs for the Lambda function invocation. This is present only if
+ the invocation type is RequestResponse and the logs were requested.
+ type: string
+ - contextPath: AWS.Lambda.InvokedFunctions.Payload
+ description: It is the JSON representation of the object returned by the Lambda
+ function. This is present only if the invocation type is RequestResponse.
+ type: string
+ - contextPath: AWS.Lambda.InvokedFunctions.ExecutedVersion
+ description: The function version that has been executed. This value is returned
+ only if the invocation type is RequestResponse.
+ type: string
+ - contextPath: AWS.Lambda.InvokedFunctions.Region
+ description: The AWS Region.
+ type: string
+ - arguments:
+ - default: false
+ description: The AWS Region, if not specified the default region will be used
+ isArray: false
+ auto: PREDEFINED
+ name: region
+ predefined:
+ - us-east-1
+ - us-east-2
+ - us-west-1
+ - us-west-2
+ - ca-central-1
+ - eu-west-1
+ - eu-central-1
+ - eu-west-2
+ - ap-northeast-1
+ - ap-northeast-2
+ - ap-southeast-1
+ - ap-southeast-2
+ - ap-south-1
+ - sa-east-1
+ - eu-north-1
+ - eu-west-3
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves details about your account's limits and usage in an AWS
+ Region.
+ execution: false
+ name: aws-lambda-get-account-settings
+ outputs:
+ - contextPath: AWS.Lambda.AccountLimit.TotalCodeSize
+ description: The amount of storage space that you can use for all deployment
+ packages and layer archives.
+ type: number
+ - contextPath: AWS.Lambda.AccountLimit.CodeSizeUnzipped
+ description: The maximum size of your function's code and layers when they're
+ extracted.
+ type: number
+ - contextPath: AWS.Lambda.AccountLimit.CodeSizeZipped
+ description: The maximum size of a deployment package when it's uploaded directly
+ to AWS Lambda. Use Amazon S3 for larger files.
+ type: number
+ - contextPath: AWS.Lambda.AccountLimit.ConcurrentExecutions
+ description: The maximum number of simultaneous function executions.
+ type: number
+ - contextPath: AWS.Lambda.AccountLimit.UnreservedConcurrentExecutions
+ description: The maximum number of simultaneous function executions, minus the
+ capacity that's reserved for individual functions with PutFunctionConcurrency
+ .
+ type: number
+ - contextPath: AWS.Lambda.AccountUsage.TotalCodeSize
+ description: The amount of storage space, in bytes, that's being used by deployment
+ packages and layer archives.
+ type: number
+ - contextPath: AWS.Lambda.AccountUsage. FunctionCount
+ description: The number of Lambda functions.
+ type: number
+ dockerimage: demisto/boto3py3:1.0.0.1030
+ isfetch: false
+ runonce: false
+ subtype: python3
+ script: ''
+ type: python
+tests:
+- AWS-Lambda-Test (Read-Only)
diff --git a/Integrations/AWS-Lambda/AWS-Lambda_description.md b/Integrations/AWS-Lambda/AWS-Lambda_description.md
new file mode 100644
index 000000000000..e8e0b3291c1e
--- /dev/null
+++ b/Integrations/AWS-Lambda/AWS-Lambda_description.md
@@ -0,0 +1,14 @@
+Before you can use the AWS Lambda integration in Demisto, you need to perform several configuration steps in your AWS environment.
+
+### Prerequisites
+- Attach an instance profile with the required permissions to the Demisto server or engine that is running
+on your AWS environment.
+- Instance profile requires minimum permission: sts:AssumeRole.
+- Instance profile requires permission to assume the roles needed by the AWS integrations.
+
+### Configure AWS Settings
+1. Create an IAM Role for the Instance Profile.
+2. Attach a Role to the Instance Profile.
+3. Configure the Necessary IAM Roles that the AWS Integration Can Assume.
+
+For detailed instructions, [see the AWS Integrations Configuration Guide](https://support.demisto.com/hc/en-us/articles/360005686854-AWS-Integrations-Configuration-Guide).
diff --git a/Integrations/AWS-Lambda/AWS-Lambda_image.png b/Integrations/AWS-Lambda/AWS-Lambda_image.png
new file mode 100644
index 000000000000..de729ebfc32c
Binary files /dev/null and b/Integrations/AWS-Lambda/AWS-Lambda_image.png differ
diff --git a/Integrations/AWS-Lambda/CHANGELOG.md b/Integrations/AWS-Lambda/CHANGELOG.md
new file mode 100644
index 000000000000..6a336285fd1d
--- /dev/null
+++ b/Integrations/AWS-Lambda/CHANGELOG.md
@@ -0,0 +1,9 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+* Bugfix for Proxy/Insecure issues.
+
+## [19.8.0] - 2019-08-06
+#### New Integration
+Amazon Web Services Serverless Compute service (lambda).
\ No newline at end of file
diff --git a/Integrations/AWS-S3/AWS-S3.py b/Integrations/AWS-S3/AWS-S3.py
new file mode 100644
index 000000000000..705e151a9b74
--- /dev/null
+++ b/Integrations/AWS-S3/AWS-S3.py
@@ -0,0 +1,379 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+import boto3
+import io
+import math
+import json
+from datetime import datetime, date
+from botocore.config import Config
+from botocore.parsers import ResponseParserError
+import urllib3.util
+
+# Disable insecure warnings
+urllib3.disable_warnings()
+
+"""PARAMETERS"""
+AWS_DEFAULT_REGION = demisto.params().get('defaultRegion')
+AWS_ROLE_ARN = demisto.params().get('roleArn')
+AWS_ROLE_SESSION_NAME = demisto.params().get('roleSessionName')
+AWS_ROLE_SESSION_DURATION = demisto.params().get('sessionDuration')
+AWS_ROLE_POLICY = None
+AWS_ACCESS_KEY_ID = demisto.params().get('access_key')
+AWS_SECRET_ACCESS_KEY = demisto.params().get('secret_key')
+VERIFY_CERTIFICATE = not demisto.params().get('insecure', True)
+proxies = handle_proxy(proxy_param_name='proxy', checkbox_default_value=False)
+config = Config(
+ connect_timeout=1,
+ retries=dict(
+ max_attempts=5
+ ),
+ proxies=proxies
+)
+
+
+"""HELPER FUNCTIONS"""
+
+
+def aws_session(service='s3', region=None, roleArn=None, roleSessionName=None, roleSessionDuration=None,
+ rolePolicy=None):
+ kwargs = {}
+ if roleArn and roleSessionName is not None:
+ kwargs.update({
+ 'RoleArn': roleArn,
+ 'RoleSessionName': roleSessionName,
+ })
+ elif AWS_ROLE_ARN and AWS_ROLE_SESSION_NAME is not None:
+ kwargs.update({
+ 'RoleArn': AWS_ROLE_ARN,
+ 'RoleSessionName': AWS_ROLE_SESSION_NAME,
+ })
+
+ if roleSessionDuration is not None:
+ kwargs.update({'DurationSeconds': int(roleSessionDuration)})
+ elif AWS_ROLE_SESSION_DURATION is not None:
+ kwargs.update({'DurationSeconds': int(AWS_ROLE_SESSION_DURATION)})
+
+ if rolePolicy is not None:
+ kwargs.update({'Policy': rolePolicy})
+ elif AWS_ROLE_POLICY is not None:
+ kwargs.update({'Policy': AWS_ROLE_POLICY})
+ if kwargs and AWS_ACCESS_KEY_ID is None:
+
+ if AWS_ACCESS_KEY_ID is None:
+ sts_client = boto3.client('sts', config=config, verify=VERIFY_CERTIFICATE)
+ sts_response = sts_client.assume_role(**kwargs)
+ if region is not None:
+ client = boto3.client(
+ service_name=service,
+ region_name=region,
+ aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
+ aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
+ aws_session_token=sts_response['Credentials']['SessionToken'],
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ else:
+ client = boto3.client(
+ service_name=service,
+ region_name=AWS_DEFAULT_REGION,
+ aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
+ aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
+ aws_session_token=sts_response['Credentials']['SessionToken'],
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ elif AWS_ACCESS_KEY_ID and AWS_ROLE_ARN:
+ sts_client = boto3.client(
+ service_name='sts',
+ aws_access_key_id=AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ kwargs.update({
+ 'RoleArn': AWS_ROLE_ARN,
+ 'RoleSessionName': AWS_ROLE_SESSION_NAME,
+ })
+ sts_response = sts_client.assume_role(**kwargs)
+ client = boto3.client(
+ service_name=service,
+ region_name=AWS_DEFAULT_REGION,
+ aws_access_key_id=sts_response['Credentials']['AccessKeyId'],
+ aws_secret_access_key=sts_response['Credentials']['SecretAccessKey'],
+ aws_session_token=sts_response['Credentials']['SessionToken'],
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ else:
+ if region is not None:
+ client = boto3.client(
+ service_name=service,
+ region_name=region,
+ aws_access_key_id=AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+ else:
+ client = boto3.client(
+ service_name=service,
+ region_name=AWS_DEFAULT_REGION,
+ aws_access_key_id=AWS_ACCESS_KEY_ID,
+ aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
+ verify=VERIFY_CERTIFICATE,
+ config=config
+ )
+
+ return client
+
+
+def convert_size(size_bytes):
+ if size_bytes == 0:
+ return "0B"
+ size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
+ i = int(math.floor(math.log(size_bytes, 1024)))
+ p = math.pow(1024, i)
+ s = round(size_bytes / p, 2)
+ return "{} {}".format(s, size_name[i])
+
+
+class DatetimeEncoder(json.JSONEncoder):
+ # pylint: disable=method-hidden
+ def default(self, obj):
+ if isinstance(obj, datetime):
+ return obj.strftime('%Y-%m-%dT%H:%M:%S')
+ elif isinstance(obj, date):
+ return obj.strftime('%Y-%m-%d')
+ # Let the base class default method raise the TypeError
+ return json.JSONEncoder.default(self, obj)
+
+
+def create_bucket_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ data = []
+ kwargs = {'Bucket': args.get('bucket').lower()}
+ if args.get('acl') is not None:
+ kwargs.update({'ACL': args.get('acl')})
+ if args.get('locationConstraint') is not None:
+ kwargs.update({'CreateBucketConfiguration': {'LocationConstraint': args.get('locationConstraint')}})
+ if args.get('grantFullControl') is not None:
+ kwargs.update({'GrantFullControl': args.get('grantFullControl')})
+ if args.get('grantRead') is not None:
+ kwargs.update({'GrantRead': args.get('grantRead')})
+ if args.get('grantReadACP') is not None:
+ kwargs.update({'GrantReadACP': args.get('grantReadACP')})
+ if args.get('grantWrite') is not None:
+ kwargs.update({'GrantWrite': args.get('grantWrite')})
+ if args.get('grantWriteACP') is not None:
+ kwargs.update({'GrantWriteACP': args.get('grantWriteACP')})
+
+ response = client.create_bucket(**kwargs)
+
+ data.append({
+ 'BucketName': args.get('bucket'),
+ 'Location': response['Location']
+ })
+ ec = {'AWS.S3.Buckets': data}
+ human_readable = tableToMarkdown('AWS S3 Buckets', data)
+ return_outputs(human_readable, ec)
+
+
+def delete_bucket_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+
+ response = client.delete_bucket(Bucket=args.get('bucket').lower())
+ if response['ResponseMetadata']['HTTPStatusCode'] == 204:
+ demisto.results("the Bucket {bucket} was Deleted ".format(bucket=args.get('bucket')))
+
+
+def list_buckets_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ data = []
+ response = client.list_buckets()
+ for bucket in response['Buckets']:
+ data.append({
+ 'BucketName': bucket['Name'],
+ 'CreationDate': datetime.strftime(bucket['CreationDate'], '%Y-%m-%dT%H:%M:%S')
+ })
+ ec = {'AWS.S3.Buckets(val.BucketName === obj.BucketName)': data}
+ human_readable = tableToMarkdown('AWS S3 Buckets', data)
+ return_outputs(human_readable, ec)
+
+
+def get_bucket_policy_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ data = []
+ response = client.get_bucket_policy(Bucket=args.get('bucket').lower())
+ policy = json.loads(response['Policy'])
+ statements = policy['Statement']
+ for statement in statements:
+ data.append({
+ 'BucketName': args.get('bucket'),
+ 'PolicyId': policy['Id'],
+ 'PolicyVersion': policy['Version'],
+ 'Sid': statement['Sid'],
+ 'Action': statement['Action'],
+ 'Principal': statement['Principal'],
+ 'Resource': statement['Resource'],
+ 'Effect': statement['Effect'],
+ 'Json': response['Policy']
+ })
+ ec = {'AWS.S3.Buckets(val.BucketName === obj.BucketName).Policy': data}
+ human_readable = tableToMarkdown('AWS S3 Bucket Policy', data)
+ return_outputs(human_readable, ec)
+
+
+def put_bucket_policy_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ kwargs = {
+ 'Bucket': args.get('bucket').lower(),
+ 'Policy': args.get('policy')
+ }
+ if args.get('confirmRemoveSelfBucketAccess') is not None:
+ kwargs.update({'ConfirmRemoveSelfBucketAccess': True if args.get(
+ 'confirmRemoveSelfBucketAccess') == 'True' else False})
+
+ response = client.put_bucket_policy(**kwargs)
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results('Successfully applied Bucket policy to {bucket} bucket'.format(bucket=args.get('BucketName')))
+
+
+def delete_bucket_policy_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ client.delete_bucket_policy(Bucket=args.get('bucket').lower())
+ demisto.results('Policy deleted from {}'.format(args.get('bucket')))
+
+
+def download_file_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ data = io.BytesIO()
+ client.download_fileobj(args.get('bucket').lower(), args.get('key'), data)
+
+ demisto.results(fileResult(args.get('key'), data.getvalue()))
+
+
+def list_objects_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ data = []
+ response = client.list_objects(Bucket=args.get('bucket'))
+ for key in response['Contents']:
+ data.append({
+ 'Key': key['Key'],
+ 'Size': convert_size(key['Size']),
+ 'LastModified': datetime.strftime(key['LastModified'], '%Y-%m-%dT%H:%M:%S')
+ })
+
+ ec = {'AWS.S3.Buckets(val.BucketName === args.get("bucket")).Objects': data}
+ human_readable = tableToMarkdown('AWS S3 Bucket Objects', data)
+ return_outputs(human_readable, ec)
+
+
+def get_file_path(file_id):
+ filepath_result = demisto.getFilePath(file_id)
+ return filepath_result
+
+
+def upload_file_command(args):
+ client = aws_session(
+ region=args.get('region'),
+ roleArn=args.get('roleArn'),
+ roleSessionName=args.get('roleSessionName'),
+ roleSessionDuration=args.get('roleSessionDuration'),
+ )
+ path = get_file_path(args.get('entryID'))
+
+ try:
+ with open(path['path'], 'rb') as data:
+ client.upload_fileobj(data, args.get('bucket'), args.get('key'))
+ demisto.results('File {file} was uploaded successfully to {bucket}'.format(
+ file=args.get('key'), bucket=args.get('bucket')))
+ except (OSError, IOError) as e:
+ return_error("Could not read file: {path}\n {msg}".format(path=path, msg=e.message))
+
+
+"""COMMAND BLOCK"""
+try:
+ LOG('Command being called is {command}'.format(command=demisto.command()))
+ if demisto.command() == 'test-module':
+ client = aws_session()
+ response = client.list_buckets()
+ if response['ResponseMetadata']['HTTPStatusCode'] == 200:
+ demisto.results('ok')
+
+ elif demisto.command() == 'aws-s3-create-bucket':
+ create_bucket_command(demisto.args())
+
+ elif demisto.command() == 'aws-s3-delete-bucket':
+ delete_bucket_command(demisto.args())
+
+ elif demisto.command() == 'aws-s3-list-buckets':
+ list_buckets_command(demisto.args())
+
+ elif demisto.command() == 'aws-s3-get-bucket-policy':
+ get_bucket_policy_command(demisto.args())
+
+ elif demisto.command() == 'aws-s3-put-bucket-policy':
+ put_bucket_policy_command(demisto.args())
+
+ elif demisto.command() == 'aws-s3-delete-bucket-policy':
+ delete_bucket_policy_command(demisto.args())
+
+ elif demisto.command() == 'aws-s3-download-file':
+ download_file_command(demisto.args())
+
+ elif demisto.command() == 'aws-s3-list-bucket-objects':
+ list_objects_command(demisto.args())
+
+ elif demisto.command() == 'aws-s3-upload-file':
+ upload_file_command(demisto.args())
+
+except ResponseParserError as e:
+ return_error('Could not connect to the AWS endpoint. Please check that the region is valid.\n {error}'.format(
+ error=type(e)))
+ LOG(e.message)
+
+except Exception as e:
+ return_error('Error has occurred in the AWS S3 Integration: {error}\n {message}'.format(
+ error=type(e), message=e.message))
diff --git a/Integrations/AWS-S3/AWS-S3.yml b/Integrations/AWS-S3/AWS-S3.yml
new file mode 100644
index 000000000000..8556d84ee358
--- /dev/null
+++ b/Integrations/AWS-S3/AWS-S3.yml
@@ -0,0 +1,533 @@
+category: IT Services
+commonfields:
+ id: AWS - S3
+ version: -1
+configuration:
+- display: Role Arn
+ name: roleArn
+ required: false
+ type: 0
+- display: Role Session Name
+ name: roleSessionName
+ required: false
+ type: 0
+- display: AWS Default Region
+ name: defaultRegion
+ options:
+ - us-east-1
+ - us-east-2
+ - us-west-1
+ - us-west-2
+ - ca-central-1
+ - eu-west-1
+ - eu-central-1
+ - eu-west-2
+ - ap-northeast-1
+ - ap-northeast-2
+ - ap-southeast-1
+ - ap-southeast-2
+ - ap-south-1
+ - sa-east-1
+ - eu-north-1
+ - eu-west-3
+ required: false
+ type: 15
+- display: Role Session Duration
+ name: sessionDuration
+ required: false
+ type: 0
+- display: Access Key
+ name: access_key
+ required: false
+ type: 0
+- display: Secret Key
+ name: secret_key
+ required: false
+ type: 4
+- display: Trust any cert (Not Secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+description: Amazon Web Services Simple Storage Service (S3)
+display: AWS - S3
+name: AWS - S3
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The name of S3 bucket to create (in lowercase).
+ isArray: false
+ name: bucket
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: ACL for S3 bucket.
+ isArray: false
+ name: acl
+ predefined:
+ - private
+ - public-read
+ - public-read-write
+ - authenticated-read
+ required: false
+ secret: false
+ - default: false
+ description: Specifies the region where the bucket will be created. If you don't
+ specify a region, the bucket will be created in US Standard.
+ isArray: false
+ name: locationConstraint
+ required: false
+ secret: false
+ - default: false
+ description: Allows grantee the read, write, read ACP, and write ACP permissions
+ on the bucket.
+ isArray: false
+ name: grantFullControl
+ required: false
+ secret: false
+ - default: false
+ description: Allows grantee to list the objects in the bucket.
+ isArray: false
+ name: grantRead
+ required: false
+ secret: false
+ - default: false
+ description: Allows grantee to read the bucket ACL.
+ isArray: false
+ name: grantReadACP
+ required: false
+ secret: false
+ - default: false
+ description: Allows grantee to create, overwrite, and delete any object in the
+ bucket.
+ isArray: false
+ name: grantWrite
+ required: false
+ secret: false
+ - default: false
+ description: Allows grantee to write the ACL for the applicable bucket.
+ isArray: false
+ name: grantWriteACP
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Create AWS S3 bucket.
+ execution: false
+ name: aws-s3-create-bucket
+ outputs:
+ - contextPath: AWS.S3.Buckets.BucketName
+ description: The name of the bucket that was created.
+ type: string
+ - contextPath: AWS.S3.Buckets.Location
+ description: The AWS Region the bucket was created.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of S3 bucket to delete.
+ isArray: false
+ name: bucket
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Delete AWS S3 bucket.
+ execution: false
+ name: aws-s3-delete-bucket
+ - arguments:
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: List all S3 buckets in AWS account
+ execution: false
+ name: aws-s3-list-buckets
+ outputs:
+ - contextPath: AWS.S3.Buckets.BucketName
+ description: The name of the bucket.
+ type: string
+ - contextPath: AWS.S3.Buckets.CreationDate
+ description: Date the bucket was created.
+ type: date
+ - arguments:
+ - default: false
+ description: Name of bucket.
+ isArray: false
+ name: bucket
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Get AWS S3 Bucket Policy
+ execution: false
+ name: aws-s3-get-bucket-policy
+ outputs:
+ - contextPath: AWS.S3.Buckets.Policy.Version
+ description: S3 Bucket Policy Version.
+ type: string
+ - contextPath: AWS.S3.Buckets.Policy.PolicyId
+ description: S3 Bucket Policy ID.
+ type: string
+ - contextPath: AWS.S3.Buckets.Policy.Sid
+ description: S3 Bucket Policy Statment ID.
+ type: string
+ - contextPath: AWS.S3.Buckets.Policy.Action
+ description: S3 Bucket Policy Statment Action.
+ type: string
+ - contextPath: AWS.S3.Buckets.Policy.Principal
+ description: S3 Bucket Policy Statment Principal.
+ type: string
+ - contextPath: AWS.S3.Buckets.Policy.Resource
+ description: S3 Bucket Policy Statment Resource.
+ type: string
+ - contextPath: AWS.S3.Buckets.Policy.Effect
+ description: S3 Bucket Policy Statment Effect.
+ type: string
+ - contextPath: AWS.S3.Buckets.Policy.Json
+ description: AWS S3 Policy Json output.
+ type: string
+ - contextPath: AWS.S3.Buckets.Policy.BucketName
+ description: S3 Bucket Name.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of S3 bucket.
+ isArray: false
+ name: bucket
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes the policy from the bucket.
+ execution: false
+ name: aws-s3-delete-bucket-policy
+ - arguments:
+ - default: false
+ description: The name of S3 bucket.
+ isArray: false
+ name: bucket
+ required: false
+ secret: false
+ - default: false
+ description: The S3 object key to download.
+ isArray: false
+ name: key
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Download a file from S3 bucket to war room.
+ execution: false
+ name: aws-s3-download-file
+ - arguments:
+ - default: false
+ description: The name of S3 bucket.
+ isArray: false
+ name: bucket
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: List object in S3 bucket.
+ execution: false
+ name: aws-s3-list-bucket-objects
+ outputs:
+ - contextPath: AWS.S3.Buckets.Objects.Key
+ description: The name of S3 object.
+ type: Unknown
+ - contextPath: AWS.S3.Buckets.Objects.Size
+ description: Object size.
+ type: Unknown
+ - contextPath: AWS.S3.Buckets.Objects.LastModified
+ description: Last date object was modified.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Name of S3 bucket.
+ isArray: false
+ name: bucket
+ required: true
+ secret: false
+ - default: false
+ description: The bucket policy to apply in json format.
+ isArray: false
+ name: policy
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Set this parameter to true to confirm that you want to remove your
+ permissions to change this bucket policy in the future.
+ isArray: false
+ name: confirmRemoveSelfBucketAccess
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Replaces a policy on a bucket. If the bucket already has a policy,
+ the one in this request completely replaces it.
+ execution: false
+ name: aws-s3-put-bucket-policy
+ - arguments:
+ - default: false
+ description: Entry ID of the file to upload
+ isArray: false
+ name: entryID
+ required: true
+ secret: false
+ - default: false
+ description: The name of the bucket to upload to
+ isArray: false
+ name: bucket
+ required: true
+ secret: false
+ - default: false
+ description: The name of the key to upload to
+ isArray: false
+ name: key
+ required: true
+ secret: false
+ - default: false
+ description: The AWS Region, if not specified the default region will be used.
+ isArray: false
+ name: region
+ required: false
+ secret: false
+ - default: false
+ description: The Amazon Resource Name (ARN) of the role to assume.
+ isArray: false
+ name: roleArn
+ required: false
+ secret: false
+ - default: false
+ description: An identifier for the assumed role session.
+ isArray: false
+ name: roleSessionName
+ required: false
+ secret: false
+ - default: false
+ description: The duration, in seconds, of the role session. The value can range
+ from 900 seconds (15 minutes) up to the maximum session duration setting for
+ the role.
+ isArray: false
+ name: roleSessionDuration
+ required: false
+ secret: false
+ deprecated: false
+ description: Upload file to S3 bucket
+ execution: false
+ name: aws-s3-upload-file
+ dockerimage: demisto/boto3
+ isfetch: false
+ runonce: false
+ script: ''
+ type: python
+ subtype: python2
+tests:
+- 97393cfc-2fc4-4dfe-8b6e-af64067fc436
diff --git a/Integrations/AWS-S3/AWS-S3_description.md b/Integrations/AWS-S3/AWS-S3_description.md
new file mode 100644
index 000000000000..2eae109ea4b6
--- /dev/null
+++ b/Integrations/AWS-S3/AWS-S3_description.md
@@ -0,0 +1,14 @@
+Before you can use AWS S3, you need to perform several configuration steps in your AWS environment.
+
+### Prerequisites
+- Attach an instance profile with the required permissions to the Demisto server or engine that is running
+on your AWS environment.
+- Instance profile requires minimum permission: sts:AssumeRole.
+- Instance profile requires permission to assume the roles needed by the AWS integrations.
+
+### Configure AWS Settings
+- Create an IAM Role for the Instance Profile.
+- Attach a Role to the Instance Profile.
+- Configure the Necessary IAM Roles that the AWS Integration Can Assume.
+
+For detailed instructions, see the [AWS Integrations Configuration Guide](https://support.demisto.com/hc/en-us/articles/360005686854-AWS-Integrations-Configuration-Guide).
diff --git a/Integrations/AWS-S3/AWS-S3_image.png b/Integrations/AWS-S3/AWS-S3_image.png
new file mode 100644
index 000000000000..862a38298912
Binary files /dev/null and b/Integrations/AWS-S3/AWS-S3_image.png differ
diff --git a/Integrations/AWS-S3/CHANGELOG.md b/Integrations/AWS-S3/CHANGELOG.md
new file mode 100644
index 000000000000..625b62d0440a
--- /dev/null
+++ b/Integrations/AWS-S3/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Bugfix for Proxy/Insecure issues.
\ No newline at end of file
diff --git a/Integrations/AbuseDB/AbuseDB.py b/Integrations/AbuseDB/AbuseDB.py
new file mode 100644
index 000000000000..e7e64066d210
--- /dev/null
+++ b/Integrations/AbuseDB/AbuseDB.py
@@ -0,0 +1,288 @@
+import demistomock as demisto
+from CommonServerPython import *
+''' IMPORTS '''
+import requests
+import os
+import csv
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS '''
+VERBOSE = True
+SERVER = demisto.params().get('server')
+if not SERVER.endswith('/'):
+ SERVER += '/'
+API_KEY = demisto.params().get('apikey')
+MAX_AGE = demisto.params().get('days')
+THRESHOLD = demisto.params().get('threshold')
+INSECURE = demisto.params().get('insecure')
+TEST_IP = "127.0.0.2"
+BLACKLIST_SCORE = 3
+CHECK_CMD = "check"
+CHECK_BLOCK_CMD = "check-block"
+REPORT_CMD = "report"
+BLACKLIST_CMD = 'blacklist'
+ANALYSIS_TITLE = "AbuseIPDB Analysis"
+BLACKLIST_TITLE = "AbuseIPDB Blacklist"
+REPORT_SUCCESS = "IP address reported successfully."
+
+HEADERS = {
+ 'Key': API_KEY,
+ 'Accept': 'application/json'
+}
+
+PROXY = demisto.params().get('proxy')
+if not demisto.params().get('proxy', False):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+CATEGORIES_NAME = {
+ 3: 'Frad_Orders',
+ 4: 'DDoS_Attack',
+ 5: 'FTP_Brute-Force',
+ 6: 'Ping of Death',
+ 7: 'Phishing',
+ 8: 'Fraud VoIP',
+ 9: 'Open_Proxy',
+ 10: 'Web_Spam',
+ 11: 'Email_Spam',
+ 12: 'Blog_Spam',
+ 13: 'VPN IP',
+ 14: 'Port_Scan',
+ 15: 'Hacking',
+ 16: 'SQL Injection',
+ 17: 'Spoofing',
+ 18: 'Brute_Force',
+ 19: 'Bad_Web_Bot',
+ 20: 'Exploited_Host',
+ 21: 'Web_App_Attack',
+ 22: 'SSH',
+ 23: 'IoT_Targeted'
+}
+
+CATEGORIES_ID = {
+ "Frad_Orders": "3",
+ "DDoS_Attack": "4",
+ "FTP_Brute": "5",
+ "Ping of Death": "6",
+ "Phishing": "7",
+ "Fraud VoIP": "8",
+ "Open_Proxy": "9",
+ "Web_Spam": "10",
+ "Email_Spam": "11",
+ "Blog_Spam": "12",
+ "VPN IP": "13",
+ "Port_Scan": "14",
+ "Hacking": "15",
+ "SQL Injection": "16",
+ "Spoofing": "17",
+ "Brute_Force": "18",
+ "Bad_Web_Bot": "19",
+ "Exploited_Host": "20",
+ "Web_App_Attack": "21",
+ "SSH": "22",
+ "IoT_Targeted": "23"
+}
+
+session = requests.session()
+
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url_suffix, params=None, headers=HEADERS, threshold=THRESHOLD):
+ LOG('running request with url=%s' % (SERVER + url_suffix))
+ try:
+ analysis = session.request(method, SERVER + url_suffix, headers=headers, params=params, verify=not INSECURE)
+
+ if analysis.status_code not in {200, 204, 429}:
+ return_error('Bad connection attempet. Status code: ' + str(analysis.status_code))
+ if analysis.status_code == 429:
+ if demisto.params().get('disregard_quota'):
+ return 'Too many requests (possibly bad API key). Status code: ' + str(analysis.status_code)
+ else:
+ return_error('Too many requests (possibly bad API key). Status code: ' + str(analysis.status_code))
+
+ return REPORT_SUCCESS if url_suffix == REPORT_CMD else analysis.json()
+ except Exception as e:
+ LOG(e)
+ return_error(e.message)
+
+
+def analysis_to_entry(info, threshold=THRESHOLD, verbose=VERBOSE):
+ if not isinstance(info, list):
+ info = [info]
+
+ context_ip_generic, context_ip, human_readable, dbot_scores = [], [], [], []
+ for analysis in info:
+ ip_ec = {
+ "Address": analysis.get("ipAddress"),
+ "Geo": {"Country": analysis.get("countryName") or analysis.get("countryCode")}
+ }
+ abuse_ec = {
+ "IP": {
+ "Address": analysis.get("ipAddress"),
+ "Geo": {"Country": analysis.get("countryName") or analysis.get("countryCode")},
+ 'AbuseConfidenceScore': analysis.get('abuseConfidenceScore'),
+ "TotalReports": analysis.get("totalReports") or analysis.get("numReports") or "0"
+ }
+ }
+
+ if verbose:
+ reports = sum([report_dict.get("categories") for report_dict in analysis.get("reports")], []) # type: list
+ categories = set(reports)
+ abuse_ec["IP"]["Reports"] = {CATEGORIES_NAME[c]: reports.count(c) for c in categories}
+
+ human_readable.append(abuse_ec['IP'])
+
+ dbot_score = getDBotScore(analysis, threshold)
+ if dbot_score == 3:
+ ip_ec["Malicious"] = abuse_ec["IP"]["Malicious"] = {
+ 'Vendor': "AbuseIPDB",
+ 'Detections': 'The address was reported as Malicious by AbuseIPDB.',
+ 'Description': 'The address was reported as Malicious by AbuseIPDB.'
+
+ }
+ dbot_scores.append({
+ "Score": dbot_score,
+ "Vendor": "AbuseIPDB",
+ "Indicator": analysis.get("ipAddress"),
+ "Type": "ip"
+ })
+ context_ip.append(abuse_ec)
+ context_ip_generic.append(ip_ec)
+
+ return createEntry(context_ip, context_ip_generic, human_readable, dbot_scores, title=ANALYSIS_TITLE)
+
+
+def blacklist_to_entry(data, saveToContext):
+ if not isinstance(data, list):
+ data = [data]
+
+ ips = [d.get("ipAddress") for d in data]
+ context = {"Blacklist": ips}
+ temp = demisto.uniqueFile()
+ with open(demisto.investigation()['id'] + '_' + temp, 'wb') as f:
+ wr = csv.writer(f, quoting=csv.QUOTE_ALL)
+ for ip in ips:
+ wr.writerow([ip])
+ entry = {
+ 'HumanReadable': '',
+ 'Contents': ips,
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['file'],
+ 'File': "Blacklist.csv",
+ 'FileID': temp,
+ 'EntryContext': {'AbuseIPDB': createContext(context if saveToContext else None, removeNull=True)}
+ }
+ return entry
+
+
+def getDBotScore(analysis, threshold=THRESHOLD):
+ total_reports = analysis.get("totalReports") or analysis.get("numReports") or 0
+ abuse_score = int(analysis.get("abuseConfidenceScore"))
+ dbot_score = 0 if total_reports == 0 else 1 if abuse_score < 20 else 2 if abuse_score < int(threshold) else 3
+ return dbot_score
+
+
+def createEntry(context_ip, context_ip_generic, human_readable, dbot_scores, title):
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': context_ip,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, human_readable, removeNull=True),
+ 'EntryContext': {
+ 'IP(val.Address && val.Address == obj.Address)': createContext(context_ip_generic, removeNull=True),
+ 'AbuseIPDB(val.IP.Address && val.IP.Address == obj.IP.Address)': createContext(context_ip, removeNull=True),
+ 'DBotScore': createContext(dbot_scores, removeNull=True)
+ }
+ }
+ return entry
+
+
+''' FUNCTIONS '''
+
+
+def check_ip_command(ip, days=MAX_AGE, verbose=VERBOSE, threshold=THRESHOLD):
+ params = {
+ "ipAddress": ip,
+ "maxAgeInDays": days
+ }
+ if verbose:
+ params['verbose'] = "verbose"
+ analysis = http_request("GET", url_suffix=CHECK_CMD, params=params).get("data")
+ return analysis_to_entry(analysis, verbose=verbose, threshold=threshold)
+
+
+def check_block_command(network, limit, days=MAX_AGE, threshold=THRESHOLD):
+ params = {
+ "network": network,
+ "maxAgeInDays": days
+ }
+ analysis = http_request("GET", url_suffix=CHECK_BLOCK_CMD, params=params).get("data").get("reportedAddress")
+ return analysis_to_entry(analysis[:int(limit) if limit.isdigit() else 40], verbose=False, threshold=threshold)
+
+
+def report_ip_command(ip, categories):
+ params = {
+ "ip": ip,
+ "categories": ",".join([CATEGORIES_ID[c] if c in CATEGORIES_ID else c for c in categories.split()])
+ }
+ analysis = http_request("POST", url_suffix=REPORT_CMD, params=params)
+ return analysis
+
+
+def get_blacklist_command(limit, days, saveToContext):
+ params = {
+ 'maxAgeInDays': days,
+ "limit": limit
+ }
+ analysis = http_request("GET", url_suffix=BLACKLIST_CMD, params=params)
+ return analysis if type(analysis) is str else blacklist_to_entry(analysis.get("data"), saveToContext)
+
+
+def test_module():
+ try:
+ check_ip_command(ip=TEST_IP, verbose=False)
+ except Exception as e:
+ LOG(e)
+ return_error(e.message)
+ demisto.results('ok')
+
+
+def get_categories_command():
+ categories = {str(key): value for key, value in CATEGORIES_NAME.items()}
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': categories,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown("AbuseIPDB report categories", categories, removeNull=True),
+ 'EntryContext': {'AbuseIPDB.Categories(val && val == obj)': createContext(categories, removeNull=True),
+ }
+ }
+ return entry
+
+
+try:
+ if demisto.command() == 'test-module':
+ # Tests connectivity and credentails on login
+ test_module()
+ elif demisto.command() == 'ip':
+ demisto.results(check_ip_command(**demisto.args()))
+ elif demisto.command() == 'abuseipdb-check-cidr-block':
+ demisto.results(check_block_command(**demisto.args()))
+ elif demisto.command() == 'abuseipdb-report-ip':
+ demisto.results(report_ip_command(**demisto.args()))
+ elif demisto.command() == 'abuseipdb-get-blacklist':
+ demisto.results(get_blacklist_command(**demisto.args()))
+ elif demisto.command() == 'abuseipdb-get-categories':
+ demisto.results(get_categories_command(**demisto.args())) # type:ignore
+
+except Exception as e:
+ LOG.print_log()
+ return_error(e.message)
diff --git a/Integrations/AbuseDB/AbuseDB.yml b/Integrations/AbuseDB/AbuseDB.yml
new file mode 100644
index 000000000000..9842978ae7f5
--- /dev/null
+++ b/Integrations/AbuseDB/AbuseDB.yml
@@ -0,0 +1,282 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: AbuseIPDB
+ version: -1
+configuration:
+- defaultvalue: https://api.abuseipdb.com/api/v2/
+ display: AbuseIP server URL
+ name: server
+ required: true
+ type: 0
+- display: API Key (v2)
+ name: apikey
+ required: true
+ type: 4
+- defaultvalue: '80'
+ display: Minimum score threshold
+ name: threshold
+ required: false
+ type: 0
+- defaultvalue: '30'
+ display: Maximum reports age (in days)
+ name: days
+ required: false
+ type: 0
+- display: Disregard quota errors
+ name: disregard_quota
+ required: false
+ type: 8
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Central repository to report and identify IP addresses that have been
+ associated with malicious activity online. Check the Detailed Information section
+ for more information on how to configure the integration.
+display: AbuseIPDB
+name: AbuseIPDB
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: The IP address to check.
+ isArray: false
+ name: ip
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '30'
+ description: The time range to return reports (in days). Default is 30.
+ isArray: false
+ name: days
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: The length of the report. "true" returns the full report, "false"
+ does not return reported categories. Default is "true".
+ isArray: false
+ name: verbose
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '80'
+ description: The minimum score from AbuseIPDB to consider whether the IP address
+ is malicious (must be greater than 20). Default is 80.
+ isArray: false
+ name: threshold
+ required: false
+ secret: false
+ deprecated: false
+ description: Checks the specified IP address against the AbuseIP database.
+ execution: false
+ name: ip
+ outputs:
+ - contextPath: IP.Address
+ description: The address of the IP.
+ type: String
+ - contextPath: IP.Geo.Country
+ description: The country in which the IP address is located.
+ type: String
+ - contextPath: IP.Malicious.Vendor
+ description: The vendor reporting the IP address as malicious.
+ type: String
+ - contextPath: IP.Malicious.Detections
+ description: The Detections that led to the verdict.
+ type: String
+ - contextPath: AbuseIPDB.IP.Address
+ description: The IP address fetched from AbuseIPDB.
+ type: String
+ - contextPath: AbuseIPDB.IP.AbuseConfidenceScore
+ description: The confidence score fetched from AbuseIPDB.
+ type: String
+ - contextPath: AbuseIPDB.IP.TotalReports
+ description: The number of times the address has been reported.
+ type: Number
+ - contextPath: AbuseIPDB.IP.Geo.Country
+ description: The country associated with the IP Address.
+ type: String
+ - contextPath: AbuseIPDB.IP.Reports
+ description: The reports summary (for "verbose" reports).
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: String
+ - contextPath: AbuseIPDB.IP.Malicious.Vendor
+ description: The vendor that determined this IP address to be malicious.
+ type: String
+ - contextPath: AbuseIPDB.IP.Malicious.Detections
+ description: The Detections that led to the verdict.
+ type: String
+ - contextPath: IP.Malicious.Description
+ description: A description explaining why the IP address was reported as malicious.
+ type: String
+ - contextPath: AbuseIPDB.IP.Malicious.Description
+ description: A description explaining why the IP address was reported as malicious.
+ type: String
+ - arguments:
+ - default: false
+ description: IPv4 Address Block in CIDR notation.
+ isArray: false
+ name: network
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '30'
+ description: The time range to return reports (in days). Default is 30.
+ isArray: false
+ name: days
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '40'
+ description: The maximum number of IPs to check. Default is 40.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '80'
+ description: The minimum score from AbuseIPDB to consider whether the IP address
+ is malicious (must be greater than 20). Default is 80.
+ isArray: false
+ name: threshold
+ required: false
+ secret: false
+ deprecated: false
+ description: Queries a block of IP addresses to check against the database.
+ execution: false
+ name: abuseipdb-check-cidr-block
+ outputs:
+ - contextPath: IP.Address
+ description: The IP address.
+ type: String
+ - contextPath: IP.Geo.Country
+ description: The country in which the IP address is located.
+ type: String
+ - contextPath: IP.Malicious.Vendor
+ description: The vendor reporting the IP address as malicious.
+ type: String
+ - contextPath: IP.Malicious.Detections
+ description: The Detections that led to the verdict.
+ type: String
+ - contextPath: AbuseIPDB.IP.Address
+ description: The IP address fetched from AbuseIPDB.
+ type: String
+ - contextPath: AbuseIPDB.IP.AbuseConfidenceScore
+ description: The confidence score fetched from AbuseIPDB.
+ type: Unknown
+ - contextPath: AbuseIPDB.IP.TotalReports
+ description: The number of times this address has been reported.
+ type: Unknown
+ - contextPath: AbuseIPDB.IP.Geo.Country
+ description: The country associated with this IP Address.
+ type: Unknown
+ - contextPath: AbuseIPDB.IP.Reports
+ description: Reports summary (for "verbose" reports).
+ type: Unknown
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: String
+ - contextPath: AbuseIPDB.IP.Malicious.Vendor
+ description: The vendor used to calculate the score.
+ type: String
+ - contextPath: AbuseIPDB.IP.Malicious.Detections
+ description: The Detections that led to the verdict.
+ type: String
+ - contextPath: IP.Malicious.Description
+ description: A description explaining why the IP address was reported as malicious.
+ type: String
+ - arguments:
+ - default: false
+ description: The IP address to report.
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ - default: false
+ description: A CSV list of category IDs. For more information, see https://www.abuseipdb.com/categories.
+ isArray: false
+ name: categories
+ required: true
+ secret: false
+ deprecated: false
+ description: Reports an IP address to AbuseIPDB.
+ execution: false
+ name: abuseipdb-report-ip
+ - arguments:
+ - default: false
+ defaultValue: '30'
+ description: The time range to return reports (in days). Default is 30.
+ isArray: false
+ name: days
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: 'The maximum number of IPs to retrieve. Default is 50. '
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Whether to save a list of blacklisted IPs in the Context Data in
+ Demisto. Default is false.
+ isArray: false
+ name: saveToContext
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of the most reported IP addresses.
+ execution: false
+ name: abuseipdb-get-blacklist
+ outputs:
+ - contextPath: AbuseIPDB.Blacklist
+ description: A list of blacklisted IP addresses.
+ type: Unknown
+ - deprecated: false
+ description: Returns a list of report categories from AbuseIPDB.
+ execution: false
+ name: abuseipdb-get-categories
+ outputs:
+ - contextPath: AbuseIPDB.Categories
+ description: The list of AbuseIPDB categories.
+ type: string
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- AbuseIPDB Test
diff --git a/Integrations/AbuseDB/AbuseDB_description.md b/Integrations/AbuseDB/AbuseDB_description.md
new file mode 100644
index 000000000000..c749d98a0974
--- /dev/null
+++ b/Integrations/AbuseDB/AbuseDB_description.md
@@ -0,0 +1,10 @@
+- Minimum score threshold
+- Minimum score from AbuseIPDB for the IP to be considered malicious. (Must be above 20).
+
+
+ - Searching an entire subnet far back in time (e.g. Days=365) will likely cause a server timeout,
+ and you will receive a 500 Internal Server Error response from AbuseIPDB server.
+
+
+
+ - [Report Categories](https://www.abuseipdb.com/categories).
\ No newline at end of file
diff --git a/Integrations/AbuseDB/AbuseDB_image.png b/Integrations/AbuseDB/AbuseDB_image.png
new file mode 100644
index 000000000000..ae615eaea6a5
Binary files /dev/null and b/Integrations/AbuseDB/AbuseDB_image.png differ
diff --git a/Integrations/AbuseDB/CHANGELOG.md b/Integrations/AbuseDB/CHANGELOG.md
new file mode 100644
index 000000000000..b88b78e02607
--- /dev/null
+++ b/Integrations/AbuseDB/CHANGELOG.md
@@ -0,0 +1,11 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+-
+
+## [19.9.1] - 2019-09-18
+-
+
+## [19.9.0] - 2019-09-04
+-
diff --git a/Integrations/Active_Directory_Query/Active_Directory_Query.py b/Integrations/Active_Directory_Query/Active_Directory_Query.py
new file mode 100644
index 000000000000..4b3d7e9b3b72
--- /dev/null
+++ b/Integrations/Active_Directory_Query/Active_Directory_Query.py
@@ -0,0 +1,1047 @@
+import demistomock as demisto
+from CommonServerPython import *
+from typing import *
+from ldap3 import Server, Connection, NTLM, SUBTREE, ALL_ATTRIBUTES, Tls, Entry
+from ldap3.extend import microsoft
+import ssl
+from datetime import datetime
+import traceback
+import os
+from ldap3.utils.log import (set_library_log_detail_level, get_library_log_detail_level,
+ set_library_log_hide_sensitive_data, EXTENDED)
+
+# global connection
+conn: Optional[Connection] = None
+
+''' GLOBAL VARS '''
+
+# userAccountControl is a bitmask used to store a number of settings.
+# find more at:
+# https://support.microsoft.com/en-gb/help/305144/how-to-use-the-useraccountcontrol-flags-to-manipulate-user-account-pro
+
+COOMON_ACCOUNT_CONTROL_FLAGS = {
+ 512: "Enabled Account",
+ 514: "Disabled account",
+ 544: "Account Enabled - Require user to change password at first logon",
+ 4096: "Workstation/server",
+ 66048: "Enabled, password never expires",
+ 66050: "Disabled, password never expires",
+ 66080: "Enables, password never expires, password not required.",
+ 532480: "Domain controller"
+}
+NORMAL_ACCOUNT = 512
+DISABLED_ACCOUNT = 514
+
+# common attributes for specific AD objects
+DEFAULT_PERSON_ATTRIBUTES = [
+ 'name',
+ 'displayName',
+ 'memberOf',
+ 'mail',
+ 'samAccountName',
+ 'manager',
+ 'userAccountControl'
+]
+DEFAULT_COMPUTER_ATTRIBUTES = [
+ 'name',
+ 'memberOf'
+]
+
+''' HELPER FUNCTIONS '''
+
+
+def initialize_server(host, port, secure_connection, unsecure):
+ """
+ uses the instance configuration to initialize the LDAP server
+
+ :param host: host or ip
+ :type host: string
+ :param port: port or None
+ :type port: number
+ :param secure_connection: SSL or None
+ :type secure_connection: string
+ :param unsecure: trust any cert
+ :type unsecure: boolean
+ :return: ldap3 Server
+ :rtype: Server
+ """
+
+ if secure_connection == "SSL":
+ # intialize server with ssl
+ # port is configured by default as 389 or as 636 for LDAPS if not specified in configuration
+ demisto.debug("initializing sever with ssl (unsecure: {}). port: {}". format(unsecure, port or 'default(636)'))
+ if not unsecure:
+ demisto.debug("will require server certificate.")
+ tls = Tls(validate=ssl.CERT_REQUIRED, ca_certs_file=os.environ.get('SSL_CERT_FILE'))
+ if port:
+ return Server(host, port=port, use_ssl=True, tls=tls)
+ return Server(host, use_ssl=True, tls=tls)
+ if port:
+ return Server(host, port=port, use_ssl=True)
+ return Server(host, use_ssl=True)
+ demisto.debug("initializing server without secure connection. port: {}". format(port or 'default(389)'))
+ if port:
+ return Server(host, port=port)
+ return Server(host)
+
+
+def account_entry(person_object, custome_attributes):
+ # create an account entry from a person objects
+ account = {
+ 'Type': 'AD',
+ 'ID': person_object.get('dn'),
+ 'Email': person_object.get('mail'),
+ 'Username': person_object.get('samAccountName'),
+ 'DisplayName': person_object.get('displayName'),
+ 'Managr': person_object.get('manager'),
+ 'Manager': person_object.get('manager'),
+ 'Groups': person_object.get('memberOf')
+ }
+
+ for attr in custome_attributes:
+ account[attr] = person_object[attr]
+
+ return account
+
+
+def endpoint_entry(computer_object, custome_attributes):
+ # create an endpoint entry from a computer object
+ endpoint = {
+ 'Type': 'AD',
+ 'ID': computer_object.get('dn'),
+ 'Hostname': computer_object.get('name'),
+ 'Groups': computer_object.get('memberOf')
+ }
+
+ for attr in custome_attributes:
+ endpoint[attr] = computer_object[attr]
+
+ return endpoint
+
+
+def base_dn_verified(base_dn):
+ # serch AD with a simple query to test base DN is configured correctly
+ try:
+ search(
+ "(objectClass=user)",
+ base_dn,
+ size_limit=1
+ )
+ except Exception as e:
+ demisto.info(str(e))
+ return False
+ return True
+
+
+''' COMMANDS '''
+
+''' SEARCH '''
+
+
+def search(search_filter, search_base, attributes=None, size_limit=0, time_limit=0):
+ """
+ find entries in the DIT
+
+ Args:
+ search_base: the location in the DIT where the search will start
+ search_filte: LDAP query string
+ attributes: the attributes to specify for each entry found in the DIT
+
+ """
+ assert conn is not None
+ success = conn.search(
+ search_base=search_base,
+ search_filter=search_filter,
+ attributes=attributes,
+ size_limit=size_limit,
+ time_limit=time_limit
+ )
+
+ if not success:
+ raise Exception("Search failed")
+ return conn.entries
+
+
+def search_with_paging(search_filter, search_base, attributes=None, page_size=100, size_limit=0, time_limit=0):
+ """
+ find entries in the DIT
+
+ Args:
+ search_base: the location in the DIT where the search will start
+ search_filte: LDAP query string
+ attributes: the attributes to specify for each entrxy found in the DIT
+
+ """
+ assert conn is not None
+ total_entries = 0
+ cookie = None
+ start = datetime.now()
+
+ entries: List[Entry] = []
+ entries_left_to_fetch = size_limit
+ while True:
+ if 0 < entries_left_to_fetch < page_size:
+ page_size = entries_left_to_fetch
+
+ conn.search(
+ search_base,
+ search_filter,
+ search_scope=SUBTREE,
+ attributes=attributes,
+ paged_size=page_size,
+ paged_cookie=cookie
+ )
+
+ entries_left_to_fetch -= len(conn.entries)
+ total_entries += len(conn.entries)
+ cookie = conn.result['controls']['1.2.840.113556.1.4.319']['value']['cookie']
+ time_diff = (start - datetime.now()).seconds
+
+ entries.extend(conn.entries)
+
+ # stop when: 1.reached size limit 2.reached time limit 3. no cookie
+ if (size_limit and size_limit <= total_entries) or (time_limit and time_diff >= time_limit) or (not cookie):
+ break
+
+ # keep the raw entry for raw content (backward compatability)
+ raw = []
+ # flaten the entries
+ flat = []
+
+ for entry in entries:
+ entry = json.loads(entry.entry_to_json())
+
+ flat_entry = {
+ 'dn': entry['dn']
+ }
+
+ for attr in entry.get('attributes', {}):
+ flat_entry[attr] = entry['attributes'][attr]
+
+ raw.append(entry)
+ flat.append(flat_entry)
+
+ return {
+ "raw": raw,
+ "flat": flat
+ }
+
+
+def user_dn(sam_account_name, search_base):
+ search_filter = '(&(objectClass=user)(sAMAccountName={}))'.format(sam_account_name)
+ entries = search(
+ search_filter,
+ search_base
+ )
+ if not entries:
+ raise Exception("Could not get full DN for user with sAMAccountName '{}'".format(sam_account_name))
+ entry = json.loads(entries[0].entry_to_json())
+ return entry['dn']
+
+
+def computer_dn(compuer_name, search_base):
+ search_filter = '(&(objectClass=user)(objectCategory=computer)(name={}))'.format(compuer_name)
+ entries = search(
+ search_filter,
+ search_base
+ )
+ if not entries:
+ raise Exception("Could not get full DN for computer with name '{}'".format(compuer_name))
+ entry = json.loads(entries[0].entry_to_json())
+ return entry['dn']
+
+
+def group_dn(group_name, search_base):
+ search_filter = '(&(objectClass=group)(cn={}))'.format(group_name)
+ entries = search(
+ search_filter,
+ search_base
+ )
+ if not entries:
+ raise Exception("Could not get full DN for group with name '{}'".format(group_name))
+ entry = json.loads(entries[0].entry_to_json())
+ return entry['dn']
+
+
+def convert_special_chars_to_unicode(search_filter):
+ # We allow users to use special chars without explicitly typing their unicode values
+ chars_to_replace = {
+ '\\(': '\\28',
+ '\\)': '\\29',
+ '\\*': '\\2a',
+ '\\/': '\\2f',
+ '\\\\': '\\5c'
+ }
+ for i, j in chars_to_replace.items():
+ search_filter = search_filter.replace(i, j)
+
+ return search_filter
+
+
+def free_search(default_base_dn, page_size):
+
+ args = demisto.args()
+
+ search_filter = args.get('filter')
+ size_limit = int(args.get('size-limit', '0'))
+ time_limit = int(args.get('time-limit', '0'))
+ search_base = args.get('base-dn') or default_base_dn
+ attributes = args.get('attributes')
+ context_output = args.get('context-output')
+
+ search_filter = convert_special_chars_to_unicode(search_filter)
+
+ # if ALL was specified - get all the object's attributes, else expect a string of comma separated values
+ if attributes:
+ attributes = ALL_ATTRIBUTES if attributes == 'ALL' else attributes.split(',')
+
+ entries = search_with_paging(
+ search_filter,
+ search_base,
+ attributes=attributes,
+ size_limit=size_limit,
+ time_limit=time_limit,
+ page_size=page_size
+ )
+
+ ec = {} if context_output == 'no' else {'ActiveDirectory.Search(obj.dn == val.dn)': entries['flat']}
+ demisto_entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': entries['raw'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown("Active Directory Search", entries['flat']),
+ 'EntryContext': ec
+ }
+ demisto.results(demisto_entry)
+
+
+def search_users(default_base_dn, page_size):
+ # this command is equivalant to script ADGetUser
+ # will preform a custom search to find users by a specific (one) attribute specified by the user
+
+ args = demisto.args()
+
+ attributes: List[str] = []
+ custom_attributes: List[str] = []
+
+ # zero is actually no limitation
+ limit = int(args.get('limit', '0'))
+
+ # default query - list all users
+ query = "(&(objectClass=User)(objectCategory=person))"
+
+ # query by user DN
+ if args.get('dn'):
+ query = "(&(objectClass=User)(objectCategory=person)(distinguishedName={}))".format(args['dn'])
+
+ # query by name
+ if args.get('name'):
+ query = "(&(objectClass=User)(objectCategory=person)(cn={}))".format(args['name'])
+
+ # query by email
+ if args.get('email'):
+ query = "(&(objectClass=User)(objectCategory=person)(mail={}))".format(args['email'])
+
+ # query by sAMAccountName
+ if args.get('username'):
+ query = "(&(objectClass=User)(objectCategory=person)(sAMAccountName={}))".format(args['username'])
+
+ # query by custom object attribute
+ if args.get('custom-field-type'):
+ if not args.get('custom-field-data'):
+ raise Exception('Please specify "custom-field-data" as well when quering by "custom-field-type"')
+ query = "(&(objectClass=User)(objectCategory=person)({}={}))".format(
+ args['custom-field-type'], args['custom-field-data'])
+
+ if args.get('attributes'):
+ custom_attributes = args['attributes'].split(",")
+
+ attributes = list(set(custom_attributes + DEFAULT_PERSON_ATTRIBUTES))
+
+ entries = search_with_paging(
+ query,
+ default_base_dn,
+ attributes=attributes,
+ size_limit=limit,
+ page_size=page_size
+ )
+
+ accounts = [account_entry(entry, custom_attributes) for entry in entries['flat']]
+
+ if args.get('user-account-control-out', '') == 'true':
+ # display a literal translation of the numeric account control flag
+ for i, user in enumerate(entries['flat']):
+ flag_no = user.get('userAccountControl')[0]
+ entries['flat'][i]['userAccountControl'] = COOMON_ACCOUNT_CONTROL_FLAGS.get(flag_no) or flag_no
+
+ demisto_entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': entries['raw'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown("Active Directory - Get Users", entries['flat']),
+ 'EntryContext': {
+ 'ActiveDirectory.Users(obj.dn == val.dn)': entries['flat'],
+ # 'backward compatability' with ADGetUser script
+ 'Account(obj.ID == val.ID)': accounts
+ }
+ }
+ demisto.results(demisto_entry)
+
+
+def search_computers(default_base_dn, page_size):
+ # this command is equivalent to ADGetComputer script
+
+ args = demisto.args()
+
+ attributes: List[str] = []
+ custome_attributes: List[str] = []
+
+ # default query - list all users (computer category)
+ query = "(&(objectClass=user)(objectCategory=computer))"
+
+ # query by user DN
+ if args.get('dn'):
+ query = "(&(objectClass=user)(objectCategory=computer)(dn={}))".format(args['dn'])
+
+ # query by name
+ if args.get('name'):
+ query = "(&(objectClass=user)(objectCategory=computer)(name={}))".format(args['name'])
+
+ # query by custom object attribute
+ if args.get('custom-field-type'):
+ if not args.get('custom-field-data'):
+ raise Exception('Please specify "custom-field-data" as well when quering by "custom-field-type"')
+ query = "(&(objectClass=user)(objectCategory=computer)({}={}))".format(
+ args['custom-field-type'], args['ustom-field-data'])
+
+ if args.get('attributes'):
+ custome_attributes = args['attributes'].split(",")
+
+ attributes = list(set(custome_attributes + DEFAULT_COMPUTER_ATTRIBUTES))
+
+ entries = search_with_paging(
+ query,
+ default_base_dn,
+ attributes=attributes,
+ page_size=page_size
+ )
+
+ endpoints = [endpoint_entry(entry, custome_attributes) for entry in entries['flat']]
+
+ demisto_entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': entries['raw'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown("Active Directory - Get Computers", entries['flat']),
+ 'EntryContext': {
+ 'ActiveDirectory.Computers(obj.dn == val.dn)': entries['flat'],
+ # 'backward compatability' with ADGetComputer script
+ 'Endpoint(obj.ID == val.ID)': endpoints
+ }
+ }
+ demisto.results(demisto_entry)
+
+
+def search_group_members(default_base_dn, page_size):
+ # this command is equivalent to ADGetGroupMembers script
+
+ args = demisto.args()
+ member_type = args.get('member-type')
+ group_dn = args.get('group-dn')
+
+ custome_attributes: List[str] = []
+ default_attributes = DEFAULT_PERSON_ATTRIBUTES if member_type == 'person' else DEFAULT_COMPUTER_ATTRIBUTES
+
+ if args.get('attributes'):
+ custome_attributes = args['attributes'].split(",")
+
+ attributes = list(set(custome_attributes + default_attributes))
+
+ # neasted search
+ query = "(&(objectCategory={})(objectClass=user)(memberOf:1.2.840.113556.1.4.1941:={}))".format(member_type,
+ group_dn)
+
+ entries = search_with_paging(
+ query,
+ default_base_dn,
+ attributes=attributes,
+ page_size=page_size
+ )
+
+ members = [{'dn': entry['dn'], 'category': member_type} for entry in entries['flat']]
+
+ demisto_entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': entries['raw'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown("Active Directory - Get Group Members", entries['flat']),
+ 'EntryContext': {
+ 'ActiveDirectory.Groups(obj.dn ==' + group_dn + ')': {
+ 'dn': group_dn,
+ 'members': members
+ }
+ }
+ }
+
+ if member_type == 'person':
+ demisto_entry['EntryContext']['ActiveDirectory.Users(obj.dn == val.dn)'] = entries['flat']
+ demisto_entry['EntryContext']['Account'] = [account_entry(
+ entry, custome_attributes) for entry in entries['flat']]
+ else:
+ demisto_entry['EntryContext']['ActiveDirectory.Computers(obj.dn == val.dn)'] = entries['flat']
+ demisto_entry['EntryContext']['Endpoint'] = [endpoint_entry(
+ entry, custome_attributes) for entry in entries['flat']]
+
+ demisto.results(demisto_entry)
+
+
+''' DATABASE OPERATIONS '''
+
+''' CREATE OBJECT'''
+
+
+def create_user():
+ assert conn is not None
+ args = demisto.args()
+
+ object_classes = ["top", "person", "organizationalPerson", "user"]
+ user_dn = args.get('user-dn')
+ username = args.get("username")
+ password = args.get("password")
+ custome_attributes = args.get('custom-attributes')
+ attributes = {
+ "samAccountName": username
+ }
+
+ # set common user attributes
+ if args.get('display-name'):
+ attributes['displayName'] = args['display-name']
+ if args.get('description'):
+ attributes['description'] = args['description']
+ if args.get('email'):
+ attributes['mail'] = args['email']
+ if args.get('telephone-number'):
+ attributes['telephoneNumber'] = args['telephone-number']
+ if args.get('title'):
+ attributes['title'] = args['title']
+
+ # set user custome attributes
+ if custome_attributes:
+ try:
+ custome_attributes = json.loads(custome_attributes)
+ except Exception as e:
+ demisto.info(str(e))
+ raise Exception(
+ "Failed to parse custom attributes argument. Please see an example of this argument in the description."
+ )
+ for attribute_name, attribute_value in custome_attributes.items():
+ # can run default attribute stting
+ attributes[attribute_name] = attribute_value
+
+ # add user
+ success = conn.add(user_dn, object_classes, attributes)
+ if not success:
+ raise Exception("Failed to create user")
+
+ # set user password
+ success = conn.extend.microsoft.modify_password(user_dn, password)
+ if not success:
+ raise Exception("Failed to reset user password")
+
+ # enable user and expire password
+ modification = {
+ # enable user
+ 'userAccountControl': [('MODIFY_REPLACE', NORMAL_ACCOUNT)],
+ # set to 0, to force password change on next login
+ "pwdLastSet": [('MODIFY_REPLACE', "0")]
+ }
+ modify_object(user_dn, modification)
+
+ demisto_entry = {
+ 'ContentsFormat': formats['text'],
+ 'Type': entryTypes['note'],
+ 'Contents': "Created user with DN: {}".format(user_dn)
+ }
+ demisto.results(demisto_entry)
+
+
+def create_contact():
+ assert conn is not None
+ args = demisto.args()
+
+ object_classes = ["top", "person", "organizationalPerson", "contact"]
+ contact_dn = args.get('contact-dn')
+
+ # set contact attributes
+ attributes: Dict = {}
+ if args.get('custom-attributes'):
+ try:
+ attributes = json.loads(args['custom-attributes'])
+ except Exception as e:
+ demisto.info(str(e))
+ raise Exception(
+ 'Failed to parse custom attributes argument. Please see an example of this argument in the argument.'
+ )
+
+ # set common user attributes
+ if args.get('display-name'):
+ attributes['displayName'] = args['display-name']
+ if args.get('description'):
+ attributes['description'] = args['description']
+ if args.get('email'):
+ attributes['mail'] = args['email']
+ if args.get('telephone-number'):
+ attributes['telephoneNumber'] = args['telephone-number']
+ if args.get('title'):
+ attributes['title'] = args['title']
+
+ # add contact
+
+ success = conn.add(contact_dn, object_classes, attributes)
+ if not success:
+ raise Exception("Failed to create contact")
+
+ demisto_entry = {
+ 'ContentsFormat': formats['text'],
+ 'Type': entryTypes['note'],
+ 'Contents': "Created contact with DN: {}".format(contact_dn)
+ }
+ demisto.results(demisto_entry)
+
+
+''' UPDATE OBJECT '''
+
+
+def modify_object(dn, modification):
+ """
+ modifys object in the DIT
+ """
+ assert conn is not None
+ success = conn.modify(dn, modification)
+ if not success:
+ raise Exception("Failed to update object {} with the following modofication: {}".format(
+ dn, json.dumps(modification)))
+
+
+def update_user(default_base_dn):
+ args = demisto.args()
+
+ # get user DN
+ sam_account_name = args.get('username')
+ attribute_name = args.get('attribute-name')
+ attribute_value = args.get('attribute-value')
+ search_base = args.get('base-dn') or default_base_dn
+ dn = user_dn(sam_account_name, search_base)
+
+ modification = {}
+ modification[attribute_name] = [('MODIFY_REPLACE', attribute_value)]
+
+ # modify user
+ modify_object(dn, modification)
+
+ demisto_entry = {
+ 'ContentsFormat': formats['text'],
+ 'Type': entryTypes['note'],
+ 'Contents': "Updated user's {} to {} ".format(attribute_name, attribute_value)
+ }
+ demisto.results(demisto_entry)
+
+
+def update_contact():
+ args = demisto.args()
+
+ contact_dn = args.get('contact-dn')
+ modification = {}
+ modification[args.get('attribute-name')] = [('MODIFY_REPLACE', args.get('attribute-value'))]
+
+ # modify
+ modify_object(contact_dn, modification)
+
+ demisto_entry = {
+ 'ContentsFormat': formats['text'],
+ 'Type': entryTypes['note'],
+ 'Contents': "Updated contact's {} to: {} ".format(args.get('attribute-name'), args.get('attribute-value'))
+ }
+ demisto.results(demisto_entry)
+
+
+def modify_computer_ou(default_base_dn):
+ assert conn is not None
+ args = demisto.args()
+
+ computer_name = args.get('computer-name')
+ dn = computer_dn(computer_name, args.get('base-dn') or default_base_dn)
+
+ success = conn.modify_dn(dn, "CN={}".format(computer_name), new_superior=args.get('full-superior-dn'))
+ if not success:
+ raise Exception("Failed to modify computer OU")
+
+ demisto_entry = {
+ 'ContentsFormat': formats['text'],
+ 'Type': entryTypes['note'],
+ 'Contents': "Moved computer {} to {}".format(computer_name, args.get('full-superior-dn'))
+ }
+ demisto.results(demisto_entry)
+
+
+def expire_user_password(default_base_dn):
+ args = demisto.args()
+
+ # get user DN
+ sam_account_name = args.get('username')
+ search_base = args.get('base-dn') or default_base_dn
+ dn = user_dn(sam_account_name, search_base)
+
+ modification = {
+ # set to 0, to force password change on next login
+ "pwdLastSet": [('MODIFY_REPLACE', "0")]
+ }
+
+ # modify user
+ modify_object(dn, modification)
+
+ demisto_entry = {
+ 'ContentsFormat': formats['text'],
+ 'Type': entryTypes['note'],
+ 'Contents': "Expired password successfully"
+ }
+ demisto.results(demisto_entry)
+
+
+def set_user_password(default_base_dn):
+ assert conn is not None
+ args = demisto.args()
+
+ # get user DN
+ sam_account_name = args.get('username')
+ password = args.get('password')
+ search_base = args.get('base-dn') or default_base_dn
+ dn = user_dn(sam_account_name, search_base)
+
+ # set user password
+ success = conn.extend.microsoft.modify_password(dn, password)
+ if not success:
+ raise Exception("Failed to reset user password")
+
+ demisto_entry = {
+ 'ContentsFormat': formats['text'],
+ 'Type': entryTypes['note'],
+ 'Contents': "User password successfully set"
+ }
+ demisto.results(demisto_entry)
+
+
+def enable_user(default_base_dn):
+ args = demisto.args()
+
+ # get user DN
+ sam_account_name = args.get('username')
+ search_base = args.get('base-dn') or default_base_dn
+ dn = user_dn(sam_account_name, search_base)
+
+ # modify user
+ modification = {
+ 'userAccountControl': [('MODIFY_REPLACE', NORMAL_ACCOUNT)]
+ }
+ modify_object(dn, modification)
+
+ demisto_entry = {
+ 'ContentsFormat': formats['text'],
+ 'Type': entryTypes['note'],
+ 'Contents': "User {} was enabled".format(sam_account_name)
+ }
+ demisto.results(demisto_entry)
+
+
+def disable_user(default_base_dn):
+ args = demisto.args()
+
+ # get user DN
+ sam_account_name = args.get('username')
+ search_base = args.get('base-dn') or default_base_dn
+ dn = user_dn(sam_account_name, search_base)
+
+ # modify user
+ modification = {
+ 'userAccountControl': [('MODIFY_REPLACE', DISABLED_ACCOUNT)]
+ }
+ modify_object(dn, modification)
+
+ demisto_entry = {
+ 'ContentsFormat': formats['text'],
+ 'Type': entryTypes['note'],
+ 'Contents': "User {} was disabled".format(sam_account_name)
+ }
+ demisto.results(demisto_entry)
+
+
+def add_member_to_group(default_base_dn):
+
+ args = demisto.args()
+
+ search_base = args.get('base-dn') or default_base_dn
+
+ # get the dn of the member - either user or computer
+ args_err = "Pleade provide either username or computer-name"
+ member_dn = ''
+
+ if args.get('username') and args.get('computer-name'):
+ # both arguments passed
+ raise Exception(args_err)
+ if args.get('username'):
+ member_dn = user_dn(args['username'], search_base)
+ elif args.get('computer-name'):
+ member_dn = computer_dn(args['computer-name'], search_base)
+ else:
+ # none of the arguments passed
+ raise Exception(args_err)
+
+ grp_dn = group_dn(args.get('group-cn'), search_base)
+
+ success = microsoft.addMembersToGroups.ad_add_members_to_groups(conn, [member_dn], [grp_dn])
+ if not success:
+ raise Exception("Failed to add {} to group {]}".format(
+ args.get('username') or args.get('computer-name'),
+ args.get('group_name')
+ ))
+
+ demisto_entry = {
+ 'ContentsFormat': formats['text'],
+ 'Type': entryTypes['note'],
+ 'Contents': "Object with dn {} was added to group {}".format(member_dn, args.get('group-cn'))
+ }
+ demisto.results(demisto_entry)
+
+
+def remove_member_from_group(default_base_dn):
+
+ args = demisto.args()
+
+ search_base = args.get('base-dn') or default_base_dn
+
+ # get the dn of the member - either user or computer
+ args_err = "Pleade provide either username or computer-name"
+ member_dn = ''
+
+ if args.get('username') and args.get('computer-name'):
+ # both arguments passed
+ raise Exception(args_err)
+ if args.get('username'):
+ member_dn = user_dn(args['username'], search_base)
+ elif args.get('computer-name'):
+ member_dn = computer_dn(args['computer-name'], search_base)
+ else:
+ # none of the arguments passed
+ raise Exception(args_err)
+
+ grp_dn = group_dn(args.get('group-cn'), search_base)
+
+ success = microsoft.removeMembersFromGroups.ad_remove_members_from_groups(conn, [member_dn], [grp_dn], True)
+ if not success:
+ raise Exception("Failed to remove {member} from group {group_name}".format({
+ "member": args.get('username') or args.get('computer-name'),
+ "group_name": args.get('group_name')
+ }))
+
+ demisto_entry = {
+ 'ContentsFormat': formats['text'],
+ 'Type': entryTypes['note'],
+ 'Contents': "Object with dn {} removed from group {}".format(member_dn, args.get('group-cn'))
+ }
+ demisto.results(demisto_entry)
+
+
+def unlock_account(default_base_dn):
+ args = demisto.args()
+
+ # get user DN
+ sam_account_name = args.get('username')
+ search_base = args.get('base-dn') or default_base_dn
+ dn = user_dn(sam_account_name, search_base)
+
+ success = microsoft.unlockAccount.ad_unlock_account(conn, dn)
+ if not success:
+ raise Exception("Failed to unlock user {}".format(sam_account_name))
+
+ demisto_entry = {
+ 'ContentsFormat': formats['text'],
+ 'Type': entryTypes['note'],
+ 'Contents': "Unlocked user {}".format(sam_account_name)
+ }
+ demisto.results(demisto_entry)
+
+
+''' DELETE OBJECT '''
+
+
+def delete_user():
+ # can acually delete any object...
+ assert conn is not None
+ success = conn.delete(demisto.args().get('user-dn'))
+ if not success:
+ raise Exception('Failed to delete user')
+
+ demisto_entry = {
+ 'ContentsFormat': formats['text'],
+ 'Type': entryTypes['note'],
+ 'Contents': "Deleted object with dn {}".format(demisto.args().get('user-dn'))
+ }
+ demisto.results(demisto_entry)
+
+
+'''
+ TEST CONFIGURATION
+ authenticate user credentials while initializing connection wiith AD server
+ verify base DN is configured correctly
+'''
+
+
+def main():
+ ''' INSTANCE CONFIGURATION '''
+ SERVER_IP = demisto.params().get('server_ip')
+ USERNAME = demisto.params().get('credentials')['identifier']
+ PASSWORD = demisto.params().get('credentials')['password']
+ DEFAULT_BASE_DN = demisto.params().get('base_dn')
+ SECURE_CONNECTION = demisto.params().get('secure_connection')
+ DEFAULT_PAGE_SIZE = int(demisto.params().get('page_size'))
+ NTLM_AUTH = demisto.params().get('ntlm')
+ UNSECURE = demisto.params().get('unsecure', False)
+ PORT = demisto.params().get('port')
+
+ if PORT:
+ # port was configured, cast to int
+ PORT = int(PORT)
+ last_log_detail_level = None
+ try:
+ try:
+ set_library_log_hide_sensitive_data(True)
+ if is_debug_mode():
+ demisto.info('debug-mode: setting library log detail to EXTENDED')
+ last_log_detail_level = get_library_log_detail_level()
+ set_library_log_detail_level(EXTENDED)
+ server = initialize_server(SERVER_IP, PORT, SECURE_CONNECTION, UNSECURE)
+ except Exception as e:
+ return_error(str(e))
+ return
+ global conn
+ if NTLM_AUTH:
+ # intialize connection to LDAP server with NTLM authentication
+ # user example: domain\user
+ domain_user = SERVER_IP + '\\' + USERNAME if '\\' not in USERNAME else USERNAME
+ conn = Connection(server, user=domain_user, password=PASSWORD, authentication=NTLM)
+ else:
+ # here username should be the user dn
+ conn = Connection(server, user=USERNAME, password=PASSWORD)
+
+ # bind operation is the “authenticate†operation.
+ try:
+ # open socket and bind to server
+ if not conn.bind():
+ message = "Failed to bind to server. Please validate the credentials configured correctly.\n{}".format(
+ json.dumps(conn.result))
+ return_error(message)
+ return
+ except Exception as e:
+ exc_msg = str(e)
+ demisto.info("Failed bind to: {}:{}. {}: {}".format(SERVER_IP, PORT, type(e), exc_msg
+ + "\nTrace:\n{}".format(traceback.format_exc())))
+ message = "Failed to access LDAP server. Please validate the server host and port are configured correctly"
+ if 'ssl wrapping error' in exc_msg:
+ message = "Failed to access LDAP server. SSL error."
+ if not UNSECURE:
+ message += ' Try using: "Trust any certificate" option.'
+ return_error(message)
+ return
+
+ demisto.info('Established connection with AD LDAP server')
+
+ if not base_dn_verified(DEFAULT_BASE_DN):
+ message = "Failed to verify the base DN configured for the instance.\n" \
+ "Last connection result: {}\n" \
+ "Last error from LDAP server: {}".format(json.dumps(conn.result), json.dumps(conn.last_error))
+ return_error(message)
+ return
+
+ demisto.info('Verfied base DN "{}"'.format(DEFAULT_BASE_DN))
+
+ ''' COMMAND EXECUTION '''
+
+ if demisto.command() == 'test-module':
+ if conn.user == '':
+ # Empty response means you have no authentication status on the server, so you are an anonymous user.
+ raise Exception("Failed to authenticate user")
+ demisto.results('ok')
+
+ if demisto.command() == 'ad-search':
+ free_search(DEFAULT_BASE_DN, DEFAULT_PAGE_SIZE)
+
+ if demisto.command() == 'ad-expire-password':
+ expire_user_password(DEFAULT_BASE_DN)
+
+ if demisto.command() == 'ad-set-new-password':
+ set_user_password(DEFAULT_BASE_DN)
+
+ if demisto.command() == 'ad-unlock-account':
+ unlock_account(DEFAULT_BASE_DN)
+
+ if demisto.command() == 'ad-disable-account':
+ disable_user(DEFAULT_BASE_DN)
+
+ if demisto.command() == 'ad-enable-account':
+ enable_user(DEFAULT_BASE_DN)
+
+ if demisto.command() == 'ad-remove-from-group':
+ remove_member_from_group(DEFAULT_BASE_DN)
+
+ if demisto.command() == 'ad-add-to-group':
+ add_member_to_group(DEFAULT_BASE_DN)
+
+ if demisto.command() == 'ad-create-user':
+ create_user()
+
+ if demisto.command() == 'ad-delete-user':
+ delete_user()
+
+ if demisto.command() == 'ad-update-user':
+ update_user(DEFAULT_BASE_DN)
+
+ if demisto.command() == 'ad-modify-computer-ou':
+ modify_computer_ou(DEFAULT_BASE_DN)
+
+ if demisto.command() == 'ad-create-contact':
+ create_contact()
+
+ if demisto.command() == 'ad-update-contact':
+ update_contact()
+
+ if demisto.command() == 'ad-get-user':
+ search_users(DEFAULT_BASE_DN, DEFAULT_PAGE_SIZE)
+
+ if demisto.command() == 'ad-get-computer':
+ search_computers(DEFAULT_BASE_DN, DEFAULT_PAGE_SIZE)
+
+ if demisto.command() == 'ad-get-group-members':
+ search_group_members(DEFAULT_BASE_DN, DEFAULT_PAGE_SIZE)
+
+ except Exception as e:
+ message = str(e)
+ if conn:
+ message += "\nLast connection result: {}\nLast error from LDAP server: {}".format(
+ json.dumps(conn.result), conn.last_error)
+ return_error(message)
+ return
+ finally:
+ # disconnect and close the connection
+ if conn:
+ conn.unbind()
+ if last_log_detail_level:
+ set_library_log_detail_level(last_log_detail_level)
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/Active_Directory_Query/Active_Directory_Query.yml b/Integrations/Active_Directory_Query/Active_Directory_Query.yml
new file mode 100644
index 000000000000..04a1c0f36ce0
--- /dev/null
+++ b/Integrations/Active_Directory_Query/Active_Directory_Query.yml
@@ -0,0 +1,397 @@
+commonfields:
+ id: Active Directory Query v2
+ version: -1
+name: Active Directory Query v2
+display: Active Directory Query v2
+category: Data Enrichment & Threat Intelligence
+description: Active Directory Query integration enables you to access and manage Active
+ Directory objects (users, contacts, and computers).
+configuration:
+- display: Server IP address (e.g., 192.168.0.1)
+ name: server_ip
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Port. If not specified, default the port is 389, or 636 for LDAPS.
+ name: port
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Credentials
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: true
+- display: NTLM authentication
+ name: ntlm
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Base DN (for example "dc=company,dc=com")
+ name: base_dn
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Page size
+ name: page_size
+ defaultvalue: "500"
+ type: 0
+ required: true
+- display: Secure Connection
+ name: secure_connection
+ defaultvalue: SSL
+ type: 15
+ required: true
+ options:
+ - None
+ - SSL
+- display: Trust any certificate (not secure)
+ name: unsecure
+ defaultvalue: ""
+ type: 8
+ required: false
+script:
+ script: ''
+ type: python
+ subtype: python3
+ commands:
+ - name: ad-expire-password
+ arguments:
+ - name: username
+ required: true
+ description: The username (samAccountName) of the user to modify
+ - name: base-dn
+ description: Root (e.g., DC=domain,DC=com)
+ description: Expires the password of an Active Directory user.
+ - name: ad-create-user
+ arguments:
+ - name: username
+ required: true
+ description: The username (samAccountName) of the user to be modified
+ - name: password
+ required: true
+ description: 'The initial password to set for the user. The user will be asked
+ to change the password after login.'
+ - name: user-dn
+ required: true
+ description: The user's DN
+ - name: display-name
+ description: The user's display name
+ - name: description
+ description: Short description of the user
+ - name: email
+ description: User email.
+ - name: telephone-number
+ description: The user's telephone number
+ - name: title
+ description: The user's job title
+ - name: custom-attributes
+ description: Sets basic or custom attributes of the user object. For example,
+ custom-attributes="{\"notes\":\"a note about the contact\",\"company\":\"company
+ name\"}"
+ description: Creates an Active Directory user. This command requires a secure connection (SSL,TLS).
+ in order to use this command.
+ - name: ad-search
+ arguments:
+ - name: filter
+ required: true
+ description: 'Enables you to define search criteria in the Query Active Directory using Active Directory syntax. For example, the following query searches for all user objects,
+ except Andy: "(&(objectCategory=person)(objectClass=user)(!(cn=andy)))". NOTE if you have special characters
+ such as "*","(",or "\" the character must be preceded by two backslashes "\\". For example, to use "*",
+ type "\\*". For more information about search filters, see
+ syntax: https://docs.microsoft.com/en-us/windows/win32/adsi/search-filter-syntax'
+ - name: base-dn
+ description: Root (e.g. DC=domain,DC=com). By default, the Base DN configured for the instance
+ will be used.
+ - name: attributes
+ description: CSV list of the object attributes to return, e.g.,
+ "dn,memberOf". To get all objects atributes, specify 'ALL'.
+ - name: size-limit
+ description: Maximum number of records to return
+ defaultValue: "50"
+ - name: time-limit
+ description: Maximum time to pull records (in seconds)
+ - name: context-output
+ auto: PREDEFINED
+ predefined:
+ - "yes"
+ - "no"
+ defaultValue: "yes"
+ description: If set to 'no' will not output the the results of the search to
+ the context.
+ outputs:
+ - contextPath: 'ActiveDirectory.Search.dn'
+ description: The distinguished names that match the query.
+ type: string
+ - contextPath: 'ActiveDirectory.Search'
+ description: Result of the search.
+ type: unknown
+ description: Runs Active Directory queries.
+ - name: ad-add-to-group
+ arguments:
+ - name: username
+ description: "The username of the user to add to the group. If this argument is not specified,
+ the computer name argument must be specified.\t"
+ - name: computer-name
+ description: The name of the computer to add to the group. If this argument is not specified,
+ the username argument must be specified.
+ - name: group-cn
+ required: true
+ description: The name of the group to add the user to
+ - name: base-dn
+ description: Root (e.g., DC=domain,DC=com). By default, the Base DN configured for the instance
+ will be used.
+ description: Adds an Active Directory user or computer to a group.
+ - name: ad-remove-from-group
+ arguments:
+ - name: username
+ description: "The name of the user to remove from the group. If this argument is not specified,
+ the computer name argument must be specified.\t"
+ - name: computer-name
+ description: The name of the computer to remove from the group. If this argument is not specified,
+ the username argument must be specified.
+ - name: group-cn
+ required: true
+ description: "The name of the group to remove the user from\t"
+ - name: base-dn
+ description: Root (e.g., DC=domain,DC=com). By default, the Base DN configured for the instance
+ will be used.
+ description: Removes an Active Directory user or computer from a group.
+ - name: ad-update-user
+ arguments:
+ - name: username
+ required: true
+ description: "The username of the account to update (sAMAccountName)\t"
+ - name: attribute-name
+ required: true
+ description: The name of the attribute to modify (e.g., sn, displayName, mail,
+ etc.)
+ - name: attribute-value
+ required: true
+ description: "The value the attribute should be changed to\t"
+ - name: base-dn
+ description: Root (e.g. DC=domain,DC=com). By default, the Base DN configured for the instance
+ will be used.
+ description: Updates attributes of an existing Active Directory user.
+ - name: ad-delete-user
+ arguments:
+ - name: user-dn
+ required: true
+ description: The DN of the user to delete
+ description: Deletes an Active Directory user.
+ - name: ad-create-contact
+ arguments:
+ - name: contact-dn
+ required: true
+ description: "The contact's DN\t"
+ - name: display-name
+ description: "The contact's display name\t"
+ - name: description
+ description: Short description of the contact
+ - name: email
+ description: The contact's email address
+ - name: telephone-number
+ description: The contact's telephone number
+ - name: custom-attributes
+ description: Sets basic or custom attributes of the contact object. For example,
+ custom-attributes="{\"notes\":\"some note about the contact\",\"company\":\"some
+ company\"}"
+ - name: title
+ description: The contact's job title
+ description: Creates an Active Directory contact.
+ - name: ad-update-contact
+ arguments:
+ - name: contact-dn
+ required: true
+ description: "The contact's DN\t"
+ - name: attribute-name
+ required: true
+ description: "The attribute name to update\t"
+ - name: attribute-value
+ required: true
+ description: "The attribute value to be updated\t"
+ description: Updates attributes of an existing Active Directory contact.
+ - name: ad-disable-account
+ arguments:
+ - name: username
+ required: true
+ description: "The username of the account to disable (sAMAccountName)\t"
+ - name: base-dn
+ description: Root (e.g., DC=domain,DC=com). By default, the Base DN configured for the instance
+ will be used.
+ description: Disables an Active Directory user account.
+ - name: ad-enable-account
+ arguments:
+ - name: username
+ required: true
+ description: "The username of the account to enable (sAMAccountName)\t"
+ - name: base-dn
+ description: Root (e.g., DC=domain,DC=com). By default, the Base DN configured for the instance
+ will be used.
+ description: Enables a previously disabled Active Directory account.
+ - name: ad-unlock-account
+ arguments:
+ - name: username
+ required: true
+ description: "The username of the account to unlock (sAMAccountName)\t"
+ - name: base-dn
+ description: Root (e.g., DC=domain,DC=com). By default, the Base DN configured for the instance
+ will be used.
+ description: Unlocks a previously locked Active Directory user account.
+ - name: ad-set-new-password
+ arguments:
+ - name: username
+ required: true
+ description: "The username of the account to be disabled (sAMAccountName)\t"
+ - name: password
+ required: true
+ description: "The password to set for the user\t"
+ - name: base-dn
+ description: Root (e.g. DC=domain,DC=com). Base DN configured for the instance
+ will be used as default.
+ description: Sets a new password for an Active Directory user. This command requires a secure connection (SSL,TLS).
+ - name: ad-modify-computer-ou
+ arguments:
+ - name: computer-name
+ required: true
+ description: "The computer name\t"
+ - name: full-superior-dn
+ description: Superior DN, e.g., OU=computers,DC=domain,DC=com (The specified domain
+ must be the same as the current computer domain)
+ description: Modifies the computer organizational unit within a domain.
+ - name: ad-get-user
+ arguments:
+ - name: dn
+ default: true
+ description: Query by the user's Active Directory Distinguished Name
+ - name: name
+ description: Query by the user's name
+ - name: attributes
+ description: Include these AD attributes of the resulting objects in addition
+ to the default attributes
+ - name: custom-field-type
+ description: Query users by this custom field type
+ - name: custom-field-data
+ description: Query users by this custom field data (relevant only if the `custom-field-type` argument
+ is provided)
+ - name: username
+ description: Query users by the samAccountName attribute
+ - name: limit
+ description: Maximum number of objects to return (default is 20)
+ - name: email
+ description: Query by the user's email address
+ - name: user-account-control-out
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Include verbose translation for UserAccountControl flags
+ defaultValue: "false"
+ outputs:
+ - contextPath: 'ActiveDirectory.Users.dn'
+ description: The user's distinguished name
+ type: string
+ - contextPath: 'ActiveDirectory.Users.displayName'
+ description: The user's display name
+ type: string
+ - contextPath: 'ActiveDirectory.Users.name'
+ description: The user's common name
+ type: string
+ - contextPath: 'ActiveDirectory.Users.sAMAccountName'
+ description: The user's sAMAccountName
+ type: string
+ - contextPath: 'ActiveDirectory.Users.userAccountControl'
+ description: The user's account control flag
+ type: number
+ - contextPath: 'ActiveDirectory.Users.mail'
+ description: The user's email address
+ type: string
+ - contextPath: 'ActiveDirectory.Users.manager'
+ description: The user's manager
+ type: string
+ - contextPath: 'ActiveDirectory.Users.memberOf'
+ description: Groups the user is member of
+ type: string
+ - contextPath: 'Account.DisplayName'
+ description: The user's display name
+ type: string
+ - contextPath: 'Account.Groups'
+ description: Groups the user is member of
+ type: string
+ - contextPath: 'Account.Manager'
+ description: "The user's manager"
+ type: string
+ - contextPath: 'Account.ID'
+ description: The user's distinguished name
+ type: string
+ - contextPath: 'Account.Username'
+ description: The user's samAccountName
+ type: string
+ - contextPath: 'Account.Email'
+ description: The user's email address
+ type: string
+ description: Retrieves detailed information about a user account. The user can
+ be specified by name, email address, or as an Active Directory Distinguished Name (DN).
+ If no filter is specified, all users are returned.
+ - name: ad-get-computer
+ arguments:
+ - name: dn
+ description: The computer's DN
+ - name: name
+ description: Name of the computer to get information for
+ - name: attributes
+ description: Include these AD attributes of the resulting objects in addition
+ to the default attributes
+ - name: custom-field-data
+ description: Search computers by this custom field data (relevant only if the `customFieldType` argument
+ is provided)
+ - name: custom-field-type
+ description: Search computer by this custom field type
+ outputs:
+ - contextPath: 'ActiveDirectory.Computers.dn'
+ description: The computer distinguished name
+ - contextPath: 'ActiveDirectory.Computers.memberOf'
+ description: Groups the computer is listed as a member
+ - contextPath: 'ActiveDirectory.Computers.name'
+ description: The computer name
+ - contextPath: 'Endpoint.ID'
+ description: The computer DN
+ - contextPath: 'Endpoint.Hostname'
+ description: The computer name
+ - contextPath: 'Endpoint.Groups'
+ description: Groups the computer is listed as a member of
+ description: 'Retrieves detailed information about a computer account. The computer
+ can be specified by name, email address, or as an Active Directory Distinguished Name
+ (DN). If no filters are provided, all computers are returned. '
+ - name: ad-get-group-members
+ arguments:
+ - name: group-dn
+ required: true
+ description: "Group's Active Directory Distinguished Name"
+ - name: member-type
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - person
+ - computer
+ description: Which members type to query
+ defaultValue: person
+ - name: attributes
+ description: CSV list of attributes to include in the results, in addition to the
+ default attributes
+ outputs:
+ - contextPath: 'ActiveDirectory.Groups.dn'
+ description: The group DN
+ type: string
+ - contextPath: 'ActiveDirectory.Groups.members.dn'
+ description: The group member DN
+ type: string
+ - contextPath: 'ActiveDirectory.Groups.members.category'
+ description: Person/computer
+ type: string
+ description: Retrieves the list of users or computers that are members of the
+ specified group
+ runonce: false
+ dockerimage: demisto/ldap:1.0.0.75
+tests:
+ - Active Directory Test
diff --git a/Integrations/Active_Directory_Query/Active_Directory_Query_description.md b/Integrations/Active_Directory_Query/Active_Directory_Query_description.md
new file mode 100644
index 000000000000..dafcf7191c77
--- /dev/null
+++ b/Integrations/Active_Directory_Query/Active_Directory_Query_description.md
@@ -0,0 +1,2 @@
+Active Directory search uses paging. You set the page size by specifying the 'page size' parameter in the instance settings.
+
diff --git a/Integrations/Active_Directory_Query/Active_Directory_Query_image.png b/Integrations/Active_Directory_Query/Active_Directory_Query_image.png
new file mode 100644
index 000000000000..851f408149a8
Binary files /dev/null and b/Integrations/Active_Directory_Query/Active_Directory_Query_image.png differ
diff --git a/Integrations/Active_Directory_Query/CHANGELOG.md b/Integrations/Active_Directory_Query/CHANGELOG.md
new file mode 100644
index 000000000000..1702c966a286
--- /dev/null
+++ b/Integrations/Active_Directory_Query/CHANGELOG.md
@@ -0,0 +1,17 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+Added support for **debug-mode**, which logs extended information when enabled.
+
+## [19.9.1] - 2019-09-18
+ - Fix an issue in the ***custom-field-data*** argument.
+ - Fixed an issue in the ***ad-create-contact*** command.
+ - Improved description of the ***filter*** argument in the ***ad-search*** command.
+ - Fixed the example value description for the ***custom-attribute*** argument in the ***ad-create-user*** and ***ad-create-contact*** commands.
+
+
+## [19.8.0] - 2019-08-06
+ - The default query now works as expected.
+ - The *dn* argument now works as expected.
+ - Added support for custom SSL certificates, by using the Docker environment variable: SSL_CERT_FILE.
diff --git a/Integrations/Active_Directory_Query/Pipfile b/Integrations/Active_Directory_Query/Pipfile
new file mode 100644
index 000000000000..4bdeb1f3fdfd
--- /dev/null
+++ b/Integrations/Active_Directory_Query/Pipfile
@@ -0,0 +1,20 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+flake8 = "*"
+astroid = "*"
+pytest-mock = "*"
+
+[packages]
+ldap3 = "*"
+
+[requires]
+python_version = "3.7"
+
+[pipenv]
+allow_prereleases = true
diff --git a/Integrations/Active_Directory_Query/Pipfile.lock b/Integrations/Active_Directory_Query/Pipfile.lock
new file mode 100644
index 000000000000..df3f7101ec40
--- /dev/null
+++ b/Integrations/Active_Directory_Query/Pipfile.lock
@@ -0,0 +1,220 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "9e3ae2508cf1327b0f1d9095d324f4b7095fe58df156a8c8e2a9cc7edc3f0ed6"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "ldap3": {
+ "hashes": [
+ "sha256:3f67c83185b1f0df8fdf6b52fa42c55bc9e9b7120c8b7fec60f0d6003c536d18",
+ "sha256:dd9be8ea27773c4ffc18ede0b95c3ca1eb12513a184590b9f8ae423db3f71eb9"
+ ],
+ "index": "pypi",
+ "version": "==2.5.2"
+ },
+ "pyasn1": {
+ "hashes": [
+ "sha256:da2420fe13a9452d8ae97a0e478adde1dee153b11ba832a95b223a2ba01c10f7",
+ "sha256:da6b43a8c9ae93bc80e2739efb38cc776ba74a886e3e9318d65fe81a8b8a2c6e"
+ ],
+ "version": "==0.4.5"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:1d5d0e6e408701ae657342645465d08be6fb66cf0ede16a31cc6435bd2e61718",
+ "sha256:8fc40235cd184bff5d7b8e1284a647005cbd36bbc87d0c39f6f6389ae26e17ad"
+ ],
+ "index": "pypi",
+ "version": "==2.2.0"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69",
+ "sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb"
+ ],
+ "version": "==18.2.0"
+ },
+ "entrypoints": {
+ "hashes": [
+ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
+ "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
+ ],
+ "version": "==0.3"
+ },
+ "flake8": {
+ "hashes": [
+ "sha256:859996073f341f2670741b51ec1e67a01da142831aa1fdc6242dbf88dffbe661",
+ "sha256:a796a115208f5c03b18f332f7c11729812c8c3ded6c46319c59b53efd3819da8"
+ ],
+ "index": "pypi",
+ "version": "==3.7.7"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:ee5fddfd792e6e1d664ee28f3fbe00dfc26d8d3c6f059ee78f4da4c19718007c",
+ "sha256:f19b23b22fb5a919a081bc31aabcc0991614c244d9215267e11abf2ca7b684ce"
+ ],
+ "version": "==4.3.9"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33",
+ "sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39",
+ "sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019",
+ "sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088",
+ "sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b",
+ "sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e",
+ "sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6",
+ "sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b",
+ "sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5",
+ "sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff",
+ "sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd",
+ "sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7",
+ "sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff",
+ "sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d",
+ "sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2",
+ "sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35",
+ "sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4",
+ "sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514",
+ "sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252",
+ "sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109",
+ "sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f",
+ "sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c",
+ "sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92",
+ "sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577",
+ "sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d",
+ "sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d",
+ "sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f",
+ "sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a",
+ "sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b"
+ ],
+ "version": "==1.3.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:0125e8f60e9e031347105eb1682cef932f5e97d7b9a1a28d9bf00c22a5daef40",
+ "sha256:590044e3942351a1bdb1de960b739ff4ce277960f2425ad4509446dbace8d9d1"
+ ],
+ "markers": "python_version > '2.7'",
+ "version": "==6.0.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f",
+ "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746"
+ ],
+ "version": "==0.9.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pyflakes": {
+ "hashes": [
+ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
+ "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ ],
+ "version": "==2.1.1"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:2bf4bd58d6d5d87174fbc9d1d134a9aeee852d4dc29cbd422a7015772770bc63",
+ "sha256:ee80c7af4f127b2a480d83010c9f0e97beb8eaa652b78c2837d3ed30b12e1182"
+ ],
+ "index": "pypi",
+ "version": "==2.3.0"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:067a1d4bf827ffdd56ad21bd46674703fce77c5957f6c1eef731f6146bfcef1c",
+ "sha256:9687049d53695ad45cf5fdc7bbd51f0c49f1ea3ecfc4b7f3fde7501b541f17f4"
+ ],
+ "index": "pypi",
+ "version": "==4.3.0"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:4d0d06d173eecf172703219a71dbd4ade0e13904e6bbce1ce660e2e0dc78b5c4",
+ "sha256:bfdf02789e3d197bd682a758cae0a4a18706566395fbe2803badcd1335e0173e"
+ ],
+ "index": "pypi",
+ "version": "==1.10.1"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:035a54ede6ce1380599b2ce57844c6554666522e376bd111eb940fbc7c3dad23",
+ "sha256:037c35f2741ce3a9ac0d55abfcd119133cbd821fffa4461397718287092d9d15",
+ "sha256:049feae7e9f180b64efacbdc36b3af64a00393a47be22fa9cb6794e68d4e73d3",
+ "sha256:19228f7940beafc1ba21a6e8e070e0b0bfd1457902a3a81709762b8b9039b88d",
+ "sha256:2ea681e91e3550a30c2265d2916f40a5f5d89b59469a20f3bad7d07adee0f7a6",
+ "sha256:3a6b0a78af298d82323660df5497bcea0f0a4a25a0b003afd0ce5af049bd1f60",
+ "sha256:5385da8f3b801014504df0852bf83524599df890387a3c2b17b7caa3d78b1773",
+ "sha256:606d8afa07eef77280c2bf84335e24390055b478392e1975f96286d99d0cb424",
+ "sha256:69245b5b23bbf7fb242c9f8f08493e9ecd7711f063259aefffaeb90595d62287",
+ "sha256:6f6d839ab09830d59b7fa8fb6917023d8cb5498ee1f1dbd82d37db78eb76bc99",
+ "sha256:730888475f5ac0e37c1de4bd05eeb799fdb742697867f524dc8a4cd74bcecc23",
+ "sha256:9819b5162ffc121b9e334923c685b0d0826154e41dfe70b2ede2ce29034c71d8",
+ "sha256:9e60ef9426efab601dd9aa120e4ff560f4461cf8442e9c0a2b92548d52800699",
+ "sha256:af5fbdde0690c7da68e841d7fc2632345d570768ea7406a9434446d7b33b0ee1",
+ "sha256:b64efdbdf3bbb1377562c179f167f3bf301251411eb5ac77dec6b7d32bcda463",
+ "sha256:bac5f444c118aeb456fac1b0b5d14c6a71ea2a42069b09c176f75e9bd4c186f6",
+ "sha256:bda9068aafb73859491e13b99b682bd299c1b5fd50644d697533775828a28ee0",
+ "sha256:d659517ca116e6750101a1326107d3479028c5191f0ecee3c7203c50f5b915b0",
+ "sha256:eddd3fb1f3e0f82e5915a899285a39ee34ce18fd25d89582bc89fc9fb16cd2c6"
+ ],
+ "markers": "python_version >= '3.7' and implementation_name == 'cpython'",
+ "version": "==1.3.1"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ }
+ }
+}
diff --git a/Integrations/Active_Directory_Query/cert.pem b/Integrations/Active_Directory_Query/cert.pem
new file mode 100644
index 000000000000..9ce06ef12e43
--- /dev/null
+++ b/Integrations/Active_Directory_Query/cert.pem
@@ -0,0 +1,18 @@
+-----BEGIN CERTIFICATE-----
+MIIC4jCCAcoCCQCTkmARc+wn5zANBgkqhkiG9w0BAQsFADAzMQswCQYDVQQGEwJV
+UzEQMA4GA1UECgwHVGVzdGluZzESMBAGA1UEAwwJMTI3LjAuMC4xMB4XDTE5MDcy
+NDE5MTI0OFoXDTI3MTAxMDE5MTI0OFowMzELMAkGA1UEBhMCVVMxEDAOBgNVBAoM
+B1Rlc3RpbmcxEjAQBgNVBAMMCTEyNy4wLjAuMTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAKBTz5R9uNJVFd7kz/O36Jv/Fe1ZK1HM5RSM93TFJ0feS0wN
+wctDNqEGsVxCKOsfAskl7QPE/rMI1Bb3n7lJBPoyoh32KTICXL6RirMD4pz3vS1N
+pK3Kl5NWYzK5vs3c/6hH+fCts+z6Esm294OrowKW3EZOkk+BhAO7K2Xtiz06c/4A
+Rs5F2VX3gzxYj/NAnrFu1KwYxtHbTwXjzVvnxxNhXr/0LV2kvOQtIHNONEZ+zfJX
+Ga8MUbGGjzs3qx8SH7gM6rkg/JskbUINqfpXg6zd6ozT3+0dpKmxl6rNX41oCOsW
+XikKq0YO+QlsP6iLwrX0e9hrLkPp2DMmNVD/jf8CAwEAATANBgkqhkiG9w0BAQsF
+AAOCAQEAFVMI3ffmlQxyKUI/l1Uklli0saAGHj2x2uD4eI4dAVXnpKE2ei2IOq8d
+cch1Osjj2hHoVCi+qwgztxbFmZ1zRyzDgd3VjkG5GHbl5nA94I+9jnC7gyv0qDHp
+jKBALlOSCQtvQjhrw0gbD7Ht4bCQTCZjgotzhDD82Jf5rCZZu/sL/VACn4GN5/vQ
+7d1aAfsNcF2rkNIF+eJelNJMIab8CrU0MLtSU9HooUe++Hzc+ph7WnYF+Yv/euQX
+BAu4sZ4WGFVvVbYw2Zwl9R30xZcKVtx9nsegFeda0vue00bwiH+WkiOO+FaHyOAl
+F3plkx21/deAutzcihoqGo+LpCjVzw==
+-----END CERTIFICATE-----
diff --git a/Integrations/Active_Directory_Query/connection_test.py b/Integrations/Active_Directory_Query/connection_test.py
new file mode 100644
index 000000000000..cedb97fd1180
--- /dev/null
+++ b/Integrations/Active_Directory_Query/connection_test.py
@@ -0,0 +1,139 @@
+import demistomock as demisto
+from Active_Directory_Query import main
+import socket
+import ssl
+from threading import Thread
+import time
+import os
+import pytest
+
+BASE_TEST_PARAMS = {
+ 'server_ip': '127.0.0.1',
+ 'secure_connection': 'None',
+ 'page_size': '500',
+ 'credentials': {'identifier': 'bad', 'password': 'bad'}
+}
+
+RETURN_ERROR_TARGET = 'Active_Directory_Query.return_error'
+
+
+def test_bad_host_no_ssl(mocker):
+ mocker.patch.object(demisto, 'params',
+ return_value=BASE_TEST_PARAMS)
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET)
+ # validate our mock of params
+ assert demisto.params().get('server_ip') == '127.0.0.1'
+ main()
+ assert return_error_mock.call_count == 1
+ # call_args last call with a tuple of args list and kwargs
+ err_msg = return_error_mock.call_args[0][0]
+ assert len(err_msg) < 100
+ assert 'Failed to access' in err_msg
+
+
+def test_bad_ssl(mocker):
+ params = BASE_TEST_PARAMS.copy()
+ params['server_ip'] = '185.199.108.153' # disable-secrets-detection
+ params['secure_connection'] = 'SSL'
+ params['port'] = 443
+ mocker.patch.object(demisto, 'params',
+ return_value=params)
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET)
+ demisto_info_mock = mocker.patch.object(demisto, "info")
+ # validate our mock of params
+ assert demisto.params().get('secure_connection') == 'SSL'
+ main()
+ assert return_error_mock.call_count == 1
+ # call_args last call with a tuple of args list and kwargs
+ err_msg = return_error_mock.call_args[0][0]
+ assert len(err_msg) < 100
+ assert 'Failed to access' in err_msg
+ assert 'SSL error' in err_msg
+ # call_args_list holds all calls (we need the first) with a tuple of args list and kwargs
+ info_msg = demisto_info_mock.call_args_list[0][0][0]
+ # ip is not in the certificate. so it should fail on host match
+ assert "doesn't match any name" in info_msg
+
+
+def ssl_bad_socket_server(port):
+ context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
+ # cert and keyfile generated with
+ # openssl req -x509 -nodes -days 3000 -newkey rsa:2048 -keyout key.pem -out cert.pem
+ try:
+ context.load_cert_chain('cert.pem', 'key.pem')
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) as sock:
+ sock.bind(('127.0.0.1', port))
+ sock.listen(5)
+ with context.wrap_socket(sock, server_side=True) as ssock:
+ try:
+ conn, addr = ssock.accept()
+ except ssl.SSLError as err:
+ if 'TLSV1_ALERT_UNKNOWN_CA' in str(err):
+ # all is ok. client refused our cert
+ return
+ raise
+ conn.recv(32)
+ msg = b'THIS IS A TEST SERVER WHICH IGNORES PROTOCOL\n\n'
+ for x in range(10):
+ msg += msg
+ conn.send(msg)
+ conn.shutdown(socket.SHUT_RDWR)
+ conn.close()
+ except Exception as ex:
+ pytest.fail("Failed starting ssl_bad_socket_server: {}".format(ex))
+ raise
+
+
+def test_faulty_server(mocker):
+ port = 9638
+ t = Thread(target=ssl_bad_socket_server, args=(port,))
+ t.start()
+ time.sleep(1) # wait for socket server to startup
+ params = BASE_TEST_PARAMS.copy()
+ params['server_ip'] = '127.0.0.1' # disable-secrets-detection
+ params['secure_connection'] = 'SSL'
+ params['unsecure'] = True
+ params['port'] = port
+ mocker.patch.object(demisto, 'params',
+ return_value=params)
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET)
+ # validate our mock of params
+ assert demisto.params().get('secure_connection') == 'SSL'
+ main()
+ t.join(5)
+ assert return_error_mock.call_count == 1
+ # call_args last call with a tuple of args list and kwargs
+ err_msg = return_error_mock.call_args[0][0]
+ assert len(err_msg) < 100
+ assert 'Failed to access' in err_msg
+
+
+def test_ssl_custom_cert(mocker, request):
+ ENV_KEY = 'SSL_CERT_FILE'
+ os.environ[ENV_KEY] = 'cert.pem'
+
+ def cleanup():
+ os.environ.pop(ENV_KEY)
+
+ request.addfinalizer(cleanup)
+ port = 9637
+ t = Thread(target=ssl_bad_socket_server, args=(port,))
+ t.start()
+ time.sleep(1) # wait for socket server to startup
+ params = BASE_TEST_PARAMS.copy()
+ params['server_ip'] = '127.0.0.1' # disable-secrets-detection
+ params['secure_connection'] = 'SSL'
+ params['port'] = port
+ mocker.patch.object(demisto, 'params',
+ return_value=params)
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET)
+ # validate our mock of params
+ assert demisto.params().get('secure_connection') == 'SSL'
+ main()
+ t.join(5)
+ assert return_error_mock.call_count == 1
+ # call_args last call with a tuple of args list and kwargs
+ err_msg = return_error_mock.call_args[0][0]
+ assert len(err_msg) < 100
+ assert 'Failed to access' in err_msg
+ assert 'SSL error' not in err_msg
diff --git a/Integrations/Active_Directory_Query/key.pem b/Integrations/Active_Directory_Query/key.pem
new file mode 100644
index 000000000000..eca6af3eed29
--- /dev/null
+++ b/Integrations/Active_Directory_Query/key.pem
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCgU8+UfbjSVRXe
+5M/zt+ib/xXtWStRzOUUjPd0xSdH3ktMDcHLQzahBrFcQijrHwLJJe0DxP6zCNQW
+95+5SQT6MqId9ikyAly+kYqzA+Kc970tTaStypeTVmMyub7N3P+oR/nwrbPs+hLJ
+tveDq6MCltxGTpJPgYQDuytl7Ys9OnP+AEbORdlV94M8WI/zQJ6xbtSsGMbR208F
+481b58cTYV6/9C1dpLzkLSBzTjRGfs3yVxmvDFGxho87N6sfEh+4DOq5IPybJG1C
+Dan6V4Os3eqM09/tHaSpsZeqzV+NaAjrFl4pCqtGDvkJbD+oi8K19HvYay5D6dgz
+JjVQ/43/AgMBAAECggEAVgRBNWCyPR5rKnfGXfLIeJFF2V5i0HNGYF01ElAIQBWy
+fi5PT9dSwtfNZjsUqvirTPHtFH2nTLSq3VujNQxJAFcUBVEhTvRXf+Kj8GFgPmjn
+9RtYA+nSorjRZdCj3tz3yPg8z3TSOIz+cXyZlOoL/gKkUZAsDT3mbv4HTZtBfeo6
+EK6OcM+56bZieqq7WG6Blw3A0lcs9h69yo+zo88//W1q+8LwHrMSarapYdReV7ov
+B0lM2Q45v7BNF12hlea4Do3jMPTsMJ5dV6LbYZDnRi2AVhg1FHreTUgISqAlYO/1
+nBr7WMjIz7d1HPN8QtwYEBNbQ3fsdLQCZ4BMNG6WyQKBgQDN2Wo+dNLigsc+w2YL
+0eJnh68P0ZwezAAT5KCFhvqvF1aD6+gx92YARE26twqhA8VyeH1uZjUf9qViInv4
+K48vb42/ktDUi3FLSuZ98SYp4ysUbi3pt+0/r+cH3oMQQx7nseDWnJNxjsr+UAD/
+FqIk1xwAL4OfmM72bb3pbfmE5QKBgQDHYz9RRWscNWlcsCB+h7yw3z4C9TyRjzmr
+uadNfDXhmW+F+gRHocdaUEGv+zrnDfpn3U2zvgKH7Y218ud6kniI30DW61Z9Xc+z
+6o5IUZdGlnyzMH1TjMFqNaTsUUrp/A7wDGuuRQhsB6EFmymzohSjs4ABHjYYhLPk
+8RvhHa3dEwKBgBRzaDHCS31DfuYGZaoA9RxSB5truLX6kHtpeGUrrtYcXHUIx+3G
+5XRbVNma+8xZYpFn1RZVwytkxrOEN4rFURlmXM/Olz62h8/yJgoXHjKFJi+GbrtM
+kZ6FLE/E1IKWGIjuTCcmMSPUCujPK/or1noQgPCnx8zgUpd6+aYqTeLdAoGAXgcF
+wxPTkTf2+gjtKf5lE5wTqF1yNMqwTbV9mmLrHfq3xxzCi+1VD9gtQgpeGrxSzExm
+6k6+7s8uNxHX68vLH/D4qOHOpcxpuNTySFSB3H33TKGfWJlWVPn7Tp5/j5yYhse0
+uDhB4oAuhXBI2DelOBXcVaqjpChglaTGmwDeDxUCgYEAzYbviJZzfCMvq7VI65NQ
+EvfsWRaGcN33vhWBm/bavaSH81jGueBnQi53Ii2upy87DleHFM1529njU/5Xfy8v
+ngFK/KkB72W9zmN1nftatkx8zdoFueH1P/UvVBe/f65FElD+i5yoAUjTHJRViXB8
+cQNjoZ3cthMluzD+IxZBOq4=
+-----END PRIVATE KEY-----
diff --git a/Integrations/Alexa/Alexa.py b/Integrations/Alexa/Alexa.py
new file mode 100644
index 000000000000..00342483545a
--- /dev/null
+++ b/Integrations/Alexa/Alexa.py
@@ -0,0 +1,85 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+import xml.etree.ElementTree as ET # type: ignore
+import requests
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+
+"""GLOBAL VARIABLES/CONSTANTS"""
+THRESHOLD = int(demisto.params().get('threshold'))
+USE_SSL = not demisto.params().get('insecure', False)
+
+
+"""COMMAND FUNCTIONS"""
+
+
+def alexa_domain_command():
+ domain = demisto.args().get('domain')
+ resp = requests.request('GET', 'https://data.alexa.com/data?cli=10&dat=s&url={}'.format(domain), verify=USE_SSL)
+ root = ET.fromstring(str(resp.content))
+ try:
+ rank = root.find("SD[0]/POPULARITY").attrib['TEXT'] # type: ignore
+ if int(rank) > THRESHOLD:
+ dbot_score = 2
+ dbot_score_text = 'suspicious'
+ else:
+ dbot_score = 0
+ dbot_score_text = 'unknown'
+ except AttributeError:
+ rank = 'Unknown'
+ dbot_score = 2
+ dbot_score_text = 'suspicious'
+ dom_ec = {'Name': domain}
+ dbot_ec = {
+ 'Score': dbot_score,
+ 'Vendor': 'Alexa Rank Indicator',
+ 'Domain': domain,
+ 'Type': 'domain'
+ }
+ ec = {
+ 'Domain(val.Name && val.Name == obj.Name)': dom_ec,
+ 'DBotScore': dbot_ec,
+ 'Alexa.Domain(val.Name && val.Name == obj.Domain.Name)': {
+ 'Name': domain,
+ 'Rank': rank
+ }
+ }
+ hr_string = ('The Alexa rank of {} is {} and has been marked as {}'
+ ' while the threshold is {}'.format(domain, rank, dbot_score_text, THRESHOLD))
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': xml2json(resp.content),
+ 'HumanReadable': hr_string,
+ 'EntryContext': ec
+ })
+
+
+def test_module_command():
+ domain = 'google.com'
+ resp = requests.request('GET', 'https://data.alexa.com/data?cli=10&dat=s&url={}'.format(domain), verify=USE_SSL)
+ root = ET.fromstring(str(resp.content))
+ rank = root.find("SD[0]/POPULARITY").attrib['TEXT'] # type: ignore
+ if rank == '1':
+ result = 'ok'
+ else:
+ result = 'An error has occurred'
+ return result
+
+
+"""EXECUTION BLOCK"""
+try:
+ handle_proxy()
+ if demisto.command() == 'test-module':
+ test_result = test_module_command()
+ demisto.results(test_result)
+ if demisto.command() == 'domain':
+ alexa_domain_command()
+except Exception as e:
+ LOG(e)
+ LOG.print_log(False)
+ return_error(e.message)
diff --git a/Integrations/Alexa/Alexa.yml b/Integrations/Alexa/Alexa.yml
new file mode 100644
index 000000000000..a75652ec35fb
--- /dev/null
+++ b/Integrations/Alexa/Alexa.yml
@@ -0,0 +1,63 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: Alexa Rank Indicator
+ version: -1
+configuration:
+- defaultvalue: '2000000'
+ display: Sensitivity threshold for configuring which domains are suspicious versus
+ trusted.
+ name: threshold
+ required: true
+ type: 0
+- display: Use System Proxy
+ name: proxy
+ required: false
+ type: 8
+- display: Trust any certificate (Not Secure)
+ name: insecure
+ required: false
+ type: 8
+description: Alexa provides website ranking information that can be useful in determining
+ if the domain in question has a strong web presence.
+display: Alexa Rank Indicator
+name: Alexa Rank Indicator
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: domain to search
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: Provides an Alexa ranking of the Domain in question.
+ execution: false
+ name: domain
+ outputs:
+ - contextPath: Domain.Name
+ description: The Domain being checked
+ type: string
+ - contextPath: DBotScore.Score
+ description: DBot score returned
+ type: number
+ - contextPath: DBotScore.Vendor
+ description: Vendor reporting the score
+ type: string
+ - contextPath: DBotScore.Domain
+ description: Domain being reported
+ type: string
+ - contextPath: Alexa.Domain.Data
+ description: The Domain being checked
+ type: string
+ - contextPath: Alexa.Domain.Rank
+ description: Alexa rank as determined by Amazon
+ type: string
+ isfetch: false
+ runonce: false
+ script: ''
+ type: python
+image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHgAAAAyCAYAAACXpx/YAAAAAXNSR0IArs4c6QAAEchJREFUeAHtWQl4VdW13me8881AQkKIjALG+Ck+gsKzCpQhwBOLtsS+r9ZXbLVaeFQs2mdbAb9XwVCLAxVqFat8VWtAgY+CIJP4ECiChiEQIARoNCQMme545vevk3vCJWJraWmtnv3dfc8+a6+91tpr7bX22vsw5hZXA64GXA24GnA14GrA1YCrAVcDrgZcDbgacDXgasDVgKsBVwOuBlwNuBpwNeBq4F9KA723HynrvafR6r2t9sNJFRXCP1t4/p8twBeOv8GYaTFmoX4eimvgS2AFi5F1Px8WFi/B/L7UJOHA8GByYfNzoQfXwH9nM6iGwSzDYjzq56GcF6Ity+JycnK6eUKh/r7s7MLZs2ef1/9pAmf06JGFkpHeT7S6d+/epbCw0JcO79wO5uV19YTD/TC+x6BBg6TO/RfzXlBQ4EfN6dWrl/ezjCdZCZ/G/SX84Zs3i7mbj+Xnbz/WK//dI7md8VW4sArjKp9i30G7dkkFW2ou677jo0KilT6ekjIM49JhndvU3zl5u3zHkXDB1qM97Lpq13lz6CDmCwbLdF2fAgJXWaYZ4DgugVotiuLTiWj09w4jr98/2zDNr6KvzRMI/EiJx39smmYpCBm8ICx7+MEHZ5TPnz9YV9W5oHUl4M2AP6/EYk+BRkfcCgQCYzTDeACAfwO/MOipwK0B7m9A8zcOri8j43ZdUaaD1icKCQ/82aC9ljoDubn5eiz2E9Mwxlocl8lZ1lme51fJolgeiUTOEk4mSlxRXoZRswSe3yx6vRXgtyAla6vNPxZ7EqgdstK44JbDRZzBvoMEarxlmX2wxXo5nm9mAr/TY2g/bxpZtM3GW19dZvoDr/OJWOW4pr0lS8vKKGoj6bK48Mbqu01emmKZxhUwo4XxB3jd+FVk9BUvEk7W+ur7NH/gTi4ePdij2by3qqxYJbhTQusP/sIKhr4ixqOv5yjCogav8TVo4FugfR1qDmQyOVE4wZlGRUBX5jaWXhOzV5AnGJyiadqvgOTQIoE8qEMAv94XCimJSGQ5dcKYQ1FvhEGSaiLxvKHrNxKcRgI+vXzePEk3zQlo90zB87An/dIbCDQmY7FXCIbF9HVF014D/Q6Ptfkxdq1lGIs8fj8PpS+0xxtGEWhdT+3OxZbWsnoTPNC1a54aiayBca+18TAX9Ofi/QpY6ivhcPjmtra2JkEQAqA3Gvx8MLAPc7gF7wNpDPBJ1icg33Es6jdsOqk/Q9UnG+HsB7mWpuM8MzfBkTTDNAabntA4TdWGBNYfHBYbXbTPMDSmI0zzhk7rr6ME3j7wmO4PPczi0bjArDUwsKQb/Fjm8S72ravqmigtflwxzBVGLDrDCoaH1BrNhzC43CEQWHvgTsXrn8FF2uq8jK+ICPEMTRMXWzzz8abxPphtteermsNYMPRTPWp0gxG/x1OINDRtLibMyCslSfq2x+cbDEW8TsQB52DEyQ4jINmrCnAv4EPgIet5jjvu9GMPmoq+bMA3gWmzDQdtKHEitSkMIlLMA44Efpooy1PBb5AoCM9SP+CEO9nZHrDK10KmcrvK8hzI9T7hUcH4CLzzPWrr0ehMx7jAedUrSaPx/B31AT4UC+p/qI0xtC4UasMQ14JXb+CthqxNBIMAzDAMW1b73fkzjQViS/MtmUy7KlF61QQY5DZRZ4NZpHW37vFnqYp2D6FSiDZ0g5nauQDgXb33JoWJP4bxGkTdGIaxtybGFN+M9q1GUklizMzgH/YVxcdeeZLTtbuMSJuicsIjvrVV1xHN0FvVA5KM+6URT6jo/86ZMVfUwztPCYZRJnPG1cnS4qGgOQnP4byu32VEY4amm3eENlb3s8MaPOY52etdQyGXCFLxZ2QMEmTZoir5fDugeOiAMdnnW+XAEa4fS+GWCB5PMgXHQg3ZCoLXTnZwRY9nHeFijw+Bz5NEx+vzPU4wKn7s/TC2TUPyemsvtB9ibAH6Tjg0Qed+GkuLFKH2NMHxbKI8guDZyCPAty0Fr6c8IRdhHLDmFA0T3voNwsVcvuvQBY8NBPssxbPig2n8xmOWuKLSHiMsryzj1x22pJUfVjp7pbRyz0v8puOWvHLPw51pYtwyu2955SNOn7yicia/oZZo7spZ+UGBuLxyI7/hmCWn4Ti4nZ9YvRxkOMq/VW35V+0dL8ZOn24A0vepo1t+fk6U44bpltVTU1V79RAB27KdKMETmMBx7xDYw/NHVI5rwiLoBo9rEUKhrSwSYZxpHkobZi/pM2fORACbTnAkQZmNZ8/egD24lx6PFwPUkdTBi85jW1xcLB8+evQleFwPGguvW3XL+PELli5dyrDvXgXeOQTHvvunqVOnNiICsG7dup2K1NR8hLkVwTO7JXX9ymy//zDhUYGsjR5R3JBA2yNJlfBokMFC5jiJnilvt3Gdv4zlu/oqJt8bwmWCrs/UrcEsqcBtzfaECQOZjqmiFh+YZNU+t0vaoxmDWTRGQb1FXrb7aktvD9+cKFqWbjQwVWembpQ4PHrIgfLjrc3DTdk3ojUZ32R6PAP4lqa3+5ne8ioHKfXMWVkdijNtgGGZ3SxdDfo4MYDQ5LNl0AzJFoq8xxOP/9xsaZkARXwiM+xE036lecAIdrjOyMjgIomEE5O0DJ43yYrYv3nGd9gM+mgv5EnxZHLWRw0Nt4NfgQPvQHAAac/a48cfgHFHEwiG+djv8fw3jGsnMOk00BefNWuWRQbev3+/hkgRQT9+kFfXC7Ewqh2yMFI0GAwqLS0tBCL2VLkOiQmaKsFXdxYnRb48qrIxzCNL2EeAbeIHERJYIprejkkxmkcb77NmMeupFULASmpZUBbTOH4h5GNMlB2yYAgpVI32i0Iwh9swq2Z8PwUL6e5oIr7NEOUBXCJ+Muzxfq9qYlrShROOZ8D4qc1q7IeMF/vYeuZlrDPMFbyYgkWn60xkublBLRpdhn3q34krFPCGIIqvAClX0/XnOiS5cAPydJT0dgewc4OOQvsOHHjJ2edoD0em/iLwDESNJZjkJ442SPKGou+RFC1dEsVpMMqJzrTpnTyvE5yMZhfM7c/dDdO4zmPtcYFXd+QldH2lJYf6IjveJOptLzNOPAFbxU3NmKh6Az9hGsUBFBVrnkfuCK+kwp3CmcmrW0wUmcTUF7DiayDkOT4IObQY8P8xgB2yGhHLx3hTgGtjsRiyokUyQa7OJoo/T98xP9AE+WkumYwLvDoHYec96LJFwkqKaearFgejKyoTu4rixLMp4wLh/5C9TqLQ5AuHr+fo0A7OKB2MHQYX+6w6dIiycHuPBr/q3OzsifX19XFvZmZPKMeZuAVb2DzJ26PJ5CLIYZ/vkGy90j0/f3VNTU26CCdpYLukVrikpIQikzZixAgkrFaAEGlLQf9JLKwLOSihfGoxkuoEywvjtrYc6trVN6F+QkncQfYufncgE+ExjgeT95BHagZ7FGxHZdVGl0fyG5AcFPCKsilx142vOWM/7ZmzeGuoRUv+1hLELoIS3W5I3qGKor6Qt2TPVxvvvCZWXFEhH4oY30eqzkRVe0T57o3zHVoQjJNeeDeJ1Bgug2weIWoh9kC7H+Gjxtl34NFDUsYlI2ellObQuegneBU5dEGkjoxLxDhdL4EBPNTGM4QQbxuUQjlkuYbgVLBPlp6oq9uLpHAtjnfTcPzJFoPBfbCgnQVj7GW1tbXdCHfv3r359E5t9J+WeX6fqqpwr7+u6KpeAMZkuI/SjUtULE2HY8LLUh5rp9HUVnRrFhyFzsGcpr9De6Km6Pc4idefk6BVUeeasr+ET8SX9fTIY/lYdAver2uOnplN406qPbyWqnVhiSTJdP62jAljsQkkD+RGUmNZS+3IDzgUORoXECORBX8N+9VPiRgVeFz//QcP3tX+9rf9w2vbj04gA2MPpowbPEfpmtZx5gM8FxvjVMjK4VKlNJ0j+vJxVu1PcFymPJ3E8Sfa2HgKZ1r73IrFE4olk4+FQqEboonE43jPoPFICF+jczDe/2oPljQki7EEjGkN8j617iaSaxCSJ8+C9aW6Zv2IxRGeVaM9/NMerGg4Juk5Gb/ecnnus5uDXt18jmttaTUtYfiKxuyFXRZt7E40qAYXvdfV98z62wiP5PQ9ve6bBhOmcNiCsBLvr7ljSJvXFO7jWtuaDEu837Ng3bgmwDhVr2NghePYZPJ4ohV4dnW+vGDDE0j8+tuJH2Tia2tq5kDpbUQcky+EwjYoqroCRjdxq/MBwSm84axrKwpI6R5gKwsLgCKkA5dBxwm1HcoEDbuf9/u3gJ+9l0CoTEVRliO7XY92PvhtJ35UgB8qw24GWj60dfDfi7D9Jsb+DsZcg/dakgshN4/wgz7fo+g7SG3A7kio6lY8v0XvoLtLzsr6X2qnZHOyHEdmgpOsttyQpQNOY3yW+hYfjbxvcVKmphpvS+Wrt+1pPbVHV621fFJTWAIJjapRVs15LLOGi0aR6HCFsYRW2RJLPhmZOuqwrGv3Yr+MGpznnpZIcj/RQN2RiLQeUgXfG7FkfLhnzur+iO6/xgKxRF2bkrhv5MfEPzpl2EFJUx7CHbeoK+ZiX/maQsm0fsHF4pZh8rc3n2mtlOateQ+78UFT0e7mEkods0RmqXqIJnUUR4UyKGcbQnQjKR8KfNMriiMzAoEJUM47uIS4NxGLzSNmUOo+Ujbqh1D4GYLh4kIBofdtOGM7EQYxYyiW55sIL4W/j2CxU6cacd6dhL6N4NeAWg/vWivJcmmoe/ex4P0Wkryf4fpxxlIsUCRUc3ERUqImkwNx6/R1LZn8Ntr/ccPQoQNkWb4FCRrQGDt79uzHAa93HMYuAc2TxIrmAtkXyqHQzZH6eltWZPZ0BftHkgnP3TgBwA/s0oJxu1PwPSmY/WiaNr5NMtltQiyymGlmg+0hhpUUNfUHXTRzmJhIrOQ1c1+/Z9bI8WmjKkVN/yGvqpWcotYxHdkXFnzigXG/92jaCCHatoRp1llTNwdYutkTIWq/GI08FEzquI0yvsnzUqWYiP9MmT52dboMyQfGvihFW+fznHwEoe/WxPTSZbKS/E8+mdgGmcKg1Ys3jLd9BjdUNswZvKptzpeEc98BaGXTxwFcDoTTCadWPOuVfnE/aZLg3DSl4XKTAMe74712F+Gl4Gmo8CTghQsLs+luOL3D4ZcOQwgfKAcCD+MCYgmOPUth8IXOBUU6ntNGeO4Csr0603b68bygrIBfUNa0cSx3dkUwPH9t9gXmn47GaK8tnl3hRIrz+grnV/hCc97skv3MmjDpoaMTNjjvvaPjXMPuPxch2/U4e212TvnK0Dmsdv1SVDpHPL33Am1vMDgTR6eAlJHxRLSh4fQFUC4FiAdf+nhwG/ZeuktV4Q252IP7wvtafbLc1/mIcCmYf6lokjfIfv9v4UV/wnMmfd77RyiA+NKqJc+mMzTdaOE6cjtq3V/6FPmPkO8LxwN3tnfT/S5qBIZ+CeGzlO6XL9VE6SsR7rT/CwtrK+7MnyVDSx5PFUL18kvF80tPF3tcfyh4GYxsUYXya6D857Ev3o5z6eXkZRerpLy8vEAgK+tq0LsXPN4E/RbiQQb1ZmT0Ifp4N3G7devF8vgyjfvMe/CFlELei8+DD8KrRqLaKNgbYyB6DBnpQeyZ1chKjyHzqjcFoRkZbgxZLK55cA5BwVk7CNwcU9e7Y1+9HOAi7LVFoNQb9DCU7v25bUB9PB6JrKJxiBqPoq+0f58+N1VVVeFe0C2XVANQNofbsBFQ/MvYFxudT27nPVOeDs9TAU9QpTZ55nl4qc+ToHMGHvtaIBwuTc/AKZNHuF4CfkWXdFJfIOJ/kwd31gN9h8Ut0jBcfIxC5jsYntgLOGHybvJvE84rSLgOx1cOnK/JO+2KrjYIcpzDhQTOxBv9Xu8WOtcC3rkI+E7sca43O3e675/UwN/VwOnkKeM9evRoAW6peuJoc1lGVtZlA/r1e+iPO3dmXTtwYOJUY2N5/cmTNbBwHS5VTvTt27d+9+7ddvhOp+O2XQ24GnA14GrA1YCrAVcDrgZcDbgacDXgasDVgKsBVwOuBlwNuBpwNeBq4F9eA/8P/d6pgTGZCKcAAAAASUVORK5CYII=
+tests:
+- Alexa Test Playbook
+fromversion: 3.5.0
diff --git a/Integrations/Alexa/Alexa_description.md b/Integrations/Alexa/Alexa_description.md
new file mode 100644
index 000000000000..6226f17d3a0c
--- /dev/null
+++ b/Integrations/Alexa/Alexa_description.md
@@ -0,0 +1,3 @@
+To use this integration, simply call ```!domain```. !domain accepts
+ "domain" as a parameter to query. Threshold is the point where a domain will be
+ considered unknown versus suspicious.
\ No newline at end of file
diff --git a/Integrations/Alexa/Alexa_image.png b/Integrations/Alexa/Alexa_image.png
new file mode 100644
index 000000000000..b2d12bef5ce1
Binary files /dev/null and b/Integrations/Alexa/Alexa_image.png differ
diff --git a/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere.py b/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere.py
new file mode 100644
index 000000000000..a67acb1886f7
--- /dev/null
+++ b/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere.py
@@ -0,0 +1,469 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+
+import json
+import requests
+import dateparser
+from typing import Dict
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+CLIENT_ID = demisto.params().get('client_id')
+CLIENT_SECRET = demisto.params().get('client_secret')
+# Remove trailing slash to prevent wrong URL path to service
+SERVER = demisto.params().get('url', '').strip('/')
+
+# Should we use SSL
+USE_SSL = not demisto.params().get('insecure', False)
+IS_FETCH = demisto.params().get('isFetch')
+# How much time before the first fetch to retrieve incidents
+FETCH_TIME = demisto.params().get('fetch_time', '3 days')
+# Service base URL
+BASE_URL = SERVER + '/api/2.0'
+# Headers to be sent in requests
+HEADERS = {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+}
+TIME_FORMAT = demisto.params().get('time_format', 'auto-discovery')
+AUTH_TOKEN = ''
+
+
+''' HELPER FUNCTIONS '''
+
+
+def parse_time(time_str):
+ if TIME_FORMAT != 'auto-discovery':
+ return TIME_FORMAT
+
+ regex_to_format = {
+ r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z': '%Y-%m-%dT%H:%M:%SZ',
+ r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z': '%Y-%m-%dT%H:%M:%S.%fZ'
+ }
+
+ selected_format = '%Y-%m-%dT%H:%M:%SZ'
+ for regex, date_format in regex_to_format.items():
+ if re.match(regex, time_str):
+ selected_format = date_format
+ break
+
+ return selected_format
+
+
+def http_request(method, url_suffix, params=None, headers=None, data=None, **kwargs):
+ data = data if data else {}
+ if not headers:
+ headers = HEADERS
+ headers['Authorization'] = 'Bearer ' + AUTH_TOKEN
+ # A wrapper for requests lib to send our requests and handle requests and responses better
+ res = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ verify=USE_SSL,
+ params=params,
+ data=data,
+ headers=headers,
+ **kwargs
+ )
+
+ # Handle error responses gracefully
+ if res.status_code == 401:
+ raise Exception('UnauthorizedError: please validate your credentials.')
+ if res.status_code not in {200}:
+ raise Exception('Error in API call to Example Integration [{}] - {}'.format(res.status_code, res.reason))
+
+ return res.json()
+
+
+@logger
+def get_token():
+ basic_auth_credentials = (CLIENT_ID, CLIENT_SECRET)
+
+ res = http_request('POST', '/oauth/token',
+ params={'grant_type': 'client_credentials'},
+ headers={'Content-Type': 'application/www-form-urlencoded'},
+ auth=basic_auth_credentials)
+
+ return res.get('access_token')
+
+
+@logger
+def get_time_range(time_frame=None, start_time=None, end_time=None):
+ if time_frame is None:
+ return None, None
+
+ if time_frame == 'Custom':
+ if start_time is None and end_time is None:
+ raise ValueError('invalid custom time frame: need to specify one of start_time, end_time')
+
+ if start_time is None:
+ start_time = datetime.now()
+ else:
+ start_time = dateparser.parse(start_time)
+
+ if end_time is None:
+ end_time = datetime.now()
+ else:
+ end_time = dateparser.parse(end_time)
+
+ return date_to_timestamp(start_time), date_to_timestamp(end_time)
+
+ end_time = datetime.now()
+ if time_frame == 'Today':
+ start_time = datetime.now().date()
+
+ elif time_frame == 'Yesterday':
+ start_time = (end_time - timedelta(days=1)).date()
+
+ elif time_frame == 'Last Hour':
+ start_time = end_time - timedelta(hours=1)
+ elif time_frame == 'Last 24 Hours':
+ start_time = end_time - timedelta(hours=24)
+ elif time_frame == 'Last 48 Hours':
+ start_time = end_time - timedelta(hours=48)
+ elif time_frame == 'Last 7 Days':
+ start_time = end_time - timedelta(days=7)
+ elif time_frame == 'Last 30 Days':
+ start_time = end_time - timedelta(days=30)
+ else:
+ raise ValueError('Could not parse time frame: {}'.format(time_frame))
+
+ return date_to_timestamp(start_time), date_to_timestamp(end_time)
+
+
+@logger
+def parse_alarms(alarms_data):
+ if not isinstance(alarms_data, list):
+ alarms_data = [alarms_data]
+
+ alarms = []
+ for alarm in alarms_data:
+ events = []
+ for event in alarm.get('events', []):
+ # search command return the event object under sub-key message
+ if 'message' in event:
+ event = event['message']
+
+ events.append({
+ 'ID': event['uuid'],
+ 'OccurredTime': event['timestamp_occured_iso8601'],
+ 'ReceivedTime': event['timestamp_received_iso8601'],
+ })
+
+ alarms.append({
+ 'ID': alarm['uuid'],
+ 'Priority': alarm['priority_label'],
+ 'OccurredTime': alarm['timestamp_occured_iso8601'],
+ 'ReceivedTime': alarm['timestamp_received_iso8601'],
+
+ 'RuleAttackID': alarm.get('rule_attack_id'),
+ 'RuleAttackTactic': alarm.get('rule_attack_tactic'),
+ 'RuleAttackTechnique': alarm.get('rule_attack_technique'),
+ 'RuleDictionary': alarm.get('rule_dictionary'),
+ 'RuleID': alarm.get('rule_id'),
+ 'RuleIntent': alarm.get('rule_intent'),
+ 'RuleMethod': alarm.get('rule_method'),
+ 'RuleStrategy': alarm.get('rule_strategy'),
+
+ 'Source': {
+ 'IPAddress': alarm.get('alarm_source_names') or alarm.get('source_name'),
+ 'Organization': alarm.get('alarm_source_organisations') or alarm.get('source_organisation'),
+ 'Country': alarm.get('alarm_source_countries') or alarm.get('source_country'),
+ },
+ 'Destination': {
+ 'IPAddress': alarm.get('alarm_destination_names') or alarm.get('destination_name'),
+ },
+ 'Event': events
+ })
+
+ return alarms
+
+
+@logger
+def parse_events(events_data):
+ regex = re.compile(r'.*"signature": "([\w\s]*)"')
+ events = []
+ for event in events_data:
+ event_name = ''
+ match = regex.match(event.get('log', ''))
+ if match:
+ event_name = match.group(1)
+
+ events.append({
+ 'ID': event.get('uuid'),
+ 'Name': event_name,
+ 'OccurredTime': event.get('timestamp_occured_iso8601'),
+ 'ReceivedTime': event.get('timestamp_received_iso8601'),
+ 'Suppressed': event.get('suppressed'),
+
+ 'AccessControlOutcome': event.get('access_control_outcome'),
+ 'Category': event.get('event_category'),
+ 'Severity': event.get('event_severity'),
+ 'Subcategory': event.get('event_subcategory'),
+
+ 'Source': {
+ 'IPAddress': event.get('source_name'),
+ 'Port': event.get('source_port'),
+ },
+ 'Destination': {
+ 'IPAddress': event.get('destination_name'),
+ 'Port': event.get('destination_port')
+ },
+ })
+
+ return events
+
+
+def dict_value_to_int(target_dict: Dict, key: str):
+ """
+ :param target_dict: A dictionary which has the key param
+ :param key: The key that we need to convert it's value to integer
+ :return: The integer representation of the key's value in the dict params
+ """
+ try:
+ if target_dict:
+ value = target_dict.get(key)
+ if value:
+ target_dict[key] = int(value)
+ return target_dict[key]
+ except ValueError:
+ raise ValueError(f'The value for {key} must be an integer.')
+
+
+def item_to_incident(item):
+ incident = {
+ 'Type': 'AlienVault USM',
+ 'name': 'Alarm: ' + item.get('uuid'),
+ 'occurred': item.get('timestamp_occured_iso8601'),
+ 'rawJSON': json.dumps(item),
+ }
+
+ return incident
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """
+ Performs basic get request to get alarm samples
+ """
+ # the login is executed in the switch panel code
+ if IS_FETCH:
+ # just check the correctness of the parameter
+ parse_date_range(FETCH_TIME)
+ search_alarms(limit=2)
+ demisto.results('ok')
+
+
+def get_alarm_command():
+ """
+ Gets alarm details by ID
+ """
+ args = demisto.args()
+ alarm_id = args['alarm_id']
+
+ # Make request and get raw response
+ response = get_alarm(alarm_id)
+
+ # Parse response into context & content entries
+ alarm_details = parse_alarms(response)
+
+ return_outputs(tableToMarkdown('Alarm {}'.format(alarm_id), alarm_details),
+ {'AlienVault.Alarm(val.ID && val.ID == obj.ID)': alarm_details},
+ response)
+
+
+def get_alarm(alarm_id):
+ res = http_request('GET', '/alarms/' + alarm_id)
+
+ return res
+
+
+def search_alarms_command():
+ args = demisto.args()
+ time_frame = args.get('time_frame')
+ start_time = args.get('start_time', 'now-7d')
+ end_time = args.get('end_time', 'now')
+ show_suppressed = args.get('show_suppressed', 'false')
+ limit = int(args.get('limit', 100))
+
+ start_time, end_time = get_time_range(time_frame, start_time, end_time)
+
+ result = search_alarms(start_time=start_time, end_time=end_time, show_suppressed=show_suppressed, limit=limit)
+ alarms = parse_alarms(result)
+
+ return_outputs(tableToMarkdown('Alarms:', alarms),
+ {'AlienVault.Alarm(val.ID && val.ID == obj.ID)': alarms}, result)
+
+
+@logger
+def search_alarms(start_time=None, end_time=None, status=None, priority=None, show_suppressed=None,
+ limit=100, rule_intent=None, rule_method=None, rule_strategy=None, direction='desc'):
+ params = {
+ 'page': 1,
+ 'size': limit,
+ 'sort': 'timestamp_occured,{}'.format(direction),
+ 'suppressed': show_suppressed
+ }
+
+ if status:
+ params['status'] = status
+ if priority:
+ params['priority_label'] = priority
+ if rule_intent:
+ params['rule_intent'] = rule_intent
+ if rule_method:
+ params['rule_method'] = rule_method
+ if rule_strategy:
+ params['rule_strategy'] = rule_strategy
+
+ if start_time:
+ params['timestamp_occured_gte'] = start_time
+ if end_time:
+ params['timestamp_occured_lte'] = end_time
+
+ res = http_request('GET', '/alarms', params=params)
+ if res['page']['totalElements'] == 0:
+ return []
+
+ return res.get('_embedded', {}).get('alarms', [])
+
+
+def search_events_command():
+ args = demisto.args()
+ time_frame = args.get('time_frame')
+ start_time = args.get('start_time', 'now-7d')
+ end_time = args.get('end_time', 'now')
+ account_name = args.get('account_name')
+ event_name = args.get('event_name')
+ source_name = args.get('source_name')
+ limit = int(args.get('limit', 100))
+
+ start_time, end_time = get_time_range(time_frame, start_time, end_time)
+
+ result = search_events(start_time=start_time, end_time=end_time, account_name=account_name, event_name=event_name,
+ source_name=source_name, limit=limit)
+ events = parse_events(result)
+
+ return_outputs(tableToMarkdown('Events:', events),
+ {'AlienVault.Event(val.ID && val.ID == obj.ID)': events},
+ result)
+
+
+@logger
+def search_events(start_time=None, end_time=None, account_name=None, event_name=None, source_name=None, limit=100,
+ direction='desc'):
+ params = {
+ 'page': 1,
+ 'size': limit,
+ 'sort': 'timestamp_occured,{}'.format(direction),
+ }
+
+ if account_name:
+ params['account_name'] = account_name
+ if event_name:
+ params['event_name'] = event_name
+ if source_name:
+ params['source_name'] = source_name
+
+ if start_time:
+ params['timestamp_occured_gte'] = start_time
+ if end_time:
+ params['timestamp_occured_lte'] = end_time
+
+ res = http_request('GET', '/events', params=params)
+ if res['page']['totalElements'] == 0:
+ return []
+
+ return res.get('_embedded', {}).get('eventResourceList', [])
+
+
+def get_events_by_alarm_command():
+ args = demisto.args()
+ alarm_id = args['alarm_id']
+
+ alarm = get_alarm(alarm_id)
+
+ events = parse_events(alarm['events'])
+
+ return_outputs(tableToMarkdown('Events of Alarm {}:'.format(alarm_id), events),
+ {'AlienVault.Event(val.ID && val.ID == obj.ID)': events},
+ alarm)
+
+
+def fetch_incidents():
+ last_run = demisto.getLastRun()
+ # Get the last fetch time, if exists
+ last_fetch = last_run.get('timestamp')
+
+ # Handle first time fetch, fetch incidents retroactively
+ # OR
+ # Handle first time after release
+ if last_fetch is None:
+ time_field = last_run.get('time')
+ if time_field:
+ last_fetch = date_to_timestamp(time_field, parse_time(time_field))
+ else:
+ last_fetch, _ = parse_date_range(FETCH_TIME, to_timestamp=True)
+
+ incidents = []
+ limit = dict_value_to_int(demisto.params(), 'fetch_limit')
+ items = search_alarms(start_time=last_fetch, direction='asc', limit=limit)
+ for item in items:
+ incident = item_to_incident(item)
+ incidents.append(incident)
+
+ if incidents:
+ # updating according to latest incident
+ time_str = str(incidents[-1].get('occurred'))
+ last_fetch = str(date_to_timestamp(time_str, date_format=parse_time(time_str)))
+
+ demisto.setLastRun({'timestamp': last_fetch})
+ demisto.incidents(incidents)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+COMMANDS = {
+ 'test-module': test_module,
+ 'fetch-incidents': fetch_incidents,
+ 'alienvault-search-alarms': search_alarms_command,
+ 'alienvault-get-alarm': get_alarm_command,
+ 'alienvault-search-events': search_events_command,
+ 'alienvault-get-events-by-alarm': get_events_by_alarm_command,
+}
+
+
+def main():
+ global AUTH_TOKEN
+ cmd = demisto.command()
+ LOG('Command being called is {}'.format(cmd))
+
+ try:
+ handle_proxy()
+ AUTH_TOKEN = get_token()
+
+ if cmd in COMMANDS:
+ COMMANDS[cmd]()
+
+ # Log exceptions
+ except Exception as e:
+ import traceback
+ LOG(traceback.format_exc())
+
+ if demisto.command() == 'fetch-incidents':
+ LOG(str(e))
+ LOG.print_log()
+ raise
+ else:
+ return_error('An error occurred: {}'.format(str(e)))
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere.yml b/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere.yml
new file mode 100644
index 000000000000..db89ae6574cd
--- /dev/null
+++ b/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere.yml
@@ -0,0 +1,422 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: AlienVault USM Anywhere
+ version: -1
+configuration:
+- defaultvalue: https://www.example.com
+ display: Server URL (e.g., https://www.example.com)
+ name: url
+ required: true
+ type: 0
+- display: Client ID
+ name: client_id
+ required: true
+ type: 0
+- display: Client Secret
+ name: client_secret
+ required: true
+ type: 4
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- defaultvalue: 3 days
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days)
+ name: fetch_time
+ required: false
+ type: 0
+- defaultvalue: '10'
+ display: Fetch Limit
+ name: fetch_limit
+ required: false
+ type: 0
+- defaultvalue: auto-discovery
+ display: Time Format e.g. %Y-%m-%dT%H:%M:%SZ. Select "auto-discovery" to try to automatically determine the format.
+ name: time_format
+ required: false
+ type: 0
+description: Searches for and monitors alarms and events from AlienVault USM Anywhere.
+display: AlienVault USM Anywhere
+name: AlienVault USM Anywhere
+script:
+ commands:
+ - arguments:
+ - default: false
+ defaultValue: '100'
+ description: The maximum number of alarms to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: The alarm status by which to filter the results.
+ isArray: false
+ name: status
+ required: false
+ secret: false
+ - default: false
+ description: The alarm priority by which to filter the results.
+ isArray: false
+ name: priority
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether to include suppressed alarms in the search. Can be "true" or "false".
+ isArray: false
+ name: show_suppressed
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The time frame by which to filter the results, for example: Last 48 Hours.'
+ isArray: false
+ name: time_frame
+ predefined:
+ - Today
+ - Yesterday
+ - Last 7 Days
+ - Last Hour
+ - Last 24 Hours
+ - Last 48 Hours
+ - Last 30 Days
+ - Custom
+ required: false
+ secret: false
+ - default: false
+ description: 'If the value of the time_frame argument is "Custom", specify the start time for the time range,
+ for example: 2017-06-01T12:48:16Z.'
+ isArray: false
+ name: start_time
+ required: false
+ secret: false
+ - default: false
+ description: 'If the value of the time_frame argument is "Custom", specify the end time for the time range,
+ for example: 2017-06-01T12:48:16Z.'
+ isArray: false
+ name: end_time
+ required: false
+ secret: false
+ - default: false
+ description: The rule intention by which to filter the results.
+ isArray: false
+ name: rule_intent
+ predefined:
+ - ''
+ required: false
+ secret: false
+ - default: false
+ description: The rule method by which to filter the results.
+ isArray: false
+ name: rule_method
+ required: false
+ secret: false
+ - default: false
+ description: The rule strategy by which to filter the results.
+ isArray: false
+ name: rule_strategy
+ predefined:
+ - ''
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves alarms from AlienVault.
+ execution: false
+ name: alienvault-search-alarms
+ outputs:
+ - contextPath: AlienVault.Alarm.ID
+ description: The alarm ID.
+ type: String
+ - contextPath: AlienVault.Alarm.Priority
+ description: The alarm priority.
+ type: String
+ - contextPath: AlienVault.Alarm.OccurredTime
+ description: The time that the alarm occurred.
+ type: Date
+ - contextPath: AlienVault.Alarm.ReceivedTime
+ description: The time that the alarm was received.
+ type: Date
+ - contextPath: AlienVault.Alarm.Source
+ description: The alarm source object.
+ type: Unknown
+ - contextPath: AlienVault.Alarm.Source.IPAddress
+ description: The alarm source IP address.
+ type: String
+ - contextPath: AlienVault.Alarm.Source.Organization
+ description: The source organization.
+ type: String
+ - contextPath: AlienVault.Alarm.Source.Country
+ description: The source country.
+ type: String
+ - contextPath: AlienVault.Alarm.Destination
+ description: The alarm destination object.
+ type: Unknown
+ - contextPath: AlienVault.Alarm.Destination.IPAddress
+ description: The alarm destination IP address.
+ type: String
+ - contextPath: AlienVault.Alarm.RuleAttackID
+ description: The rule attack ID.
+ type: String
+ - contextPath: AlienVault.Alarm.RuleStrategy
+ description: The rule strategy.
+ type: String
+ - contextPath: AlienVault.Alarm.RuleIntent
+ description: The rule intent.
+ type: String
+ - contextPath: AlienVault.Alarm.RuleID
+ description: The rule ID.
+ type: String
+ - contextPath: AlienVault.Alarm.RuleDictionary
+ description: The rule dictionary.
+ type: String
+ - contextPath: AlienVault.Alarm.RuleMethod
+ description: The rule method.
+ type: String
+ - contextPath: AlienVault.Alarm.RuleAttackTactic
+ description: The rule attack tactic.
+ type: Unknown
+ - contextPath: AlienVault.Alarm.RuleAttackTechnique
+ description: The rule attack technique.
+ type: String
+ - arguments:
+ - default: true
+ description: The alarm ID. Can be obtained by running the `alienvault-search-alarms`
+ command.
+ isArray: false
+ name: alarm_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves details for an alarm.
+ execution: false
+ name: alienvault-get-alarm
+ outputs:
+ - contextPath: AlienVault.Alarm.ID
+ description: The alarm ID.
+ type: String
+ - contextPath: AlienVault.Alarm.Priority
+ description: The alarm priority.
+ type: String
+ - contextPath: AlienVault.Alarm.OccurredTime
+ description: The time that the alarm occurred.
+ type: Date
+ - contextPath: AlienVault.Alarm.ReceivedTime
+ description: The time that the alarm was received.
+ type: Date
+ - contextPath: AlienVault.Alarm.Source
+ description: The alarm source object.
+ type: Unknown
+ - contextPath: AlienVault.Alarm.Source.IPAddress
+ description: The alarm source IP address.
+ type: String
+ - contextPath: AlienVault.Alarm.Source.Organization
+ description: The source organization.
+ type: String
+ - contextPath: AlienVault.Alarm.Source.Country
+ description: The source country.
+ type: String
+ - contextPath: AlienVault.Alarm.Destination
+ description: The alarm destination object.
+ type: Unknown
+ - contextPath: AlienVault.Alarm.Destination.IPAddress
+ description: The alarm destination IP address.
+ type: String
+ - contextPath: AlienVault.Alarm.RuleAttackID
+ description: The rule attack ID.
+ type: String
+ - contextPath: AlienVault.Alarm.RuleStrategy
+ description: The rule strategy.
+ type: String
+ - contextPath: AlienVault.Alarm.RuleIntent
+ description: The rule intent.
+ type: String
+ - contextPath: AlienVault.Alarm.RuleID
+ description: The rule ID.
+ type: String
+ - contextPath: AlienVault.Alarm.RuleDictionary
+ description: The rule dictionary.
+ type: String
+ - contextPath: AlienVault.Alarm.RuleMethod
+ description: The rule method.
+ type: String
+ - contextPath: AlienVault.Alarm.RuleAttackTactic
+ description: The rule attack tactic.
+ type: Unknown
+ - contextPath: AlienVault.Alarm.RuleAttackTechnique
+ description: The rule attack technique.
+ type: String
+ - arguments:
+ - default: false
+ defaultValue: '100'
+ description: The maximum number of alarms to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: The account name.
+ isArray: false
+ name: account_name
+ required: false
+ secret: false
+ - default: false
+ description: The event name.
+ isArray: false
+ name: event_name
+ required: false
+ secret: false
+ - default: false
+ description: The source name.
+ isArray: false
+ name: source_name
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The time frame by which to filter the results, for example: Last 48 Hours.'
+ isArray: false
+ name: time_frame
+ predefined:
+ - Today
+ - Yesterday
+ - Last 7 Days
+ - Last Hour
+ - Last 24 Hours
+ - Last 48 Hours
+ - Last 30 Days
+ - Custom
+ required: false
+ secret: false
+ - default: false
+ description: 'If the value of the time_frame argument is "Custom", specify the start time for the time range,
+ for example: 2017-06-01T12:48:16Z.'
+ isArray: false
+ name: start_time
+ required: false
+ secret: false
+ - default: false
+ description: 'If the value of the time_frame argument is "Custom", specify the end time for the time range,
+ for example: 2017-06-01T12:48:16Z.'
+ isArray: false
+ name: end_time
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches for events in AlienVault USM Anywhere.
+ execution: false
+ name: alienvault-search-events
+ outputs:
+ - contextPath: AlienVault.Event.Category
+ description: The event category.
+ type: String
+ - contextPath: AlienVault.Event.Source.IPAddress
+ description: The source IP address.
+ type: String
+ - contextPath: AlienVault.Event.Source.Port
+ description: The source port.
+ type: Number
+ - contextPath: AlienVault.Event.Destination.IPAddress
+ description: The destination IP address.
+ type: String
+ - contextPath: AlienVault.Event.Destination.Port
+ description: The destination port.
+ type: Number
+ - contextPath: AlienVault.Event.Severity
+ description: The event severity.
+ type: String
+ - contextPath: AlienVault.Event.OccurredTime
+ description: The time that the event occurred.
+ type: String
+ - contextPath: AlienVault.Event.ReceivedTime
+ description: The time that the event was received.
+ type: String
+ - contextPath: AlienVault.Event.AccessControlOutcome
+ description: The access control outcome.
+ type: String
+ - contextPath: AlienVault.Event.Suppressed
+ description: Whether the event is suppressed.
+ type: Bool
+ - contextPath: AlienVault.Event.ID
+ description: The event ID.
+ type: String
+ - contextPath: AlienVault.Event.Name
+ description: The event name.
+ type: String
+ - contextPath: AlienVault.Event.Subcategory
+ description: The event subcategory.
+ type: String
+ - arguments:
+ - default: true
+ description: The ID of the alarm for which to get events. Can be obtained by running the `alienvault-search-alarms`
+ command.
+ isArray: false
+ name: alarm_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves events associated with the specified alarm.
+ execution: false
+ name: alienvault-get-events-by-alarm
+ outputs:
+ - contextPath: AlienVault.Event.Category
+ description: The event category.
+ type: String
+ - contextPath: AlienVault.Event.Source.IPAddress
+ description: The source IP address.
+ type: String
+ - contextPath: AlienVault.Event.Source.Port
+ description: The source port.
+ type: Number
+ - contextPath: AlienVault.Event.Destination.IPAddress
+ description: The destination IP address.
+ type: String
+ - contextPath: AlienVault.Event.Destination.Port
+ description: The destination port.
+ type: Number
+ - contextPath: AlienVault.Event.Severity
+ description: The event severity.
+ type: String
+ - contextPath: AlienVault.Event.OccurredTime
+ description: The time that the event occurred.
+ type: String
+ - contextPath: AlienVault.Event.ReceivedTime
+ description: The time that the event was received.
+ type: String
+ - contextPath: AlienVault.Event.AccessControlOutcome
+ description: The access control outcome.
+ type: String
+ - contextPath: AlienVault.Event.Suppressed
+ description: Whether the event is suppressed.
+ type: Bool
+ - contextPath: AlienVault.Event.ID
+ description: The event ID.
+ type: String
+ - contextPath: AlienVault.Event.Name
+ description: The event name.
+ type: String
+ - contextPath: AlienVault.Event.Subcategory
+ description: The event subcategory.
+ type: String
+ dockerimage: demisto/python3:3.7.3.286
+ isfetch: true
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
diff --git a/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere_description.md b/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere_image.png b/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere_image.png
new file mode 100644
index 000000000000..2d48ff0d1c56
Binary files /dev/null and b/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere_image.png differ
diff --git a/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere_test.py b/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere_test.py
new file mode 100644
index 000000000000..43294f9f1922
--- /dev/null
+++ b/Integrations/AlienVault_USM_Anywhere/AlienVault_USM_Anywhere_test.py
@@ -0,0 +1,71 @@
+import demistomock as demisto
+import dateparser
+from datetime import datetime, timedelta
+
+server_url = 'https://vigilant.alienvault.cloud/api/2.0/alarms?page=1&size=1' \
+ '&sort=timestamp_occured%2Casc×tamp_occured_gte=1547567249000'
+
+
+def approximate_compare(time1, time2):
+ if isinstance(time1, int):
+ time1 = datetime.fromtimestamp(time1 / 1000)
+ if isinstance(time2, int):
+ time2 = datetime.fromtimestamp(time2 / 1000)
+
+ return timedelta(seconds=-30) <= time1 - time2 <= timedelta(seconds=3)
+
+
+def test_fetch_incidents(mocker, requests_mock):
+ mocker.patch.object(demisto, 'params', return_value={
+ 'fetch_limit': '1',
+ 'url': 'https://vigilant.alienvault.cloud/'
+ })
+ mocker.patch.object(demisto, 'getLastRun', return_value={'timestamp': '1547567249000'})
+ mocker.patch.object(demisto, 'setLastRun')
+ mocker.patch.object(demisto, 'incidents')
+ from AlienVault_USM_Anywhere import fetch_incidents
+ requests_mock.get(
+ server_url,
+ json={
+ '_embedded': {
+ 'alarms': [
+ {
+ 'uuid': '4444444444',
+ 'timestamp_occured_iso8601': '2019-07-12T06:00:38.000Z',
+ }
+ ]
+ },
+ 'page': {
+ 'totalElements': 1861
+ }
+ }
+ )
+ fetch_incidents()
+
+
+def test_get_time_range():
+ from AlienVault_USM_Anywhere import get_time_range
+ from CommonServerPython import date_to_timestamp
+
+ assert get_time_range(None, None, None) == (None, None)
+
+ dt = datetime.now()
+ start, end = get_time_range('Today', None, None)
+ assert datetime.fromtimestamp(start / 1000).date() == dt.date() and approximate_compare(dt, end)
+
+ dt = datetime.now()
+ # should ignore the start/end time values
+ start, end = get_time_range('Today', 'asfd', 'asdf')
+ assert datetime.fromtimestamp(start / 1000).date() == dt.date() and approximate_compare(dt, end)
+
+ dt = datetime.now()
+ start, end = get_time_range('Yesterday', None, None)
+ assert datetime.fromtimestamp(start / 1000).date() == (dt.date() - timedelta(days=1)) and approximate_compare(dt, end)
+
+ start, end = get_time_range('Custom', '2019-12-30T01:02:03Z', '2019-12-30T04:05:06Z')
+ assert ((start, end) == (date_to_timestamp(dateparser.parse('2019-12-30T01:02:03Z')),
+ date_to_timestamp(dateparser.parse('2019-12-30T04:05:06Z'))))
+
+ start, end = get_time_range('Custom', '2019-12-30T01:02:03Z', None)
+ assert (start == date_to_timestamp(dateparser.parse('2019-12-30T01:02:03Z'))
+ and approximate_compare(end, datetime.now()))
diff --git a/Integrations/AlienVault_USM_Anywhere/CHANGELOG.md b/Integrations/AlienVault_USM_Anywhere/CHANGELOG.md
new file mode 100644
index 000000000000..822364321974
--- /dev/null
+++ b/Integrations/AlienVault_USM_Anywhere/CHANGELOG.md
@@ -0,0 +1,7 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+ - Improved implemention of the fetch incidents function.
+ - Improved integration documentation.
+ - Added the *Fetch limit* and *Time format* parameters to the instance configuration.
diff --git a/Integrations/AlienVault_USM_Anywhere/Pipfile b/Integrations/AlienVault_USM_Anywhere/Pipfile
new file mode 100644
index 000000000000..04bc6b26fa3e
--- /dev/null
+++ b/Integrations/AlienVault_USM_Anywhere/Pipfile
@@ -0,0 +1,17 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+pytest-asyncio = "*"
+dateparser = "*"
+
+[packages]
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/Anomali_ThreatStream_v2/Anomali_ThreatStream_v2.py b/Integrations/Anomali_ThreatStream_v2/Anomali_ThreatStream_v2.py
new file mode 100644
index 000000000000..6079a06ec08f
--- /dev/null
+++ b/Integrations/Anomali_ThreatStream_v2/Anomali_ThreatStream_v2.py
@@ -0,0 +1,854 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import requests
+from requests.exceptions import MissingSchema, ConnectionError
+import json
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+USERNAME = demisto.params().get('username')
+API_KEY = demisto.params().get('apikey')
+SERVER = demisto.params().get('url', '').strip('/')
+USE_SSL = not demisto.params().get('insecure', False)
+BASE_URL = SERVER + '/api/'
+DEFAULT_THRESHOLD = demisto.params().get('default_threshold', 'high')
+
+HEADERS = {
+ 'Content-Type': 'application/json'
+}
+
+CREDENTIALS = {
+ 'username': USERNAME,
+ 'api_key': API_KEY
+}
+
+DBOT_SCORE = {
+ 'low': 2,
+ 'medium': 2,
+ 'high': 3,
+ 'very-high': 3
+}
+
+SEVERITY_SCORE = {
+ 'low': 0,
+ 'medium': 1,
+ 'high': 2,
+ 'very-high': 3
+}
+
+DBOT_MAPPING = {
+ 'value': 'Indicator',
+ 'type': 'Type',
+ 'source': 'Vendor',
+}
+
+INDICATOR_MAPPING = {
+ 'asn': 'ASN',
+ 'value': 'Address',
+ 'country': 'Country',
+ 'type': 'Type',
+ 'modified_ts': 'Modified',
+ 'confidence': 'Confidence',
+ 'status': 'Status',
+ 'org': 'Organization',
+ 'source': 'Source',
+}
+
+INDICATOR_EXTENDED_MAPPING = {
+ 'value': 'Value',
+ 'id': 'ID',
+ 'itype': 'IType',
+ 'meta': 'Meta',
+ 'confidence': 'Confidence',
+ 'country': 'Country',
+ 'org': 'Organization',
+ 'asn': 'ASN',
+ 'status': 'Status',
+ 'tags': 'Tags',
+ 'modified_ts': 'Modified',
+ 'source': 'Source',
+ 'type': 'Type',
+}
+
+THREAT_MODEL_MAPPING = {
+ 'name': 'Name',
+ 'id': 'ID',
+ 'created_ts': 'CreatedTime',
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url_suffix, params=None, data=None, headers=None, files=None):
+ """
+ A wrapper for requests lib to send our requests and handle requests and responses better.
+ """
+ res = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ verify=USE_SSL,
+ params=params,
+ data=data,
+ headers=headers,
+ files=files,
+ )
+ # Handle error responses gracefully
+ if res.status_code in {401}:
+ return_error("Got unauthorized from the server. Check the credentials.")
+ elif res.status_code in {404}:
+ command = demisto.command()
+ if command in ['threatstream-get-model-description', 'threatstream-get-indicators-by-model',
+ 'threatstream-get-analysis-status', 'threatstream-analysis-report']:
+ # in order to prevent raising en error in case model/indicator/report was not found
+ return {}
+ else:
+ return_error("The resource not found. Check the endpoint.")
+ elif res.status_code not in {200, 201, 202}:
+ return_error(F"Error in API call to ThreatStream {res.status_code} - {res.text}")
+
+ return res.json()
+
+
+def find_worst_indicator(indicators):
+ """
+ Sorts list of indicators by severity score and returns one indicator with the highest severity.
+ In case the indicator has no severity value, the indicator severity score is set to 0 (low).
+ """
+ indicators.sort(key=lambda ioc: SEVERITY_SCORE[ioc.get('meta', {}).get('severity', 'low')], reverse=True)
+ return indicators[0]
+
+
+def prepare_args(args):
+ # removing empty keys that can be passed from playbook input
+ args = {k: v for (k, v) in args.items() if v}
+ if 'include_inactive' in args:
+ # special handling for ip, domain, file, url and threatstream-email-reputation commands
+ args['status'] = "active,inactive" if args.pop('include_inactive') == 'True' else "active"
+ if 'indicator_severity' in args:
+ # special handling for threatstream-get-indicators
+ args['meta.severity'] = args.pop('indicator_severity', None)
+ if 'tags_name' in args:
+ # special handling for threatstream-get-indicators
+ args['tags.name'] = args.pop('tags_name', None)
+ if 'indicator_value' in args:
+ # special handling for threatstream-get-indicators
+ args['value'] = args.pop('indicator_value', None)
+
+ return args
+
+
+def build_params(**params):
+ """
+ Builds query string from key word arguments and appends to it username and api key.
+ """
+ params.update(CREDENTIALS)
+ return params
+
+
+def get_dbot_context(indicator, threshold):
+ """
+ Builds and returns dictionary with Indicator, Type, Vendor and Score keys
+ and values from the indicator that will be returned to context.
+ """
+ dbot_context = {DBOT_MAPPING[k]: v for (k, v) in indicator.items() if k in DBOT_MAPPING.keys()}
+ indicator_score = DBOT_SCORE[indicator.get('meta', {}).get('severity', 'low')]
+ # the indicator will be considered as malicious in case it's score is greater or equal to threshold
+ dbot_context['Score'] = 3 if indicator_score >= DBOT_SCORE[threshold] else indicator_score
+
+ return dbot_context
+
+
+def mark_as_malicious(indicator, threshold, context):
+ """
+ Marks indicator as malicious if severity of indicator is greater/equals to threshold and
+ adds Malicious key to returned dictionary (context) in such case.
+ """
+ severity = indicator.get('meta', {}).get('severity', 'low')
+
+ if SEVERITY_SCORE[severity] >= SEVERITY_SCORE[threshold]:
+ context['Malicious'] = {
+ 'Vendor': 'ThreatStream'
+ }
+
+
+def search_indicator_by_params(params, searchable_value):
+ """
+ Generic function that searches for indicators from ThreatStream by given query string.
+ Returns indicator with the highest severity score.
+ """
+ indicators_data = http_request("Get", "v2/intelligence/", params=params, headers=HEADERS)
+
+ if not indicators_data['objects']:
+ demisto.results(F"No intelligence has been found for {searchable_value}")
+ sys.exit()
+
+ return find_worst_indicator(indicators_data['objects'])
+
+
+def get_ip_context(indicator, threshold):
+ """
+ Builds and returns dictionary that will be set to IP generic context.
+ """
+ ip_context = {}
+ ip_context['ASN'] = indicator.get('asn', '')
+ ip_context['Address'] = indicator.get('value', '')
+ ip_context['Geo'] = {
+ 'Country': indicator.get('country', ''),
+ 'Location': F"{indicator.get('latitude', '')},{indicator.get('longitude', '')}"
+ }
+ mark_as_malicious(indicator, threshold, ip_context)
+
+ return ip_context
+
+
+def get_domain_context(indicator, threshold):
+ """
+ Builds and returns dictionary that will be set to Domain generic context.
+ """
+ domain_context = {}
+ whois_context = {}
+ domain_context['Name'] = indicator.get('value', '')
+ domain_context['DNS'] = indicator.get('ip', '')
+
+ whois_context['CreationDate'] = indicator.get('created_ts', '')
+ whois_context['UpdatedDate'] = indicator.get('modified_ts', '')
+ meta = indicator.get('meta', None)
+
+ if meta:
+ registrant = {}
+ registrant['Name'] = meta.get('registrant_name', '')
+ registrant['Email'] = meta.get('registrant_email', '')
+ registrant['Phone'] = meta.get('registrant_phone', '')
+ whois_context['Registrant'] = registrant
+ domain_context['WHOIS'] = whois_context
+ mark_as_malicious(indicator, threshold, domain_context)
+
+ return domain_context
+
+
+def get_file_context(indicator, threshold):
+ """
+ Builds and returns dictionary that will be set to File generic context.
+ """
+ file_context = {'MD5': indicator.get('value', '')}
+ mark_as_malicious(indicator, threshold, file_context)
+
+ return file_context
+
+
+def get_url_context(indicator, threshold):
+ """
+ Builds and returns dictionary that will be set to URL generic context.
+ """
+ url_context = {'Data': indicator.get('value', '')}
+ mark_as_malicious(indicator, threshold, url_context)
+
+ return url_context
+
+
+def get_threat_generic_context(indicator):
+ """
+ Receives indicator and builds new dictionary from values that were defined in
+ INDICATOR_MAPPING keys and adds the Severity key with indicator severity value.
+ """
+ threat_ip_context = {INDICATOR_MAPPING[k]: v for (k, v) in indicator.items() if
+ k in INDICATOR_MAPPING.keys()}
+ try:
+ threat_ip_context['Severity'] = indicator['meta']['severity']
+ except KeyError:
+ threat_ip_context['Severity'] = 'low'
+ finally:
+ return threat_ip_context
+
+
+def parse_network_elem(element_list, context_prefix):
+ """
+ Parses the network elements list and returns a new dictionary.
+ """
+ return list(map(lambda e: {
+ F'{context_prefix}Source': e.get('src', ''),
+ F'{context_prefix}Destination': e.get('dst', ''),
+ F'{context_prefix}Port': e.get('dport', ''),
+ }, element_list))
+
+
+def parse_network_lists(network):
+ """
+ Parses the network part that was received from sandbox report json.
+ In each list, only sublist of 10 elements is taken.
+ """
+ hosts = [{'Hosts': h} for h in network.get('hosts', [])[:10]]
+
+ if 'packets' in network:
+ network = network['packets']
+
+ udp_list = parse_network_elem(network.get('udp', [])[:10], 'Udp')
+ icmp_list = parse_network_elem(network.get('icmp', [])[:10], 'Icmp')
+ tcp_list = parse_network_elem(network.get('tcp', [])[:10], 'Tcp')
+ http_list = parse_network_elem(network.get('http', [])[:10], 'Http')
+ https_list = parse_network_elem(network.get('https', [])[:10], 'Https')
+ network_result = udp_list + icmp_list + tcp_list + http_list + https_list + hosts
+
+ return network_result
+
+
+def parse_info(info):
+ """
+ Parses the info part that was received from sandbox report json
+ """
+ info.update(info.pop('machine', {}))
+ parsed_info = {
+ 'Category': info.get('category', '').title(),
+ 'Started': info.get('started', ''),
+ 'Completed': info.get('ended', ''),
+ 'Duration': info.get('duration', ''),
+ 'VmName': info.get('name', ''),
+ 'VmID': info.get('id', '')
+
+ }
+ return parsed_info
+
+
+def get_report_outputs(report, report_id):
+ """
+ Returns human readable and entry context of the sandbox report
+ """
+ info = parse_info(report.get('info', {}))
+ info['ReportID'] = report_id
+ _, info['Verdict'] = get_submission_status(report_id, False)
+ network = parse_network_lists(report.get('network', {}))
+
+ hm = tableToMarkdown(F"Report {report_id} analysis results", info)
+ ec = {
+ 'ThreatStream.Analysis': info,
+ 'ThreatStream.Analysis.Network': network
+ }
+
+ return hm, ec
+
+
+def parse_indicators_list(iocs_list):
+ """
+ Parses the indicator list and returns dictionary that will be set to context.
+ """
+ iocs_context = list(map(lambda i: {INDICATOR_EXTENDED_MAPPING[k]: v for (k, v) in i.items() if
+ k in INDICATOR_EXTENDED_MAPPING.keys()}, iocs_list))
+
+ for indicator in iocs_context:
+ meta = indicator.pop('Meta', None)
+ if meta:
+ indicator['Severity'] = meta.get('severity', 'low')
+ tags = indicator.get('Tags', [])
+ if isinstance(tags, list):
+ indicator['Tags'] = ",".join(list(map(lambda t: t.get('name', ''), tags)))
+
+ return iocs_context
+
+
+def build_model_data(model, name, is_public, tlp, tags, intelligence, description):
+ """
+ Builds data dictionary that is used in Threat Model creation/update request.
+ """
+ if model == 'tipreport':
+ description_field_name = 'body'
+ else:
+ description_field_name = 'description'
+ data = {k: v for (k, v) in (('name', name), ('is_public', is_public), ('tlp', tlp),
+ (description_field_name, description)) if v}
+ if tags:
+ data['tags'] = tags if isinstance(tags, list) else [t.strip() for t in tags.split(',')]
+ if intelligence:
+ data['intelligence'] = intelligence if isinstance(intelligence, list) else [i.strip() for i in
+ intelligence.split(',')]
+ return data
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """
+ Performs basic get request to get item samples
+ """
+ params = build_params(limit=1)
+ http_request('GET', 'v2/intelligence/', params=params)
+ demisto.results('ok')
+
+
+def get_ip_reputation(ip, threshold=None, status="active,inactive"):
+ """
+ Checks the reputation of given ip from ThreatStream and
+ returns the indicator with highest severity score.
+ """
+ params = build_params(value=ip, type="ip", status=status, limit=0)
+ indicator = search_indicator_by_params(params, ip)
+ threshold = threshold or DEFAULT_THRESHOLD
+ dbot_context = get_dbot_context(indicator, threshold)
+ ip_context = get_ip_context(indicator, threshold)
+ threat_ip_context = get_threat_generic_context(indicator)
+
+ ec = {
+ 'DBotScore(val.Indicator == obj.Indicator)': dbot_context,
+ 'IP(val.Address == obj.Address)': ip_context,
+ 'ThreatStream.IP(val.Address == obj.Address)': threat_ip_context
+ }
+ human_readable = tableToMarkdown(F"IP reputation for: {ip}", threat_ip_context)
+
+ return_outputs(human_readable, ec, indicator)
+
+
+def get_domain_reputation(domain, threshold=None, status="active,inactive"):
+ """
+ Checks the reputation of given domain from ThreatStream and
+ returns the indicator with highest severity score.
+ """
+ params = build_params(value=domain, type="domain", status=status, limit=0)
+ indicator = search_indicator_by_params(params, domain)
+ threshold = threshold or DEFAULT_THRESHOLD
+ dbot_context = get_dbot_context(indicator, threshold)
+ domain_context = get_domain_context(indicator, threshold)
+ threat_domain_context = get_threat_generic_context(indicator)
+
+ ec = {
+ 'DBotScore(val.Indicator == obj.Indicator)': dbot_context,
+ 'Domain(val.Name == obj.Name)': domain_context,
+ 'ThreatStream.Domain(val.Address == obj.Address)': threat_domain_context
+ }
+ human_readable = tableToMarkdown(F"Domain reputation for: {domain}", threat_domain_context)
+
+ return_outputs(human_readable, ec, indicator)
+
+
+def get_file_reputation(file, threshold=None, status="active,inactive"):
+ """
+ Checks the reputation of given md5 of the file from ThreatStream and
+ returns the indicator with highest severity score.
+ """
+ params = build_params(value=file, type="md5", status=status, limit=0)
+ indicator = search_indicator_by_params(params, file)
+ threshold = threshold or DEFAULT_THRESHOLD
+ dbot_context = get_dbot_context(indicator, threshold)
+ file_context = get_file_context(indicator, threshold)
+ threat_file_context = get_threat_generic_context(indicator)
+ threat_file_context['MD5'] = threat_file_context.pop('Address')
+ threat_file_context.pop("ASN", None)
+ threat_file_context.pop("Organization", None)
+ threat_file_context.pop("Country", None)
+
+ ec = {
+ 'DBotScore(val.Indicator == obj.Indicator)': dbot_context,
+ 'File(val.MD5 == obj.MD5)': file_context,
+ 'ThreatStream.File(val.MD5 == obj.MD5)': threat_file_context
+ }
+ human_readable = tableToMarkdown(F"MD5 reputation for: {file}", threat_file_context)
+
+ return_outputs(human_readable, ec, indicator)
+
+
+def get_url_reputation(url, threshold=None, status="active,inactive"):
+ """
+ Checks the reputation of given url address from ThreatStream and
+ returns the indicator with highest severity score.
+ """
+ params = build_params(value=url, type="url", status=status, limit=0)
+ indicator = search_indicator_by_params(params, url)
+ threshold = threshold or DEFAULT_THRESHOLD
+ dbot_context = get_dbot_context(indicator, threshold)
+ domain_context = get_url_context(indicator, threshold)
+ threat_url_context = get_threat_generic_context(indicator)
+ del threat_url_context['ASN']
+
+ ec = {
+ 'DBotScore(val.Indicator == obj.Indicator)': dbot_context,
+ 'URL(val.Data == obj.Data)': domain_context,
+ 'ThreatStream.URL(val.Address == obj.Address)': threat_url_context
+ }
+ human_readable = tableToMarkdown(F"URL reputation for: {url}", threat_url_context)
+
+ return_outputs(human_readable, ec, indicator)
+
+
+def get_email_reputation(email, threshold=None, status="active,inactive"):
+ """
+ Checks the reputation of given email address from ThreatStream and
+ returns the indicator with highest severity score.
+ """
+ params = build_params(value=email, type="email", status=status, limit=0)
+ indicator = search_indicator_by_params(params, email)
+ threshold = threshold or DEFAULT_THRESHOLD
+ dbot_context = get_dbot_context(indicator, threshold)
+ threat_email_context = get_threat_generic_context(indicator)
+ threat_email_context['Email'] = threat_email_context.pop('Address')
+ threat_email_context.pop("ASN", None)
+ threat_email_context.pop("Organization", None)
+ threat_email_context.pop("Country", None)
+
+ ec = {
+ 'DBotScore(val.Indicator == obj.Indicator)': dbot_context,
+ 'ThreatStream.EmailReputation(val.Email == obj.Email)': threat_email_context
+ }
+ human_readable = tableToMarkdown(F"Email reputation for: {email}", threat_email_context)
+
+ return_outputs(human_readable, ec, indicator)
+
+
+def get_passive_dns(value, type="ip", limit=50):
+ """
+ Receives value and type of indicator and returns
+ enrichment data for domain or ip.
+ """
+ dns_results = http_request("GET", F"v1/pdns/{type}/{value}/", params=CREDENTIALS).get('results', None)
+
+ if not dns_results:
+ demisto.results(F"No Passive DNS enrichment data found for {value}")
+ sys.exit()
+
+ dns_results = dns_results[:int(limit)]
+ output = camelize(dns_results, delim='_')
+
+ ec = ({
+ 'ThreatStream.PassiveDNS': output
+ })
+ human_readable = tableToMarkdown(F"Passive DNS enrichment data for: {value}", output)
+
+ return_outputs(human_readable, ec, dns_results)
+
+
+def import_ioc_with_approval(import_type, import_value, confidence="50", classification="Private",
+ threat_type="exploit", severity="low"):
+ """
+ Imports indicators data to ThreatStream.
+ The data can be imported using one of three import_types: data-text (plain-text),
+ file-id of uploaded file to war room or URL.
+ """
+ files = None
+ uploaded_file = None
+ data = {
+ 'confidence': confidence,
+ 'classification': classification,
+ 'threat_type': threat_type,
+ 'severity': severity
+ }
+
+ if import_type == 'file-id':
+ try:
+ # import_value will be entry id of uploaded file to war room
+ file_info = demisto.getFilePath(import_value)
+ except Exception:
+ return_error(F"Entry {import_value} does not contain a file.")
+
+ uploaded_file = open(file_info['path'], 'rb')
+ files = {'file': (file_info['name'], uploaded_file)}
+ else:
+ data[import_type] = import_value
+ # in case import_type is not file-id, http_requests will receive None as files
+ res = http_request("POST", "v1/intelligence/import/", params=CREDENTIALS, data=data, files=files)
+ # closing the opened file if exist
+ if uploaded_file:
+ uploaded_file.close()
+ # checking that response contains success key
+ if res.get('success', False):
+ imported_id = res.get('import_session_id', '')
+ ec = {'ThreatStream.Import.ImportID': imported_id}
+ return_outputs(F"The data was imported successfully. The ID of imported job is: {imported_id}", ec, res)
+ else:
+ return_outputs("The data was not imported. Check if valid arguments were passed", None)
+
+
+def get_model_list(model, limit="50"):
+ """
+ Returns list of Threat Model that was specified. By default limit is set to 50 results.
+ Possible values for model are : actor, campaign, incident, signature, ttp, vulnerability, tipreport
+ """
+ # if limit=0 don't put to context
+ params = build_params(limit=limit, skip_intelligence="true", skip_associations="true")
+ model_list = http_request("GET", F"v1/{model}/", params=params).get('objects', None)
+
+ if not model_list:
+ demisto.results(F"No Threat Model {model.title()} found.")
+ sys.exit()
+
+ threat_list_context = list(map(lambda m: {THREAT_MODEL_MAPPING[k]: v for (k, v) in m.items() if
+ k in THREAT_MODEL_MAPPING.keys()}, model_list))
+ for m in threat_list_context:
+ m['Type'] = model.title()
+ # in case that no limit was passed, the stage of set to context is skipped
+ ec = {'ThreatStream.List': threat_list_context} if limit != '0' else None
+ human_readable = tableToMarkdown(F"List of {model.title()}s", threat_list_context)
+
+ return_outputs(human_readable, ec, model_list)
+
+
+def get_model_description(model, id):
+ """
+ Returns a description of Threat Model as html file to the war room.
+ """
+ params = build_params(skip_intelligence="true", skip_associations="true")
+ description = http_request("GET", F"v1/{model}/{id}", params=params)
+
+ if model == 'signature':
+ description = description.get('notes', None)
+ elif model == 'tipreport':
+ description = description.get('body', None)
+ else:
+ description = description.get('description', None)
+
+ if not description:
+ demisto.results(F"No description found for Threat Model {model.title()} with id {id}")
+ sys.exit()
+
+ demisto.results(fileResult(F"{model}_{id}.html", description.encode(encoding='UTF-8')))
+
+
+def get_iocs_by_model(model, id, limit="20"):
+ """
+ Returns list of indicators associated with specific Threat Model by model id.
+ """
+ params = build_params(limit=limit)
+ iocs_list = http_request("GET", F"v1/{model}/{id}/intelligence/", params=params).get('objects', None)
+
+ if not iocs_list:
+ demisto.results(F"No indicators found for Threat Model {model.title()} with id {id}")
+ sys.exit()
+
+ iocs_context = parse_indicators_list(iocs_list)
+
+ ec = {
+ 'ThreatStream.Model(val.ModelID == obj.ModelID && val.ModelType == obj.ModelType)': {'ModelType': model.title(),
+ 'ModelID': id,
+ 'Indicators': iocs_context},
+ }
+ human_readable = tableToMarkdown(F"Indicators list for Threat Model {model.title()} with id {id}", iocs_context)
+
+ return_outputs(human_readable, ec, iocs_list)
+
+
+def create_model(model, name, is_public="false", tlp=None, tags=None, intelligence=None, description=None):
+ """
+ Creates Threat Model with basic parameters.
+ """
+ data = build_model_data(model, name, is_public, tlp, tags, intelligence, description)
+ model_id = http_request("POST", F"v1/{model}/", data=json.dumps(data), params=CREDENTIALS).get('id', None)
+
+ if model_id:
+ get_iocs_by_model(model, model_id, limit="50")
+ else:
+ demisto.results(F"{model.title()} Threat Model was not created. Check the input parameters")
+
+
+def update_model(model, model_id, name=None, is_public="false", tlp=None, tags=None, intelligence=None,
+ description=None):
+ """
+ Updates a ThreatStream model with parameters. In case one or more optional parameters are
+ defined, the previous data is overridden.
+ """
+ data = build_model_data(model, name, is_public, tlp, tags, intelligence, description)
+ http_request("PATCH", F"v1/{model}/{model_id}/", data=json.dumps(data), params=CREDENTIALS)
+ get_iocs_by_model(model, model_id, limit="50")
+
+
+def supported_platforms(sandbox_type="default"):
+ """
+ Returns list of supported platforms for premium sandbox or default sandbox.
+ """
+ platform_data = http_request("GET", "v1/submit/parameters/", params=CREDENTIALS)
+ available_platforms = platform_data.get('platform_choices', []) if sandbox_type == 'default' else platform_data.get(
+ 'premium_platform_choices',
+ [])
+
+ if available_platforms:
+ supported_output = camelize(available_platforms)
+ context_path = "ThreatStream.DefaultPlatforms" if sandbox_type == 'default' else "ThreatStream.PremiumPlatforms"
+
+ return_outputs(tableToMarkdown(F"Supported platforms for {sandbox_type} sandbox", supported_output),
+ {context_path: supported_output}, platform_data)
+ else:
+ demisto.results(F"No supported platforms found for {sandbox_type} sandbox")
+
+
+def get_submission_status(report_id, output=True):
+ """
+ Returns the sandbox submission status. If status is not received in report_info
+ then status is set to done. Receives output boolean that prints the result to the war room.
+ By default the output boolean is set to True.
+ """
+ report_info = http_request("GET", F"v1/submit/{report_id}/", params=CREDENTIALS, headers=HEADERS)
+
+ if not report_info:
+ demisto.results(F"No report found with id {report_id}")
+ sys.exit()
+
+ status = report_info.get('status', "done")
+ verdict = report_info.get('verdict', '').title()
+ platform = report_info.get('platform', '')
+
+ if output:
+ report_outputs = {'ReportID': report_id, 'Status': status, 'Platform': platform, 'Verdict': verdict}
+ ec = {'ThreatStream.Analysis(val.ReportID == obj.ReportID)': report_outputs}
+ return_outputs(tableToMarkdown(F"The analysis status for id {report_id}", report_outputs), ec, report_info)
+ return status, verdict
+
+
+def submit_report(submission_type, submission_value, submission_classification="private", report_platform="WINDOWS7",
+ premium_sandbox="false", detail=None):
+ """
+ Detonates URL or file that was uploaded to war room to ThreatStream sandbox.
+ """
+ uploaded_file = None
+ files = None
+
+ data = {
+ 'report_radio-classification': submission_classification,
+ 'report_radio-platform': report_platform,
+ 'use_premium_sandbox': premium_sandbox,
+ }
+ if detail:
+ data['detail'] = detail
+
+ if submission_type == 'file':
+ try:
+ # submission_value will be entry id of uploaded file to war room
+ file_info = demisto.getFilePath(submission_value)
+ except Exception:
+ return_error(F"Entry {submission_value} does not contain a file.")
+
+ uploaded_file = open(file_info['path'], 'rb')
+ files = {'report_radio-file': (file_info['name'], uploaded_file)}
+ else:
+ data['report_radio-url'] = submission_value
+
+ res = http_request("POST", "v1/submit/new/", params=CREDENTIALS, data=data, files=files)
+ # closing the opened file if exist
+ if uploaded_file:
+ uploaded_file.close()
+
+ if str(res.get('success', '')).lower() == 'true':
+ report_info = res['reports'][report_platform]
+ report_id = report_info['id']
+ report_status, _ = get_submission_status(report_id, False)
+
+ report_outputs = {'ReportID': report_id, 'Status': report_status, 'Platform': report_platform}
+ ec = {'ThreatStream.Analysis': report_outputs}
+ return_outputs(tableToMarkdown(F"The submission info for {submission_value}", report_outputs), ec, report_info)
+ else:
+ demisto.results(F"The submission of {submission_value} failed")
+
+
+def get_report(report_id):
+ """
+ Returns the report from ThreatStream sandbox by id.
+ """
+ report = http_request("GET", F"v1/submit/{report_id}/report", params=CREDENTIALS, headers=HEADERS)
+ if not report:
+ demisto.results(F"No report found with id {report_id}")
+ sys.exit()
+ hm, ec = get_report_outputs(report.get('results', {}), report_id)
+
+ return_outputs(hm, ec, report)
+
+
+def add_tag_to_model(model_id, tags, model="intelligence"):
+ """
+ Adds tag to specific Threat Model. By default is set to intelligence (indicators).
+ """
+ tags = tags if isinstance(tags, list) else tags.split(',')
+
+ data = {
+ 'tags': [{'name': t, 'tlp': 'red'} for t in tags]
+ }
+
+ res = http_request("POST", F"v1/{model}/{model_id}/tag/", params=CREDENTIALS, data=json.dumps(data))
+
+ if str(res.get('success', '')).lower() == 'true':
+ return_outputs(F"Added successfully tags: {tags} to {model} with {model_id}", None, res)
+ else:
+ return_outputs(F"Failed to add {tags} to {model} with {model_id}", None, res)
+
+
+def get_indicators(**kwargs):
+ """
+ Returns filtered indicators by parameters from ThreatStream.
+ By default the limit of indicators result is set to 20.
+ """
+ if 'query' in kwargs:
+ params = build_params(q=kwargs['query'], limit=kwargs.get('limit', 20))
+ else:
+ params = build_params(**kwargs)
+
+ iocs_list = http_request("GET", "v2/intelligence/", params=params).get('objects', None)
+
+ if not iocs_list:
+ demisto.results('No indicators found from ThreatStream')
+ sys.exit()
+
+ iocs_context = parse_indicators_list(iocs_list)
+ ec = {'ThreatStream.Indicators': iocs_context}
+ return_outputs(tableToMarkdown("The indicators results", iocs_context), ec, iocs_list)
+
+
+def main():
+ ''' COMMANDS MANAGER / SWITCH PANEL '''
+
+ LOG('Command being called is %s' % (demisto.command()))
+
+ try:
+ handle_proxy()
+ args = prepare_args(demisto.args())
+ if demisto.command() == 'test-module':
+ test_module()
+ elif demisto.command() == 'ip':
+ get_ip_reputation(**args)
+ elif demisto.command() == 'domain':
+ get_domain_reputation(**args)
+ elif demisto.command() == 'file':
+ get_file_reputation(**args)
+ elif demisto.command() == 'url':
+ get_url_reputation(**args)
+ elif demisto.command() == 'threatstream-email-reputation':
+ get_email_reputation(**args)
+ elif demisto.command() == 'threatstream-get-passive-dns':
+ get_passive_dns(**args)
+ elif demisto.command() == 'threatstream-import-indicator-with-approval':
+ import_ioc_with_approval(**args)
+ elif demisto.command() == 'threatstream-get-model-list':
+ get_model_list(**args)
+ elif demisto.command() == 'threatstream-get-model-description':
+ get_model_description(**args)
+ elif demisto.command() == 'threatstream-get-indicators-by-model':
+ get_iocs_by_model(**args)
+ elif demisto.command() == 'threatstream-create-model':
+ create_model(**args)
+ elif demisto.command() == 'threatstream-update-model':
+ update_model(**args)
+ elif demisto.command() == 'threatstream-submit-to-sandbox':
+ submit_report(**args)
+ elif demisto.command() == 'threatstream-get-analysis-status':
+ get_submission_status(**args)
+ elif demisto.command() == 'threatstream-analysis-report':
+ get_report(**args)
+ elif demisto.command() == 'threatstream-supported-platforms':
+ supported_platforms(**args)
+ elif demisto.command() == 'threatstream-get-indicators':
+ get_indicators(**args)
+ elif demisto.command() == 'threatstream-add-tag-to-model':
+ add_tag_to_model(**args)
+
+ except Exception as e:
+ if isinstance(e, MissingSchema):
+ return_error("Not valid server url. Check url format")
+ elif isinstance(e, ConnectionError):
+ return_error("The server is not reachable.")
+ else:
+ return_error(e)
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/Anomali_ThreatStream_v2/Anomali_ThreatStream_v2.yml b/Integrations/Anomali_ThreatStream_v2/Anomali_ThreatStream_v2.yml
new file mode 100644
index 000000000000..8977c9ce7e0f
--- /dev/null
+++ b/Integrations/Anomali_ThreatStream_v2/Anomali_ThreatStream_v2.yml
@@ -0,0 +1,1512 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: Anomali ThreatStream v2
+ version: -1
+configuration:
+- defaultvalue: https://api.threatstream.com
+ display: Server URL (e.g., https://www.test.com)
+ name: url
+ required: true
+ type: 0
+- display: Username
+ name: username
+ required: true
+ type: 0
+- display: API Key
+ name: apikey
+ required: true
+ type: 4
+- defaultvalue: high
+ display: Threshold of the indicator.
+ name: default_threshold
+ options:
+ - low
+ - medium
+ - high
+ - very-high
+ required: true
+ type: 15
+- defaultvalue: 'false'
+ display: Trust any certificate (insecure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+description: Use Anomali ThreatStream to query and submit threats.
+display: Anomali ThreatStream v2
+name: Anomali ThreatStream v2
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The IP to check.
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If severity is greater than or equal to the threshold, then the IP address
+ will be considered malicious. This argument will override the default threshold defined as a parameter.
+ isArray: false
+ name: threshold
+ predefined:
+ - low
+ - medium
+ - high
+ - very-high
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'True'
+ description: Whether to include results with the status "Inactive". Default is "True".
+ isArray: false
+ name: include_inactive
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Checks the reputation of the given IP.
+ execution: false
+ name: ip
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: String
+ - contextPath: IP.ASN
+ description: Autonomous System (AS) number associated with the indicator.
+ type: String
+ - contextPath: IP.Address
+ description: IP address of the indicator.
+ type: String
+ - contextPath: IP.Geo.Country
+ description: Country associated with the indicator.
+ type: String
+ - contextPath: IP.Geo.Location
+ description: Longitude and latitude of the IP address.
+ type: String
+ - contextPath: ThreatStream.IP.ASN
+ description: Autonomous System (AS) number associated with the indicator.
+ type: String
+ - contextPath: ThreatStream.IP.Address
+ description: IP address of the indicator.
+ type: String
+ - contextPath: ThreatStream.IP.Country
+ description: Country associated with the indicator.
+ type: String
+ - contextPath: ThreatStream.IP.Type
+ description: The indicator type.
+ type: String
+ - contextPath: ThreatStream.IP.Modified
+ description: 'Time when the indicator was last updated. The date format is:
+ YYYYMMDDThhmmss, where "T" denotes the start of the value for time, in UTC
+ time.'
+ type: String
+ - contextPath: ThreatStream.IP.Severity
+ description: 'The indicator severity ("very-high", "high", "medium", or "low".'
+ type: String
+ - contextPath: ThreatStream.IP.Confidence
+ description: Level of certainty that an observable is of the reported indicator type. Confidence score can range from 0-100, in increasing order of
+ confidence.
+ type: String
+ - contextPath: ThreatStream.IP.Status
+ description: Status assigned to the indicator.
+ type: String
+ - contextPath: ThreatStream.IP.Organization
+ description: Name of the business that owns the IP address associated with the
+ indicator.
+ type: String
+ - contextPath: ThreatStream.IP.Source
+ description: The source of the indicator.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - contextPath: IP.Malicious.Vendor
+ description: Vendor that reported the indicator as malicious.
+ type: String
+ - arguments:
+ - default: false
+ description: The domain name to check.
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If severity is greater than or equal to the threshold, then the IP address
+ will be considered malicious. This argument will override the default threshold defined as a parameter.
+ isArray: false
+ name: threshold
+ predefined:
+ - low
+ - medium
+ - high
+ - very-high
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'True'
+ description: Whether to include results with status of "Inactive". Default is "True".
+ isArray: false
+ name: include_inactive
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Checks the reputation of the given domain name.
+ execution: false
+ name: domain
+ outputs:
+ - contextPath: Domain.Name
+ description: The domain name.
+ type: String
+ - contextPath: Domain.DNS
+ description: 'IPs resolved by DNS. '
+ type: String
+ - contextPath: Domain.WHOIS.CreationDate
+ description: |-
+ Date the domain was created. The date format is: YYYYMMDDThhmmss.
+ Where T denotes the start of the value
+ for time, in UTC time.
+ type: Date
+ - contextPath: Domain.WHOIS.UpdatedDate
+ description: |-
+ Date the domain was last updated. The date format is: YYYYMMDDThhmmss.
+ Where T denotes the start of the value
+ for time, in UTC time.
+ type: Date
+ - contextPath: Domain.WHOIS.Registrant.Name
+ description: Name of the registrant.
+ type: String
+ - contextPath: Domain.WHOIS.Registrant.Email
+ description: Email address of the registrant.
+ type: String
+ - contextPath: Domain.WHOIS.Registrant.Phone
+ description: Phone number of the registrant.
+ type: String
+ - contextPath: ThreatStream.Domain.ASN
+ description: Autonomous System (AS) number associated with the indicator.
+ type: String
+ - contextPath: ThreatStream.Domain.Address
+ description: The domain name of the indicator.
+ type: String
+ - contextPath: ThreatStream.Domain.Country
+ description: Country associated with the indicator.
+ type: String
+ - contextPath: ThreatStream.Domain.Type
+ description: The indicator type.
+ type: String
+ - contextPath: ThreatStream.Domain.Modified
+ description: |-
+ Date and time when the indicator was last updated. The date format is: YYYYMMDDThhmmss, where "T" denotes the start of the value
+ for time, in UTC time.
+ type: String
+ - contextPath: ThreatStream.Domain.Severity
+ description: 'The indicator severity ("very-high", "high", "medium", "low").'
+ type: String
+ - contextPath: ThreatStream.Domain.Confidence
+ description: Level of certainty that an observable is of the reported indicator type. Confidence score can range from 0-100, in increasing order of
+ confidence.
+ type: String
+ - contextPath: ThreatStream.Domain.Status
+ description: Status assigned to the indicator.
+ type: String
+ - contextPath: ThreatStream.Domain.Organization
+ description: Name of the business that owns the IP address associated with the
+ indicator.
+ type: String
+ - contextPath: ThreatStream.Domain.Source
+ description: The source of the indicator.
+ type: String
+ - contextPath: Domain.Malicious.Vendor
+ description: Vendor that reported the indicator as malicious.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - arguments:
+ - default: false
+ description: The MD5 hash of file to check.
+ isArray: false
+ name: file
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If severity is greater than or equal to the threshold, then the MD5 hash
+ of file will be considered malicious. This argument will override the default threshold defined as a parameter.
+ isArray: false
+ name: threshold
+ predefined:
+ - low
+ - medium
+ - high
+ - very-high
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'True'
+ description: Whether to include results with the status "Inactive". Default is "True".
+ isArray: false
+ name: include_inactive
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Checks the reputation of the given MD5 hash of the file.
+ execution: false
+ name: file
+ outputs:
+ - contextPath: File.MD5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: File.Malicious.Vendor
+ description: Vendor that reported the indicator as malicious.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - contextPath: ThreatStream.File.Severity
+ description: 'The indicator severity ("very-high", "high", "medium", "low").'
+ type: String
+ - contextPath: ThreatStream.File.Confidence
+ description: Level of certainty that an observable is of the reported indicator type. Confidence score can range from 0-100, in increasing order of
+ confidence.
+ type: String
+ - contextPath: ThreatStream.File.Status
+ description: Status assigned to the indicator.
+ type: String
+ - contextPath: ThreatStream.File.Type
+ description: The indicator type.
+ type: String
+ - contextPath: ThreatStream.File.MD5
+ description: The MD5 hash of the indicator.
+ type: String
+ - contextPath: ThreatStream.File.Modified
+ description: |-
+ Date and time when the indicator was last updated. The date format is: YYYYMMDDThhmmss, where "T" denotes the start of the value
+ for time, in UTC time.
+ type: String
+ - contextPath: ThreatStream.File.Source
+ description: The source of the indicator.
+ type: String
+ - arguments:
+ - default: false
+ description: The email address to check.
+ isArray: false
+ name: email
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If severity is greater or equal than the threshold, then the IP address will be considered malicious. This argument will override the default threshold defined as a parameter.
+ isArray: false
+ name: threshold
+ predefined:
+ - low
+ - medium
+ - high
+ - very-high
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'True'
+ description: Whether to include results with the status "Inactive". Default is "True".
+ isArray: false
+ name: include_inactive
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Checks the reputation of the given email address.
+ execution: false
+ name: threatstream-email-reputation
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The tested indicator.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - contextPath: ThreatStream.EmailReputation.Severity
+ description: 'The indicator severity ("very-high", "high", "medium", "low").'
+ type: String
+ - contextPath: ThreatStream.EmailReputation.Confidence
+ description: Level of certainty that an observable is of the reported indicator type. Confidence score can range from 0-100, in increasing order of
+ confidence.
+ type: String
+ - contextPath: ThreatStream.EmailReputation.Status
+ description: Status assigned to the indicator.
+ type: String
+ - contextPath: ThreatStream.EmailReputation.Type
+ description: The indicator type.
+ type: String
+ - contextPath: ThreatStream.EmailReputation.Email
+ description: The email address of the indicator.
+ type: String
+ - contextPath: ThreatStream.EmailReputation.Source
+ description: The source of the indicator.
+ type: String
+ - contextPath: ThreatStream.EmailReputation.Modified
+ description: |-
+ Date and time when the indicator was last updated. The date format is: YYYYMMDDThhmmss, where "T" denotes the start of the value
+ for time, in UTC time.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: ip
+ description: 'The type of passive DNS search ("ip", "domain").'
+ isArray: false
+ name: type
+ predefined:
+ - ip
+ - domain
+ required: true
+ secret: false
+ - default: false
+ description: Possible values are "IP" or "Domain".
+ isArray: false
+ name: value
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: Maximum number of results to return. Default is 50.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns enrichment data for Domain or IP for availabe observables.
+ execution: false
+ name: threatstream-get-passive-dns
+ outputs:
+ - contextPath: ThreatStream.PassiveDNS.Domain
+ description: The domain value.
+ type: String
+ - contextPath: ThreatStream.PassiveDNS.Ip
+ description: The IP value.
+ type: String
+ - contextPath: ThreatStream.PassiveDNS.Rrtype
+ description: The Rrtype value.
+ type: String
+ - contextPath: ThreatStream.PassiveDNS.Source
+ description: The source value.
+ type: String
+ - contextPath: ThreatStream.PassiveDNS.FirstSeen
+ description: |-
+ The first seen date. The date format is: YYYYMMDDThhmmss, where "T" denotes the start of the value
+ for time, in UTC time.
+ type: String
+ - contextPath: ThreatStream.PassiveDNS.LastSeen
+ description: |-
+ The last seen date. The date format is: YYYYMMDDThhmmss, where "T" denotes the start of the value
+ for time, in UTC time.
+ type: String
+ - arguments:
+ - default: false
+ defaultValue: '50'
+ description: The level of certainty that an observable is of the reported indicator
+ type. Default is 50.
+ isArray: false
+ name: confidence
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: private
+ description: Denotes whether the indicator data is public or private to the organization.
+ Default is "private".
+ isArray: false
+ name: classification
+ predefined:
+ - private
+ - public
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: exploit
+ description: Type of threat associated with the imported observables. Default is "exploit".
+ isArray: false
+ name: threat_type
+ predefined:
+ - adware
+ - anomalous
+ - anonymization
+ - apt
+ - bot
+ - brute
+ - c2
+ - compromised
+ - crypto
+ - data_leakage
+ - ddos
+ - dyn_dns
+ - exfil
+ - exploit
+ - hack_tool
+ - i2p
+ - informational
+ - malware
+ - p2p
+ - parked
+ - phish
+ - scan
+ - sinkhole
+ - spam
+ - suppress
+ - suspicious
+ - tor
+ - vps
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: low
+ description: Gauges the potential impact of the indicator type the observable
+ is thought to be associated with. Default is "low".
+ isArray: false
+ name: severity
+ predefined:
+ - low
+ - medium
+ - high
+ - very-high
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The import type of the indicator. '
+ isArray: false
+ name: import_type
+ predefined:
+ - datatext
+ - file-id
+ - url
+ required: true
+ secret: false
+ - default: false
+ description: 'The source of imported data. Can be one of the following: url,
+ datatext of file-id of uploaded file to the War Rroom. Supported formats in
+ case of file-id are: CSV, HTML, IOC, JSON, PDF, TXT.'
+ isArray: false
+ name: import_value
+ required: true
+ secret: false
+ deprecated: false
+ description: 'Imports indicators (observables) into ThreatStream. Approval
+ of the imported data is required, usingh the ThreatStream UI. The data can be imported using
+ one of three methods: plain-text, file, or URL. Only one argument can be used.'
+ execution: false
+ name: threatstream-import-indicator-with-approval
+ outputs:
+ - contextPath: ThreatStream.Import.ImportID
+ description: The ID of the imported data.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The type of indicator to be imported.
+ isArray: false
+ name: indicator_type
+ predefined:
+ - srcip
+ - domain
+ - url
+ - email
+ - md5
+ required: true
+ secret: false
+ - default: false
+ description: The indicator value to be imported.
+ isArray: false
+ name: indicator_data
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Classification of the indicator.
+ isArray: false
+ name: classification
+ predefined:
+ - public
+ - ' private'
+ required: false
+ secret: false
+ - default: false
+ description: Level of certainty that an observable is of the reported indicator
+ type.
+ isArray: false
+ name: confidence
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Severity you want to assign to the indicator when it is imported.
+ isArray: false
+ name: severity
+ predefined:
+ - low
+ - medium
+ - high
+ - very-high
+ required: false
+ secret: false
+ - default: false
+ description: Tags applied to the imported observables.
+ isArray: false
+ name: tags
+ required: false
+ secret: false
+ deprecated: true
+ description: Imports indicators (observables) into ThreatStream. Approval is not required for the imported data. You must have the
+ Approve Intel user permission to import without approval using the API.
+ execution: false
+ name: threatstream-import-indicator-without-approval
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: Threat model of the returned list.
+ isArray: false
+ name: model
+ predefined:
+ - actor
+ - campaign
+ - incident
+ - signature
+ - ttp
+ - vulnerability
+ - tipreport
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: Limits the list of models size. Specifying limit=0 will return
+ up to a maximum of 1000 models. In case of limit=0 the output won't be set
+ in the context.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of threat model.
+ execution: false
+ name: threatstream-get-model-list
+ outputs:
+ - contextPath: ThreatStream.List.Type
+ description: The type of threat model.
+ type: String
+ - contextPath: ThreatStream.List.Name
+ description: The name of the threat model.
+ type: String
+ - contextPath: ThreatStream.List.ID
+ description: The ID of the threat model.
+ type: String
+ - contextPath: ThreatStream.List.CreatedTime
+ description: |-
+ Date and time of threat model creation. The date format is: YYYYMMDDThhmmss, where "T" denotes the start of the value for time, in UTC time.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The threat model.
+ isArray: false
+ name: model
+ predefined:
+ - actor
+ - campaign
+ - incident
+ - signature
+ - ttp
+ - vulnerability
+ - tipreport
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the threat model.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns an HTML file with a description of the threat model.
+ execution: false
+ name: threatstream-get-model-description
+ outputs:
+ - contextPath: File.Name
+ description: The file name of the model desctiption.
+ type: String
+ - contextPath: File.EntryID
+ description: The entry ID of the model desctipton.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The threat model.
+ isArray: false
+ name: model
+ predefined:
+ - actor
+ - campaign
+ - incident
+ - signature
+ - ttp
+ - vulnerability
+ - tipreport
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the model.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '20'
+ description: Maximum number of results to return. Default is 20.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of indicators associated with the specified model
+ and ID of the model.
+ execution: false
+ name: threatstream-get-indicators-by-model
+ outputs:
+ - contextPath: ThreatStream.Model.ModelType
+ description: The type of the threat model.
+ type: String
+ - contextPath: ThreatStream.Model.ModelID
+ description: The ID of the threat model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Value
+ description: The value of indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.ID
+ description: The ID of indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.IType
+ description: The iType of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Severity
+ description: The severity of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Confidence
+ description: The confidence of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Country
+ description: The courty of the indicator associated with the specified model
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Organization
+ description: The organization of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.ASN
+ description: The ASN of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Status
+ description: The status of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Tags
+ description: The tags of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Modified
+ description: The date and time the indicator was last modified.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Source
+ description: The source of the inidicator.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Type
+ description: The type of the inidicator.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: private
+ description: Classification of the Sandbox submission.
+ isArray: false
+ name: submission_classification
+ predefined:
+ - private
+ - public
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: WINDOWS7
+ description: Platform on which the submitted URL or file will be run. To obtain a list supported platforms run the threatstream-get-sandbox-platforms
+ command.
+ isArray: false
+ name: report_platform
+ predefined:
+ - WINDOWS7
+ - WINDOWSXP
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: file
+ description: The detonation type ("file" or "url".
+ isArray: false
+ name: submission_type
+ predefined:
+ - file
+ - url
+ required: true
+ secret: false
+ - default: false
+ description: The submission value. Possible values are a valid URL or a file ID that was uploaded to
+ the War Room to detonate.
+ isArray: false
+ name: submission_value
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Specifies whether the premium sandbox should be used for detonation.
+ Default is "false".
+ isArray: false
+ name: premium_sandbox
+ predefined:
+ - 'false'
+ - 'true'
+ required: false
+ secret: false
+ - default: false
+ description: A CSV list of additional details for the
+ indicator. This information is displayed in the Tag column of the ThreatStream
+ UI.
+ isArray: false
+ name: detail
+ required: false
+ secret: false
+ deprecated: false
+ description: Submits a file or URL to the ThreatStream-hosted Sandbox for detonation.
+ execution: false
+ name: threatstream-submit-to-sandbox
+ outputs:
+ - contextPath: ThreatStream.Analysis.ReportID
+ description: The report ID that was submitted to the sandbox.
+ type: String
+ - contextPath: ThreatStream.Analysis.Status
+ description: The analysis status.
+ type: String
+ - contextPath: ThreatStream.Analysis.Platform
+ description: The platform of the submission submitted to the sanbox.
+ type: String
+ - arguments:
+ - default: false
+ description: Report ID for which to check the status.
+ isArray: false
+ name: report_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the current status of the report that was submitted to the sandbox.
+ The report ID is returned from threatstream-submit-to-sandbox command.
+ execution: false
+ name: threatstream-get-analysis-status
+ outputs:
+ - contextPath: ThreatStream.Analysis.ReportID
+ description: The report ID of the file or URL that was detonated to sandbox.
+ type: String
+ - contextPath: ThreatStream.Analysis.Status
+ description: The report status of the file or URL that was detonated in the
+ sandbox.
+ type: String
+ - contextPath: ThreatStream.Analysis.Platform
+ description: The platfrom that was used for detonation.
+ type: String
+ - contextPath: ThreatStream.Analysis.Verdict
+ description: The report verdict of the file or URL that was detonated in the
+ sandbox. The verdict will remain "benign" until detonation is complete.
+ type: String
+ - arguments:
+ - default: false
+ description: Report ID to return.
+ isArray: false
+ name: report_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the report of a file or URL that was submitted to the sandbox.
+ execution: false
+ name: threatstream-analysis-report
+ outputs:
+ - contextPath: ThreatStream.Analysis.ReportID
+ description: The ID of the report submitted to the sandbox.
+ type: String
+ - contextPath: ThreatStream.Analysis.Category
+ description: The report category.
+ type: String
+ - contextPath: ThreatStream.Analysis.Started
+ description: Detonation start time.
+ type: String
+ - contextPath: ThreatStream.Analysis.Completed
+ description: Detonation completion time.
+ type: String
+ - contextPath: ThreatStream.Analysis.Duration
+ description: Duration of the detonation (in seconds).
+ type: Number
+ - contextPath: ThreatStream.Analysis.VmName
+ description: The name of the VM.
+ type: String
+ - contextPath: ThreatStream.Analysis.VmID
+ description: The ID of the VM.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.UdpSource
+ description: The source of UDP.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.UdpDestination
+ description: The destination of UDP.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.UdpPort
+ description: The port of the UDP.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.IcmpSource
+ description: The ICMP source.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.IcmpDestination
+ description: The destinaton of ICMP.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.IcmpPort
+ description: The port of the ICMP.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.TcpSource
+ description: The source of TCP.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.TcpDestination
+ description: The destination of TCP.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.TcpPort
+ description: The port of TCP.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.HttpSource
+ description: The source of HTTP.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.HttpDestinaton
+ description: The destination of HTTP.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.HttpPort
+ description: The port of HTTP.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.HttpsSource
+ description: The source of HTTPS.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.HttpsDestinaton
+ description: The destination of HTTPS.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.HttpsPort
+ description: The port of HTTPS.
+ type: String
+ - contextPath: ThreatStream.Analysis.Network.Hosts
+ description: The hosts of network analysis.
+ type: String
+ - contextPath: ThreatStream.Analysis.Verdict
+ description: The verdict of the sandbox detonation.
+ type: String
+ - arguments:
+ - default: false
+ description: Anomali Observable Search Filter Language query to filter indicatorts
+ results. If a query is passed as an argument, it overides all other arguments.
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: Autonomous System (AS) number associated with the indicator.
+ isArray: false
+ name: asn
+ required: false
+ secret: false
+ - default: false
+ description: |-
+ Level of certainty that an observable
+ is of the reported indicator type. Confidence scores range from 0-100, in increasing order of confidence, and is assigned by ThreatStream based on several factors.
+ isArray: false
+ name: confidence
+ required: false
+ secret: false
+ - default: false
+ description: Country associated with the indicator.
+ isArray: false
+ name: country
+ required: false
+ secret: false
+ - default: false
+ description: |-
+ When the indicator was first seen on
+ the ThreatStream cloud platform. Date must be specified in this format:
+ YYYYMMDDThhmmss, where "T" denotes the start of the value for time, in UTC time.
+ For example, 2014-10-02T20:44:35.
+ isArray: false
+ name: created_ts
+ required: false
+ secret: false
+ - default: false
+ description: Unique ID for the indicator.
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Classification of the indicator.
+ isArray: false
+ name: is_public
+ predefined:
+ - 'false'
+ - 'true'
+ required: false
+ secret: false
+ - default: false
+ description: Severity assigned to the indicator by ThreatStream.
+ isArray: false
+ name: indicator_severity
+ required: false
+ secret: false
+ - default: false
+ description: Registered owner (organization) of the IP address associated with
+ the indicator.
+ isArray: false
+ name: org
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Status assigned to the indicator.
+ isArray: false
+ name: status
+ predefined:
+ - active
+ - inactive
+ - falsepos
+ required: false
+ secret: false
+ - default: false
+ description: Tag assigned to the indicator.
+ isArray: false
+ name: tags_name
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Type of indicator.
+ isArray: false
+ name: type
+ predefined:
+ - domain
+ - email
+ - ip
+ - md5
+ - string
+ - url
+ required: false
+ secret: false
+ - default: false
+ description: 'Value of the indicator. '
+ isArray: false
+ name: indicator_value
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '20'
+ description: Maximum number of results to return from ThreatStrem. Default is 20.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Return filtered indicators from ThreatStream. If a query is defined,
+ it overides all othe arguments that were passed to the command.
+ execution: false
+ name: threatstream-get-indicators
+ outputs:
+ - contextPath: ThreatStream.Indicators.IType
+ description: The indicator type.
+ type: String
+ - contextPath: ThreatStream.Indicators.Modified
+ description: |-
+ Date and time when the indicator was last updated on the ThreatStream. Format: YYYYMMDDThhmmss, where T denotes the start of the value
+ for time, in UTC time.
+ type: String
+ - contextPath: ThreatStream.Indicators.Confidence
+ description: Level of certainty that an observable is of the reported indicator
+ type.
+ type: String
+ - contextPath: ThreatStream.Indicators.Value
+ description: The indicator value.
+ type: String
+ - contextPath: ThreatStream.Indicators.Status
+ description: The indicator status.
+ type: String
+ - contextPath: ThreatStream.Indicators.Organization
+ description: Registered owner (organization) of the IP address associated with
+ the indicator.
+ type: String
+ - contextPath: ThreatStream.Indicators.Country
+ description: Country associated with the indicator.
+ type: String
+ - contextPath: ThreatStream.Indicators.Tags
+ description: Tag assigned to the indicator.
+ type: String
+ - contextPath: ThreatStream.Indicators.Source
+ description: The source of the indicator.
+ type: String
+ - contextPath: ThreatStream.Indicators.ID
+ description: The ID of the indicator.
+ type: String
+ - contextPath: ThreatStream.Indicators.ASN
+ description: Autonomous System (AS) number associated with the indicator.
+ type: String
+ - contextPath: ThreatStream.Indicators.Severity
+ description: The severity assigned to the indicator.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: intelligence
+ description: The type of threat model entity on which to add the tag. Default is "intelligence" (indicator).
+ isArray: false
+ name: model
+ predefined:
+ - actor
+ - campaign
+ - incident
+ - intelligence
+ - signature
+ - tipreport
+ - ttp
+ - vulnerability
+ required: false
+ secret: false
+ - default: false
+ description: 'A CSV list of tags applied to the specified threat model entities
+ or observable. '
+ isArray: true
+ name: tags
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the model on which to add the tag.
+ isArray: false
+ name: model_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Add tags to intelligence for purposes of filtering for related entities.
+ execution: false
+ name: threatstream-add-tag-to-model
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The type of threat model to create.
+ isArray: false
+ name: model
+ predefined:
+ - actor
+ - campaign
+ - incident
+ - ttp
+ - vulnerability
+ - tipreport
+ required: true
+ secret: false
+ - default: false
+ description: The name of the threat model to create.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: The scope of threat model visibility.
+ isArray: false
+ name: is_public
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: red
+ description: Traffic Light Protocol designation for the threat model.
+ isArray: false
+ name: tlp
+ predefined:
+ - red
+ - amber
+ - green
+ - white
+ required: false
+ secret: false
+ - default: false
+ description: A CSV list of tags.
+ isArray: false
+ name: tags
+ required: false
+ secret: false
+ - default: false
+ description: A CSV list of indicators IDs associated with the threat model
+ on the ThreatStream platform.
+ isArray: false
+ name: intelligence
+ required: false
+ secret: false
+ - default: false
+ description: The description of the threat model.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a threat model with the specified parameters.
+ execution: false
+ name: threatstream-create-model
+ outputs:
+ - contextPath: ThreatStream.Model.ModelType
+ description: The type of the threat model.
+ type: String
+ - contextPath: ThreatStream.Model.ModelID
+ description: The ID of the threat model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Value
+ description: The value of indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.ID
+ description: The ID of indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.IType
+ description: The iType of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Severity
+ description: The severity of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Confidence
+ description: The confidence of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Country
+ description: The courty of the indicator associated with the specified model
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Organization
+ description: The organization of the indicator associated with the specified
+ model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.ASN
+ description: The ASN of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Status
+ description: The status of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Tags
+ description: The tags of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Modified
+ description: The date and time the indicator was last modified.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Source
+ description: The source of the inidicator.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Type
+ description: The type of the inidicator.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The type of threat model to update.
+ isArray: false
+ name: model
+ predefined:
+ - actor
+ - campaign
+ - incident
+ - ttp
+ - vulnerability
+ - tipreport
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the threat model to update.
+ isArray: false
+ name: model_id
+ required: true
+ secret: false
+ - default: false
+ description: The name of the threat model to update.
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: The scope of threat model visibility.
+ isArray: false
+ name: is_public
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: red
+ description: Traffic Light Protocol designation for the threat model.
+ isArray: false
+ name: tlp
+ predefined:
+ - red
+ - amber
+ - green
+ - white
+ required: false
+ secret: false
+ - default: false
+ description: A CSV list of tags.
+ isArray: false
+ name: tags
+ required: false
+ secret: false
+ - default: false
+ description: A CSV list of indicators IDs associated with the threat model
+ on the ThreatStream platform.
+ isArray: false
+ name: intelligence
+ required: false
+ secret: false
+ - default: false
+ description: The description of the threat model.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ deprecated: false
+ description: Updates a threat model with specific parameters. If one or more optional
+ parameters are defined, the command overides previous data stored in ThreatStream.
+ execution: false
+ name: threatstream-update-model
+ outputs:
+ - contextPath: ThreatStream.Model.ModelType
+ description: The type of the threat model.
+ type: String
+ - contextPath: ThreatStream.Model.ModelID
+ description: The ID of the threat model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Value
+ description: The value of indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.ID
+ description: The ID of indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.IType
+ description: The iType of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Severity
+ description: The severity of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Confidence
+ description: The confidence of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Country
+ description: The courty of the indicator associated with the specified model
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Organization
+ description: The organization of the indicator associated with the specified
+ model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.ASN
+ description: The ASN of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Status
+ description: The status of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Tags
+ description: The tags of the indicator associated with the specified model.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Modified
+ description: The date and time the indicator was last modified.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Source
+ description: The source of the inidicator.
+ type: String
+ - contextPath: ThreatStream.Model.Indicators.Type
+ description: The type of the inidicator.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: default
+ description: 'The type of sandbox ("default" or "premium").'
+ isArray: false
+ name: sandbox_type
+ predefined:
+ - default
+ - premium
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns list of supported platforms for default or premium sandbox.
+ execution: false
+ name: threatstream-supported-platforms
+ outputs:
+ - contextPath: ThreatStream.PremiumPlatforms.Name
+ description: Name of the supported platform for premium sadnbox.
+ type: String
+ - contextPath: ThreatStream.PremiumPlatforms.Types
+ description: Type of supported submissions for premium sanbox.
+ type: String
+ - contextPath: ThreatStream.PremiumPlatforms.Label
+ description: The display name of the supported platform of premium sandbox.
+ type: String
+ - contextPath: ThreatStream.DefaultPlatforms.Name
+ description: Name of the supported platform for standard sadnbox.
+ type: String
+ - contextPath: ThreatStream.DefaultPlatforms.Types
+ description: Type of supported submissions for standard sanbox.
+ type: String
+ - contextPath: ThreatStream.DefaultPlatforms.Label
+ description: The display name of the supported platform of standard sandbox.
+ type: String
+ - arguments:
+ - default: false
+ description: The URL to check.
+ isArray: false
+ name: url
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If severity is greater than or equal to the threshold, then the URL
+ will be considered malicious. This argument will override the default threshold defined as a parameter.
+ isArray: false
+ name: threshold
+ predefined:
+ - low
+ - medium
+ - high
+ - very-high
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'True'
+ description: Whether to include results with the status "Inactive". Default is "True".
+ isArray: false
+ name: include_inactive
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Checks the reputation of the given URL.
+ execution: false
+ name: url
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - contextPath: URL.Data
+ description: The URL of the indicator.
+ type: String
+ - contextPath: URL.Malicious.Vendor
+ description: Vendor that reported the indicator as malicious.
+ type: String
+ - contextPath: ThreatStream.URL.Modified
+ description: |-
+ Date and time when the indicator was last updated. The date format is: YYYYMMDDThhmmss, where "T" denotes the start of the value
+ for time, in UTC time.
+ type: String
+ - contextPath: ThreatStream.URL.Confidence
+ description: Level of certainty that an observable is of the reported indicator type. Confidence score can range from 0-100, in increasing order of
+ confidence.
+ type: String
+ - contextPath: ThreatStream.URL.Status
+ description: The status of the indicator.
+ type: String
+ - contextPath: ThreatStream.URL.Organization
+ description: Name of the business that owns the IP address associated with the
+ indicator.
+ type: String
+ - contextPath: ThreatStream.URL.Address
+ description: URL of the indicator.
+ type: String
+ - contextPath: ThreatStream.URL.Country
+ description: Country associated with the indicator.
+ type: String
+ - contextPath: ThreatStream.URL.Type
+ description: The indicator type.
+ type: String
+ - contextPath: ThreatStream.URL.Source
+ description: The source of the indicator.
+ type: String
+ - contextPath: ThreatStream.URL.Severity
+ description: 'The indicator severity ("very-high", "high", "medium", or "low").'
+ type: String
+ dockerimage: demisto/python3:3.7.3.221
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- ThreatStream-Test
diff --git a/Integrations/Anomali_ThreatStream_v2/Anomali_ThreatStream_v2_description.md b/Integrations/Anomali_ThreatStream_v2/Anomali_ThreatStream_v2_description.md
new file mode 100644
index 000000000000..7ee897efc527
--- /dev/null
+++ b/Integrations/Anomali_ThreatStream_v2/Anomali_ThreatStream_v2_description.md
@@ -0,0 +1 @@
+To access ThreatStream using the API, you need a user ID and API key. To get these credentials, register at [http://ui.threatstream.com](http://ui.threatstream.com).
diff --git a/Integrations/Anomali_ThreatStream_v2/Anomali_ThreatStream_v2_image.png b/Integrations/Anomali_ThreatStream_v2/Anomali_ThreatStream_v2_image.png
new file mode 100644
index 000000000000..db0de05a40e4
Binary files /dev/null and b/Integrations/Anomali_ThreatStream_v2/Anomali_ThreatStream_v2_image.png differ
diff --git a/Integrations/Anomali_ThreatStream_v2/CHANGELOG.md b/Integrations/Anomali_ThreatStream_v2/CHANGELOG.md
new file mode 100644
index 000000000000..115c3fce8a45
--- /dev/null
+++ b/Integrations/Anomali_ThreatStream_v2/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.8.0] - 2019-08-06
+ - Fixed an issue with the *description* argument in the ***threatstream-create-model*** command.
diff --git a/Integrations/ArcSightESMv2/ArcSightESMv2.py b/Integrations/ArcSightESMv2/ArcSightESMv2.py
new file mode 100644
index 000000000000..82e9ea3cc452
--- /dev/null
+++ b/Integrations/ArcSightESMv2/ArcSightESMv2.py
@@ -0,0 +1,849 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+""" IMPORTS """
+from datetime import datetime
+import requests
+import base64
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+""" GLOBALS """
+MAX_UNIQUE = int(demisto.params().get('max_unique', 2000))
+FETCH_CHUNK_SIZE = int(demisto.params().get('fetch_chunk_size', 50))
+FETCH_CHUNK_SIZE = min(50, FETCH_CHUNK_SIZE) # fetch size should no exceed 50
+
+BASE_URL = demisto.params().get('server').rstrip('/') + '/'
+VERIFY_CERTIFICATE = not demisto.params().get('insecure', True)
+HEADERS = {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+}
+
+REQ_SOAP_BODY = """
+
+
+
+
+ {auth_token}
+ {resource_id}
+ {entryList}
+
+
+
+""".format
+
+ENTRY_LIST = "{} ".format
+ENTRY = "{} ".format
+COLUMN = "{} ".format
+BODY = "{} ".format
+
+if not demisto.params().get("proxy", False):
+ del os.environ["HTTP_PROXY"]
+ del os.environ["HTTPS_PROXY"]
+ del os.environ["http_proxy"]
+ del os.environ["https_proxy"]
+
+
+@logger
+def int_to_ip(num):
+ return "{}.{}.{}.{}".format((num >> 24) & 255, (num >> 16) & 255, (num >> 8) & 255, num & 255)
+
+
+@logger
+def decode_ip(address_by_bytes):
+ """ Decodes the enigmatic ways IPs are stored in ArcSight DB into IPv4/6 format """
+ if isinstance(address_by_bytes, int):
+ return int_to_ip(address_by_bytes)
+
+ try:
+ # if it's not an int, it should be Base64 encoded string
+ decoded_string = base64.b64decode(address_by_bytes).encode('hex')
+ if len(address_by_bytes) >= 20:
+ # split the IPv6 address into 8 chunks of 4
+ decoded_string = [decoded_string[i:i + 4] for i in range(0, len(decoded_string), 4)] # type: ignore
+ return "{}:{}:{}:{}:{}:{}:{}:{}".format(*decoded_string)
+ elif len(address_by_bytes) >= 6:
+ decoded_string = int(decoded_string, 16) # type: ignore
+ return int_to_ip(decoded_string)
+ else:
+ return address_by_bytes
+
+ except Exception as ex:
+ # sometimes ArcSight would not encode IPs, this will cause the decoder to
+ # throw an exception, and in turn, we will return the input in its original form.
+ demisto.debug(str(ex))
+ return address_by_bytes
+
+
+@logger
+def parse_timestamp_to_datestring(timestamp):
+ if timestamp and timestamp > 0:
+ try:
+ return datetime.fromtimestamp(timestamp / 1000.0).strftime("%Y-%m-%dT%H:%M:%S.000Z")
+ except (ValueError, TypeError) as e:
+ demisto.debug(str(e))
+ if timestamp == '31 Dec 1969 19:00:00 EST':
+ # Unix epoch 00:00:00 UTC
+ return 'None'
+ return timestamp
+
+
+@logger
+def decode_arcsight_output(d, depth=0, remove_nones=True):
+ """ Converts some of the values from ArcSight DB into a more useful & readable format """
+ # ArcSight stores some None values as follows
+ NONE_VALUES = [-9223372036854776000, -9223372036854775808, -2147483648, 5e-324]
+ # ArcSight stores IP addresses as int, in the following keys
+ IP_FIELDS = ['address', 'addressAsBytes', 'Destination Address', 'Source Address']
+ # ArcSight stores Dates as timeStamps in the following keys, -> reformat into Date
+ TIMESTAMP_FIELDS = ['createdTimestamp', 'modifiedTimestamp', 'deviceReceiptTime', 'startTime', 'endTime',
+ 'stageUpdateTime', 'modificationTime', 'managerReceiptTime', 'createTime', 'agentReceiptTime']
+ if depth < 10:
+ if isinstance(d, list):
+ return [decode_arcsight_output(d_, depth + 1) for d_ in d]
+ if isinstance(d, dict):
+ for key, value in d.items():
+ if isinstance(value, dict):
+ decode_arcsight_output(value, depth + 1)
+ elif value in NONE_VALUES:
+ if remove_nones:
+ d.pop(key, None)
+ else:
+ d[key] = 'None'
+
+ elif key in IP_FIELDS:
+ key = 'decodedAddress' if key == 'addressAsBytes' else key
+ d[key] = decode_ip(value)
+ elif key in TIMESTAMP_FIELDS:
+ key = key.replace('Time', 'Date').replace('stamp', '')
+ d[key] = parse_timestamp_to_datestring(value)
+ return d
+
+
+def login():
+ query_path = 'www/core-service/rest/LoginService/login'
+ headers = {
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ 'Accept': 'application/json'
+ }
+ params = {
+ 'login': demisto.get(demisto.params(), 'credentials.identifier'),
+ 'password': demisto.get(demisto.params(), 'credentials.password'),
+ 'alt': 'json'
+ }
+ res = send_request(query_path, headers=headers, params=params, is_login=True)
+ if not res.ok:
+ demisto.debug(res.text)
+ return_error('Failed to login, check integration parameters.')
+
+ try:
+ res_json = res.json()
+ if 'log.loginResponse' in res_json and 'log.return' in res_json.get('log.loginResponse'):
+ auth_token = res_json.get('log.loginResponse').get('log.return')
+ if demisto.command() not in ['test-module', 'fetch-incidents']:
+ # this is done to bypass setting integration context outside of the cli
+ demisto.setIntegrationContext({'auth_token': auth_token})
+ return auth_token
+
+ return_error('Failed to login. Have not received token after login')
+ except ValueError:
+ return_error('Failed to login. Please check integration parameters')
+
+
+def send_request(query_path, body=None, params=None, json=None, headers=None, method='post', is_login=False):
+ if headers is None:
+ headers = HEADERS
+ full_url = BASE_URL + query_path
+ try:
+ res = requests.request(
+ method,
+ full_url,
+ headers=headers,
+ verify=VERIFY_CERTIFICATE,
+ data=body,
+ params=params,
+ json=json
+ )
+
+ if not res.ok and not is_login:
+ if params and not body:
+ params['authToken'] = login()
+ else:
+ body = body.replace(demisto.getIntegrationContext().get('auth_token'), login())
+ return requests.request(
+ method,
+ full_url,
+ headers=headers,
+ verify=VERIFY_CERTIFICATE,
+ data=body,
+ params=params,
+ json=json
+ )
+ return res
+
+ except Exception as ex:
+ demisto.debug(str(ex))
+ return_error('Connection Error. Please check integration parameters')
+
+
+def test():
+ """
+ Login (already done in global).
+ Test if fetch query viewers are valid.
+ Run query viewer if fetch defined.
+ """
+ events_query_viewer_id = demisto.params().get('viewerId')
+ cases_query_viewer_id = demisto.params().get('casesQueryViewerId')
+ is_fetch = demisto.params().get('isFetch')
+
+ if is_fetch and not events_query_viewer_id and not cases_query_viewer_id:
+ return_error('If fetch is enabled, you must provide query viewer Resource ID for Cases or Events')
+
+ if events_query_viewer_id:
+ fields, results = get_query_viewer_results(events_query_viewer_id)
+ if 'Event ID' not in fields or 'Start Time' not in fields:
+ return_error('Query "{}" must contain "Start Time" and "Event ID" fields'.format(cases_query_viewer_id))
+
+ if cases_query_viewer_id:
+ fields, results = get_query_viewer_results(cases_query_viewer_id)
+ if 'ID' not in fields or 'Create Time' not in fields:
+ return_error('Query "{}" must contain "Create Time" and "ID" fields'.format(cases_query_viewer_id))
+
+
+@logger
+def get_query_viewer_results(query_viewer_id):
+ query_path = 'www/manager-service/rest/QueryViewerService/getMatrixData'
+ params = {
+ 'authToken': AUTH_TOKEN,
+ 'id': query_viewer_id,
+ 'alt': 'json'
+ }
+ res = send_request(query_path, params=params, method='get')
+
+ if not res.ok:
+ demisto.debug(res.text)
+ if 'ResourceNotFoundException' in res.text:
+ return_error('Invalid resource ID {} for Query Viewer(ResourceNotFoundException)'.format(query_viewer_id))
+ else:
+ return_error('Failed to get query viewer results.')
+
+ return_object = None
+ res_json = res.json()
+ if "qvs.getMatrixDataResponse" in res_json and "qvs.return" in res_json["qvs.getMatrixDataResponse"]:
+ # ArcSight ESM version 6.7 & 6.9 rest API supports qvs.getMatrixDataResponse
+ return_object = res_json.get("qvs.getMatrixDataResponse").get("qvs.return")
+
+ elif "que.getMatrixDataResponse" in res_json and "que.return" in res_json["que.getMatrixDataResponse"]:
+ # ArcSight ESM version 6.1 rest API supports que.getMatrixDataResponse
+ return_object = res_json.get("que.getMatrixDataResponse").get("que.return")
+
+ else:
+ return_error('Invalid response structure. Open ticket to Demisto support and attach the logs')
+ return
+
+ fields = return_object.get('columnHeaders', [])
+ if not isinstance(fields, (list,)):
+ fields = [fields]
+
+ results = return_object.get("rows", [])
+ if not isinstance(results, (list,)):
+ results = [results]
+
+ if len(fields) == 0 or len(results) == 0:
+ return fields, results
+
+ """
+ we parse the rows by column headers and create formatted result
+
+ "columnHeaders": [
+ "ID",
+ "Event-Event ID",
+ ],
+ "rows": [
+ {
+ "@xsi.type": "listWrapper",
+ "value": [
+ {
+ "@xsi.type": "xs:string",
+ "$": ""
+ },
+ {
+ "@xsi.type": "xs:string",
+ "$": ""
+ }
+ ]
+ },
+ {
+ "@xsi.type": "listWrapper",
+ "value": [
+ {
+ "@xsi.type": "xs:string",
+ "$": ""
+ },
+ {
+ "@xsi.type": "xs:string",
+ "$": ""
+ }
+ ]
+ }
+ ]
+
+ convert to ===>
+
+ query_results = [
+ {
+ Event-Event ID:
+ ID:
+ },
+ {
+ Event-Event ID:
+ ID:
+ }
+ ]
+ """
+ results = [{field: result.get('value')[idx].get('$') for idx, field in enumerate(fields)} for result in results]
+ return fields, results
+
+
+@logger
+def get_query_viewer_results_command():
+ resource_id = demisto.args().get('id')
+ only_columns = demisto.args().get('onlyColumns')
+ columns, query_results = get_query_viewer_results(query_viewer_id=resource_id)
+
+ demisto.debug('printing Query Viewer column headers')
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': columns,
+ 'HumanReadable': tableToMarkdown(name='', headers='Column Headers', t=columns, removeNull=True)
+ })
+ if only_columns == 'false':
+ demisto.debug('printing Query Viewer results')
+
+ contents = query_results
+ human_readable = tableToMarkdown(name='Query Viewer Results: {}'.format(resource_id), t=contents,
+ removeNull=True)
+ outputs = {'ArcSightESM.QueryViewerResults': contents}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+
+@logger
+def fetch():
+ """
+ Query viewer should be defined in ArcSight ESM. fetch incidents fetches the results of query viewer
+ and converts them to Demisto incidents. We can query Cases or Events. If Cases are fetched then the
+ query viewer query must return fields ID and Create Time. If Events are fetched then Event ID and Start Time.
+ """
+ events_query_viewer_id = demisto.params().get('viewerId')
+ cases_query_viewer_id = demisto.params().get('casesQueryViewerId')
+ type_of_incident = 'case' if events_query_viewer_id else 'event'
+ last_run = json.loads(demisto.getLastRun().get('value', '{}'))
+ already_fetched = last_run.get('already_fetched', [])
+
+ fields, query_results = get_query_viewer_results(events_query_viewer_id or cases_query_viewer_id)
+ # sort query_results by creation time
+ query_results.sort(key=lambda k: int(k.get('Start Time') or k.get('Create Time')))
+
+ incidents = []
+ for result in query_results:
+ # convert case or event to demisto incident
+ r_id = result.get('ID') or result.get('Event ID')
+ if r_id not in already_fetched:
+ create_time_epoch = int(result.get('Start Time') or result.get('Create Time'))
+ result['Create Time'] = parse_timestamp_to_datestring(create_time_epoch)
+ incident_name = result.get('Name') or 'New {} from arcsight at {}'.format(type_of_incident, datetime.now())
+ labels = [{'type': key.encode('utf-8'), 'value': value.encode('utf-8') if value else value} for key, value
+ in result.items()]
+ incident = {
+ 'name': incident_name,
+ 'occurred': result['Create Time'],
+ 'labels': labels,
+ 'rawJSON': json.dumps(result)
+ }
+
+ incidents.append(incident)
+ if len(incidents) >= FETCH_CHUNK_SIZE:
+ break
+
+ if len(already_fetched) > MAX_UNIQUE:
+ already_fetched.pop(0)
+ already_fetched.append(r_id)
+
+ last_run = {
+ 'already_fetched': already_fetched,
+ }
+ demisto.setLastRun({'value': json.dumps(last_run)})
+ decode_arcsight_output(incidents)
+
+ if demisto.command() == 'as-fetch-incidents':
+ contents = {
+ 'last_run': last_run,
+ 'last_run_updated': demisto.getLastRun(),
+ 'incidents': incidents,
+ 'already_fetched': already_fetched
+ }
+ return_outputs(readable_output='', outputs={}, raw_response=contents)
+ else:
+ demisto.incidents(incidents)
+
+
+@logger
+def get_case(resource_id, fetch_base_events=False):
+ query_path = 'www/manager-service/rest/CaseService/getResourceById'
+ params = {
+ 'authToken': AUTH_TOKEN,
+ 'resourceId': resource_id,
+ }
+ res = send_request(query_path, params=params, method='get')
+
+ if not res.ok:
+ demisto.debug(res.text)
+ if 'InvalidResourceIDException: Invalid resource ID' in res.text and 'for Case' in res.text:
+ return_error('Invalid resource ID {} for Case'.format(resource_id))
+ else:
+ return_error('Failed to get case. StatusCode: {}'.format(res.status_code))
+
+ res_json = res.json()
+ if 'cas.getResourceByIdResponse' in res_json and 'cas.return' in res_json.get('cas.getResourceByIdResponse'):
+ case = res_json.get('cas.getResourceByIdResponse').get('cas.return')
+
+ if case.get('eventIDs') and not isinstance(case['eventIDs'], list):
+ # if eventIDs is single id then convert to list
+ case['eventIDs'] = [case['eventIDs']]
+
+ if case.get('eventIDs') and fetch_base_events:
+ case['events'] = decode_arcsight_output(get_security_events(case['eventIDs'], ignore_empty=True),
+ remove_nones=False)
+
+ return case
+
+ return_error('Case {} not found'.format(resource_id))
+
+
+@logger
+def get_case_command():
+ resource_id = demisto.args().get('resourceId')
+ with_base_events = demisto.args().get('withBaseEvents') == 'true'
+
+ raw_case = get_case(resource_id, fetch_base_events=with_base_events)
+ case = {
+ 'Name': raw_case.get('name'),
+ 'EventIDs': raw_case.get('eventIDs'),
+ 'Action': raw_case.get('action'),
+ 'Stage': raw_case.get('stage'),
+ 'CaseID': raw_case.get('resourceid'),
+ 'Severity': raw_case.get('consequenceSeverity'),
+ 'CreatedTime': epochToTimestamp(raw_case.get('createdTimestamp'))
+ }
+ if with_base_events:
+ case['events'] = raw_case.get('events')
+
+ contents = decode_arcsight_output(raw_case)
+ human_readable = tableToMarkdown(name='Case {}'.format(resource_id), t=case, removeNull=True)
+ outputs = {'ArcSightESM.Cases(val.resourceid===obj.resourceid)': contents}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+
+@logger
+def get_all_cases_command():
+ query_path = 'www/manager-service/rest/CaseService/findAllIds'
+ params = {
+ 'authToken': AUTH_TOKEN,
+ 'alt': 'json'
+ }
+ res = send_request(query_path, params=params, method='get')
+
+ if not res.ok:
+ demisto.debug(res.text)
+ return_error('Failed to get case list. StatusCode: {}'.format(res.status_code))
+
+ contents = res.json().get('cas.findAllIdsResponse').get('cas.return')
+ human_readable = tableToMarkdown(name='All cases', headers='caseID', t=contents, removeNull=True)
+ outputs = {'ArcSightESM.AllCaseIDs': contents}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+
+@logger
+def get_security_events_command():
+ ids = demisto.args().get('ids')
+ last_date_range = demisto.args().get('lastDateRange')
+ ids = argToList(str(ids) if isinstance(ids, int) else ids)
+ raw_events = get_security_events(ids, last_date_range)
+ if raw_events:
+ events = []
+ contents = decode_arcsight_output(raw_events)
+ for raw_event in contents:
+ event = {
+ 'Event ID': raw_event.get('eventId'),
+ 'Time': timestamp_to_datestring(raw_event.get('endTime'), '%Y-%m-%d, %H:%M:%S'),
+ 'Source Address': decode_ip(demisto.get(raw_event, 'source.address')),
+ 'Destination Address': decode_ip(demisto.get(raw_event, 'destination.address')),
+ 'Name': raw_event.get('name'),
+ 'Source Port': demisto.get(raw_event, 'source.port'),
+ 'Base Event IDs': raw_event.get('baseEventIds')
+ }
+ events.append(event)
+
+ human_readable = tableToMarkdown('Security Event: {}'.format(','.join(map(str, ids))), events, removeNull=True)
+ outputs = {'ArcSightESM.SecurityEvents(val.eventId===obj.eventId)': contents}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+ else:
+ demisto.results('No events were found')
+
+
+@logger
+def get_security_events(event_ids, last_date_range=None, ignore_empty=False):
+ start_time, end_time = -1, -1
+ if last_date_range:
+ # Must of format 'number date_range_unit'
+ # Examples: (2 hours, 4 minutes, 6 month, 1 day, etc.)
+ start_time, end_time = parse_date_range(last_date_range, to_timestamp=True)
+
+ query_path = 'www/manager-service/rest/SecurityEventService/getSecurityEvents'
+ params = {
+ 'alt': 'json'
+ }
+ json_ = {
+ "sev.getSecurityEvents": {
+ "sev.authToken": AUTH_TOKEN,
+ "sev.ids": event_ids,
+ "sev.startMillis": start_time,
+ "sev.endMillis": end_time
+ }
+ }
+ res = send_request(query_path, json=json_, params=params)
+
+ if not res.ok:
+ demisto.debug(res.text)
+ return_error(
+ 'Failed to get security events with ids {}.\nFull URL: {}\nStatus Code: {}\nResponse Body: {}'.format(
+ event_ids, BASE_URL + query_path, res.status_code, res.text))
+
+ res_json = res.json()
+ if res_json.get('sev.getSecurityEventsResponse') and res_json.get('sev.getSecurityEventsResponse').get(
+ 'sev.return'):
+ events = res_json.get('sev.getSecurityEventsResponse').get('sev.return')
+ return events if isinstance(events, list) else [events]
+
+ demisto.debug(res.text)
+ if not ignore_empty:
+ demisto.results('No events were found')
+
+
+@logger
+def update_case_command():
+ case_id = demisto.args().get('caseId')
+ stage = demisto.args().get('stage')
+ severity = demisto.args().get('severity')
+
+ raw_updated_case = update_case(case_id, stage, severity)
+ updated_case = {
+ 'Name': raw_updated_case.get('name'),
+ 'EventIDs': raw_updated_case.get('eventIDs'),
+ 'Action': raw_updated_case.get('action'),
+ 'Stage': raw_updated_case.get('stage'),
+ 'CaseID': raw_updated_case.get('resourceid'),
+ 'Severity': raw_updated_case.get('consequenceSeverity'),
+ 'CreatedTime': epochToTimestamp(raw_updated_case.get('createdTimestamp'))
+ }
+ contents = decode_arcsight_output(raw_updated_case)
+ human_readable = tableToMarkdown(name='Case {}'.format(case_id), t=updated_case, removeNull=True)
+ outputs = {'ArcSightESM.Cases(val.resourceid===obj.resourceid)': contents}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+
+@logger
+def update_case(case_id, stage, severity):
+ # get the case from arcsight
+ case = get_case(case_id)
+ case['stage'] = stage
+ case['consequenceSeverity'] = severity if severity else case['consequenceSeverity']
+
+ # update its stage and send it back to arcsight
+ query_path = 'www/manager-service/rest/CaseService/update'
+ params = {
+ 'alt': 'json'
+ }
+ json_ = {
+ "cas.update": {
+ "cas.authToken": AUTH_TOKEN,
+ "cas.resource": case,
+ }
+ }
+ res = send_request(query_path, json=json_, params=params)
+
+ if not res.ok:
+ demisto.debug(res.text)
+ return_error('Failed to get security update case {}. \nPlease make sure user have edit permissions,'
+ ' or case is unlocked. \nStatus Code: {}\nResponse Body: {}'.format(case_id, res.status_code,
+ res.text))
+
+ res_json = res.json()
+ if 'cas.updateResponse' in res_json and 'cas.return' in res_json.get('cas.updateResponse'):
+ return case
+
+ return_error('Failed to update case, fail to parse response. Response Body: {}'.format(res.text))
+
+
+@logger
+def get_correlated_events_ids(event_ids):
+ related_ids = set(event_ids)
+ correlated_events = decode_arcsight_output(get_security_events(event_ids, ignore_empty=True))
+
+ if correlated_events:
+ for raw_event in correlated_events:
+ base_event_ids = raw_event.get('baseEventIds')
+ if base_event_ids:
+ if isinstance(base_event_ids, list):
+ related_ids.update(base_event_ids)
+ else:
+ related_ids.add(base_event_ids)
+
+ return list(related_ids)
+
+
+@logger
+def get_case_event_ids_command():
+ case_id = demisto.args().get('caseId')
+ with_correlated_events = demisto.args().get('withCorrelatedEvents') == 'true'
+ query_path = 'www/manager-service/rest/CaseService/getCaseEventIDs'
+ params = {
+ 'authToken': AUTH_TOKEN,
+ 'caseId': case_id
+ }
+
+ res = send_request(query_path, params=params, method='get')
+ if not res.ok:
+ demisto.debug(res.text)
+ return_error("Failed to get Event IDs with:\nStatus Code: {}\nResponse: {}".format(res.status_code, res.text))
+
+ res_json = res.json()
+ if 'cas.getCaseEventIDsResponse' in res_json and 'cas.return' in res_json.get('cas.getCaseEventIDsResponse'):
+ event_ids = res_json.get('cas.getCaseEventIDsResponse').get('cas.return')
+ if not isinstance(event_ids, list):
+ event_ids = [event_ids]
+
+ if with_correlated_events:
+ event_ids = get_correlated_events_ids(event_ids)
+
+ contents = decode_arcsight_output(res_json)
+ human_readable = tableToMarkdown(name='', headers='Case {} Event IDs'.format(case_id), t=event_ids,
+ removeNull=True)
+ outputs = {'ArcSightESM.CaseEvents': event_ids}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+ else:
+ demisto.results('No result returned')
+
+
+@logger
+def delete_case_command():
+ case_id = demisto.args().get('caseId')
+
+ query_path = 'www/manager-service/rest/CaseService/deleteByUUID'
+ req_body = json.dumps({
+ 'cas.deleteByUUID': {
+ 'cas.authToken': AUTH_TOKEN,
+ 'cas.id': case_id
+ }
+ })
+ params = {
+ 'alt': 'json'
+ }
+ res = send_request(query_path, params=params, body=req_body)
+ if not res.ok:
+ demisto.debug(res.text)
+ return_error("Failed to delete case.\nStatus Code: {}\nResponse: {}".format(res.status_code, res.text))
+
+ entry_context = {
+ 'resourceid': case_id,
+ 'deleted': 'True'
+ }
+ contents = 'Case {} was deleted successfully'.format(case_id)
+ human_readable = 'Case {} successfully deleted'.format(case_id)
+ outputs = {'ArcSightESM.Cases(val.resourceid===obj.resourceid)': entry_context}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+
+@logger
+def get_entries_command():
+ resource_id = demisto.args().get('resourceId')
+ entry_filter = demisto.args().get('entryFilter')
+
+ query_path = 'www/manager-service/services/ActiveListService/'
+ body = REQ_SOAP_BODY(function='getEntries', auth_token=AUTH_TOKEN, resource_id=resource_id, entryList=None)
+
+ res = send_request(query_path, body=body)
+
+ if not res.ok:
+ demisto.debug(res.text)
+ return_error("Failed to get entries:\nResource ID: {}\nStatus Code: {}\nRequest Body: {}\nResponse: {}".format(
+ resource_id, res.status_code, body, res.text))
+
+ res_json = json.loads(xml2json(res.text))
+ raw_entries = demisto.get(res_json, 'Envelope.Body.getEntriesResponse.return')
+
+ # retrieve columns
+ cols = demisto.get(raw_entries, 'columns')
+ if cols:
+ hr_columns = tableToMarkdown(name='', headers=['Columns'], t=cols,
+ removeNull=True) if cols else 'Active list has no columns'
+ contents = cols
+ return_outputs(readable_output=hr_columns, outputs={}, raw_response=contents)
+
+ if 'entryList' in raw_entries:
+ entry_list = raw_entries['entryList'] if isinstance(raw_entries['entryList'], list) else [
+ raw_entries['entryList']]
+ entry_list = [d['entry'] for d in entry_list if 'entry' in d]
+ keys = raw_entries.get('columns')
+ entries = [dict(zip(keys, values)) for values in entry_list]
+
+ # if the user wants only entries that contain certain 'field:value' sets (filters)
+ # e.g., "name:myName,eventId:0,:ValueInUnknownField"
+ # if the key is empty, search in every key
+ filtered = entries
+ if entry_filter:
+ for f in entry_filter.split(','):
+ k, v = f.split(':')
+ filtered = [entry for entry in filtered if ((entry.get(k) == v) if k else (v in entry.values()))]
+
+ contents = decode_arcsight_output(filtered)
+ ActiveListContext = {
+ 'ResourceID': resource_id,
+ 'Entries': contents,
+ }
+ outputs = {
+ 'ArcSightESM.ActiveList.{id}'.format(id=resource_id): contents,
+ 'ArcSightESM.ActiveList(val.ResourceID===obj.ResourceID)': ActiveListContext
+ }
+ human_readable = tableToMarkdown(name='Active List entries: {}'.format(resource_id), t=filtered,
+ removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+ else:
+ demisto.results('Active List has no entries')
+
+
+@logger
+def clear_entries_command():
+ resource_id = demisto.args().get('resourceId')
+ query_path = 'www/manager-service/services/ActiveListService/'
+ body = REQ_SOAP_BODY(function='clearEntries', auth_token=AUTH_TOKEN, resource_id=resource_id, entryList=None)
+ res = send_request(query_path, body=body)
+
+ if not res.ok:
+ demisto.debug(res.text)
+ return_error(
+ "Failed to clear entries.\nResource ID: {}\nStatus Code: {}\nRequest Body: {}\nResponse: {}".format(
+ resource_id, res.status_code, body, res.text))
+
+ demisto.results("Success")
+
+
+@logger
+def add_entries_command():
+ resource_id = demisto.args().get('resourceId')
+ entries = demisto.args().get('entries')
+ query_path = 'www/manager-service/services/ActiveListService/'
+ if not isinstance(entries, dict):
+ try:
+ entries = json.loads(entries)
+ except ValueError as ex:
+ demisto.debug(str(ex))
+ return_error('Entries must be in JSON format. Must be array of objects.')
+ if not all([entry.keys() == entries[0].keys() for entry in entries[1:]]):
+ return_error('All entries must have the same fields')
+
+ columns = ''.join(COLUMN(column) for column in entries[0]) # the fields in the entryList matrix are the columns
+ entry_list = BODY(columns + ''.join(ENTRY_LIST(''.join(ENTRY(v) for v in en.values())) for en in entries))
+ body = REQ_SOAP_BODY(function='addEntries', auth_token=AUTH_TOKEN, resource_id=resource_id, entryList=entry_list)
+ res = send_request(query_path, body=body)
+
+ if not res.ok:
+ demisto.debug(res.text)
+ return_error("Failed to add entries. Please make sure to enter Active List resource ID"
+ "\nResource ID: {}\nStatus Code: {}\nRequest Body: {}\nResponse: {}".format(resource_id,
+ res.status_code, body,
+ res.text))
+
+ demisto.results("Success")
+
+
+@logger
+def get_all_query_viewers_command():
+ query_path = 'www/manager-service/rest/QueryViewerService/findAllIds'
+ params = {
+ 'authToken': AUTH_TOKEN,
+ 'alt': 'json'
+ }
+ headers = {
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ 'Accept': 'application/json'
+ }
+ res = send_request(query_path, headers=headers, params=params)
+ if not res.ok:
+ demisto.debug(res.text)
+ return_error("Failed to get query viewers:\nStatus Code: {}\nResponse: {}".format(res.status_code, res.text))
+
+ res_json = res.json()
+ if 'qvs.findAllIdsResponse' in res_json and 'qvs.return' in res_json.get('qvs.findAllIdsResponse'):
+ query_viewers = res_json.get('qvs.findAllIdsResponse').get('qvs.return')
+
+ contents = decode_arcsight_output(query_viewers)
+ outputs = {'ArcSightESM.AllQueryViewers': contents}
+ human_readable = tableToMarkdown(name='', t=query_viewers, headers='Query Viewers', removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+ else:
+ demisto.results('No Query Viewers were found')
+
+
+AUTH_TOKEN = demisto.getIntegrationContext().get('auth_token') or login()
+try:
+ if demisto.command() == 'test-module':
+ test()
+ demisto.results('ok')
+
+ elif demisto.command() == 'as-fetch-incidents' or demisto.command() == 'fetch-incidents':
+ fetch()
+
+ elif demisto.command() == 'as-get-matrix-data' or demisto.command() == 'as-get-query-viewer-results':
+ get_query_viewer_results_command()
+
+ elif demisto.command() == 'as-get-all-cases':
+ get_all_cases_command()
+
+ elif demisto.command() == 'as-get-case':
+ get_case_command()
+
+ elif demisto.command() == 'as-update-case':
+ update_case_command()
+
+ elif demisto.command() == 'as-case-delete':
+ delete_case_command()
+
+ elif demisto.command() == 'as-get-security-events':
+ get_security_events_command()
+
+ elif demisto.command() == 'as-get-entries':
+ get_entries_command()
+
+ elif demisto.command() == 'as-add-entries':
+ add_entries_command()
+
+ elif demisto.command() == 'as-clear-entries':
+ clear_entries_command()
+
+ elif demisto.command() == 'as-get-case-event-ids':
+ get_case_event_ids_command()
+
+ elif demisto.command() == 'as-get-all-query-viewers':
+ get_all_query_viewers_command()
+
+
+except Exception as e:
+ return_error(str(e))
diff --git a/Integrations/ArcSightESMv2/ArcSightESMv2.yml b/Integrations/ArcSightESMv2/ArcSightESMv2.yml
new file mode 100644
index 000000000000..f1a804ec26b1
--- /dev/null
+++ b/Integrations/ArcSightESMv2/ArcSightESMv2.yml
@@ -0,0 +1,356 @@
+commonfields:
+ id: ArcSight ESM v2
+ version: -1
+name: ArcSight ESM v2
+display: ArcSight ESM v2
+category: Analytics & SIEM
+description: ArcSight ESM SIEM by Micro Focus (Formerly HPE Software).
+configuration:
+- display: Server full URL (e.g., https://192.168.0.1:8443)
+ name: server
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Credentials
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: true
+- display: Fetch events as incidents via Query Viewer ID. Mandatory fields for query
+ are "Start Time" and "Event ID".
+ name: viewerId
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Fetch cases as incidents via Query Viewer ID. Mandatory fields for query
+ are "Create Time" and "ID".
+ name: casesQueryViewerId
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: The maximum number of unique IDs expected to be fetched.
+ name: max_unique
+ defaultvalue: '2000'
+ type: 0
+ required: false
+- display: The maximum number of incidents to fetch each time. Maximum is 50.
+ name: fetch_chunk_size
+ defaultvalue: '50'
+ type: 0
+ required: false
+- display: Fetch incidents
+ name: isFetch
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Incident type
+ name: incidentType
+ defaultvalue: ""
+ type: 13
+ required: false
+- display: Trust any certificate (not secure)
+ name: insecure
+ defaultvalue: ''
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: "false"
+ type: 8
+ required: false
+script:
+ script: ''
+ type: python
+ subtype: python2
+ commands:
+ - name: as-get-all-cases
+ arguments: []
+ outputs:
+ - contextPath: ArcSightESM.AllCaseIDs
+ description: All case resource IDs
+ type: Unknown
+ description: (Deprecated) Retrieves all case resource IDs.
+ - name: as-get-case
+ arguments:
+ - name: resourceId
+ required: true
+ default: true
+ description: Resource ID of the case to get information for
+ - name: withBaseEvents
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: If "true", then will return case and base events of that case
+ defaultValue: "false"
+ outputs:
+ - contextPath: ArcSightESM.Cases.resourceid
+ description: Case ID
+ type: string
+ - contextPath: ArcSightESM.Cases.name
+ description: Case name
+ type: string
+ - contextPath: ArcSightESM.Cases.eventIDs
+ description: Related base event IDs
+ type: Unknown
+ - contextPath: ArcSightESM.Cases.createdTimestamp
+ description: Time the case was created (in milliseconds)
+ type: number
+ - contextPath: ArcSightESM.Cases.createdTime
+ description: Created time (dd-mm-yyyyTHH:MM:SS.SSS timezone)
+ type: string
+ - contextPath: ArcSightESM.Cases.modifiedTimestamp
+ description: Modified timestamp (in milliseconds)
+ type: number
+ - contextPath: ArcSightESM.Cases.modifiedTime
+ description: Modified time (dd-mm-yyyyTHH:MM:SS.SSS timezone)
+ type: date
+ - contextPath: ArcSightESM.Cases.action
+ description: Action (e.g., BLOCK_OR_SHUTDOWN)
+ type: string
+ - contextPath: ArcSightESM.Cases.associatedImpact
+ description: Associated impact (e.g., AVAILABILITY)
+ type: string
+ - contextPath: ArcSightESM.Cases.attackAgent
+ description: Attack agent (e.g., INSIDER)
+ type: string
+ - contextPath: ArcSightESM.Cases.attackMechanism
+ description: Attack mechanism (e.g., PHYSICAL)
+ type: string
+ - contextPath: ArcSightESM.Cases.consequenceSeverity
+ description: Consequence severity (e.g., NONE)
+ type: string
+ - contextPath: ArcSightESM.Cases.detectionTime
+ description: Detection time (dd-mm-yyyyTHH:MM:SS.SSS timezone)
+ type: date
+ - contextPath: ArcSightESM.Cases.displayID
+ description: Display ID
+ type: number
+ - contextPath: ArcSightESM.Cases.estimatedStartTime
+ description: Estimated start time (dd-mm-yyyyTHH:MM:SS.SSS timezone)
+ type: date
+ - contextPath: ArcSightESM.Cases.eventIDs
+ description: Base event IDs
+ type: unknown
+ - contextPath: ArcSightESM.Cases.frequency
+ description: Frequency (e.g., NEVER_OR_ONCE)
+ type: string
+ - contextPath: ArcSightESM.Cases.history
+ description: History (e.g., KNOWN_OCCURENCE)
+ type: Unknown
+ - contextPath: ArcSightESM.Cases.numberOfOccurences
+ description: Number Of Occurences
+ type: number
+ - contextPath: ArcSightESM.Cases.resistance
+ description: Resistance (e.g., HIGH)
+ type: string
+ - contextPath: ArcSightESM.Cases.securityClassification
+ description: Security Classification (e.g., UNCLASSIFIED)
+ type: string
+ - contextPath: ArcSightESM.Cases.sensitivity
+ description: Sensitivity (e.g., UNCLASSIFIED)
+ type: string
+ - contextPath: ArcSightESM.Cases.stage
+ description: Stage (e.g., QUEUED,INITIAL,FOLLOW_UP,FINAL,CLOSED)
+ type: string
+ - contextPath: ArcSightESM.Cases.ticketType
+ description: Ticket type (e.g., INTERNAL,CLIENT,INCIDENT)
+ type: string
+ - contextPath: ArcSightESM.Cases.vulnerability
+ description: Vulnerability (e.g., DESIGN)
+ type: string
+ description: Gets information about a single case.
+ - name: as-get-matrix-data
+ arguments:
+ - name: id
+ required: true
+ default: true
+ description: Resource ID of a query viewer
+ - name: onlyColumns
+ description: If "true", will return only the columns of the query. If "false", will return the column headers and all query results.
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ defaultValue: "true"
+ description: Retrieves query viewer results (query viewer must be configured to
+ be refreshed every minute, see documentation)
+ - name: as-add-entries
+ arguments:
+ - name: resourceId
+ required: true
+ description: Resource ID of the Active List
+ - name: entries
+ required: true
+ description: 'Entries are in JSON format. JSON must be an array of entries. Each entry
+ must contain the same columns as they appear in the Active List, e.g., [{ "UserName":
+ "john", "IP":"19.12.13.11"},{ "UserName": "bob", "IP":"22.22.22.22"}]'
+ isArray: true
+ description: Adds new entries to the Active List.
+ execution: true
+ - name: as-clear-entries
+ arguments:
+ - name: resourceId
+ description: Resource ID of a specific Active List
+ required: true
+ description: Deletes all entries in the Active List.
+ execution: true
+ - name: as-get-entries
+ arguments:
+ - name: resourceId
+ description: Resource ID of a specific Active List
+ required: true
+ - name: entryFilter
+ description: 'Filters the entries, e.g., entryFilter="moo:moo1"'
+ outputs:
+ - contextPath: ArcSightESM.ActiveList
+ description: Active List is a map of active list resource id => active list
+ entries
+ type: Unknown
+ - contextPath: ArcSightESM.ActiveList.ListID
+ description: The ActiveList ID
+ type: list
+ - contextPath: ArcSightESM.ActiveList.Entry
+ description: Active List is a map of active list resource id => active list
+ type: Unknown
+ description: Returns all entries in the Active List
+ - name: as-get-security-events
+ arguments:
+ - name: ids
+ description: 'ID or multiple ids separated by comma of security events. Event ID is ArcSight is always a number. Example: 13906590'
+ required: true
+ isArray: true
+ - name: lastDateRange
+ description: 'Query last events. Format follows ''number date_range_unit'', e.g., 2 hours, 4 minutes, 6 month, 1 day'
+ outputs:
+ - contextPath: ArcSightESM.SecurityEvents
+ description: List of security events
+ type: Unknown
+ - contextPath: ArcSightESM.SecurityEvents.name
+ description: Event name
+ type: string
+ - contextPath: ArcSightESM.SecurityEvents.eventId
+ description: Event ID
+ type: number
+ - contextPath: ArcSightESM.SecurityEvents.type
+ description: Event type (e.g., CORRELATION)
+ type: string
+ - contextPath: ArcSightESM.SecurityEvents.baseEventIds
+ description: Base event IDs
+ type: Unknown
+ - contextPath: ArcSightESM.SecurityEvents.source.address
+ description: Event source address
+ type: Unknown
+ - contextPath: ArcSightESM.SecurityEvents.destination.address
+ description: Event destination address
+ type: Unknown
+ - contextPath: ArcSightESM.SecurityEvents.startTime
+ description: Start time in milliseconds
+ type: date
+ description: Returns the security event details
+ - name: as-get-case-event-ids
+ arguments:
+ - name: caseId
+ required: true
+ description: 'Case ID, e.g., 7e6LEbF8BABCfA-dlp1rl1A=='
+ - name: withCorrelatedEvents
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: If "true", then will return case and correlated events
+ defaultValue: "false"
+ outputs:
+ - contextPath: ArcSightESM.CaseEvents
+ description: Map of caseId => related event ids
+ type: Unknown
+ - contextPath: ArcSightESM.CaseEvents.LatestResult
+ description: Event IDs of the last execution of this command
+ type: Unknown
+ description: Returns all case event IDs.
+ - name: as-update-case
+ arguments:
+ - name: caseId
+ required: true
+ description: Case resource ID to update. The case must be unlocked, and the user should have edit permissions.
+ - name: stage
+ auto: PREDEFINED
+ predefined:
+ - CLOSED
+ - QUEUED
+ - FINAL
+ - FOLLOW_UP
+ - INITIAL
+ description: Stage the case is in
+ - name: severity
+ auto: PREDEFINED
+ predefined:
+ - NONE
+ - INSIGNIFICANT
+ - MARGINAL
+ - CRITICAL
+ - CATASTROPHIC
+ description: Ticket consequence Severity
+ outputs:
+ - contextPath: ArcSightESM.Cases
+ description: List of cases
+ type: unknown
+ - contextPath: ArcSightESM.Cases.resourceid
+ description: Case resource ID
+ type: string
+ - contextPath: ArcSightESM.Cases.stage
+ description: 'Case stage '
+ type: string
+ - contextPath: ArcSightESM.Cases.consequenceSeverity
+ description: Case severity
+ type: string
+ description: Updates a specific case.
+ execution: true
+ - name: as-get-all-query-viewers
+ arguments: []
+ outputs:
+ - contextPath: ArcSightESM.AllQueryViewers
+ description: List of all query viewer IDs
+ type: Unknown
+ description: Returns all the query viewer IDs.
+ - name: as-case-delete
+ arguments:
+ - name: caseId
+ required: true
+ description: Resource ID of the case
+ outputs:
+ - contextPath: ArcSightESM.Cases.resourceid
+ description: Resource ID of case
+ type: string
+ - contextPath: ArcSightESM.Cases.Deleted
+ description: Boolean flag. "True" if deleted.
+ type: boolean
+ description: Deletes a case
+ execution: true
+ - name: as-get-query-viewer-results
+ arguments:
+ - name: id
+ required: true
+ description: Resource ID of the query viewer
+ - name: onlyColumns
+ description: If "true", will return only the columns of the query. If "false", will return the column headers and all query results.
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ defaultValue: "false"
+ outputs:
+ - contextPath: ArcSight.QueryViewerResults
+ type: Unknown
+ description: Query viewer results
+ description: Retrieves query viewer results (query viewer must be configured to
+ be refreshed every minute, see documentation)
+ - name: as-fetch-incidents
+ arguments:
+ - name: last_run
+ description: Last run to start fetching incidents from
+ description: Fetches incidents
+ isfetch: true
+ runonce: false
+tests:
+ - ArcSight ESM v2 Test
diff --git a/Integrations/ArcSightESMv2/ArcSightESMv2_description.md b/Integrations/ArcSightESMv2/ArcSightESMv2_description.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Integrations/ArcSightESMv2/ArcSightESMv2_image.png b/Integrations/ArcSightESMv2/ArcSightESMv2_image.png
new file mode 100644
index 000000000000..bd0c57866183
Binary files /dev/null and b/Integrations/ArcSightESMv2/ArcSightESMv2_image.png differ
diff --git a/Integrations/ArcSightESMv2/CHANGELOG.md b/Integrations/ArcSightESMv2/CHANGELOG.md
new file mode 100644
index 000000000000..5b14bc7fa35b
--- /dev/null
+++ b/Integrations/ArcSightESMv2/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+Limited the incident fetch limit to 50 incidents per fetch.
+
diff --git a/Integrations/AttackIQFireDrill/AttackIQFireDrill.py b/Integrations/AttackIQFireDrill/AttackIQFireDrill.py
new file mode 100644
index 000000000000..5633d3620d19
--- /dev/null
+++ b/Integrations/AttackIQFireDrill/AttackIQFireDrill.py
@@ -0,0 +1,564 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+from requests import HTTPError
+from typing import Dict, Any
+from json.decoder import JSONDecodeError
+
+import json
+import traceback
+import requests
+import math
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+TOKEN = demisto.params().get('token')
+# Remove trailing slash to prevent wrong URL path to service
+SERVER = demisto.params()['url'][:-1] \
+ if ('url' in demisto.params() and demisto.params()['url'].endswith('/')) else demisto.params().get('url')
+# Should we use SSL
+USE_SSL = not demisto.params().get('insecure', False)
+# Headers to be sent in requests
+HEADERS = {
+ 'Authorization': f'Token {TOKEN}',
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+}
+
+# Error messages
+INVALID_ID_ERR_MSG = 'Error in API call. This may be happen if you provided an invalid id.'
+API_ERR_MSG = 'Error in API call to AttackIQ. '
+DEFAULT_PAGE_SIZE = 10
+
+# Transformation dicts
+ASSESSMENTS_TRANS = {
+ 'id': 'Id',
+ 'name': 'Name',
+ 'user': 'User',
+ 'users': 'Users',
+ 'owner': 'Owner',
+ 'groups': 'Groups',
+ 'creator': 'Creator',
+ 'created': 'Created',
+ 'end_date': 'EndDate',
+ 'modified': 'Modified',
+ 'start_date': 'StartDate',
+ 'description': 'Description',
+ 'project_state': 'AssessmentState',
+ 'master_job_count': 'MasterJobCount',
+ 'default_schedule': 'DefaultSchedule',
+ 'default_asset_count': 'DefaultAssetCount',
+ 'project_template.id': 'AssessmentTemplateId',
+ 'default_asset_group_count': 'DefaultAssetGroupCount',
+ 'project_template.company': 'AssessmentTemplateCompany',
+ 'project_template.created': 'AssessmentTemplateCreated',
+ 'project_template.modified': 'AssessmentTemplateModified',
+ 'project_template.template_name': 'AssessmentTemplateName',
+ 'project_template.default_schedule': 'AssessmentTemplateDefaultSchedule',
+ 'project_template.template_description': 'AssessmentTemplateDescription'
+}
+
+TESTS_TRANS = {
+ 'id': 'Id',
+ 'name': 'Name',
+ 'description': 'Description',
+ 'project': 'Assessment',
+ 'total_asset_count': 'TotalAssetCount',
+ 'cron_expression': 'CronExpression',
+ 'runnable': 'Runnable',
+ 'last_result': 'LastResult',
+ 'user': 'User',
+ 'created': 'Created',
+ 'modified': 'Modified',
+ 'using_default_schedule': 'UsingDefaultSchedule',
+ 'using_default_assets': 'UsingDefaultAssets',
+ 'latest_instance_id': 'LatestInstanceId',
+ 'scenarios': {
+ 'name': 'Name',
+ 'id': 'Id'
+ },
+ 'assets': {
+ 'id': 'Id',
+ 'ipv4_address': 'Ipv4Address',
+ 'hostname': 'Hostname',
+ 'product_name': 'ProductName',
+ 'modified': 'Modified',
+ 'status': 'Status'
+ }
+}
+
+TEST_STATUS_TRANS = {
+ 'detected': 'Detected',
+ 'failed': 'Failed',
+ 'finished': 'Finished',
+ 'passed': 'Passed',
+ 'errored': 'Errored',
+ 'total': 'Total'
+}
+
+TEST_RESULT_TRANS = {
+ 'id': 'Id',
+ 'modified': 'Modified',
+ 'project.id': 'Assessment.Id',
+ 'project.name': 'Assessment.Name',
+ 'scenario.id': 'Scenario.Id',
+ 'scenario.name': 'Scenario.Name',
+ 'scenario.description': 'Scenario.Description',
+ 'asset.id': 'Asset.Id',
+ 'asset.ipv4_address': 'Asset.Ipv4Address',
+ 'asset.hostname': 'Asset.Hostname',
+ 'asset.product_name': 'Asset.ProductName',
+ 'asset.modified': 'Asset.Modified',
+ 'asset_group': 'Asset.AssetGroup',
+ 'job_state_name': 'JobState',
+ 'outcome_name': 'Outcome'
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url_suffix, params=None, data=None):
+ url = f'{SERVER}/{url_suffix}'
+ LOG(f'attackiq is attempting {method} request sent to {url} with params:\n{json.dumps(params, indent=4)}')
+ try:
+ res = requests.request(
+ method,
+ url,
+ verify=USE_SSL,
+ params=params,
+ data=data,
+ headers=HEADERS
+ )
+ # Handle error responses gracefully
+ if res.status_code not in {200, 201}:
+ error_reason = get_http_error_reason(res)
+ raise HTTPError(f'[{res.status_code}] - {error_reason}')
+ try:
+ return res.json()
+ except JSONDecodeError:
+ return_error('Response contained no valid body. See logs for more information.',
+ error=f'attackiq response body:\n{res.content}')
+ except requests.exceptions.ConnectionError as e:
+ LOG(str(e))
+ return_error('Encountered issue reaching the endpoint, please check that you entered the URL correctly.')
+
+
+def get_http_error_reason(res):
+ """
+ Get error reason from an AttackIQ http error
+ Args:
+ res: AttackIQ response
+
+ Returns: Reason for http error
+ """
+ err_reason = res.reason
+ try:
+ res_json = res.json()
+ if 'detail' in res_json:
+ err_reason = f'{err_reason}. {res_json["detail"]}'
+ except JSONDecodeError:
+ pass
+ return err_reason
+
+
+def build_transformed_dict(src, trans_dict):
+ """Builds a dictionary according to a conversion map
+
+ Args:
+ src (dict): original dictionary to build from
+ trans_dict (dict): dict in the format { 'OldKey': 'NewKey', ...}
+
+ Returns: src copy with changed keys
+ """
+ if isinstance(src, list):
+ return [build_transformed_dict(x, trans_dict) for x in src]
+ res: Dict[str, Any] = {}
+ for key, val in trans_dict.items():
+ if isinstance(val, dict):
+ # handle nested list
+ sub_res = res
+ item_val = [build_transformed_dict(item, val) for item in (demisto.get(src, key) or [])]
+ key = underscoreToCamelCase(key)
+ for sub_key in key.split('.')[:-1]:
+ if sub_key not in sub_res:
+ sub_res[sub_key] = {}
+ sub_res = sub_res[sub_key]
+ sub_res[key.split('.')[-1]] = item_val
+ elif '.' in val:
+ # handle nested vals
+ update_nested_value(res, val, to_val=demisto.get(src, key))
+ else:
+ res[val] = demisto.get(src, key)
+ return res
+
+
+def create_invalid_id_err_msg(orig_err, error_codes):
+ """
+ Creates an 'invalid id' error message
+ Args:
+ orig_err (str): The original error message
+ error_codes (list): List of error codes to look for
+
+ Returns (str): Error message for invalid id
+ """
+ err_msg = API_ERR_MSG
+ if any(err_code in orig_err for err_code in error_codes):
+ err_msg += f'This may be happen if you provided an invalid id.\n'
+ err_msg += orig_err
+ return err_msg
+
+
+def update_nested_value(src_dict, to_key, to_val):
+ """
+ Updates nested value according to transformation dict structure where 'a.b' key will create {'a': {'b': val}}
+ Args:
+ src_dict (dict): The original dict
+ to_key (str): Key to transform to (expected to contain '.' to mark nested)
+ to_val: The value that'll be put under the nested key
+ """
+ sub_res = src_dict
+ to_key_lst = to_key.split('.')
+ for sub_to_key in to_key_lst[:-1]:
+ if sub_to_key not in sub_res:
+ sub_res[sub_to_key] = {}
+ sub_res = sub_res[sub_to_key]
+ sub_res[to_key_lst[-1]] = to_val
+
+
+def get_page_number_and_page_size(args):
+ """
+ Get arguments page_number and page_size from args
+ Args:
+ args (dict): Argument dictionary, with possible page_number and page_size keys
+
+ Returns (int, int): Return a tuple of (page_number, page_size)
+
+ """
+ page = args.get('page_number', 1)
+ page_size = args.get('page_size', DEFAULT_PAGE_SIZE)
+ err_msg_format = 'Error: Invalid {arg} value. "{val}" Is not a valid value. Please enter a positive integer.'
+ try:
+ page = int(page)
+ if page <= 0:
+ raise ValueError()
+ except (ValueError, TypeError):
+ return_error(err_msg_format.format(arg='page_number', val=page))
+ try:
+ page_size = int(page_size)
+ if page_size <= 0:
+ raise ValueError()
+ except (ValueError, TypeError):
+ return_error(err_msg_format.format(arg='page_size', val=page_size))
+ return page, page_size
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """
+ Performs basic get request to get item samples
+ """
+ http_request('GET', '/v1/assessments')
+ demisto.results('ok')
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+
+def activate_assessment_command():
+ """ Implements attackiq-activate-assessment command
+ """
+ ass_id = demisto.getArg('assessment_id')
+ try:
+ raw_res = http_request('POST', f'/v1/assessments/{ass_id}/activate')
+ hr = raw_res['message'] if 'message' in raw_res else f'Assessment {ass_id} activation was sent successfully.'
+ demisto.results(hr)
+ except HTTPError as e:
+ return_error(create_invalid_id_err_msg(str(e), ['403']))
+
+
+def get_assessment_execution_status_command():
+ """ Implements attackiq-get-assessment-execution-status command
+ """
+ ass_id = demisto.getArg('assessment_id')
+ try:
+ raw_res = http_request('GET', f'/v1/assessments/{ass_id}/is_on_demand_running')
+ ex_status = raw_res.get('message')
+ hr = f'Assessment {ass_id} execution is {"" if ex_status else "not "}running.'
+ ec = {
+ 'AttackIQ.Assessment(val.Id === obj.Id)': {
+ 'Running': ex_status,
+ 'Id': ass_id
+ }
+ }
+ return_outputs(hr, ec, raw_res)
+ except HTTPError as e:
+ return_error(create_invalid_id_err_msg(str(e), ['403']))
+
+
+def get_test_execution_status_command():
+ """ Implements attackiq-get-test-execution-status command
+ """
+ test_id = demisto.getArg('test_id')
+ try:
+ raw_test_status = http_request('GET', f'/v1/tests/{test_id}/get_status')
+ test_status = build_transformed_dict(raw_test_status, TEST_STATUS_TRANS)
+ test_status['Id'] = test_id
+ hr = tableToMarkdown(f'Test {test_id} status', test_status)
+ return_outputs(hr, {'AttackIQTest(val.Id === obj.Id)': test_status}, raw_test_status)
+ except HTTPError as e:
+ return_error(create_invalid_id_err_msg(str(e), ['500']))
+
+
+def build_test_results_hr(test_results, test_id, page, tot_pages):
+ """
+ Creates test results human readable
+ Args:
+ page (int): Current page
+ tot_pages (int): Total pages
+ test_results (list): Results of the test (after being transformed)
+ test_id (str): ID of the test
+
+ Returns: Human readable of test results
+ """
+ keys = ['Assessment Name', 'Scenario Name', 'Hostname', 'Asset IP', 'Job State', 'Modified', 'Outcome']
+ test_results_mod = []
+ for t_res in test_results:
+ assessment = t_res.get('Assessment')
+ asset = t_res.get('Asset')
+ scenario = t_res.get('Scenario')
+ hr_items = {
+ keys[0]: assessment.get('Name'),
+ keys[1]: scenario.get('Name'),
+ keys[2]: asset.get('Hostname'),
+ keys[3]: asset.get('Ipv4Address'),
+ keys[4]: demisto.get(t_res, 'JobState.Name'),
+ keys[5]: t_res.get('Modified'),
+ keys[6]: demisto.get(t_res, 'Outcome.Name')
+ }
+ test_results_mod.append(hr_items)
+ return tableToMarkdown(f'Test Results for {test_id}\n ### Page {page}/{tot_pages}', test_results_mod, keys)
+
+
+def get_test_results(page, page_size, test_id, show_last_res):
+ """
+ Get test results response
+ Args:
+ page (int): Page number
+ page_size (int): Page size
+ test_id (int): ID of test
+ show_last_res (bool): Flag for showing only last result
+
+ Returns: Test results
+ """
+ params = {
+ 'page': page,
+ 'page_size': page_size,
+ 'test_id': test_id,
+ 'show_last_result': show_last_res
+ }
+ return http_request('GET', '/v1/results', params=params)
+
+
+def get_test_results_command(args=demisto.args()):
+ """ Implements attackiq-get-test-results command
+ """
+ test_id = args.get('test_id')
+ outcome_filter = args.get('outcome_filter')
+ page, page_size = get_page_number_and_page_size(demisto.args())
+ try:
+ raw_test_res = get_test_results(page, page_size, test_id, args.get('show_last_result') == 'True')
+ test_cnt = raw_test_res.get('count')
+ if test_cnt == 0:
+ return_outputs('No results were found', {})
+ else:
+ total_pages = math.ceil(test_cnt / page_size)
+ remaining_pages = total_pages - page
+ if remaining_pages < 0:
+ remaining_pages = 0
+ test_res = build_transformed_dict(raw_test_res['results'], TEST_RESULT_TRANS)
+ if outcome_filter:
+ test_res = list(filter(lambda x: x.get('Outcome') == outcome_filter, test_res))
+ context = {
+ 'AttackIQTestResult(val.Id === obj.Id)': test_res,
+ 'AttackIQTestResult(val.Count).Count': test_cnt,
+ 'AttackIQTestResult(val.RemainingPages).RemainingPages': remaining_pages
+ }
+ hr = build_test_results_hr(test_res, test_id, page, total_pages)
+ return_outputs(hr, context, raw_test_res)
+ except HTTPError as e:
+ return_error(create_invalid_id_err_msg(str(e), ['500']))
+
+
+def get_assessments(page='1', assessment_id=None, page_size=DEFAULT_PAGE_SIZE):
+ """
+ Fetches assessments from attackIQ
+ Args:
+ page (str or int): Page number to fetch
+ assessment_id (str): (Optional) If provided will fetch only the assessment with matching ID
+
+ Returns: Assessments from attackIQ
+ """
+ params = {
+ 'page_size': page_size,
+ 'page': page
+ }
+ if assessment_id:
+ return http_request('GET', f'/v1/assessments/{assessment_id}')
+ return http_request('GET', '/v1/assessments', params=params)
+
+
+def list_assessments_command():
+ """ Implements attackiq-list-assessments command
+ """
+ page, page_size = get_page_number_and_page_size(demisto.args())
+ raw_assessments = get_assessments(page=page, page_size=page_size)
+ assessments_res = build_transformed_dict(raw_assessments.get('results'), ASSESSMENTS_TRANS)
+ ass_cnt = raw_assessments.get('count')
+ total_pages = math.ceil(ass_cnt / page_size)
+ remaining_pages = total_pages - page
+ if remaining_pages < 0:
+ remaining_pages = 0
+ context = {
+ 'AttackIQ.Assessment(val.Id === obj.Id)': assessments_res,
+ 'AttackIQ.Assessment(val.Count).Count': ass_cnt,
+ 'AttackIQ.Assessment(val.RemainingPages).RemainingPages': remaining_pages
+ }
+ hr = tableToMarkdown(f'AttackIQ Assessments Page {page}/{total_pages}', assessments_res,
+ headers=['Id', 'Name', 'Description', 'User', 'Created', 'Modified'])
+ return_outputs(hr, context, raw_assessments)
+
+
+def get_assessment_by_id_command():
+ """ Implements attackiq-get-assessment-by-id command
+ """
+ assessment_id = demisto.getArg('assessment_id')
+ try:
+ raw_assessments = get_assessments(assessment_id=assessment_id)
+ assessments_res = build_transformed_dict(raw_assessments, ASSESSMENTS_TRANS)
+ hr = tableToMarkdown(f'AttackIQ Assessment {assessment_id}', assessments_res,
+ headers=['Id', 'Name', 'Description', 'User', 'Created', 'Modified'])
+ return_outputs(hr, {'AttackIQ.Assessment(val.Id === obj.Id)': assessments_res}, raw_assessments)
+ except HTTPError as e:
+ return_error(create_invalid_id_err_msg(str(e), ['403']))
+
+
+def build_tests_hr(tests_res, ass_id, page_num, tot_pages):
+ """
+ Creates tests human readable
+ Args:
+ tot_pages (int): Total pages
+ page_num (int): Current page
+ ass_id (str): Assignment ID
+ tests_res (list): Transformed result of test
+
+ Returns: Human readable string (md format) of tests
+ """
+ hr = f'# Assessment {ass_id} tests\n## Page {page_num} / {tot_pages}\n'
+ for test in tests_res:
+ test = dict(test)
+ assets = test.pop('Assets', {})
+ scenarios = test.pop('Scenarios', {})
+ test_name = test.get('Name')
+ hr += tableToMarkdown(f'Test - {test_name}', test,
+ headers=['Id', 'Name', 'Created', 'Modified', 'Runnable', 'LastResult'],
+ headerTransform=pascalToSpace)
+ hr += tableToMarkdown(f'Assets ({test_name})', assets)
+ hr += tableToMarkdown(f'Scenarios ({test_name})', scenarios)
+ if not hr:
+ hr = 'Found no tests'
+ return hr
+
+
+def list_tests_by_assessment(params):
+ return http_request('GET', f'/v1/tests', params=params)
+
+
+def list_tests_by_assessment_command():
+ """ Implements attackiq-list-tests-by-assessment command
+ """
+ page, page_size = get_page_number_and_page_size(demisto.args())
+ ass_id = demisto.getArg('assessment_id')
+ params = {
+ 'project': ass_id,
+ 'page_size': page_size,
+ 'page': page
+ }
+ raw_res = list_tests_by_assessment(params)
+ test_cnt = raw_res.get('count')
+ if test_cnt == 0:
+ return_outputs('No results were found', {})
+ else:
+ tests_res = build_transformed_dict(raw_res.get('results'), TESTS_TRANS)
+ total_pages = math.ceil(test_cnt / page_size)
+ remaining_pages = total_pages - page
+ if remaining_pages < 0:
+ remaining_pages = 0
+ context = {
+ 'AttackIQTest(val.Id === obj.Id)': tests_res,
+ 'AttackIQTest(val.Count).Count': test_cnt,
+ 'AttackIQTest(val.RemainingPages).RemainingPages': remaining_pages
+ }
+ hr = build_tests_hr(tests_res, ass_id, page, total_pages)
+ return_outputs(hr, context, raw_res)
+
+
+def run_all_tests_in_assessment_command():
+ """ Implements attackiq-run-all-tests-in-assessment command
+ """
+ args = demisto.args()
+ ass_id = args.get('assessment_id')
+ on_demand_only = args.get('on_demand_only')
+ try:
+ params = {'on_demand_only': on_demand_only == 'True'}
+ raw_res = http_request('POST', f'/v1/assessments/{ass_id}/run_all_tests', params=params)
+ hr = raw_res['message'] if 'message' in raw_res else \
+ f'Request to run all tests for assessment {ass_id} was sent successfully.'
+ demisto.results(hr)
+ except HTTPError as e:
+ return_error(create_invalid_id_err_msg(str(e), ['403']))
+
+
+def main():
+ handle_proxy()
+ command = demisto.command()
+ LOG(f'Command being called is {command}')
+ try:
+ if command == 'test-module':
+ test_module()
+ elif command == 'attackiq-activate-assessment':
+ activate_assessment_command()
+ elif command == 'attackiq-get-assessment-execution-status':
+ get_assessment_execution_status_command()
+ elif command == 'attackiq-get-test-execution-status':
+ get_test_execution_status_command()
+ elif command == 'attackiq-get-test-results':
+ get_test_results_command()
+ elif command == 'attackiq-list-assessments':
+ list_assessments_command()
+ elif command == 'attackiq-get-assessment-by-id':
+ get_assessment_by_id_command()
+ elif command == 'attackiq-list-tests-by-assessment':
+ list_tests_by_assessment_command()
+ elif command == 'attackiq-run-all-tests-in-assessment':
+ run_all_tests_in_assessment_command()
+ else:
+ return_error(f'Command {command} is not supported.')
+ except HTTPError as e:
+ # e is expected to contain parsed error message
+ err = f'{API_ERR_MSG}{str(e)}'
+ return_error(err)
+ except Exception as e:
+ message = f'Unexpected error: {str(e)}, traceback: {traceback.format_exc()}'
+ return_error(message)
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/AttackIQFireDrill/AttackIQFireDrill.yml b/Integrations/AttackIQFireDrill/AttackIQFireDrill.yml
new file mode 100644
index 000000000000..07c993764635
--- /dev/null
+++ b/Integrations/AttackIQFireDrill/AttackIQFireDrill.yml
@@ -0,0 +1,499 @@
+category: Vulnerability Management
+commonfields:
+ id: AttackIQFireDrill
+ version: -1
+configuration:
+- display: Server URL (e.g. https://example.net)
+ name: url
+ required: true
+ type: 0
+- display: API Token
+ name: token
+ required: true
+ type: 4
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: An attack simulation platform that provides validations for security controls, responses, and remediation exercises.
+display: AttackIQ Platform
+name: AttackIQFireDrill
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: The ID of the assessment to return.
+ isArray: false
+ name: assessment_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns all assessments by its ID in the AttackIQ FireDrill platform.
+ execution: false
+ name: attackiq-get-assessment-by-id
+ outputs:
+ - contextPath: AttackIQ.Assessment.Id
+ description: The ID of the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.Name
+ description: The name of the assessment name.
+ type: String
+ - contextPath: AttackIQ.Assessment.Description
+ description: The description of the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.StartDate
+ description: The start date of the assessment.
+ type: Date
+ - contextPath: AttackIQ.Assessment.EndDate
+ description: The end date of the assessment.
+ type: Date
+ - contextPath: AttackIQ.Assessment.AssessmentState
+ description: The state of the assessment. Can be Active or Inactive.
+ type: String
+ - contextPath: AttackIQ.Assessment.DefaultSchedule
+ description: The default schedule timing (cron) of the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.AssessmentTemplateId
+ description: The template ID of the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.AssessmentTemplateName
+ description: The template name of the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.AssessmentTemplateDescription
+ description: The template description of the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.AssessmentTemplateDefaultSchedule
+ description: The assessment's template default schedule timing (cron).
+ type: Unknown
+ - contextPath: AttackIQ.Assessment.AssessmentTemplateCompany
+ description: The owner of the template.
+ type: String
+ - contextPath: AttackIQ.Assessment.AssessmentTemplateCreated
+ description: The date that the template was created.
+ type: Date
+ - contextPath: AttackIQ.Assessment.AssessmentTemplateModified
+ description: The date the template was last modified.
+ type: Date
+ - contextPath: AttackIQ.Assessment.Creator
+ description: The user who created the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.Owner
+ description: The user who owns the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.User
+ description: The user who ran the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.Created
+ description: The time that the assessment was created.
+ type: String
+ - contextPath: AttackIQ.Assessment.Modified
+ description: The time that the assessment was last modified.
+ type: String
+ - contextPath: AttackIQ.Assessment.Users
+ description: The user IDs that can access the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.Groups
+ description: The user groups who can access the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.DefaultAssetCount
+ description: The number of machines (assets) that are connected to the assessment.
+ type: Number
+ - contextPath: AttackIQ.Assessment.DefaultAssetGroupCount
+ description: The number of asset groups that are connected to the assessment.
+ type: Number
+ - contextPath: AttackIQ.Assessment.MasterJobCount
+ description: The number of tests that ran in the assessment.
+ type: Number
+ - contextPath: AttackIQ.Assessment.Count
+ description: The total number of assessments.
+ type: Number
+ - contextPath: AttackIQ.Assessment.RemainingPages
+ description: The number of remaining pages to return. For example, if the total number of pages is
+ 6, and the last fetch was page 5, the value is 1.
+ type: Number
+ - arguments:
+ - default: false
+ defaultValue: '1'
+ description: The page number to return.
+ isArray: false
+ name: page_number
+ required: false
+ secret: false
+ - default: false
+ description: The maximum page size of the results.
+ isArray: false
+ name: page_size
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns all assessments in a page.
+ execution: false
+ name: attackiq-list-assessments
+ outputs:
+ - contextPath: AttackIQ.Assessment.Id
+ description: The ID of the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.Name
+ description: The name of the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.Description
+ description: The description of the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.StartDate
+ description: The start date of the assessment.
+ type: Date
+ - contextPath: AttackIQ.Assessment.EndDate
+ description: The end date of the assessment.
+ type: Date
+ - contextPath: AttackIQ.Assessment.AssessmentState
+ description: The state of the assessment. Can be Active or Inactive.
+ type: String
+ - contextPath: AttackIQ.Assessment.DefaultSchedule
+ description: The default schedule timing (cron) of the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.AssessmentTemplateId
+ description: The template ID of the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.AssessmentTemplateName
+ description: The template name of the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.AssessmentTemplateDescription
+ description: The template description of the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.AssessmentTemplateDefaultSchedule
+ description: The default schedule timing (cron) of the template assessment.
+ type: Unknown
+ - contextPath: AttackIQ.Assessment.AssessmentTemplateCompany
+ description: The owner of the template.
+ type: String
+ - contextPath: AttackIQ.Assessment.AssessmentTemplateCreated
+ description: The date that the template was created.
+ type: Date
+ - contextPath: AttackIQ.Assessment.AssessmentTemplateModified
+ description: The date the template was last modified.
+ type: Date
+ - contextPath: AttackIQ.Assessment.Creator
+ description: The user who created the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.Owner
+ description: The user who owned the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.User
+ description: The user that ran the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.Created
+ description: The time that the assessment was created.
+ type: String
+ - contextPath: AttackIQ.Assessment.Modified
+ description: The time that the assessment was last modified.
+ type: String
+ - contextPath: AttackIQ.Assessment.Users
+ description: The User IDs that can access the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.Groups
+ description: The user groups who can access the assessment.
+ type: String
+ - contextPath: AttackIQ.Assessment.DefaultAssetCount
+ description: The number of machines (assets) that are connected to the assessment.
+ type: Number
+ - contextPath: AttackIQ.Assessment.DefaultAssetGroupCount
+ description: The number of asset groups that are connected to the assessment.
+ type: Number
+ - contextPath: AttackIQ.Assessment.MasterJobCount
+ description: The number of tests that ran in the assessment.
+ type: Number
+ - arguments:
+ - default: true
+ description: ID of the assessment to activate.
+ isArray: false
+ name: assessment_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Activates the assessment, which is required for execution.
+ execution: false
+ name: attackiq-activate-assessment
+ - arguments:
+ - default: false
+ description: The ID of the assessment.
+ isArray: false
+ name: assessment_id
+ required: true
+ secret: false
+ - default: false
+ auto: PREDEFINED
+ description: Runs only on-demand tests in the assessment. True executes
+ tests in the assessment that are not scheduled to run. False executes all
+ tests in the assessment including scheduled tests. Default is false.
+ isArray: false
+ name: on_demand_only
+ required: false
+ secret: false
+ predefined:
+ - 'True'
+ - 'False'
+ deprecated: false
+ description: Runs all tests in the assessment.
+ execution: false
+ name: attackiq-run-all-tests-in-assessment
+ - arguments:
+ - default: true
+ description: The assessment to check status.
+ isArray: false
+ name: assessment_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns an assessment execution status when running an on-demand execution only.
+ execution: false
+ name: attackiq-get-assessment-execution-status
+ outputs:
+ - contextPath: AttackIQ.Assessment.Running
+ description: Whether the assessment is running.
+ type: Boolean
+ - contextPath: AttackIQ.Assessment.Id
+ description: The ID of the assessment.
+ type: String
+ - arguments:
+ - default: true
+ description: The ID of the Test.
+ isArray: false
+ name: test_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the status of the test.
+ execution: false
+ name: attackiq-get-test-execution-status
+ outputs:
+ - contextPath: AttackIQTest.Detected
+ description: The number of detections in the test.
+ type: Number
+ - contextPath: AttackIQTest.Failed
+ description: The number of failures in the test.
+ type: Number
+ - contextPath: AttackIQTest.Finished
+ description: Whether the test is finished.
+ type: Boolean
+ - contextPath: AttackIQTest.Passed
+ description: The number of passed tests.
+ type: Number
+ - contextPath: AttackIQTest.Errored
+ description: The number of tests that returned errors.
+ type: Number
+ - contextPath: AttackIQTest.Total
+ description: The total number of tests that ran.
+ type: Number
+ - contextPath: AttackIQTest.Id
+ description: The ID of the assessment test.
+ type: String
+ - arguments:
+ - default: true
+ description: The ID of the assessment that contains the tests.
+ isArray: false
+ name: assessment_id
+ required: true
+ secret: false
+ - default: false
+ description: The Maximum page size for the results.
+ isArray: false
+ name: page_size
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '1'
+ description: The page number to return.
+ isArray: false
+ name: page_number
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of tests by an assessment.
+ execution: false
+ name: attackiq-list-tests-by-assessment
+ outputs:
+ - contextPath: AttackIQTest.Id
+ description: ID of the test.
+ type: String
+ - contextPath: AttackIQTest.Name
+ description: The name of the test.
+ type: String
+ - contextPath: AttackIQTest.Description
+ description: The description of the test.
+ type: String
+ - contextPath: AttackIQTest.Scenarios.Id
+ description: The ID of the test scenario.
+ type: String
+ - contextPath: AttackIQTest.Scenarios.Name
+ description: The name of the test scenario.
+ type: String
+ - contextPath: AttackIQTest.Assets.Id
+ description: The ID of the test asset.
+ type: String
+ - contextPath: AttackIQTest.Assets.Ipv4Address
+ description: The IP version 4 address of the test asset.
+ type: String
+ - contextPath: AttackIQTest.Assets.Hostname
+ description: The host name of the test asset.
+ type: String
+ - contextPath: AttackIQTest.Assets.ProductName
+ description: The product name of the test asset.
+ type: String
+ - contextPath: AttackIQTest.Assets.Modified
+ description: The last modified date of the test asset.
+ type: String
+ - contextPath: AttackIQTest.Assets.Status
+ description: The status of the test asset. Can be Active or Inactive.
+ type: Date
+ - contextPath: AttackIQTest.TotalAssetCount
+ description: The number of assets in which the test ran.
+ type: Number
+ - contextPath: AttackIQTest.CronExpression
+ description: The Cron expression of the test.
+ type: String
+ - contextPath: AttackIQTest.Runnable
+ description: Whether the test can run.
+ type: Boolean
+ - contextPath: AttackIQTest.LastResult
+ description: The last result of the test.
+ type: String
+ - contextPath: AttackIQTest.User
+ description: The name of the user that ran the test in the assessment.
+ type: String
+ - contextPath: AttackIQTest.Created
+ description: The date that the test was created.
+ type: Date
+ - contextPath: AttackIQTest.Modified
+ description: The date that the test was last modified.
+ type: Date
+ - contextPath: AttackIQTest.LatestInstanceId
+ description: The ID of the most recent run of the test.
+ type: Number
+ - contextPath: AttackIQTest.UsingDefaultAssets
+ description: Whether the test uses default assets.
+ type: Boolean
+ - contextPath: AttackIQTest.UsingDefaultSchedule
+ description: Whether the test uses the default schedule.
+ type: Boolean
+ - contextPath: AttackIQTest.RemainingPages
+ description: The number of remaining pages to return. For example, if the total number of pages is
+ 6, and the last fetch was page 5, the value is 1.
+ type: Number
+ - contextPath: AttackIQTest.Count
+ description: The total number of tests.
+ type: Number
+ - arguments:
+ - default: false
+ description: ID of the test in which to show results.
+ isArray: false
+ name: test_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: Shows the last result. True shows the last result.
+ isArray: false
+ name: show_last_result
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '1'
+ description: The page number of the test results.
+ isArray: false
+ name: page_number
+ required: false
+ secret: false
+ - default: false
+ description: The maximum page size of the results.
+ isArray: false
+ name: page_size
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Filters results according to user choice. If set to Passed will return only Passed tests and vice versa.
+ isArray: false
+ name: outcome_filter
+ predefined:
+ - Passed
+ - Failed
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns the test results of an assessment.
+ execution: false
+ name: attackiq-get-test-results
+ outputs:
+ - contextPath: AttackIQTestResult.Id
+ description: ID of the test result.
+ type: String
+ - contextPath: AttackIQTestResult.Modified
+ description: The date the test result was last modified.
+ type: Date
+ - contextPath: AttackIQTestResult.Assessment.Id
+ description: The ID of the test assessment.
+ type: String
+ - contextPath: AttackIQTestResult.Assessment.Name
+ description: The name of the test assessment.
+ type: String
+ - contextPath: AttackIQTestResult.LastResult
+ description: The result of the test's last run.
+ type: String
+ - contextPath: AttackIQTestResult.Scenario.Id
+ description: The scenario ID of the test results.
+ type: String
+ - contextPath: AttackIQTestResult.Scenario.Name
+ description: The scenario name of the test results.
+ type: String
+ - contextPath: AttackIQTestResult.Scenario.Description
+ description: The scenario description of the test results.
+ type: String
+ - contextPath: AttackIQTestResult.Asset.Id
+ description: The ID of the test results asset.
+ type: String
+ - contextPath: AttackIQTestResult.Asset.Ipv4Address
+ description: The IP address of the test results scenario asset.
+ type: String
+ - contextPath: AttackIQTestResult.Asset.Hostname
+ description: The host name of the test results asset.
+ type: String
+ - contextPath: AttackIQTestResult.Asset.ProductName
+ description: The product name of the test results asset.
+ type: String
+ - contextPath: AttackIQTestResult.Asset.Modified
+ description: The date that the asset was last modified.
+ type: Date
+ - contextPath: AttackIQTestResult.AssetGroup
+ description: The asset group of the test.
+ type: String
+ - contextPath: AttackIQTestResult.JobState
+ description: The state of the job.
+ type: String
+ - contextPath: AttackIQTestResult.Outcome
+ description: The result outcome of the test.
+ type: String
+ - contextPath: AttackIQTestResult.RemainingPages
+ description: The number of remaining pages to return. For example, if the total number pages is
+ 6, and the last fetch was page 5, the value is 1.
+ type: Number
+ - contextPath: AttackIQTestResult.Count
+ description: The total number of tests.
+ type: Number
+ dockerimage: demisto/python3:3.7.4.1502
+ isfetch: false
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- AttackIQ - Test
diff --git a/Integrations/AttackIQFireDrill/AttackIQFireDrill_description.md b/Integrations/AttackIQFireDrill/AttackIQFireDrill_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/AttackIQFireDrill/AttackIQFireDrill_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/AttackIQFireDrill/AttackIQFireDrill_image.png b/Integrations/AttackIQFireDrill/AttackIQFireDrill_image.png
new file mode 100644
index 000000000000..6f1870bbb1b0
Binary files /dev/null and b/Integrations/AttackIQFireDrill/AttackIQFireDrill_image.png differ
diff --git a/Integrations/AttackIQFireDrill/AttackIQFireDrill_test.py b/Integrations/AttackIQFireDrill/AttackIQFireDrill_test.py
new file mode 100644
index 000000000000..ce6b390290d9
--- /dev/null
+++ b/Integrations/AttackIQFireDrill/AttackIQFireDrill_test.py
@@ -0,0 +1,95 @@
+import pytest
+
+from AttackIQFireDrill import build_transformed_dict, activate_assessment_command, create_invalid_id_err_msg, \
+ get_assessment_execution_status_command, get_test_execution_status_command, get_test_results_command, \
+ list_assessments_command, get_assessment_by_id_command, list_tests_by_assessment_command, \
+ run_all_tests_in_assessment_command, get_page_number_and_page_size
+
+from test_data.constants import DICT_1to5, TRANS_DICT_134, DICT_NESTED_123, TRANS_DICT_NESTED_12, \
+ TRANS_DICT_NESTED_VAL_12, DICT_LST_AAB2B, TRANS_DICT_LST_A2B, DICT_LST_NESTED, TRANS_DICT_LST_NESTED, \
+ ACTIVATE_ASS_RESP, ACTIVATE_ASS_RES, GET_ASS_EXECUTION_STATUS_RESP, GET_ASS_EXECUTION_RESULT, \
+ GET_TEST_STATUS_RESP, GET_TEST_STATUS_RESULT, GET_TEST_RESULT_RESP, GET_TEST_RESULT_RESULT, GET_ASS_RESP, \
+ GET_ASS_RESULT, GET_ASS_BY_ID_RESULT, GET_TESTS_RESP, GET_TEST_RESULT, RUN_ALL_TESTS_RESP, RUN_ALL_TESTS_RESULT
+
+import requests
+import demistomock as demisto
+from test_data.constants import ResponseMock
+
+
+def test_build_transformed_dict_basic():
+ assert build_transformed_dict(DICT_1to5, TRANS_DICT_134) == {'one': 1, 'three': 3, 'four': 4}
+ assert 'one' not in DICT_1to5
+
+
+def test_build_transformed_dict_nested_keys():
+ assert build_transformed_dict(DICT_NESTED_123, TRANS_DICT_NESTED_12) == {'one': 1, 'two': 2}
+
+
+def test_build_transformed_dict_nested_vals():
+ assert build_transformed_dict(DICT_1to5, TRANS_DICT_NESTED_VAL_12) == {'one': {'1': 1}, 'two': 2}
+
+
+def test_build_transformed_dict_list():
+ assert build_transformed_dict(DICT_LST_AAB2B, TRANS_DICT_LST_A2B) == {'AaB': [{'two': 2}, {'two': 3}], 'four': 4}
+ assert build_transformed_dict(DICT_LST_NESTED, TRANS_DICT_LST_NESTED) == {
+ 'Master': {'ID': 1, 'Assets': [{'ID': 1, 'Name': 'a'}, {'ID': 2, 'Name': 'b'}]}}
+
+
+def test_activate_assessment_command(mocker):
+ mocker.patch.object(requests, 'request', return_value=ResponseMock(ACTIVATE_ASS_RESP))
+ mocker.patch.object(demisto, 'results')
+ activate_assessment_command()
+ demisto.results.assert_called_with('Successfully activated project c4e352ae-1506-4c74-bd90-853f02dd765a')
+
+
+def test_create_invalid_id_err_msg_no_err():
+ orig_err = 'Error test'
+ actual = create_invalid_id_err_msg(orig_err, ['403'])
+ assert actual == 'Error in API call to AttackIQ. Error test'
+
+
+def test_create_invalid_id_err_msg_with_err():
+ orig_err = 'Error test 403'
+ actual = create_invalid_id_err_msg(orig_err, ['500', '403'])
+ assert actual == 'Error in API call to AttackIQ. This may be happen if you provided an invalid id.\nError test 403'
+
+
+def test_create_invalid_id_err_msg_with_irrelevant_err():
+ orig_err = 'Error test 403'
+ actual = create_invalid_id_err_msg(orig_err, ['500'])
+ assert actual == 'Error in API call to AttackIQ. Error test 403'
+
+
+def test_get_page_number_and_page_size_valid_int():
+ args = {
+ 'page_size': 5,
+ 'page_number': 4
+ }
+ assert get_page_number_and_page_size(args) == (args.get('page_number'), args.get('page_size'))
+
+
+def test_get_page_number_and_page_size_valid_str():
+ args = {
+ 'page_size': '5',
+ 'page_number': '4'
+ }
+ assert get_page_number_and_page_size(args) == (4, 5)
+
+
+@pytest.mark.parametrize('command,args,response,expected_result', [
+ (activate_assessment_command, {}, ACTIVATE_ASS_RESP, ACTIVATE_ASS_RES),
+ (get_assessment_execution_status_command, {'assessment_id': 1}, GET_ASS_EXECUTION_STATUS_RESP,
+ GET_ASS_EXECUTION_RESULT),
+ (get_test_execution_status_command, {'test_id': 1}, GET_TEST_STATUS_RESP, GET_TEST_STATUS_RESULT),
+ (get_test_results_command, {'test_id': 1}, GET_TEST_RESULT_RESP, GET_TEST_RESULT_RESULT),
+ (list_assessments_command, {'page_number': 1}, GET_ASS_RESP, GET_ASS_RESULT),
+ (get_assessment_by_id_command, {'assessment_id': 1}, GET_ASS_RESP, GET_ASS_BY_ID_RESULT),
+ (list_tests_by_assessment_command, {}, GET_TESTS_RESP, GET_TEST_RESULT),
+ (run_all_tests_in_assessment_command, {}, RUN_ALL_TESTS_RESP, RUN_ALL_TESTS_RESULT)
+]) # noqa: E124
+def test_commands(command, args, response, expected_result, mocker):
+ mocker.patch.object(requests, 'request', return_value=ResponseMock(response))
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(demisto, 'args', return_value=args)
+ command()
+ demisto.results.assert_called_with(expected_result)
diff --git a/Integrations/AttackIQFireDrill/CHANGELOG.md b/Integrations/AttackIQFireDrill/CHANGELOG.md
new file mode 100644
index 000000000000..d62413a494d4
--- /dev/null
+++ b/Integrations/AttackIQFireDrill/CHANGELOG.md
@@ -0,0 +1,17 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+ - Changed the integtration name from "AttackIQ FireDrill" to "AttackIQ Platform"
+
+
+## [19.9.1] - 2019-09-18
+ - New Integration AttackIQ - FireDrill:
+ - Command ***attackiq-get-assessment-by-id***: Get all assessments in a page or by assessment id.
+ - Command ***attackiq-list-assessments***: Get all assessments in a page (up to 10 assessments per page).
+ - Command ***attackiq-activate-assessment***: Activates the assessment. This is required for execution.
+ - Command ***attackiq-run-all-tests-in-assessment***: Runs all of the tests in the assessment.
+ - Command ***attackiq-get-assessment-execution-status***: Get assessment execution status - supports only on demand runs.
+ - Command ***attackiq-get-test-execution-status***: Get test run status.
+ - Command ***attackiq-list-tests-by-assessment***: Get assessment's test. Will get by default up to 10 test per call.
+ - Command ***attackiq-get-test-results***: Get assessment's test result by page (by default, a page consists of 10 tests).
diff --git a/Integrations/AttackIQFireDrill/Pipfile b/Integrations/AttackIQFireDrill/Pipfile
new file mode 100644
index 000000000000..59173fae1c47
--- /dev/null
+++ b/Integrations/AttackIQFireDrill/Pipfile
@@ -0,0 +1,16 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+pytest-asyncio = "*"
+
+[packages]
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/AttackIQFireDrill/Pipfile.lock b/Integrations/AttackIQFireDrill/Pipfile.lock
new file mode 100644
index 000000000000..0ec11860c105
--- /dev/null
+++ b/Integrations/AttackIQFireDrill/Pipfile.lock
@@ -0,0 +1,244 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "05cf99fdc8697b46431a0adb31d839d373002b73121efd93428adae96f6d475d"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
+ "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
+ ],
+ "version": "==2019.6.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7",
+ "sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db"
+ ],
+ "version": "==0.18"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832",
+ "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"
+ ],
+ "version": "==7.2.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af",
+ "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"
+ ],
+ "version": "==19.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:43c5486cefefa536c9aab528881c992328f020eefe4f6d06332449c365218580",
+ "sha256:d6c5ffe9d0305b9b977f7a642d36b9370954d1da7ada4c62393382cbadad4265"
+ ],
+ "version": "==2.4.1.1"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6ef6d06de77ce2961156013e9dff62f1b2688aa04d0dc244299fe7d67e09370d",
+ "sha256:a736fed91c12681a7b34617c8fcefe39ea04599ca72c608751c31d89579a3f77"
+ ],
+ "index": "pypi",
+ "version": "==5.0.1"
+ },
+ "pytest-asyncio": {
+ "hashes": [
+ "sha256:9fac5100fd716cbecf6ef89233e8590a4ad61d729d1732e0a96b84182df1daaf",
+ "sha256:d734718e25cfc32d2bf78d346e99d33724deeba774cc4afdf491530c6184b63b"
+ ],
+ "index": "pypi",
+ "version": "==0.10.0"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
+ "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ ],
+ "version": "==2.22.0"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:12e17c7ad1397fd1df5ead7727eb3f1bdc9fe1c18293b0492e0e01b57997e38d",
+ "sha256:dc9e416a095ee7c3360056990d52e5611fb94469352fc1c2dc85be1ff2189146"
+ ],
+ "index": "pypi",
+ "version": "==1.6.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e",
+ "sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e",
+ "sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0",
+ "sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c",
+ "sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631",
+ "sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4",
+ "sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34",
+ "sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b",
+ "sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a",
+ "sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233",
+ "sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1",
+ "sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36",
+ "sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d",
+ "sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a",
+ "sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.4.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
+ "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
+ ],
+ "version": "==1.25.3"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/AttackIQFireDrill/test_data/constants.py b/Integrations/AttackIQFireDrill/test_data/constants.py
new file mode 100644
index 000000000000..d8a0564e858c
--- /dev/null
+++ b/Integrations/AttackIQFireDrill/test_data/constants.py
@@ -0,0 +1,452 @@
+class ResponseMock:
+ def __init__(self, _json={}):
+ self.status_code = 200
+ self._json = _json
+
+ def json(self):
+ return self._json
+
+
+# Dictionaries
+DICT_1to5 = {'1': 1, '2': 2, '3': 3, '4': 4, '5': 5}
+DICT_NESTED_123 = {'nested': {'1': 1, '2': 2, '3': 3}}
+DICT_LST_AAB2B = {'aa_b': [{'2': 2}, {'2': 3}], 'b': 4}
+DICT_LST_NESTED = {'master': {'id': 1, 'assets': [{'id': 1, 'name': 'a'}, {'id': 2, 'name': 'b'}]}}
+
+TRANS_DICT_134 = {'1': 'one', '3': 'three', '4': 'four'}
+TRANS_DICT_NESTED_12 = {'nested.1': 'one', 'nested.2': 'two'}
+TRANS_DICT_NESTED_VAL_12 = {'1': 'one.1', '2': 'two'}
+TRANS_DICT_LST_A2B = {'aa_b': {'2': 'two'}, 'b': 'four'}
+TRANS_DICT_LST_NESTED = {'master.id': 'Master.ID', 'master.assets': {'id': 'ID', 'name': 'Name'}}
+
+# Requests
+ACTIVATE_ASS_RESP = {
+ "message": "Successfully activated project c4e352ae-1506-4c74-bd90-853f02dd765a"
+}
+GET_ASS_RESP = {
+ "count": 1,
+ "next": None,
+ "previous": None,
+ "results": [
+ {
+ "id": "2e53e597-0388-48bb-8eb8-00bb28874434",
+ "name": "Arseny's ransomware project",
+ "description": "Test of common ransomware variants",
+ "start_date": None,
+ "end_date": None,
+ "project_state": "Inactive",
+ "default_schedule": None,
+ "project_template": {
+ "id": "59d35f4a-2da0-4c4a-a08a-c30cb41dae6b",
+ "template_name": "Ransomware Project",
+ "template_description": "Variety of common ransomware variants",
+ "project_name": "Ransomware Project",
+ "project_description": "Test of common ransomware variants",
+ "icon": "ransomware_template_icon.svg",
+ "project_template_type": {
+ "id": "b1e7ac80-1417-4f7b-a387-35fb49f218c8",
+ "name": "Use Cases",
+ "description": "Showcase different use cases in which FireDrill can help"
+ },
+ "default_schedule": None,
+ "report_types": [
+ {
+ "id": "38f24061-a70f-415a-b378-bc9575b7ac6a",
+ "name": "Security Assessment Differential Report"
+ },
+ {
+ "id": "986fce3c-89a5-47f0-843d-99ba269b576b",
+ "name": "Security Assessment Detailed Report"
+ },
+ {
+ "id": "fdb6a5b9-ec10-4a5b-b387-7433ed4e78df",
+ "name": "Ransomware Executive Summary"
+ }
+ ],
+ "widgets": [
+ "b955b352-e59f-4b8f-8c93-f88a7d5aa026",
+ "938589ec-653c-45be-a7cc-6cd632387bb7"
+ ],
+ "meta_data": {
+ "hidden": True
+ },
+ "company": "906d5ec6-101c-4ae6-8906-b93ce0529060",
+ "created": "2016-07-01T20:26:43.494459Z",
+ "modified": "2019-02-19T03:31:54.393885Z"
+ },
+ "creator": "akrupnik@paloaltonetworks.com",
+ "owner": "akrupnik@paloaltonetworks.com",
+ "user": "akrupnik@paloaltonetworks.com",
+ "created": "2019-09-02T11:51:57.507486Z",
+ "modified": "2019-09-02T11:51:59.769959Z",
+ "users": [
+ "71e92cf9-5159-466c-8050-142d1ba279ea"
+ ],
+ "groups": [],
+ "default_asset_count": 0,
+ "default_asset_group_count": 0,
+ "master_job_count": 3,
+ "meta_data": {
+ "hidden": True
+ }
+ }
+ ]
+}
+GET_ASS_EXECUTION_STATUS_RESP = {
+ "message": False
+}
+GET_TESTS_RESP = {
+ "count": 1,
+ "next": None,
+ "previous": None,
+ "results": [
+ {
+ "id": "9aed2cef-8c64-4e29-83b4-709de5963b66",
+ "name": "Most Used Threat Actor Techniques",
+ "description": None,
+ "project": "8978fe24-607a-4815-a36a-89fb6191b318",
+ "scenarios": [
+ {
+ "id": "fdef9f60-d933-4158-bfde-81c2d791b2a2",
+ "name": "Persistence Through Startup Folder",
+ "model_json": {}
+ },
+ {
+ "id": "04ed47b9-145c-46f6-9434-f9f5af27a2d2",
+ "name": "Execute Encoded Powershell Command",
+ "model_json": {
+ "run_as_logged_in_user": False,
+ "timeout": 10000
+ }
+ },
+ {
+ "id": "a3098773-f2c1-4b32-8cba-2ed6d7ec0ba1",
+ "name": "Standard Application Layer Protocol",
+ "model_json": {
+ "ports_no_standard_protocols": [
+ "443"
+ ],
+ "payload_type": [
+ "safe",
+ "malicious"
+ ],
+ "timeout": 30,
+ "ports_standard_protocols": [
+ "21",
+ "25",
+ "53",
+ "80"
+ ]
+ }
+ },
+ {
+ "id": "59699d35-b268-41b5-bc00-ed8acc222b64",
+ "name": "Scheduled Task Execution",
+ "model_json": {}
+ },
+ {
+ "id": "cfbbd145-28a2-4ac3-a1e0-79abddfc9881",
+ "name": "Dump Windows Passwords with Original Mimikatz",
+ "model_json": {
+ "mimikatz_cred_types": [],
+ "show_all_cred_types": False,
+ "wce_cred_types": "lm_ntlm",
+ "use_custom_parameters": False,
+ "mimikatz_module": "sekurlsa",
+ "user_type": "all",
+ "gsecdump_cred_types": "sam_ad",
+ "pwdump7_cred_types": [],
+ "cred_types": [
+ "all"
+ ],
+ "undetectable_mimikatz_cred_types": [],
+ "print_output": False,
+ "lazagne_cred_types": "browsers",
+ "pwd_dumping_tool": "mimikatz"
+ }
+ },
+ {
+ "id": "f73dd965-dc8c-4230-9745-a530b21c5333",
+ "name": "Remote File Copy Script",
+ "model_json": {
+ "scripts": [
+ {
+ "script_hash": "1ed3ee9d6aa12e67241be44f5e284de65c8ca297025cde2ee79bc4dc7f1f425a",
+ "exit_code": 0,
+ "platform": "windows",
+ "success_type": "with_exit_code",
+ "interpreter": "powershell.exe",
+ "script_files": "67211eac-1745-43c3-9fc9-9b99049b088c/remote_file_copy.ps1"
+ }
+ ]
+ }
+ },
+ {
+ "id": "8ca3ca07-b52b-4ede-af05-ce1eb8834454",
+ "name": "Command-Line Interface Script",
+ "model_json": {
+ "scripts": [
+ {
+ "script_hash": "4851bb8fdee02a8935a3ded79e39b6a0c2c9ab6bd5a94534a2524e50009c50e2",
+ "exit_code": 0,
+ "platform": "windows",
+ "success_type": "with_exit_code",
+ "interpreter": "cmd.exe",
+ "script_files": "8a354ed9-fc5e-4c5c-8b8b-47e5e66a3c4b/command_line_interface.bat"
+ }
+ ]
+ }
+ },
+ {
+ "id": "8e39c23c-aca4-4940-96bf-247723026e46",
+ "name": "File Deletion Script",
+ "model_json": {
+ "scripts": [
+ {
+ "script_hash": "6c670f90fba2fc5d6449c1948a5497ea7d0f53f1a3d4f1d26590d211b860adf6",
+ "exit_code": 0,
+ "platform": "windows",
+ "success_type": "with_exit_code",
+ "interpreter": "cmd.exe",
+ "script_files": "029d27bb-dc6d-4510-922b-9e564df1eca4/file_deletion.bat"
+ }
+ ]
+ }
+ },
+ {
+ "id": "5fbb5e71-6e35-4e2c-8dc6-7ee55be563dd",
+ "name": "System Information Discovery Script",
+ "model_json": {
+ "scripts": [
+ {
+ "script_hash": "d51e34a47a79465a0ef3916fe01fe667e8e4281ef3b676569e6a1a33419e51ea",
+ "exit_code": 0,
+ "platform": "windows",
+ "success_type": "with_exit_code",
+ "interpreter": "cmd.exe",
+ "script_files": "4be17c81-a0de-4a7e-acd2-b9bd9f9aeb1c/system_information_discovery.bat"
+ },
+ {
+ "script_hash": "b4e7c8a463c04cd1e45e1455af358185c09d144ef3c276ebd4a0fa4c628f153e",
+ "exit_code": 0,
+ "execute_as_user": False,
+ "platform": "linux",
+ "success_type": "with_exit_code",
+ "interpreter": "/bin/bash",
+ "script_files": "3b33ee2d-04a6-4a33-b0d5-15d0c91e5857/system_information_discovery.sh"
+ }
+ ]
+ }
+ },
+ {
+ "id": "1e46e621-2453-4aaa-85b7-ab67d0b37b8c",
+ "name": "Persistence Through Windows Registry",
+ "model_json": {
+ "registry": [
+ {
+ "data": "%SystemRoot%/attackiq_data.exe",
+ "value": "attackiq_value",
+ "key": "HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Run"
+ },
+ {
+ "data": "%SystemRoot%/attackiq_data.exe",
+ "value": "attackiq_value",
+ "key": "HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\Run"
+ },
+ {
+ "data": "%APPDATA%/attackiq_data.dll",
+ "value": "attackiq_value",
+ "key": "HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Browser Helper Objects" # noqa: E501
+ },
+ {
+ "data": "%PROGRAMFILES%/attackiq_texteditor.exe",
+ "value": "attackiq_texteditor",
+ "key": "HKEY_CLASSES_ROOT\\txtfile\\Shell\\Open\\command"
+ }
+ ]
+ }
+ }
+ ],
+ "assets": [
+ {
+ "id": "03e17460-849e-4b86-b6c6-ef0db72823ff",
+ "ipv4_address": "172.31.39.254",
+ "hostname": "ec2amaz-g4iu5no",
+ "product_name": "Windows Server 2016 Datacenter",
+ "modified": "2019-09-05T13:33:34.062040Z",
+ "status": "Active"
+ }
+ ],
+ "asset_groups": [],
+ "total_asset_count": 1,
+ "cron_expression": None,
+ "runnable": True,
+ "last_result": "Failed",
+ "scheduled_count": 10,
+ "user": "akrupnik@paloaltonetworks.com",
+ "created": "2019-09-05T08:47:38.273306Z",
+ "modified": "2019-09-05T08:56:42.496002Z",
+ "latest_instance_id": "0de2caab-1ec0-4907-948b-dca3dc65fe2c",
+ "using_default_assets": True,
+ "using_default_schedule": True
+ }
+ ]
+}
+GET_TEST_STATUS_RESP = {
+ "detected": 0,
+ "failed": 9,
+ "finished": True,
+ "passed": 1,
+ "errored": 0,
+ "total": 10
+}
+GET_TEST_RESULT_RESP = {
+ "count": 1,
+ "next": None,
+ "previous": None,
+ "results": [
+ {
+ "id": "5f044657-d0bc-48ab-afaf-98c6ae5a9e7f",
+ "modified": "2019-09-03T14:22:46.747664Z",
+ "project": {
+ "id": "c4e352ae-1506-4c74-bd90-853f02dd765a",
+ "name": "Arseny's ransomware project"
+ },
+ "project_run_id": "74fc59ba-ec33-41c2-a63f-9a0188e3b4bb",
+ "master_job": {
+ "id": "1c350a5a-84f2-4938-93d8-cc31f0a99482",
+ "name": "Ransomware Download",
+ "assets": [
+ {
+ "id": "03e17460-849e-4b86-b6c6-ef0db72823ff",
+ "ipv4_address": "172.31.39.254",
+ "hostname": "ec2amaz-g4iu5no",
+ "product_name": "Windows Server 2016 Datacenter",
+ "modified": "2019-09-05T12:10:01.590138Z"
+ }
+ ],
+ "scenarios": [
+ {
+ "id": "ef72cfc8-796c-4a35-abea-547f0d898713",
+ "name": "Download Coverton Ransomware",
+ "description": "The Coverton ransomware has no known infection vector. After encryption, the ransomware deletes shadow volume copies and system restore points. A ransom note will then be created, explaining the the victim how to use the tor network and how to buy bitcoin. The authors demand a price of 1 bitcoin to decrypt and will threaten to double the price every week you do not pay. Unfortunately, the cryptography is solid so there is no decrypter available. This led some victims to pay the ransom. However, the decrypter they receive did not properly decrypt the files." # noqa: E501
+ }
+ ]
+ },
+ "master_job_name": "Ransomware Download",
+ "master_job_metadata": None,
+ "instance_job": "24178034-cb69-442d-afd8-a7d87ae78eda",
+ "instance_job_on_demand": True,
+ "instance_job_run_all": True,
+ "scenario_job_ref": 3874153,
+ "scenario_scheduled_job_uuid": "6c757c3d-6e80-426e-94cb-625113845d8e",
+ "scenario": {
+ "id": "fc057ae4-c56d-4e9a-8c0f-9f22ec1e5576",
+ "name": "Download SNSLock Ransomware",
+ "description": "The SNSLock ransomware is spread through email spam campaigns. Upon infection, the ransomware will connect to it's C2 server and send user information such as system date and time, IP address, and MAC address. During infection, the ransomware will add a .RSNSlocked extension. After infection, it will drop an html file that contains all the information to pay $300 dollars using bitcoin." # noqa: E501
+ },
+ "scenario_args": {
+ "check_if_executable": True,
+ "sha256_hash": "597a14a76fc4d6315afa877ef87b68401de45d852e38f98c2f43986b4dca1c3a",
+ "download_url": "https://malware.scenarios.aiqscenarioinfra.com/597a14a76fc4d6315afa877ef87b68401de45d852e38f98c2f43986b4dca1c3a/SNSLock" # noqa: E501
+ },
+ "scenario_exe": "ai_python",
+ "scenario_name": "Download SNSLock Ransomware",
+ "scenario_type": 1,
+ "asset": {
+ "id": "03e17460-849e-4b86-b6c6-ef0db72823ff",
+ "ipv4_address": "172.31.39.254",
+ "hostname": "ec2amaz-g4iu5no",
+ "product_name": "Windows Server 2016 Datacenter",
+ "modified": "2019-09-05T12:10:01.590138Z"
+ },
+ "asset_hostname": "ec2amaz-g4iu5no",
+ "asset_ipv4_address": "172.31.39.254",
+ "asset_group": None,
+ "asset_group_name": None,
+ "scheduled_time": "2019-09-03T14:16:00Z",
+ "sent_to_agent": True,
+ "done": True,
+ "canceled": False,
+ "job_state_id": 7,
+ "job_state_name": "Finished",
+ "scenario_result_value": {
+ "ai_scenario_outcome": 1,
+ "ai_log_time": 1567520565441,
+ "ai_python_process_id": 100,
+ "ai_total_time_taken": 2.687,
+ "ai_critical_phases_successful": 0,
+ "ai_tracker_id": "125"
+ },
+ "outcome_id": 1,
+ "outcome_name": "Passed",
+ "sent_to_siem_connector": False,
+ "result_id": "5f044657-d0bc-48ab-afaf-98c6ae5a9e7f",
+ "company": "55b4e4cf-9cf9-4bef-8c21-6eb17f5bfc7d",
+ "user": "efcc433f-c954-4855-b9f0-3c3beeefdbf6",
+ "created": "2019-09-03T14:16:24.560022Z",
+ "run_count": "",
+ "scenario_scheduled_job": {
+ "id": "5f044657-d0bc-48ab-afaf-98c6ae5a9e7f",
+ "scenario_scheduled_job_uuid": "6c757c3d-6e80-426e-94cb-625113845d8e",
+ "scenario_job": {
+ "id": "5f044657-d0bc-48ab-afaf-98c6ae5a9e7f",
+ "master_job": "1c350a5a-84f2-4938-93d8-cc31f0a99482",
+ "master_job_name": "Ransomware Download",
+ "project": {
+ "id": "c4e352ae-1506-4c74-bd90-853f02dd765a",
+ "name": "Arseny's ransomware project"
+ },
+ "scenario": {
+ "id": "fc057ae4-c56d-4e9a-8c0f-9f22ec1e5576",
+ "name": "Download SNSLock Ransomware",
+ "description": "The SNSLock ransomware is spread through email spam campaigns. Upon infection, the ransomware will connect to it's C2 server and send user information such as system date and time, IP address, and MAC address. During infection, the ransomware will add a .RSNSlocked extension. After infection, it will drop an html file that contains all the information to pay $300 dollars using bitcoin.", # noqa: E501
+ "scenario_type": "Attack",
+ "scenario_template": {
+ "id": "4f89d738-d253-452d-b944-99b41f8b2e07",
+ "zip_file": "https://static.attackiq.com/scenarios/4f89d738-d253-452d-b944-99b41f8b2e07/download_and_save_file-1.0.120.dev0.zip?v=1.0.8&Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiaHR0cHM6Ly9zdGF0aWMuYXR0YWNraXEuY29tL3NjZW5hcmlvcy80Zjg5ZDczOC1kMjUzLTQ1MmQtYjk0NC05OWI0MWY4YjJlMDcvZG93bmxvYWRfYW5kX3NhdmVfZmlsZS0xLjAuMTIwLmRldjAuemlwP3Y9MS4wLjgiLCJDb25kaXRpb24iOnsiRGF0ZUxlc3NUaGFuIjp7IkFXUzpFcG9jaFRpbWUiOjE1Njc2OTA5OTN9fX1dfQ__&Signature=H3KgpyE69Ysg3NzfkJIO-vYP1zpbqakKJZhnToPZ2PnzKw~x9~ihmQz1AKU6AGowwBN2l9fFHdigCZQ0wBdwt346MxUXVJpcjb6Wz4AVBieN9qmkfARA3SB7WCBF48HiOSLRqWJtpzBc~jqLrcGS4T-UPM5S~TEXX79~dTXg2ZJoor7FbqL-kaLX09N08r4o6XsKzB0HoVmleZ8x9b8AotgLYjbExYdLctgPnOcxWgGuJKRUtdYgW-loPf9V56yg1ngl59aA1Emgo74-BfUXGl5tgK4LPbvGQw7kg5rjM310vh3oze~h0oiE3IHHVNSW2pcsl4U7ELofUpFwE~-sUg__&Key-Pair-Id=APKAJY2DPWILXHPNCJTA" # noqa: E501
+ },
+ "supported_platforms": {
+ "osx": ">=0.0",
+ "centos": ">=0.0",
+ "redhat": ">=0.0",
+ "windows": ">=0.0",
+ "linuxmint": ">=0.0",
+ "ubuntu": ">=0.0",
+ "debian": ">=0.0",
+ "fedora": ">=0.0"
+ }
+ },
+ "asset": {
+ "id": "03e17460-849e-4b86-b6c6-ef0db72823ff",
+ "ipv4_address": "172.31.39.254",
+ "hostname": "ec2amaz-g4iu5no",
+ "product_name": "Windows Server 2016 Datacenter",
+ "modified": "2019-09-05T12:10:01.590138Z"
+ },
+ "modified": "2019-09-03T14:22:46.747664Z"
+ },
+ "job_state": "Finished",
+ "modified": "2019-09-03T14:22:46.747664Z"
+ },
+ "config_map_values": {},
+ "cancellable": True
+ }
+ ]
+}
+RUN_ALL_TESTS_RESP = {
+ "message": "Successfully started running all tests in project: ATT&CK by the Numbers @ NOVA BSides 2019",
+ "started_at": "2019-09-05T13:33:29.621693"
+}
+
+# Results
+ACTIVATE_ASS_RES = ACTIVATE_ASS_RESP['message']
+GET_ASS_RESULT = {'Type': 1, 'HumanReadable': "### AttackIQ Assessments Page 1/1\n|Id|Name|Description|User|Created|Modified|\n|---|---|---|---|---|---|\n| 2e53e597-0388-48bb-8eb8-00bb28874434 | Arseny's ransomware project | Test of common ransomware variants | akrupnik@paloaltonetworks.com | 2019-09-02T11:51:57.507486Z | 2019-09-02T11:51:59.769959Z |\n", 'ContentsFormat': 'json', 'Contents': {'count': 1, 'next': None, 'previous': None, 'results': [{'id': '2e53e597-0388-48bb-8eb8-00bb28874434', 'name': "Arseny's ransomware project", 'description': 'Test of common ransomware variants', 'start_date': None, 'end_date': None, 'project_state': 'Inactive', 'default_schedule': None, 'project_template': {'id': '59d35f4a-2da0-4c4a-a08a-c30cb41dae6b', 'template_name': 'Ransomware Project', 'template_description': 'Variety of common ransomware variants', 'project_name': 'Ransomware Project', 'project_description': 'Test of common ransomware variants', 'icon': 'ransomware_template_icon.svg', 'project_template_type': {'id': 'b1e7ac80-1417-4f7b-a387-35fb49f218c8', 'name': 'Use Cases', 'description': 'Showcase different use cases in which FireDrill can help'}, 'default_schedule': None, 'report_types': [{'id': '38f24061-a70f-415a-b378-bc9575b7ac6a', 'name': 'Security Assessment Differential Report'}, {'id': '986fce3c-89a5-47f0-843d-99ba269b576b', 'name': 'Security Assessment Detailed Report'}, {'id': 'fdb6a5b9-ec10-4a5b-b387-7433ed4e78df', 'name': 'Ransomware Executive Summary'}], 'widgets': ['b955b352-e59f-4b8f-8c93-f88a7d5aa026', '938589ec-653c-45be-a7cc-6cd632387bb7'], 'meta_data': {'hidden': True}, 'company': '906d5ec6-101c-4ae6-8906-b93ce0529060', 'created': '2016-07-01T20:26:43.494459Z', 'modified': '2019-02-19T03:31:54.393885Z'}, 'creator': 'akrupnik@paloaltonetworks.com', 'owner': 'akrupnik@paloaltonetworks.com', 'user': 'akrupnik@paloaltonetworks.com', 'created': '2019-09-02T11:51:57.507486Z', 'modified': '2019-09-02T11:51:59.769959Z', 'users': ['71e92cf9-5159-466c-8050-142d1ba279ea'], 'groups': [], 'default_asset_count': 0, 'default_asset_group_count': 0, 'master_job_count': 3, 'meta_data': {'hidden': True}}]}, 'EntryContext': {'AttackIQ.Assessment(val.Id === obj.Id)': [{'Id': '2e53e597-0388-48bb-8eb8-00bb28874434', 'Name': "Arseny's ransomware project", 'User': 'akrupnik@paloaltonetworks.com', 'Users': ['71e92cf9-5159-466c-8050-142d1ba279ea'], 'Owner': 'akrupnik@paloaltonetworks.com', 'Groups': [], 'Creator': 'akrupnik@paloaltonetworks.com', 'Created': '2019-09-02T11:51:57.507486Z', 'EndDate': None, 'Modified': '2019-09-02T11:51:59.769959Z', 'StartDate': None, 'Description': 'Test of common ransomware variants', 'AssessmentState': 'Inactive', 'MasterJobCount': 3, 'DefaultSchedule': None, 'DefaultAssetCount': 0, 'AssessmentTemplateId': '59d35f4a-2da0-4c4a-a08a-c30cb41dae6b', 'DefaultAssetGroupCount': 0, 'AssessmentTemplateCompany': '906d5ec6-101c-4ae6-8906-b93ce0529060', 'AssessmentTemplateCreated': '2016-07-01T20:26:43.494459Z', 'AssessmentTemplateModified': '2019-02-19T03:31:54.393885Z', 'AssessmentTemplateName': 'Ransomware Project', 'AssessmentTemplateDefaultSchedule': None, 'AssessmentTemplateDescription': 'Variety of common ransomware variants'}], 'AttackIQ.Assessment(val.Count).Count': 1, 'AttackIQ.Assessment(val.RemainingPages).RemainingPages': 0}} # noqa: E501
+GET_ASS_EXECUTION_RESULT = {'Type': 1, 'HumanReadable': 'Assessment 1 execution is not running.',
+ 'ContentsFormat': 'json', 'Contents': {'message': False},
+ 'EntryContext': {'AttackIQ.Assessment(val.Id === obj.Id)': {'Running': False, 'Id': 1}}}
+GET_TEST_RESULT = {'Type': 1, 'HumanReadable': '# Assessment None tests\n## Page 1 / 1\n### Test - Most Used Threat Actor Techniques\n|Id|Name|Created|Modified|Runnable|Last Result|\n|---|---|---|---|---|---|\n| 9aed2cef-8c64-4e29-83b4-709de5963b66 | Most Used Threat Actor Techniques | 2019-09-05T08:47:38.273306Z | 2019-09-05T08:56:42.496002Z | true | Failed |\n### Assets (Most Used Threat Actor Techniques)\n|Hostname|Id|Ipv4Address|Modified|ProductName|Status|\n|---|---|---|---|---|---|\n| ec2amaz-g4iu5no | 03e17460-849e-4b86-b6c6-ef0db72823ff | 172.31.39.254 | 2019-09-05T13:33:34.062040Z | Windows Server 2016 Datacenter | Active |\n### Scenarios (Most Used Threat Actor Techniques)\n|Id|Name|\n|---|---|\n| fdef9f60-d933-4158-bfde-81c2d791b2a2 | Persistence Through Startup Folder |\n| 04ed47b9-145c-46f6-9434-f9f5af27a2d2 | Execute Encoded Powershell Command |\n| a3098773-f2c1-4b32-8cba-2ed6d7ec0ba1 | Standard Application Layer Protocol |\n| 59699d35-b268-41b5-bc00-ed8acc222b64 | Scheduled Task Execution |\n| cfbbd145-28a2-4ac3-a1e0-79abddfc9881 | Dump Windows Passwords with Original Mimikatz |\n| f73dd965-dc8c-4230-9745-a530b21c5333 | Remote File Copy Script |\n| 8ca3ca07-b52b-4ede-af05-ce1eb8834454 | Command-Line Interface Script |\n| 8e39c23c-aca4-4940-96bf-247723026e46 | File Deletion Script |\n| 5fbb5e71-6e35-4e2c-8dc6-7ee55be563dd | System Information Discovery Script |\n| 1e46e621-2453-4aaa-85b7-ab67d0b37b8c | Persistence Through Windows Registry |\n', 'ContentsFormat': 'json', 'Contents': {'count': 1, 'next': None, 'previous': None, 'results': [{'id': '9aed2cef-8c64-4e29-83b4-709de5963b66', 'name': 'Most Used Threat Actor Techniques', 'description': None, 'project': '8978fe24-607a-4815-a36a-89fb6191b318', 'scenarios': [{'id': 'fdef9f60-d933-4158-bfde-81c2d791b2a2', 'name': 'Persistence Through Startup Folder', 'model_json': {}}, {'id': '04ed47b9-145c-46f6-9434-f9f5af27a2d2', 'name': 'Execute Encoded Powershell Command', 'model_json': {'run_as_logged_in_user': False, 'timeout': 10000}}, {'id': 'a3098773-f2c1-4b32-8cba-2ed6d7ec0ba1', 'name': 'Standard Application Layer Protocol', 'model_json': {'ports_no_standard_protocols': ['443'], 'payload_type': ['safe', 'malicious'], 'timeout': 30, 'ports_standard_protocols': ['21', '25', '53', '80']}}, {'id': '59699d35-b268-41b5-bc00-ed8acc222b64', 'name': 'Scheduled Task Execution', 'model_json': {}}, {'id': 'cfbbd145-28a2-4ac3-a1e0-79abddfc9881', 'name': 'Dump Windows Passwords with Original Mimikatz', 'model_json': {'mimikatz_cred_types': [], 'show_all_cred_types': False, 'wce_cred_types': 'lm_ntlm', 'use_custom_parameters': False, 'mimikatz_module': 'sekurlsa', 'user_type': 'all', 'gsecdump_cred_types': 'sam_ad', 'pwdump7_cred_types': [], 'cred_types': ['all'], 'undetectable_mimikatz_cred_types': [], 'print_output': False, 'lazagne_cred_types': 'browsers', 'pwd_dumping_tool': 'mimikatz'}}, {'id': 'f73dd965-dc8c-4230-9745-a530b21c5333', 'name': 'Remote File Copy Script', 'model_json': {'scripts': [{'script_hash': '1ed3ee9d6aa12e67241be44f5e284de65c8ca297025cde2ee79bc4dc7f1f425a', 'exit_code': 0, 'platform': 'windows', 'success_type': 'with_exit_code', 'interpreter': 'powershell.exe', 'script_files': '67211eac-1745-43c3-9fc9-9b99049b088c/remote_file_copy.ps1'}]}}, {'id': '8ca3ca07-b52b-4ede-af05-ce1eb8834454', 'name': 'Command-Line Interface Script', 'model_json': {'scripts': [{'script_hash': '4851bb8fdee02a8935a3ded79e39b6a0c2c9ab6bd5a94534a2524e50009c50e2', 'exit_code': 0, 'platform': 'windows', 'success_type': 'with_exit_code', 'interpreter': 'cmd.exe', 'script_files': '8a354ed9-fc5e-4c5c-8b8b-47e5e66a3c4b/command_line_interface.bat'}]}}, {'id': '8e39c23c-aca4-4940-96bf-247723026e46', 'name': 'File Deletion Script', 'model_json': {'scripts': [{'script_hash': '6c670f90fba2fc5d6449c1948a5497ea7d0f53f1a3d4f1d26590d211b860adf6', 'exit_code': 0, 'platform': 'windows', 'success_type': 'with_exit_code', 'interpreter': 'cmd.exe', 'script_files': '029d27bb-dc6d-4510-922b-9e564df1eca4/file_deletion.bat'}]}}, {'id': '5fbb5e71-6e35-4e2c-8dc6-7ee55be563dd', 'name': 'System Information Discovery Script', 'model_json': {'scripts': [{'script_hash': 'd51e34a47a79465a0ef3916fe01fe667e8e4281ef3b676569e6a1a33419e51ea', 'exit_code': 0, 'platform': 'windows', 'success_type': 'with_exit_code', 'interpreter': 'cmd.exe', 'script_files': '4be17c81-a0de-4a7e-acd2-b9bd9f9aeb1c/system_information_discovery.bat'}, {'script_hash': 'b4e7c8a463c04cd1e45e1455af358185c09d144ef3c276ebd4a0fa4c628f153e', 'exit_code': 0, 'execute_as_user': False, 'platform': 'linux', 'success_type': 'with_exit_code', 'interpreter': '/bin/bash', 'script_files': '3b33ee2d-04a6-4a33-b0d5-15d0c91e5857/system_information_discovery.sh'}]}}, {'id': '1e46e621-2453-4aaa-85b7-ab67d0b37b8c', 'name': 'Persistence Through Windows Registry', 'model_json': {'registry': [{'data': '%SystemRoot%/attackiq_data.exe', 'value': 'attackiq_value', 'key': 'HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Run'}, {'data': '%SystemRoot%/attackiq_data.exe', 'value': 'attackiq_value', 'key': 'HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\Run'}, {'data': '%APPDATA%/attackiq_data.dll', 'value': 'attackiq_value', 'key': 'HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Browser Helper Objects'}, {'data': '%PROGRAMFILES%/attackiq_texteditor.exe', 'value': 'attackiq_texteditor', 'key': 'HKEY_CLASSES_ROOT\\txtfile\\Shell\\Open\\command'}]}}], 'assets': [{'id': '03e17460-849e-4b86-b6c6-ef0db72823ff', 'ipv4_address': '172.31.39.254', 'hostname': 'ec2amaz-g4iu5no', 'product_name': 'Windows Server 2016 Datacenter', 'modified': '2019-09-05T13:33:34.062040Z', 'status': 'Active'}], 'asset_groups': [], 'total_asset_count': 1, 'cron_expression': None, 'runnable': True, 'last_result': 'Failed', 'scheduled_count': 10, 'user': 'akrupnik@paloaltonetworks.com', 'created': '2019-09-05T08:47:38.273306Z', 'modified': '2019-09-05T08:56:42.496002Z', 'latest_instance_id': '0de2caab-1ec0-4907-948b-dca3dc65fe2c', 'using_default_assets': True, 'using_default_schedule': True}]}, 'EntryContext': {'AttackIQTest(val.Id === obj.Id)': [{'Id': '9aed2cef-8c64-4e29-83b4-709de5963b66', 'Name': 'Most Used Threat Actor Techniques', 'Description': None, 'Assessment': '8978fe24-607a-4815-a36a-89fb6191b318', 'TotalAssetCount': 1, 'CronExpression': None, 'Runnable': True, 'LastResult': 'Failed', 'User': 'akrupnik@paloaltonetworks.com', 'Created': '2019-09-05T08:47:38.273306Z', 'Modified': '2019-09-05T08:56:42.496002Z', 'UsingDefaultSchedule': True, 'UsingDefaultAssets': True, 'LatestInstanceId': '0de2caab-1ec0-4907-948b-dca3dc65fe2c', 'Scenarios': [{'Name': 'Persistence Through Startup Folder', 'Id': 'fdef9f60-d933-4158-bfde-81c2d791b2a2'}, {'Name': 'Execute Encoded Powershell Command', 'Id': '04ed47b9-145c-46f6-9434-f9f5af27a2d2'}, {'Name': 'Standard Application Layer Protocol', 'Id': 'a3098773-f2c1-4b32-8cba-2ed6d7ec0ba1'}, {'Name': 'Scheduled Task Execution', 'Id': '59699d35-b268-41b5-bc00-ed8acc222b64'}, {'Name': 'Dump Windows Passwords with Original Mimikatz', 'Id': 'cfbbd145-28a2-4ac3-a1e0-79abddfc9881'}, {'Name': 'Remote File Copy Script', 'Id': 'f73dd965-dc8c-4230-9745-a530b21c5333'}, {'Name': 'Command-Line Interface Script', 'Id': '8ca3ca07-b52b-4ede-af05-ce1eb8834454'}, {'Name': 'File Deletion Script', 'Id': '8e39c23c-aca4-4940-96bf-247723026e46'}, {'Name': 'System Information Discovery Script', 'Id': '5fbb5e71-6e35-4e2c-8dc6-7ee55be563dd'}, {'Name': 'Persistence Through Windows Registry', 'Id': '1e46e621-2453-4aaa-85b7-ab67d0b37b8c'}], 'Assets': [{'Id': '03e17460-849e-4b86-b6c6-ef0db72823ff', 'Ipv4Address': '172.31.39.254', 'Hostname': 'ec2amaz-g4iu5no', 'ProductName': 'Windows Server 2016 Datacenter', 'Modified': '2019-09-05T13:33:34.062040Z', 'Status': 'Active'}]}], 'AttackIQTest(val.Count).Count': 1, 'AttackIQTest(val.RemainingPages).RemainingPages': 0}} # noqa: E501
+GET_TEST_STATUS_RESULT = {'Type': 1, 'HumanReadable': '### Test 1 status\n|Detected|Errored|Failed|Finished|Id|Passed|Total|\n|---|---|---|---|---|---|---|\n| 0 | 0 | 9 | true | 1 | 1 | 10 |\n', 'ContentsFormat': 'json', 'Contents': {'detected': 0, 'failed': 9, 'finished': True, 'passed': 1, 'errored': 0, 'total': 10}, 'EntryContext': {'AttackIQTest(val.Id === obj.Id)': {'Detected': 0, 'Failed': 9, 'Finished': True, 'Passed': 1, 'Errored': 0, 'Total': 10, 'Id': 1}}} # noqa: E501
+GET_TEST_RESULT_RESULT = {'Type': 1, 'HumanReadable': "### Test Results for None\n ### Page 1/1\n|Assessment Name|Scenario Name|Hostname|Asset IP|Job State|Modified|Outcome|\n|---|---|---|---|---|---|---|\n| Arseny's ransomware project | Download SNSLock Ransomware | ec2amaz-g4iu5no | 172.31.39.254 | | 2019-09-03T14:22:46.747664Z | |\n", 'ContentsFormat': 'json', 'Contents': {'count': 1, 'next': None, 'previous': None, 'results': [{'id': '5f044657-d0bc-48ab-afaf-98c6ae5a9e7f', 'modified': '2019-09-03T14:22:46.747664Z', 'project': {'id': 'c4e352ae-1506-4c74-bd90-853f02dd765a', 'name': "Arseny's ransomware project"}, 'project_run_id': '74fc59ba-ec33-41c2-a63f-9a0188e3b4bb', 'master_job': {'id': '1c350a5a-84f2-4938-93d8-cc31f0a99482', 'name': 'Ransomware Download', 'assets': [{'id': '03e17460-849e-4b86-b6c6-ef0db72823ff', 'ipv4_address': '172.31.39.254', 'hostname': 'ec2amaz-g4iu5no', 'product_name': 'Windows Server 2016 Datacenter', 'modified': '2019-09-05T12:10:01.590138Z'}], 'scenarios': [{'id': 'ef72cfc8-796c-4a35-abea-547f0d898713', 'name': 'Download Coverton Ransomware', 'description': 'The Coverton ransomware has no known infection vector. After encryption, the ransomware deletes shadow volume copies and system restore points. A ransom note will then be created, explaining the the victim how to use the tor network and how to buy bitcoin. The authors demand a price of 1 bitcoin to decrypt and will threaten to double the price every week you do not pay. Unfortunately, the cryptography is solid so there is no decrypter available. This led some victims to pay the ransom. However, the decrypter they receive did not properly decrypt the files.'}]}, 'master_job_name': 'Ransomware Download', 'master_job_metadata': None, 'instance_job': '24178034-cb69-442d-afd8-a7d87ae78eda', 'instance_job_on_demand': True, 'instance_job_run_all': True, 'scenario_job_ref': 3874153, 'scenario_scheduled_job_uuid': '6c757c3d-6e80-426e-94cb-625113845d8e', 'scenario': {'id': 'fc057ae4-c56d-4e9a-8c0f-9f22ec1e5576', 'name': 'Download SNSLock Ransomware', 'description': "The SNSLock ransomware is spread through email spam campaigns. Upon infection, the ransomware will connect to it's C2 server and send user information such as system date and time, IP address, and MAC address. During infection, the ransomware will add a .RSNSlocked extension. After infection, it will drop an html file that contains all the information to pay $300 dollars using bitcoin."}, 'scenario_args': {'check_if_executable': True, 'sha256_hash': '597a14a76fc4d6315afa877ef87b68401de45d852e38f98c2f43986b4dca1c3a', 'download_url': 'https://malware.scenarios.aiqscenarioinfra.com/597a14a76fc4d6315afa877ef87b68401de45d852e38f98c2f43986b4dca1c3a/SNSLock'}, 'scenario_exe': 'ai_python', 'scenario_name': 'Download SNSLock Ransomware', 'scenario_type': 1, 'asset': {'id': '03e17460-849e-4b86-b6c6-ef0db72823ff', 'ipv4_address': '172.31.39.254', 'hostname': 'ec2amaz-g4iu5no', 'product_name': 'Windows Server 2016 Datacenter', 'modified': '2019-09-05T12:10:01.590138Z'}, 'asset_hostname': 'ec2amaz-g4iu5no', 'asset_ipv4_address': '172.31.39.254', 'asset_group': None, 'asset_group_name': None, 'scheduled_time': '2019-09-03T14:16:00Z', 'sent_to_agent': True, 'done': True, 'canceled': False, 'job_state_id': 7, 'job_state_name': 'Finished', 'scenario_result_value': {'ai_scenario_outcome': 1, 'ai_log_time': 1567520565441, 'ai_python_process_id': 100, 'ai_total_time_taken': 2.687, 'ai_critical_phases_successful': 0, 'ai_tracker_id': '125'}, 'outcome_id': 1, 'outcome_name': 'Passed', 'sent_to_siem_connector': False, 'result_id': '5f044657-d0bc-48ab-afaf-98c6ae5a9e7f', 'company': '55b4e4cf-9cf9-4bef-8c21-6eb17f5bfc7d', 'user': 'efcc433f-c954-4855-b9f0-3c3beeefdbf6', 'created': '2019-09-03T14:16:24.560022Z', 'run_count': '', 'scenario_scheduled_job': {'id': '5f044657-d0bc-48ab-afaf-98c6ae5a9e7f', 'scenario_scheduled_job_uuid': '6c757c3d-6e80-426e-94cb-625113845d8e', 'scenario_job': {'id': '5f044657-d0bc-48ab-afaf-98c6ae5a9e7f', 'master_job': '1c350a5a-84f2-4938-93d8-cc31f0a99482', 'master_job_name': 'Ransomware Download', 'project': {'id': 'c4e352ae-1506-4c74-bd90-853f02dd765a', 'name': "Arseny's ransomware project"}, 'scenario': {'id': 'fc057ae4-c56d-4e9a-8c0f-9f22ec1e5576', 'name': 'Download SNSLock Ransomware', 'description': "The SNSLock ransomware is spread through email spam campaigns. Upon infection, the ransomware will connect to it's C2 server and send user information such as system date and time, IP address, and MAC address. During infection, the ransomware will add a .RSNSlocked extension. After infection, it will drop an html file that contains all the information to pay $300 dollars using bitcoin.", 'scenario_type': 'Attack', 'scenario_template': {'id': '4f89d738-d253-452d-b944-99b41f8b2e07', 'zip_file': 'https://static.attackiq.com/scenarios/4f89d738-d253-452d-b944-99b41f8b2e07/download_and_save_file-1.0.120.dev0.zip?v=1.0.8&Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiaHR0cHM6Ly9zdGF0aWMuYXR0YWNraXEuY29tL3NjZW5hcmlvcy80Zjg5ZDczOC1kMjUzLTQ1MmQtYjk0NC05OWI0MWY4YjJlMDcvZG93bmxvYWRfYW5kX3NhdmVfZmlsZS0xLjAuMTIwLmRldjAuemlwP3Y9MS4wLjgiLCJDb25kaXRpb24iOnsiRGF0ZUxlc3NUaGFuIjp7IkFXUzpFcG9jaFRpbWUiOjE1Njc2OTA5OTN9fX1dfQ__&Signature=H3KgpyE69Ysg3NzfkJIO-vYP1zpbqakKJZhnToPZ2PnzKw~x9~ihmQz1AKU6AGowwBN2l9fFHdigCZQ0wBdwt346MxUXVJpcjb6Wz4AVBieN9qmkfARA3SB7WCBF48HiOSLRqWJtpzBc~jqLrcGS4T-UPM5S~TEXX79~dTXg2ZJoor7FbqL-kaLX09N08r4o6XsKzB0HoVmleZ8x9b8AotgLYjbExYdLctgPnOcxWgGuJKRUtdYgW-loPf9V56yg1ngl59aA1Emgo74-BfUXGl5tgK4LPbvGQw7kg5rjM310vh3oze~h0oiE3IHHVNSW2pcsl4U7ELofUpFwE~-sUg__&Key-Pair-Id=APKAJY2DPWILXHPNCJTA'}, 'supported_platforms': {'osx': '>=0.0', 'centos': '>=0.0', 'redhat': '>=0.0', 'windows': '>=0.0', 'linuxmint': '>=0.0', 'ubuntu': '>=0.0', 'debian': '>=0.0', 'fedora': '>=0.0'}}, 'asset': {'id': '03e17460-849e-4b86-b6c6-ef0db72823ff', 'ipv4_address': '172.31.39.254', 'hostname': 'ec2amaz-g4iu5no', 'product_name': 'Windows Server 2016 Datacenter', 'modified': '2019-09-05T12:10:01.590138Z'}, 'modified': '2019-09-03T14:22:46.747664Z'}, 'job_state': 'Finished', 'modified': '2019-09-03T14:22:46.747664Z'}, 'config_map_values': {}, 'cancellable': True}]}, 'EntryContext': {'AttackIQTestResult(val.Id === obj.Id)': [{'Id': '5f044657-d0bc-48ab-afaf-98c6ae5a9e7f', 'Modified': '2019-09-03T14:22:46.747664Z', 'Assessment': {'Id': 'c4e352ae-1506-4c74-bd90-853f02dd765a', 'Name': "Arseny's ransomware project"}, 'Scenario': {'Id': 'fc057ae4-c56d-4e9a-8c0f-9f22ec1e5576', 'Name': 'Download SNSLock Ransomware', 'Description': "The SNSLock ransomware is spread through email spam campaigns. Upon infection, the ransomware will connect to it's C2 server and send user information such as system date and time, IP address, and MAC address. During infection, the ransomware will add a .RSNSlocked extension. After infection, it will drop an html file that contains all the information to pay $300 dollars using bitcoin."}, 'Asset': {'Id': '03e17460-849e-4b86-b6c6-ef0db72823ff', 'Ipv4Address': '172.31.39.254', 'Hostname': 'ec2amaz-g4iu5no', 'ProductName': 'Windows Server 2016 Datacenter', 'Modified': '2019-09-05T12:10:01.590138Z', 'AssetGroup': None}, 'JobState': 'Finished', 'Outcome': 'Passed'}], 'AttackIQTestResult(val.Count).Count': 1, 'AttackIQTestResult(val.RemainingPages).RemainingPages': 0}} # noqa: E501
+GET_ASS_BY_ID_RESULT = {'Type': 1, 'HumanReadable': '### AttackIQ Assessment 1\n|Id|Name|Description|User|Created|Modified|\n|---|---|---|---|---|---|\n| | | | | | |\n', 'ContentsFormat': 'json', 'Contents': {'count': 1, 'next': None, 'previous': None, 'results': [{'id': '2e53e597-0388-48bb-8eb8-00bb28874434', 'name': "Arseny's ransomware project", 'description': 'Test of common ransomware variants', 'start_date': None, 'end_date': None, 'project_state': 'Inactive', 'default_schedule': None, 'project_template': {'id': '59d35f4a-2da0-4c4a-a08a-c30cb41dae6b', 'template_name': 'Ransomware Project', 'template_description': 'Variety of common ransomware variants', 'project_name': 'Ransomware Project', 'project_description': 'Test of common ransomware variants', 'icon': 'ransomware_template_icon.svg', 'project_template_type': {'id': 'b1e7ac80-1417-4f7b-a387-35fb49f218c8', 'name': 'Use Cases', 'description': 'Showcase different use cases in which FireDrill can help'}, 'default_schedule': None, 'report_types': [{'id': '38f24061-a70f-415a-b378-bc9575b7ac6a', 'name': 'Security Assessment Differential Report'}, {'id': '986fce3c-89a5-47f0-843d-99ba269b576b', 'name': 'Security Assessment Detailed Report'}, {'id': 'fdb6a5b9-ec10-4a5b-b387-7433ed4e78df', 'name': 'Ransomware Executive Summary'}], 'widgets': ['b955b352-e59f-4b8f-8c93-f88a7d5aa026', '938589ec-653c-45be-a7cc-6cd632387bb7'], 'meta_data': {'hidden': True}, 'company': '906d5ec6-101c-4ae6-8906-b93ce0529060', 'created': '2016-07-01T20:26:43.494459Z', 'modified': '2019-02-19T03:31:54.393885Z'}, 'creator': 'akrupnik@paloaltonetworks.com', 'owner': 'akrupnik@paloaltonetworks.com', 'user': 'akrupnik@paloaltonetworks.com', 'created': '2019-09-02T11:51:57.507486Z', 'modified': '2019-09-02T11:51:59.769959Z', 'users': ['71e92cf9-5159-466c-8050-142d1ba279ea'], 'groups': [], 'default_asset_count': 0, 'default_asset_group_count': 0, 'master_job_count': 3, 'meta_data': {'hidden': True}}]}, 'EntryContext': {'AttackIQ.Assessment(val.Id === obj.Id)': {'Id': None, 'Name': None, 'User': None, 'Users': None, 'Owner': None, 'Groups': None, 'Creator': None, 'Created': None, 'EndDate': None, 'Modified': None, 'StartDate': None, 'Description': None, 'AssessmentState': None, 'MasterJobCount': None, 'DefaultSchedule': None, 'DefaultAssetCount': None, 'AssessmentTemplateId': None, 'DefaultAssetGroupCount': None, 'AssessmentTemplateCompany': None, 'AssessmentTemplateCreated': None, 'AssessmentTemplateModified': None, 'AssessmentTemplateName': None, 'AssessmentTemplateDefaultSchedule': None, 'AssessmentTemplateDescription': None}}} # noqa: E501
+RUN_ALL_TESTS_RESULT = 'Successfully started running all tests in project: ATT&CK by the Numbers @ NOVA BSides 2019'
diff --git a/Integrations/AutofocusV2/AutofocusV2.py b/Integrations/AutofocusV2/AutofocusV2.py
new file mode 100644
index 000000000000..f65b7ef50449
--- /dev/null
+++ b/Integrations/AutofocusV2/AutofocusV2.py
@@ -0,0 +1,1021 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import json
+import requests
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+PARAMS = demisto.params()
+API_KEY = PARAMS.get('api_key')
+# Remove trailing slash to prevent wrong URL path to service
+SERVER = 'https://autofocus.paloaltonetworks.com'
+# Should we use SSL
+USE_SSL = not PARAMS.get('insecure', False)
+# Service base URL
+BASE_URL = SERVER + '/api/v1.0'
+# Headers to be sent in requests
+HEADERS = {
+ 'Content-Type': 'application/json'
+}
+
+API_PARAM_DICT = {
+ 'scope': {
+ 'Private': 'private',
+ 'Public': 'public',
+ 'Global': 'global'
+ },
+ 'order': {
+ 'Ascending': 'asc',
+ 'Descending': 'desc'
+ },
+ 'sort': {
+ 'App Name': 'app_name',
+ 'App Packagename': 'app_packagename',
+ 'File type': 'filetype',
+ 'Size': 'size',
+ 'Finish Date': 'finish_date',
+ 'First Seen (Create Date)': 'create_date',
+ 'Last Updated (Update Date)': 'update_date',
+ 'MD5': 'md5',
+ 'SHA1': 'sha1',
+ 'SHA256': 'sha256',
+ 'Ssdeep Fuzzy Hash': 'ssdeep',
+ 'Application': 'app',
+ 'Device Country': 'device_country',
+ 'Device Country Code': 'device_countrycode',
+ 'Device Hostname': 'device_hostname',
+ 'Device Serial': 'device_serial',
+ 'Device vsys': 'vsys',
+ 'Destination Country': 'dst_country',
+ 'Destination Country Code': 'dst_countrycode',
+ 'Destination IP': 'dst_ip',
+ 'Destination Port': 'dst_port',
+ 'Email Charset': 'emailsbjcharset',
+ 'Industry': 'device_industry',
+ 'Source Country': 'src_country',
+ 'Source Country Code': 'src_countrycode',
+ 'Source IP': 'src_ip',
+ 'Source Port': 'src_port',
+ 'Time': 'tstamp',
+ 'Upload source': 'upload_srcPossible'
+ },
+ 'tag_class': {
+ 'Actor': 'actor',
+ 'Campaign': 'campaign',
+ 'Exploit': 'exploit',
+ 'Malicious Behavior': 'malicious_behavior',
+ 'Malware Family': 'malware_family'
+
+ },
+ 'search_arguments': {
+ 'file_hash': {
+ 'api_name': 'alias.hash',
+ 'operator': 'contains'
+ },
+ 'domain': {
+ 'api_name': 'alias.domain',
+ 'operator': 'contains'
+ },
+ 'ip': {
+ 'api_name': 'alias.ip_address',
+ 'operator': 'contains'
+ },
+ 'url': {
+ 'api_name': 'alias.url',
+ 'operator': 'contains'
+ },
+ 'wildfire_verdict': {
+ 'api_name': 'sample.malware',
+ 'operator': 'is',
+ 'translate': {
+ 'Malware': 1,
+ 'Grayware': 2,
+ 'Benign': 3,
+ 'Phishing': 4,
+ }
+ },
+ 'first_seen': {
+ 'api_name': 'sample.create_date',
+ 'operator': 'is in the range'
+ },
+ 'last_updated': {
+ 'api_name': 'sample.update_date',
+ 'operator': 'is in the range'
+ },
+ 'time_range': {
+ 'api_name': 'session.tstamp',
+ 'operator': 'is in the range'
+ },
+ 'time_after': {
+ 'api_name': 'session.tstamp',
+ 'operator': 'is after'
+ },
+ 'time_before': {
+ 'api_name': 'session.tstamp',
+ 'operator': 'is before'
+ }
+ },
+
+ 'file_indicators': {
+ 'Size': 'Size',
+ 'SHA1': 'SHA1',
+ 'SHA256': 'SHA256',
+ 'FileType': 'Type',
+ 'Tags': 'Tags',
+ 'FileName': 'Name'
+ },
+ 'search_results': {
+ 'sha1': 'SHA1',
+ 'sha256': 'SHA256',
+ 'filetype': 'FileType',
+ 'malware': 'Verdict',
+ 'size': 'Size',
+ 'create_date': 'Created',
+ 'finish_date': 'Finished',
+ 'md5': 'MD5',
+ 'region': 'Region',
+ 'tag': 'Tags',
+ '_id': 'ID',
+ 'tstamp': 'Seen',
+ 'filename': 'FileName',
+ 'device_industry': 'Industry',
+ 'upload_src': 'UploadSource',
+ 'fileurl': 'FileURL'
+ }
+}
+SAMPLE_ANALYSIS_LINE_KEYS = {
+ 'behavior': {
+ 'display_name': 'behavior',
+ 'indexes': {
+ 'risk': 0,
+ 'behavior': -1
+ }
+ },
+ 'process': {
+ 'display_name': 'processes',
+ 'indexes': {
+ 'parent_process': 0,
+ 'action': 1
+ }
+ },
+ 'file': {
+ 'display_name': 'files',
+ 'indexes': {
+ 'parent_process': 0,
+ 'action': 1
+ }
+ },
+ 'registry': {
+ 'display_name': 'registry',
+ 'indexes': {
+ 'action': 1,
+ 'parameters': 2
+ }
+ },
+ 'dns': {
+ 'display_name': 'DNS',
+ 'indexes': {
+ 'query': 0,
+ 'response': 1
+ }
+ },
+ 'http': {
+ 'display_name': 'HTTP',
+ 'indexes': {
+ 'host': 0,
+ 'method': 1,
+ 'url': 2
+ }
+ },
+ 'connection': {
+ 'display_name': 'connections',
+ 'indexes': {
+ 'destination': 2
+ }
+ },
+ 'mutex': {
+ 'display_name': 'mutex',
+ 'indexes': {
+ 'process': 0,
+ 'action': 1,
+ 'parameters': 2
+ }
+ }
+}
+SAMPLE_ANALYSIS_COVERAGE_KEYS = {
+ 'wf_av_sig': {
+ 'display_name': 'wildfire_signatures',
+ 'fields': ['name', 'create_date']
+ },
+ 'fileurl_sig': {
+ 'display_name': 'fileurl_signatures',
+ 'fields': ['name', 'create_date']
+ },
+ 'dns_sig': {
+ 'display_name': 'dns_signatures',
+ 'fields': ['name', 'create_date']
+ },
+ 'url_cat': {
+ 'display_name': 'url_categories',
+ 'fields': ['url', 'cat']
+ }
+}
+''' HELPER FUNCTIONS '''
+
+
+def parse_response(resp, err_operation):
+ try:
+ # Handle error responses gracefully
+ res_json = resp.json()
+ resp.raise_for_status()
+ return res_json
+ # Errors returned from AutoFocus
+ except requests.exceptions.HTTPError:
+ err_msg = f'{err_operation}: {res_json.get("message")}'
+ if res_json.get("message").find('Requested sample not found') != -1:
+ demisto.results(err_msg)
+ sys.exit(0)
+ elif res_json.get("message").find("AF Cookie Not Found") != -1:
+ demisto.results(err_msg)
+ sys.exit(0)
+ elif err_operation == 'Tag details operation failed' and \
+ res_json.get("message").find("Tag") != -1 and res_json.get("message").find("not found") != -1:
+ demisto.results(err_msg)
+ sys.exit(0)
+ else:
+ return return_error(err_msg)
+ # Unexpected errors (where no json object was received)
+ except Exception as err:
+ err_msg = f'{err_operation}: {err}'
+ return return_error(err_msg)
+
+
+def http_request(url_suffix, method='POST', data={}, err_operation=None):
+ # A wrapper for requests lib to send our requests and handle requests and responses better
+ data.update({'apiKey': API_KEY})
+ res = requests.request(
+ method=method,
+ url=BASE_URL + url_suffix,
+ verify=USE_SSL,
+ data=json.dumps(data),
+ headers=HEADERS
+ )
+ return parse_response(res, err_operation)
+
+
+def validate_sort_and_order(sort, order):
+ if sort and not order:
+ return_error('Please specify the order of sorting (Ascending or Descending).')
+ if order and not sort:
+ return_error('Please specify a field to sort by.')
+ return sort and order
+
+
+def do_search(search_object, query, scope, size=None, sort=None, order=None, err_operation=None):
+ path = '/samples/search' if search_object == 'samples' else '/sessions/search'
+ data = {
+ 'query': query,
+ 'size': size
+ }
+ if scope:
+ data.update({'scope': API_PARAM_DICT['scope'][scope]}) # type: ignore
+ if validate_sort_and_order(sort, order):
+ data.update({'sort': {API_PARAM_DICT['sort'][sort]: {'order': API_PARAM_DICT['order'][order]}}}) # type: ignore
+
+ # Remove nulls
+ data = createContext(data, removeNull=True)
+ result = http_request(path, data=data, err_operation=err_operation)
+ return result
+
+
+def run_search(search_object, query, scope=None, size=None, sort=None, order=None):
+ result = do_search(search_object, query=json.loads(query), scope=scope, size=size, sort=sort, order=order,
+ err_operation='Search operation failed')
+ in_progress = result.get('af_in_progress')
+ status = 'in progress' if in_progress else 'complete'
+ search_info = {
+ 'AFCookie': result.get('af_cookie'),
+ 'Status': status
+ }
+ return search_info
+
+
+def run_get_search_results(search_object, af_cookie):
+ path = f'/samples/results/{af_cookie}' if search_object == 'samples' else f'/sessions/results/{af_cookie}'
+ results = http_request(path, err_operation='Fetching search results failed')
+ return results
+
+
+def get_fields_from_hit_object(result_object, response_dict_name):
+ new_object = {}
+ af_params_dict = API_PARAM_DICT.get(response_dict_name)
+ for key, value in result_object.items():
+ if key in af_params_dict: # type: ignore
+ new_key = af_params_dict.get(key) # type: ignore
+ new_object[new_key] = value
+ else:
+ new_object[key] = value
+ return new_object
+
+
+def parse_hits_response(hits, response_dict_name):
+ parsed_objects = [] # type: ignore
+ if not hits:
+ return parsed_objects
+ else:
+ for hit in hits:
+ flattened_obj = {} # type: ignore
+ flattened_obj.update(hit.get('_source'))
+ flattened_obj['_id'] = hit.get('_id')
+ parsed_obj = get_fields_from_hit_object(flattened_obj, response_dict_name)
+ parsed_objects.append(parsed_obj)
+ return parsed_objects
+
+
+def get_search_results(search_object, af_cookie):
+ results = run_get_search_results(search_object, af_cookie)
+ parsed_results = parse_hits_response(results.get('hits'), 'search_results')
+ in_progress = results.get('af_in_progress')
+ status = 'in progress' if in_progress else 'complete'
+ return parsed_results, status
+
+
+def get_session_details(session_id):
+ path = f'/session/{session_id}'
+ result = http_request(path, err_operation='Get session failed')
+ parsed_result = parse_hits_response(result.get('hits'), 'search_results')
+ return parsed_result
+
+
+def validate_if_line_needed(category, info_line):
+ line = info_line.get('line')
+ line_values = line.split(',')
+ category_indexes = SAMPLE_ANALYSIS_LINE_KEYS.get(category).get('indexes') # type: ignore
+ if category == 'behavior':
+ risk_index = category_indexes.get('risk') # type: ignore
+ risk = line_values[risk_index].strip()
+ # only lines with risk higher the informational are considered
+ return not risk == 'informational'
+ elif category == 'registry':
+ action_index = category_indexes.get('action') # type: ignore
+ action = line_values[action_index].strip()
+ # Only lines with actions SetValueKey, CreateKey or RegSetValueEx are considered
+ return action == 'SetValueKey' or action == 'CreateKey' or action == 'RegSetValueEx'
+ elif category == 'file':
+ action_index = category_indexes.get('action') # type: ignore
+ action = line_values[action_index].strip()
+ benign_count = info_line.get('b') if info_line.get('b') else 0
+ malicious_count = info_line.get('m') if info_line.get('m') else 0
+ # Only lines with actions Create or CreateFileW where malicious count is grater than benign count are considered
+ return (action == 'Create' or action == 'CreateFileW') and malicious_count > benign_count
+ elif category == 'process':
+ action_index = category_indexes.get('action') # type: ignore
+ action = line_values[action_index].strip()
+ # Only lines with actions created, CreateKey or CreateProcessInternalW are considered
+ return action == 'created' or action == 'CreateProcessInternalW'
+ else:
+ return True
+
+
+def get_data_from_line(line, category_name):
+ category_indexes = SAMPLE_ANALYSIS_LINE_KEYS.get(category_name).get('indexes') # type: ignore
+ values = line.split(',')
+ sub_categories = {} # type: ignore
+ if not category_indexes:
+ return sub_categories
+ else:
+ for sub_category in category_indexes: # type: ignore
+ sub_category_index = category_indexes.get(sub_category) # type: ignore
+ sub_categories.update({
+ sub_category: values[sub_category_index]
+ })
+ return sub_categories
+
+
+def get_data_from_coverage_sub_category(sub_category_name, sub_category_data):
+ sub_categories_list = []
+ for item in sub_category_data:
+ new_sub_category = {}
+ fields_to_extract = SAMPLE_ANALYSIS_COVERAGE_KEYS.get(sub_category_name).get('fields') # type: ignore
+ for field in fields_to_extract: # type: ignore
+ new_sub_category[field] = item.get(field) # type: ignore
+ sub_categories_list.append(new_sub_category)
+ return sub_categories_list
+
+
+def parse_coverage_sub_categories(coverage_data):
+ new_coverage = {}
+ for sub_category_name, sub_category_data in coverage_data.items():
+ if sub_category_name in SAMPLE_ANALYSIS_COVERAGE_KEYS:
+ new_sub_category_data = get_data_from_coverage_sub_category(sub_category_name, sub_category_data)
+ new_sub_category_name = SAMPLE_ANALYSIS_COVERAGE_KEYS.get(sub_category_name).get( # type: ignore
+ 'display_name') # type: ignore
+ new_coverage[new_sub_category_name] = new_sub_category_data
+ return {'coverage': new_coverage}
+
+
+def parse_lines_from_os(category_name, data, filter_data_flag):
+ new_lines = []
+ for info_line in data:
+ if not filter_data_flag or validate_if_line_needed(category_name, info_line):
+ new_sub_categories = get_data_from_line(info_line.get('line'), category_name)
+ new_lines.append(new_sub_categories)
+ return new_lines
+
+
+def parse_sample_analysis_response(resp, filter_data_flag):
+ analysis = {}
+ for category_name, category_data in resp.items():
+ if category_name in SAMPLE_ANALYSIS_LINE_KEYS:
+ new_category = {}
+ for os_name, os_data in category_data.items():
+ os_sanitized_data = parse_lines_from_os(category_name, os_data, filter_data_flag)
+ new_category[os_name] = os_sanitized_data
+
+ category_dict = SAMPLE_ANALYSIS_LINE_KEYS.get(category_name)
+ analysis.update({category_dict['display_name']: new_category}) # type: ignore
+
+ elif category_name == 'coverage':
+ new_category = parse_coverage_sub_categories(category_data)
+ analysis.update(new_category)
+
+ return analysis
+
+
+def sample_analysis(sample_id, os, filter_data_flag):
+ path = f'/sample/{sample_id}/analysis'
+ data = {
+ 'coverage': 'true'
+ }
+ if os:
+ data['platforms'] = [os] # type: ignore
+ result = http_request(path, data=data, err_operation='Sample analysis failed')
+ analysis_obj = parse_sample_analysis_response(result, filter_data_flag)
+ return analysis_obj
+
+
+def parse_tag_details_response(resp):
+ tag_details = resp.get('tag')
+ fields_to_extract_from_tag_details = [
+ 'public_tag_name',
+ 'tag_name',
+ 'customer_name',
+ 'source',
+ 'tag_definition_scope',
+ 'tag_definition_status',
+ 'tag_class',
+ 'count',
+ 'lasthit',
+ ]
+ new_tag_info = {}
+ for field in fields_to_extract_from_tag_details:
+ new_tag_info[field] = tag_details.get(field)
+
+ tag_group_details = resp.get('tag_groups')
+ if tag_group_details:
+ new_tag_info['tag_group'] = tag_group_details
+
+ return new_tag_info
+
+
+def autofocus_tag_details(tag_name):
+ path = f'/tag/{tag_name}'
+ resp = http_request(path, err_operation='Tag details operation failed')
+ tag_info = parse_tag_details_response(resp)
+ return tag_info
+
+
+def validate_tag_scopes(private, public, commodity, unit42):
+ if not private and not public and not commodity and not unit42:
+ return_error('Add at least one Tag scope by setting `commodity`, `private`, `public` or `unit42` to True')
+
+
+def autofocus_top_tags_search(scope, tag_class_display, private, public, commodity, unit42):
+ validate_tag_scopes(private, public, commodity, unit42)
+ tag_class = API_PARAM_DICT['tag_class'][tag_class_display] # type: ignore
+ query = {
+ "operator": "all",
+ "children": [
+ {
+ "field": "sample.tag_class",
+ "operator": "is",
+ "value": tag_class
+ }
+ ]
+ }
+ tag_scopes = list()
+ if private:
+ tag_scopes.append('private')
+ if public:
+ tag_scopes.append('public')
+ if commodity:
+ tag_scopes.append('commodity')
+ if unit42:
+ tag_scopes.append('unit42')
+ data = {
+ 'query': query,
+ 'scope': scope,
+ 'tagScopes': tag_scopes
+ }
+ path = '/top-tags/search/'
+ resp = http_request(path, data=data, err_operation='Top tags operation failed')
+ in_progress = resp.get('af_in_progress')
+ status = 'in progress' if in_progress else 'complete'
+ search_info = {
+ 'AFCookie': resp.get('af_cookie'),
+ 'Status': status
+ }
+ return search_info
+
+
+def parse_top_tags_response(response):
+ top_tags_list = [] # type: ignore
+ top_tags = response.get('top_tags')
+ if not top_tags:
+ return top_tags_list
+ else:
+ for tag in top_tags:
+ fields_to_extract_from_top_tags = ['tag_name', 'public_tag_name', 'count', 'lasthit']
+ new_tag = {}
+ for field in fields_to_extract_from_top_tags:
+ new_tag[field] = tag[field]
+ top_tags_list.append(new_tag)
+ return top_tags_list
+
+
+def get_top_tags_results(af_cookie):
+ path = f'/top-tags/results/{af_cookie}'
+ results = http_request(path, err_operation='Fetching top tags results failed')
+ top_tags = parse_top_tags_response(results)
+ in_progress = results.get('af_in_progress')
+ status = 'in progress' if in_progress else 'complete'
+ return top_tags, status
+
+
+def print_hr_by_category(category_name, category_data):
+ hr = content = f'### {string_to_table_header(category_name)}:\nNo entries'
+ if category_name == 'coverage':
+ content = category_data
+ if category_data:
+ hr = tableToMarkdown(f'{string_to_table_header(category_name)}:', category_data,
+ headerTransform=string_to_table_header)
+ else:
+ hr = f'### {string_to_table_header(category_name)}:\nNo entries'
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': content,
+ 'HumanReadable': hr
+ })
+ else:
+ for os_name, os_data in category_data.items():
+ content = os_data
+ table_header = f'{category_name}_{os_name}'
+ if os_data:
+ hr = tableToMarkdown(f'{string_to_table_header(table_header)}:', os_data,
+ headerTransform=string_to_table_header)
+ else:
+ hr = f'### {string_to_table_header(table_header)}:\nNo entries'
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': content,
+ 'HumanReadable': hr
+ })
+
+
+def get_files_data_from_results(results):
+ """
+ Gets a list of results and for each result returns a file object includes all relevant file indicators exists
+ in that result
+ :param results: a list of dictionaries
+ :return: a list of file objects
+ """
+ files = []
+ if results:
+ for result in results:
+ raw_file = get_fields_from_hit_object(result, 'file_indicators')
+ file_data = filter_object_entries_by_dict_values(raw_file, 'file_indicators')
+ files.append(file_data)
+ return files
+
+
+def filter_object_entries_by_dict_values(result_object, response_dict_name):
+ """
+ Gets a dictionary (result_object) and filters it's keys by the values of another
+ dictionary (response_dict_name)
+ input: response_dict_name = 'file_indicators' - see API_PARAM_DICT above
+ result_object = {
+ "app": "web-browsing",
+ "vsys": 1,
+ "SHA256": "18c9acd34a3aea09121f027857e0004a3ea33a372b213a8361e8a978330f0dc8",
+ "UploadSource": "Firewall",
+ "src_port": 80,
+ "device_serial": "007051000050926",
+ "Seen": "2019-07-24T09:37:04",
+ "Name": "wildfire-test-pe-file.exe",
+ "user_id": "unknown",
+ "src_country": "United States",
+ "src_countrycode": "US",
+ "dst_port": 65168,
+ "device_countrycode": "US",
+ "Industry": "High Tech",
+ "Region": "us",
+ "device_country": "United States",
+ "ID": "179972200903"
+ }
+ output: {
+ "SHA256": "18c9acd34a3aea09121f027857e0004a3ea33a372b213a8361e8a978330f0dc8",
+ "Name": "wildfire-test-pe-file.exe"
+ }
+ :param result_object: a dictionary representing an object
+ :param response_dict_name: a dictionary which it's values are the relevant fields (filters)
+ :return: the result_object filtered by the relevant fields
+ """
+ af_params_dict = API_PARAM_DICT.get(response_dict_name)
+ result_object_filtered = {}
+ if af_params_dict and isinstance(result_object, dict) and isinstance(af_params_dict, dict):
+ for key in result_object.keys():
+ if key in af_params_dict.values(): # type: ignore
+ result_object_filtered[key] = result_object.get(key)
+ return result_object_filtered
+
+
+def search_samples(query=None, scope=None, size=None, sort=None, order=None, file_hash=None, domain=None, ip=None,
+ url=None, wildfire_verdict=None, first_seen=None, last_updated=None):
+ validate_no_query_and_indicators(query, [file_hash, domain, ip, url, wildfire_verdict, first_seen, last_updated])
+ if not query:
+ validate_no_multiple_indicators_for_search([file_hash, domain, ip, url])
+ query = build_sample_search_query(file_hash, domain, ip, url, wildfire_verdict, first_seen, last_updated)
+ return run_search('samples', query=query, scope=scope, size=size, sort=sort, order=order)
+
+
+def build_sample_search_query(file_hash, domain, ip, url, wildfire_verdict, first_seen, last_updated):
+ indicator_args_for_query = {
+ 'file_hash': file_hash,
+ 'domain': domain,
+ 'ip': ip,
+ 'url': url
+ }
+ indicator_list = build_indicator_children_query(indicator_args_for_query)
+ indicator_query = build_logic_query('OR', indicator_list)
+ filtering_args_for_search = {} # type: ignore
+ if wildfire_verdict:
+ filtering_args_for_search['wildfire_verdict'] = \
+ demisto.get(API_PARAM_DICT, f'search_arguments.wildfire_verdict.translate.{wildfire_verdict}')
+ if first_seen:
+ filtering_args_for_search['first_seen'] = first_seen
+ if last_updated:
+ filtering_args_for_search['last_updated'] = last_updated
+ filters_list = build_children_query(filtering_args_for_search)
+ filters_list.append(indicator_query)
+ logic_query = build_logic_query('AND', filters_list)
+ return json.dumps(logic_query)
+
+
+def search_sessions(query=None, size=None, sort=None, order=None, file_hash=None, domain=None, ip=None, url=None,
+ from_time=None, to_time=None):
+ validate_no_query_and_indicators(query, [file_hash, domain, ip, url, from_time, to_time])
+ if not query:
+ validate_no_multiple_indicators_for_search([file_hash, domain, ip, url])
+ query = build_session_search_query(file_hash, domain, ip, url, from_time, to_time)
+ return run_search('sessions', query=query, size=size, sort=sort, order=order)
+
+
+def build_session_search_query(file_hash, domain, ip, url, from_time, to_time):
+ indicator_args_for_query = {
+ 'file_hash': file_hash,
+ 'domain': domain,
+ 'ip': ip,
+ 'url': url
+ }
+ indicator_list = build_indicator_children_query(indicator_args_for_query)
+ indicator_query = build_logic_query('OR', indicator_list)
+ time_filters_for_search = {} # type: ignore
+ if from_time and to_time:
+ time_filters_for_search = {'time_range': [from_time, to_time]}
+ elif from_time:
+ time_filters_for_search = {'time_after': [from_time]}
+ elif to_time:
+ time_filters_for_search = {'time_before': [to_time]}
+
+ filters_list = build_children_query(time_filters_for_search)
+ filters_list.append(indicator_query)
+ logic_query = build_logic_query('AND', filters_list)
+ return json.dumps(logic_query)
+
+
+def build_logic_query(logic_operator, condition_list):
+ operator = None
+ if logic_operator == 'AND':
+ operator = 'all'
+ elif logic_operator == 'OR':
+ operator = 'any'
+ return {
+ 'operator': operator,
+ 'children': condition_list
+ }
+
+
+def build_children_query(args_for_query):
+ children_list = [] # type: ignore
+ for key, val in args_for_query.items():
+ field_api_name = API_PARAM_DICT['search_arguments'][key]['api_name'] # type: ignore
+ operator = API_PARAM_DICT['search_arguments'][key]['operator'] # type: ignore
+ children_list += children_list_generator(field_api_name, operator, [val])
+ return children_list
+
+
+def build_indicator_children_query(args_for_query):
+ for key, val in args_for_query.items():
+ if val:
+ field_api_name = API_PARAM_DICT['search_arguments'][key]['api_name'] # type: ignore
+ operator = API_PARAM_DICT['search_arguments'][key]['operator'] # type: ignore
+ children_list = children_list_generator(field_api_name, operator, val)
+ return children_list
+
+
+def children_list_generator(field_name, operator, val_list):
+ query_list = []
+ for value in val_list:
+ query_list.append({
+ 'field': field_name,
+ 'operator': operator,
+ 'value': value
+ })
+ return query_list
+
+
+def validate_no_query_and_indicators(query, arg_list):
+ if query:
+ for arg in arg_list:
+ if arg:
+ return_error(f'The search command can either run a search using a custom query '
+ f'or use the builtin arguments, but not both')
+
+
+def validate_no_multiple_indicators_for_search(arg_list):
+ used_arg = None
+ for arg in arg_list:
+ if arg and used_arg:
+ return_error(f'The search command can receive one indicator type at a time, two were given: {used_arg}, '
+ f'{arg}. For multiple indicator types use the custom query')
+ elif arg:
+ used_arg = arg
+ if not used_arg:
+ return_error('In order to perform a samples/sessions search, a query or an indicator must be given.')
+ return
+
+
+''' COMMANDS'''
+
+
+def test_module():
+ """
+ Performs basic get request to get item samples
+ """
+ query = {
+ 'operator': 'all',
+ 'children': [
+ {
+ 'field': 'sample.malware',
+ 'operator': 'is',
+ 'value': 1
+ }
+ ]
+ }
+
+ do_search('samples', query=query, scope='Public', err_operation='Test module failed')
+ return
+
+
+def search_samples_command():
+ args = demisto.args()
+ file_hash = argToList(args.get('file_hash'))
+ domain = argToList(args.get('domain'))
+ ip = argToList(args.get('ip'))
+ url = argToList(args.get('url'))
+ wildfire_verdict = args.get('wildfire_verdict')
+ first_seen = argToList(args.get('first_seen'))
+ last_updated = argToList(args.get('last_updated'))
+ query = args.get('query')
+ scope = args.get('scope')
+ max_results = args.get('max_results')
+ sort = args.get('sort')
+ order = args.get('order')
+ info = search_samples(query=query, scope=scope, size=max_results, sort=sort, order=order, file_hash=file_hash,
+ domain=domain, ip=ip, url=url, wildfire_verdict=wildfire_verdict, first_seen=first_seen,
+ last_updated=last_updated)
+ md = tableToMarkdown(f'Search Samples Info:', info)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': info,
+ 'EntryContext': {'AutoFocus.SamplesSearch(val.AFCookie == obj.AFCookie)': info},
+ 'HumanReadable': md
+ })
+
+
+def search_sessions_command():
+ args = demisto.args()
+ file_hash = argToList(args.get('file_hash'))
+ domain = argToList(args.get('domain'))
+ ip = argToList(args.get('ip'))
+ url = argToList(args.get('url'))
+ from_time = args.get('from_time')
+ to_time = args.get('to_time')
+ query = args.get('query')
+ max_results = args.get('max_results')
+ sort = args.get('sort')
+ order = args.get('order')
+ info = search_sessions(query=query, size=max_results, sort=sort, order=order, file_hash=file_hash, domain=domain,
+ ip=ip, url=url, from_time=from_time, to_time=to_time)
+ md = tableToMarkdown(f'Search Sessions Info:', info)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': info,
+ 'EntryContext': {'AutoFocus.SessionsSearch(val.AFCookie == obj.AFCookie)': info},
+ 'HumanReadable': md
+ })
+
+
+def samples_search_results_command():
+ args = demisto.args()
+ af_cookie = args.get('af_cookie')
+ results, status = get_search_results('samples', af_cookie)
+ files = get_files_data_from_results(results)
+ if len(results) < 1:
+ md = results = 'No entries found that match the query'
+ status = 'complete'
+ else:
+ md = tableToMarkdown(f'Search Samples Results is {status}', results)
+ context = {
+ 'AutoFocus.SamplesResults(val.ID === obj.ID)': results,
+ 'AutoFocus.SamplesSearch(val.AFCookie ==== obj.AFCookie)': {'Status': status, 'AFCookie': af_cookie},
+ outputPaths['file']: files
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': results,
+ 'EntryContext': context,
+ 'HumanReadable': md
+ })
+
+
+def sessions_search_results_command():
+ args = demisto.args()
+ af_cookie = args.get('af_cookie')
+ results, status = get_search_results('sessions', af_cookie)
+ files = get_files_data_from_results(results)
+ if len(results) < 1:
+ md = results = 'No entries found that match the query'
+ status = 'complete'
+ else:
+ md = tableToMarkdown(f'Search Sessions Results is {status}', results)
+ context = {
+ 'AutoFocus.SessionsResults(val.ID === obj.ID)': results,
+ 'AutoFocus.SessionsSearch(val.AFCookie === obj.AFCookie)': {'Status': status, 'AFCookie': af_cookie},
+ outputPaths['file']: files
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': results,
+ 'EntryContext': context,
+ 'HumanReadable': md
+ })
+
+
+def get_session_details_command():
+ args = demisto.args()
+ session_id = args.get('session_id')
+ result = get_session_details(session_id)
+ files = get_files_data_from_results(result)
+ md = tableToMarkdown(f'Session {session_id}:', result)
+ context = {
+ 'AutoFocus.Sessions(val.ID === obj.ID)': result,
+ outputPaths['file']: files
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': result,
+ 'EntryContext': context,
+ 'HumanReadable': md
+ })
+
+
+def sample_analysis_command():
+ args = demisto.args()
+ sample_id = args.get('sample_id')
+ os = args.get('os')
+ filter_data = False if args.get('filter_data') == 'False' else True
+ analysis = sample_analysis(sample_id, os, filter_data)
+ context = createContext(analysis, keyTransform=string_to_context_key)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': {'ID': sample_id, 'Analysis': analysis},
+ 'HumanReadable': f'### Sample Analysis results for {sample_id}:',
+ 'EntryContext': {f'AutoFocus.SampleAnalysis(val.ID == obj.ID)': {'ID': sample_id, 'Analysis': context}},
+ })
+ for category_name, category_data in analysis.items():
+ print_hr_by_category(category_name, category_data)
+
+
+def tag_details_command():
+ args = demisto.args()
+ tag_name = args.get('tag_name')
+ result = autofocus_tag_details(tag_name)
+ md = tableToMarkdown(f'Tag {tag_name} details:', result, headerTransform=string_to_table_header)
+ context = createContext(result, keyTransform=string_to_context_key)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': result,
+ 'EntryContext': {'AutoFocus.Tag(val.ID == obj.ID)': context},
+ 'HumanReadable': md
+ })
+
+
+def top_tags_search_command():
+ args = demisto.args()
+ scope = args.get('scope')
+ tag_class = args.get('class')
+ private = args.get('private') == 'True'
+ public = args.get('public') == 'True'
+ commodity = args.get('commodity') == 'True'
+ unit42 = args.get('unit42') == 'True'
+ info = autofocus_top_tags_search(scope, tag_class, private, public, commodity, unit42)
+ md = tableToMarkdown(f'Top tags search Info:', info)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': info,
+ 'EntryContext': {'AutoFocus.TopTagsSearch(val.AFCookie == obj.AFCookie)': info},
+ 'HumanReadable': md
+ })
+
+
+def top_tags_results_command():
+ args = demisto.args()
+ af_cookie = args.get('af_cookie')
+ results, status = get_top_tags_results(af_cookie)
+ md = tableToMarkdown(f'Search Top Tags Results is {status}:', results, headerTransform=string_to_table_header)
+ context = createContext(results, keyTransform=string_to_context_key)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': results,
+ 'EntryContext': {'AutoFocus.TopTagsResults(val.PublicTagName == obj.PublicTagName)': context,
+ 'AutoFocus.TopTagsSearch(val.AFCookie == obj.AFCookie)': {'Status': status,
+ 'AFCookie': af_cookie}},
+ 'HumanReadable': md
+ })
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('Command being called is %s' % (demisto.command()))
+
+try:
+ # Remove proxy if not set to true in params
+ handle_proxy()
+ active_command = demisto.command()
+ if active_command == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module()
+ demisto.results('ok')
+ elif active_command == 'autofocus-search-samples':
+ search_samples_command()
+ elif active_command == 'autofocus-search-sessions':
+ search_sessions_command()
+ elif active_command == 'autofocus-samples-search-results':
+ samples_search_results_command()
+ elif active_command == 'autofocus-sessions-search-results':
+ sessions_search_results_command()
+ elif active_command == 'autofocus-get-session-details':
+ get_session_details_command()
+ elif active_command == 'autofocus-sample-analysis':
+ sample_analysis_command()
+ elif active_command == 'autofocus-tag-details':
+ tag_details_command()
+ elif active_command == 'autofocus-top-tags-search':
+ top_tags_search_command()
+ elif active_command == 'autofocus-top-tags-results':
+ top_tags_results_command()
+
+
+# Log exceptions
+except Exception as e:
+ LOG(e)
+ LOG.print_log()
+ return_error(f'Unexpected error: {e}')
diff --git a/Integrations/AutofocusV2/AutofocusV2.yml b/Integrations/AutofocusV2/AutofocusV2.yml
new file mode 100644
index 000000000000..6347a96f72e6
--- /dev/null
+++ b/Integrations/AutofocusV2/AutofocusV2.yml
@@ -0,0 +1,653 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: AutoFocus V2
+ version: -1
+configuration:
+- display: API Key
+ name: api_key
+ required: true
+ type: 4
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Use the Palo Alto Networks AutoFocus integration to distinguish the most
+ important threats from everyday commodity attacks.
+display: Palo Alto Networks AutoFocus V2
+name: AutoFocus V2
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The query for which to retrieve samples. For additional information
+ on how to build your query using the AF GUI, see the detailed description section.
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '30'
+ description: The number of results to return.
+ isArray: false
+ name: max_results
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The field by which to sort the results.
+ isArray: false
+ name: sort
+ predefined:
+ - App Name
+ - App Packagename
+ - File type
+ - Size
+ - Finish Date
+ - First Seen (Create Date)
+ - Last Updated (Update Date)
+ - MD5
+ - SHA1
+ - SHA256
+ - Ssdeep Fuzzy Hash
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The order of the results. Can be "Ascending" or "Descending".
+ isArray: false
+ name: order
+ predefined:
+ - Ascending
+ - Descending
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: ' The scope of the search. Can be "Private", "Public", or "Global".'
+ isArray: false
+ name: scope
+ predefined:
+ - Private
+ - Public
+ - Global
+ required: true
+ secret: false
+ - default: false
+ description: The MD5, SHA1 or SHA256 hash of the file.
+ isArray: false
+ name: file_hash
+ required: false
+ secret: false
+ - default: false
+ description: The domain to search.
+ isArray: false
+ name: domain
+ required: false
+ secret: false
+ - default: false
+ description: The IP address to search.
+ isArray: false
+ name: ip
+ required: false
+ secret: false
+ - default: false
+ description: The URL to search.
+ isArray: false
+ name: url
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The Wildfire verdict. Can be "Malware", "Grayware", "Benign", or "Phishing".
+ isArray: false
+ name: wildfire_verdict
+ predefined:
+ - Malware
+ - Grayware
+ - Benign
+ - Phishing
+ required: false
+ secret: false
+ - default: false
+ description: 'The date range of the creation date.
+ Format: YYY Y-MM-DDTHH:MM:SS,YYYY-MM-DDTHH:MM:SS where the first date is the beginning and the second is the end.
+ Example: 2019-09-09T00:00:00,2019-09-09T23:01:59'
+ isArray: false
+ name: first_seen
+ required: false
+ secret: false
+ - default: false
+ description: 'The date range of the last updated date.
+ Format: YYY Y-MM-DDTHH:MM:SS,YYYY-MM-DDTHH:MM:SS where the first date is the beginning and the second is the end.
+ Example: 2019-09-09T00:00:00,2019-09-09T23:01:59'
+ isArray: false
+ name: last_updated
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches for samples in AutoFocus. To view results, run the autofocus-samples-search-results
+ command with the returned AF Cookie. The AF Cookie expires 120 seconds after
+ the search completes.
+ execution: false
+ name: autofocus-search-samples
+ outputs:
+ - contextPath: AutoFocus.SamplesSearch.AFCookie
+ description: The AutoFocus search ID. Use this ID to retrieve search results. The AF
+ Cookie expires 120 seconds after the search completes.
+ type: String
+ - contextPath: AutoFocus.SamplesSearch.Status
+ description: The search status. Can be "in progress" or "complete".
+ type: String
+ - arguments:
+ - default: false
+ description: The query for which to retrieve samples. For additional information
+ on how to build your query using the AF GUI, see the detailed description section.
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '30'
+ description: The maximum number of results to return. Default is 30.
+ isArray: false
+ name: max_results
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The field by which to sort the results.
+ isArray: false
+ name: sort
+ predefined:
+ - Application
+ - Device Country
+ - Device Country Code
+ - Device Hostname
+ - Device Serial
+ - Device vsys
+ - Destination Country
+ - Destination Country Code
+ - Destination IP
+ - Destination Port
+ - Email Charset
+ - Industry
+ - Source Country
+ - Source Country Code
+ - Source IP
+ - Source Port
+ - SHA256
+ - Time
+ - Upload source
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The order of the results. Can be "Ascending" or "Descending".
+ isArray: false
+ name: order
+ predefined:
+ - Ascending
+ - Descending
+ required: false
+ secret: false
+ - default: false
+ description: The MD5, SHA1 or SHA256 hash of the file.
+ isArray: false
+ name: file_hash
+ required: false
+ secret: false
+ - default: false
+ description: The domain to search.
+ isArray: false
+ name: domain
+ required: false
+ secret: false
+ - default: false
+ description: The IP address to search.
+ isArray: false
+ name: ip
+ required: false
+ secret: false
+ - default: false
+ description: The URL to search.
+ isArray: false
+ name: url
+ required: false
+ secret: false
+ - default: false
+ description: 'The date range in which to search for sessions.
+ Format: YYY Y-MM-DDTHH:MM:SS,YYYY-MM-DDTHH:MM:SS where the first date is the beginning and the second is the end.
+ Example: 2019-09-09T00:00:00,2019-09-09T23:01:59'
+ isArray: false
+ name: time_range
+ predefined:
+ - ''
+ required: false
+ secret: false
+ - default: false
+ description: 'The date after which to search for sessions.
+ Format: YYYY-MM-DDTHH:MM:SS
+ Example: 2019-09-09T23:01:59'
+ isArray: false
+ name: time_after
+ required: false
+ secret: false
+ - default: false
+ description: 'The date before which to search for sessions.
+ Format: YYYY-MM-DDTHH:MM:SS
+ Example: 2019-09-09T23:01:59'
+ isArray: false
+ name: time_before
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches for sessions in AutoFocus. To view results, run the autofocus-sessions-search-results
+ command with the returned AF Cookie. The AF Cookie expires 120 seconds after
+ the search completes.
+ execution: false
+ name: autofocus-search-sessions
+ outputs:
+ - contextPath: AutoFocus.SessionsSearch.AFCookie
+ description: The AutoFocus search ID. Use this ID to get search results. The AF
+ Cookie expires 120 seconds after the search completes.
+ type: String
+ - contextPath: AutoFocus.SessionsSearch.Status
+ description: The search status. Can be "in progress" or "complete".
+ type: String
+ - arguments:
+ - default: false
+ description: The AF Cookie for retrieving results of previous searches. The
+ AF Cookie expires 120 seconds after the search completes.
+ isArray: false
+ name: af_cookie
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns results of a previous samples search.
+ execution: false
+ name: autofocus-samples-search-results
+ outputs:
+ - contextPath: AutoFocus.SamplesResults.Size
+ description: The file size in bytes.
+ type: String
+ - contextPath: AutoFocus.SamplesResults.SHA1
+ description: The SHA1 hash of the file.
+ type: String
+ - contextPath: AutoFocus.SamplesResults.SHA256
+ description: The SHA256 hash of the file.
+ type: String
+ - contextPath: AutoFocus.SamplesResults.Created
+ description: The date that the file was created.
+ type: Date
+ - contextPath: AutoFocus.SamplesResults.Finished
+ description: Date finished.
+ type: Date
+ - contextPath: AutoFocus.SamplesResults.Region
+ description: Region of the sample.
+ type: String
+ - contextPath: AutoFocus.SamplesResults.FileType
+ description: The file type.
+ type: String
+ - contextPath: AutoFocus.SamplesResults.Tags
+ description: The tags attached to the sample.
+ type: String
+ - contextPath: AutoFocus.SamplesResults.Verdict
+ description: The verdict of the sample.
+ type: Number
+ - contextPath: AutoFocus.SamplesResults.TagGroups
+ description: Groups of relevant tags.
+ type: String
+ - contextPath: AutoFocus.SamplesSearch.Status
+ description: The search status. Can be "in progress" or "complete".
+ type: String
+ - contextPath: File.Size
+ description: The size of the file in bytes.
+ type: Number
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ type: String
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: String
+ - contextPath: File.Type
+ description: The file type, as determined by libmagic (same as displayed in
+ file entries).
+ type: String
+ - contextPath: File.Tags
+ description: Tags of the file.
+ type: String
+ - arguments:
+ - default: false
+ description: The AF Cookie for retrieving the results of a previous search.
+ The AF Cookie expires 120 seconds after the search completes.
+ isArray: false
+ name: af_cookie
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns results of a previous sessions search.
+ execution: false
+ name: autofocus-sessions-search-results
+ outputs:
+ - contextPath: AutoFocus.SessionsResults.FileName
+ description: The name of the file..
+ type: String
+ - contextPath: AutoFocus.SessionsResults.ID
+ description: The session ID. Used to get session details.
+ type: String
+ - contextPath: AutoFocus.SessionsResults.Industry
+ description: The related industry.
+ type: String
+ - contextPath: AutoFocus.SessionsResults.Region
+ description: The regions of the sessions.
+ type: String
+ - contextPath: AutoFocus.SessionsResults.SHA256
+ description: The SHA256 hash of the file.
+ type: String
+ - contextPath: AutoFocus.SessionsResults.Seen
+ description: Seen date.
+ type: Date
+ - contextPath: AutoFocus.SessionsResults.UploadSource
+ description: The source of the uploaded sample.
+ type: String
+ - contextPath: AutoFocus.SessionsResults.FileURL
+ description: The URL of the file.
+ type: String
+ - contextPath: AutoFocus.SessionsResults.Tags
+ description: Relevant tags.
+ type: String
+ - contextPath: AutoFocus.SessionsSearch.Status
+ description: The search status. Can be "in progress" or "complete".
+ type: String
+ - contextPath: File.Name
+ description: The full file name (including file extension).
+ type: String
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: String
+ - contextPath: File.Tags
+ description: Tags of the file.
+ type: String
+ - arguments:
+ - default: false
+ description: The session ID.
+ isArray: false
+ name: session_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Get session details by session ID
+ execution: false
+ name: autofocus-get-session-details
+ outputs:
+ - contextPath: AutoFocus.Sessions.FileName
+ description: The file name.
+ type: String
+ - contextPath: AutoFocus.Sessions.ID
+ description: The session ID.
+ type: String
+ - contextPath: AutoFocus.Sessions.Industry
+ description: The related industry.
+ type: String
+ - contextPath: AutoFocus.Sessions.Region
+ description: Session regions.
+ type: String
+ - contextPath: AutoFocus.Sessions.SHA256
+ description: TheSHA256 hash of the file.
+ type: String
+ - contextPath: AutoFocus.Sessions.Seen
+ description: Seen date.
+ type: Date
+ - contextPath: AutoFocus.Sessions.UploadSource
+ description: The source that uploaded the sample.
+ type: String
+ - contextPath: File.Name
+ description: The full file name (including file extension).
+ type: String
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: String
+ - arguments:
+ - default: false
+ description: The SHA256 hash of the sample to analyze.
+ isArray: false
+ name: sample_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The analysis environment. Can be "win7", "winxp", "android", "static_analyzer", "mac", or "bare_metal".
+ isArray: false
+ name: os
+ predefined:
+ - win7
+ - winxp
+ - android
+ - static_analyzer
+ - mac
+ - bare_metal
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'True'
+ description: Whether to smartly filter the data. If "False", the data returned will not be smartly filtered, and
+ will significantly reduce integration performance. We recommend setting this
+ to "True".
+ isArray: false
+ name: filter_data
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns properties, behaviors, and activities observed for a sample.
+ Run the command a single time to get the fields and operating systems under
+ HTTP, Coverage, Behavior, Registry, Files, Processes, Connections, and DNS.
+ execution: false
+ name: autofocus-sample-analysis
+ outputs:
+ - contextPath: AutoFocus.SampleAnalysis.Analysis.Http
+ description: HTTP requests made when the sample was executed.
+ type: Unknown
+ - contextPath: AutoFocus.SampleAnalysis.Analysis.Coverage
+ description: WildFire signatures that matched to the sample.
+ type: Unknown
+ - contextPath: AutoFocus.SampleAnalysis.Analysis.Behavior
+ description: 'Sample behavior: created or modified files, started a process,
+ spawned new processes, modified the registry, or installed browser help objects.'
+ type: Unknown
+ - contextPath: AutoFocus.SampleAnalysis.Analysis.Registry
+ description: Registry settings and options that showed activity when the sample
+ was executed in the analysis environment.
+ type: Unknown
+ - contextPath: AutoFocus.SampleAnalysis.Analysis.Files
+ description: Files that showed activity as a result of the sample being executed.
+ type: Unknown
+ - contextPath: AutoFocus.SampleAnalysis.Analysis.Processes
+ description: Processes that showed activity when the sample was executed.
+ type: Unknown
+ - contextPath: AutoFocus.SampleAnalysis.Analysis.Connections
+ description: Connections to other hosts on the network when the sample was executed.
+ type: Unknown
+ - contextPath: AutoFocus.SampleAnalysis.Analysis.Dns
+ description: DNS activity observed when the sample was executed.
+ type: Unknown
+ - contextPath: AutoFocus.SampleAnalysis.Analysis.Mutex
+ description: The mutex created when the programs start is listed with the parent
+ process if the sample generates other program threads when executed in the
+ analysis environment.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The public tag name. Can be retrieved from the top-tags command.
+ isArray: false
+ name: tag_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns details about the given tag.
+ execution: false
+ name: autofocus-tag-details
+ outputs:
+ - contextPath: AutoFocus.Tag.TagName
+ description: The simple name of the tag.
+ type: String
+ - contextPath: AutoFocus.Tag.PublicTagName
+ description: The public name of the tag. This is used as an ID of the tag.
+ type: String
+ - contextPath: AutoFocus.Tag.Count
+ description: The number of samples that matched this tag.
+ type: Number
+ - contextPath: AutoFocus.Tag.Lasthit
+ description: The date that the tag was last encountered.
+ type: Date
+ - contextPath: AutoFocus.Tag.TagDefinitionScope
+ description: The scope of the tag ("public", "private", or "Unit42").
+ type: String
+ - contextPath: AutoFocus.Tag.CustomerName
+ description: The organization that created the tag.
+ type: String
+ - contextPath: AutoFocus.Tag.Source
+ description: The organization or individual that discovered the threat that is defined
+ in the tag.
+ type: String
+ - contextPath: AutoFocus.Tag.TagClass
+ description: The classification of the tag.
+ type: String
+ - contextPath: AutoFocus.Tag.TagDefinitionStatus
+ description: The status of the tag definition ("enabled", "disabled", "removing",
+ or "rescoping").
+ type: String
+ - contextPath: AutoFocus.Tag.TagGroup
+ description: The tag group of the tag.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: Scope of the search. Can be "industry", "organization", "all", or "global".
+ isArray: false
+ name: scope
+ predefined:
+ - industry
+ - organization
+ - all
+ - global
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'Tag class.
+ - Malware Family: group of malware that have shared properties or common functions.
+ - Campaign: targeted attack, which might include several incidents or sets of activities.
+ - Actor: individual or group that initiates a campaign using malware families.
+ - Exploit: an attack, which takes advantage
+ of a software or network weakness, bug, or vulnerability to manipulate the behavior of the system.
+ - Malicious Behavior: behavior that is not specific to a malware family or campaign, but indicates that your system has been compromised.'
+ isArray: false
+ name: class
+ predefined:
+ - Actor
+ - Campaign
+ - Exploit
+ - Malicious Behavior
+ - Malware Family
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: Whether the tag scope is "private". If "True", the tag scope is private. Default is "False".
+ isArray: false
+ name: private
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: Whether the tag scope is "public". If "True", the tag scope is public. Default is "False".
+ isArray: false
+ name: public
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: Whether the tag scope is "commodity". If "True", the tag scope is commodity. Default is "False".
+ isArray: false
+ name: commodity
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: Whether the tag scope is "Unit42". If "True", the tag scope is unit42. Default is "False".
+ isArray: false
+ name: unit42
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Performs a search to identify the most popular tags.
+ execution: false
+ name: autofocus-top-tags-search
+ outputs:
+ - contextPath: AutoFocus.TopTagsSearch.AFCookie
+ description: AutoFocus search ID. Use this ID to get search results. The AF
+ Cookie expires 120 seconds after the search completes.
+ type: String
+ - contextPath: AutoFocus.TopTagsSearch.Status
+ description: The search status. Can be "in progress" or "complete".
+ type: String
+ - arguments:
+ - default: false
+ description: 'The AF Cookie for retrieving results of previous search. Note:
+ The AF Cookie expires 120 seconds after the search completes.'
+ isArray: false
+ name: af_cookie
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the results of a previous top tags search.
+ execution: false
+ name: autofocus-top-tags-results
+ outputs:
+ - contextPath: AutoFocus.TopTagsResults.Count
+ description: The number of samples that matched this tag.
+ type: Number
+ - contextPath: AutoFocus.TopTagsResults.PublicTagName
+ description: The public name of the tag. This is used as an ID of the tag.
+ type: String
+ - contextPath: AutoFocus.TopTagsResults.TagName
+ description: The simple name of the tag.
+ type: String
+ - contextPath: AutoFocus.TopTagsResults.Lasthit
+ description: The last encounter date of the tag.
+ type: Date
+ - contextPath: AutoFocus.TopTagsSearch.Status
+ description: The search status. Can be "in progress" or "complete".
+ type: String
+ dockerimage: demisto/python3:3.7.2.214
+ subtype: python3
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- AutoFocus V2 test
\ No newline at end of file
diff --git a/Integrations/AutofocusV2/AutofocusV2_description.md b/Integrations/AutofocusV2/AutofocusV2_description.md
new file mode 100644
index 000000000000..e7919467183e
--- /dev/null
+++ b/Integrations/AutofocusV2/AutofocusV2_description.md
@@ -0,0 +1,32 @@
+## Get Your API Key
+To get your API key, you need to add an authorization code, and then activate the API.
+
+ ### Add your authorization code
+ 1. Go to the [Palo Alto Networks support site](https://support.paloaltonetworks.com).
+ 2. From the left-side navigation menu, select **Assets > Site Licenses**.
+ 3. Click the **Add Site License** button.
+ 4. Enter the authorization code.
+
+ ### Activate the API
+ 1. From the **Site Licenses** page, click **Enable**.
+ 2. Select the API Key link.
+
+ Enter this API key when configuring the AutoFocus integration in Demisto.
+ For more info on activating the license see [Activating AutoFocus Licenses](https://docs.paloaltonetworks.com/autofocus/autofocus-admin/get-started-with-autofocus/activate-autofocus-licenses.html).
+
+## How to Build a Query
+These instructions explain how to build a query, which you can use as the value for the `query` argument. You can use this argument in the **autofocus-search-samples** and **autofocus-search-sessions** commands.
+ 1. Go to the [AutoFocus platform](https://autofocus.paloaltonetworks.com/#/samples/global).
+ 2. From the left-side navigation menu, click **Search**.
+ 3. From the top navigation bar, click **Advanced...**.
+ 3. Build a query by selecting fields operators and relevant values. You can always add an additional condition by
+ selecting the **+** button on the right. For more information on how to use the search editor see [Work with the Search Editor
+](https://docs.paloaltonetworks.com/autofocus/autofocus-admin/autofocus-search/work-with-the-search-editor.html#id791798e0-2277-41b5-a723-383bd0787816_id597cae40-646e-4a2f-acf5-5fe04d9e2cf0).
+4. To export the query, click the **>_API** button.
+5. Copy the query value and paste it as the value for the `query` argument for both search commands. For example:
+```
+{"operator":"all","children":[{"field":"sample.malware","operator":"is","value":1},{"field":"sample.create_date","operator":"is after","value":["2019-06-13","2019-06-13"]}]}
+```
+
+## Note for the autofocus-sample-analysis Command
+Due to a large amount of dynamic outputs, run the command once to get the fields and the operating system's under HTTP, Coverage, Behavior, Registry, Files, Processes, Connections, and DNS.
diff --git a/Integrations/AutofocusV2/AutofocusV2_image.png b/Integrations/AutofocusV2/AutofocusV2_image.png
new file mode 100644
index 000000000000..ff44f2d6baac
Binary files /dev/null and b/Integrations/AutofocusV2/AutofocusV2_image.png differ
diff --git a/Integrations/AutofocusV2/CHANGELOG.md b/Integrations/AutofocusV2/CHANGELOG.md
new file mode 100644
index 000000000000..5ef33548c856
--- /dev/null
+++ b/Integrations/AutofocusV2/CHANGELOG.md
@@ -0,0 +1,29 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+ - Improved handling of empty responses for the ***autofocus-samples-search*** and ***autofocus-sessions-search*** commands.
+
+
+## [19.9.1] - 2019-09-18
+Added several arguments to the ***autofocus-samples-search*** and ***autofocus-sessions-search*** commands.
+ - *file_hash*
+ - *domain*
+ - *ip*
+ - *url*
+ - *wildfire_verdict*
+ - *first_seen*
+ - *last_updated*
+
+## [19.9.0] - 2019-09-04
+ - Updated Palo Alto Networks AutoFocus V2 Indicators context outputs to support version 5.0.
+
+## [19.8.2] - 2019-08-22
+ - Added *tagGroups* output to ***autofocus-samples-search-results*** command.
+ - Improved handling of cases in which unknown tags are retrieved from the ***autofocus-tag-details*** command.
+
+
+## [19.8.0] - 2019-08-06
+ - Added to context the status of commands with the following prefixes: ***autofocus-samples-search***, ***autofocus-sessions-search***, and ***autofocus-top-tags***.
+ - Improved error handling for cases of no report in the ***autofocus-sample-analysis*** command.
+ - Improved error handling for retrieving a pending query in the ***autofocus-samples-search-results*** command.
diff --git a/Integrations/AwakeSecurity/AwakeSecurity.py b/Integrations/AwakeSecurity/AwakeSecurity.py
new file mode 100644
index 000000000000..ae76b15fbd1b
--- /dev/null
+++ b/Integrations/AwakeSecurity/AwakeSecurity.py
@@ -0,0 +1,468 @@
+import demistomock as demisto
+from CommonServerPython import *
+''' IMPORTS '''
+import base64
+import re
+import requests
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS '''
+handle_proxy()
+params = demisto.params()
+server = params["server"]
+prefix = server + "/awakeapi/v1"
+verify = not params.get('unsecure', False)
+credentials = params["credentials"]
+identifier = credentials["identifier"]
+password = credentials["password"]
+suspicious_threshold = params["suspicious_threshold"]
+malicious_threshold = params["malicious_threshold"]
+authTokenRequest = {
+ "loginUsername": identifier,
+ "loginPassword": password
+}
+authTokenResponse = requests.post(prefix + "/authtoken", json=authTokenRequest, verify=verify)
+authToken = authTokenResponse.json()["token"]["value"]
+headers = {
+ "Authentication": ("access " + authToken)
+}
+command = demisto.command()
+args = demisto.args()
+request = {}
+
+''' HELPERS '''
+
+
+# Convenient utility to marshal command arguments into the request body
+
+
+def slurp(fields):
+ for field in fields:
+ if field in args:
+ request[field] = args[field]
+
+# Render a subset of the fields of the Contents as a markdown table
+
+
+def displayTable(contents, fields):
+ # We don't use a set() because we want to preserve field order
+ #
+ # The fields are ordered to put the most relevant information first
+ presentFields = [] # type: List[str]
+ # Omit table columns that are all empty
+ for content in contents:
+ for field in fields:
+ if field in content and content[field] and field not in presentFields:
+ presentFields.append(field)
+ line0 = "| "
+ line1 = "| "
+ for field in presentFields:
+ # Translate camel-case field names to title-case space-separated words
+ tokens = re.findall("[a-zA-Z][A-Z]*[^A-Z]*", field)
+ name = " ".join(map(lambda token: token.title(), tokens))
+ line0 += name + " | "
+ line1 += "--- | "
+ line0 += "\n"
+ line1 += "\n"
+ body = ""
+ for content in contents:
+ body += "| "
+ for field in presentFields:
+ if field in content:
+ value = json.dumps(content[field])
+ else:
+ value = ""
+ body += value + " | "
+ body += "\n"
+ if presentFields:
+ return (line0 + line1 + body)
+ else:
+ return "Empty results"
+
+
+def returnResults(contents, outerKey, innerKey, humanReadable, dbotScore):
+ machineReadable = {
+ "AwakeSecurity": contents,
+ }
+ entryContext = {
+ ("AwakeSecurity." + outerKey + "(val." + innerKey + "===obj." + innerKey + ")"): contents,
+ }
+ if dbotScore is not None:
+ machineReadable["DBotScore"] = dbotScore
+ entryContext["DBotScore"] = dbotScore
+ demisto.results({
+ "Type": entryTypes['note'],
+ "ContentsFormat": formats['json'],
+ "Contents": json.dumps(machineReadable),
+ "HumanReadable": humanReadable,
+ "ReadableContentsFormat": formats['markdown'],
+ "EntryContext": entryContext,
+ })
+
+
+def toDBotScore(indicator_type, percentile, lookup_key):
+ if percentile <= suspicious_threshold:
+ score = 1
+ elif percentile <= malicious_threshold:
+ # Something doing something out of the ordinary
+ score = 2
+ else:
+ # Probably bad or at least not compliant with
+ # company policy.
+ score = 3
+ return {
+ "Vendor": "Awake Security",
+ "Type": indicator_type,
+ "Indicator": lookup_key,
+ "Score": score
+ }
+
+
+''' COMMANDS '''
+
+
+def lookup(lookup_type, lookup_key):
+ path = "/lookup/" + lookup_type
+ request["lookup_key"] = lookup_key
+ # default value of lookback_minutes is 480
+ if "lookback_minutes" not in args:
+ args["lookback_minutes"] = 480
+ request["lookback_minutes"] = int(args["lookback_minutes"])
+ response = requests.post(prefix + path, json=request, headers=headers, verify=verify)
+ if response.status_code < 200 or response.status_code >= 300:
+ return_error('Request Failed.\nStatus code: {} with body {} with headers {}'.format(
+ str(response.status_code),
+ response.content,
+ str(response.headers))
+ )
+
+ return response.json()
+
+
+def lookupDevice():
+ lookup_key = args["device"]
+ contents = lookup("device", lookup_key)
+ humanReadableFields = [
+ "deviceScore",
+ "deviceName",
+ "deviceType",
+ "os",
+ "osVersion",
+ "commonEmail",
+ "commonUsername",
+ "tags",
+ "recentIP",
+ "activeIP",
+ "nSimilarDevices",
+ "ipCount",
+ "applicationCount",
+ # "protocols",
+ "firstSeen",
+ "lastSeen",
+ ]
+ if "deviceScore" in contents:
+ dbotScore = toDBotScore("device", contents["deviceScore"], lookup_key)
+ else:
+ dbotScore = {
+ "Vendor": "Awake Security",
+ "Type": 'device',
+ "Indicator": lookup_key,
+ "Score": 0
+ }
+ humanReadable = displayTable([contents], humanReadableFields)
+ contents["device"] = lookup_key
+ returnResults(contents, "Devices", "device", humanReadable, dbotScore)
+
+
+def lookupDomain():
+ lookup_key = args["domain"]
+ contents = lookup("domain", lookup_key)
+ humanReadableFields = [
+ "notability",
+ "isAlexaTopOneMillion",
+ "isDGA",
+ "intelSources",
+ "numAssociatedDevices",
+ "numAssociatedActivities",
+ "approxBytesTransferred",
+ "protocols",
+ "firstSeen",
+ "lastSeen",
+ ]
+ if "notability" in contents:
+ dbotScore = toDBotScore("domain", contents["notability"], lookup_key)
+ else:
+ dbotScore = {
+ "Vendor": "Awake Security",
+ "Type": 'domain',
+ "Indicator": lookup_key,
+ "Score": 0
+ }
+ humanReadable = displayTable([contents], humanReadableFields)
+ contents["domain"] = lookup_key
+ returnResults(contents, "Domains", "domain", humanReadable, dbotScore)
+
+
+def lookupEmail():
+ lookup_key = args["email"]
+ contents = lookup("email", lookup_key)
+ humanReadableFields = [
+ "notabilityPercentile",
+ "deviceName",
+ "os",
+ "deviceType",
+ "application",
+ "numberSimilarDevices",
+ "numberSessions",
+ "firstSeen",
+ "lastSeen",
+ "duration",
+ "deviceId",
+ ]
+ if "notabilityPercentile" in contents:
+ dbotScore = toDBotScore("email", contents["notabilityPercentile"], lookup_key)
+ else:
+ dbotScore = {
+ "Vendor": "Awake Security",
+ "Type": 'email',
+ "Indicator": lookup_key,
+ "Score": 0
+ }
+ humanReadable = displayTable(contents, humanReadableFields)
+ for content in contents:
+ content["email"] = lookup_key
+ returnResults(contents, "Emails", "email", humanReadable, dbotScore)
+
+
+def lookupIp():
+ lookup_key = args["ip"]
+ contents = lookup("ip", lookup_key)
+ humanReadableFields = [
+ "deviceCount",
+ "activityCount",
+ "ipFirstSeen",
+ "ipLastSeen",
+ ]
+ dbotScore = {
+ "Vendor": "Awake Security",
+ "Type": 'ip',
+ "Indicator": lookup_key,
+ "Score": 0
+ }
+ # Note: No DBotScore for IP addresses as we do not score them.
+ # Our product scores devices rather than IP addresses.
+ humanReadable = displayTable([contents], humanReadableFields)
+ contents["ip"] = lookup_key
+ returnResults(contents, "IPs", "ip", humanReadable, dbotScore)
+
+
+def query(lookup_type):
+ # Default to an empty query if unset
+ request["queryExpression"] = ""
+ slurp(["queryExpression", "startTime", "endTime"])
+ nameMappings = [
+ ("ipAddress", "device.ip == {}"),
+ ("deviceName", "device.name like r/{}/"),
+ ("domainName", "domain.name like r/{}/"),
+ ("protocol", "activity.protocol == \"{}\""),
+ ("tags", "\"{}\" in device.tags"),
+ ]
+ for (name, mapping) in nameMappings:
+ if name in args:
+ if "queryExpression" in request and request["queryExpression"]:
+ request["queryExpression"] = request["queryExpression"] + " && " + mapping.format(args[name])
+ else:
+ request["queryExpression"] = mapping.format(args[name])
+ path = "/query/" + lookup_type
+ response = requests.post(prefix + path, json=request, headers=headers, verify=verify)
+ if response.status_code < 200 or response.status_code >= 300:
+ return_error('Request Failed.\nStatus code: {} with body {} with headers {}'.format(
+ str(response.status_code),
+ response.content,
+ str(response.headers))
+ )
+ contents = response.json()
+ return request["queryExpression"], contents
+
+
+def queryActivities():
+ q, contents = query("activities")
+ humanReadableFields = [
+ "sourceIP",
+ "sourceHost",
+ "sourcePort",
+ "destIP",
+ "destHost",
+ "destPort",
+ "activityDeviceName",
+ "activityStart",
+ "activityEnd",
+ "protocols",
+ ]
+ humanReadable = displayTable(contents, humanReadableFields)
+ for content in contents:
+ content["query"] = q
+ returnResults(contents, "Activities", "query", humanReadable, None)
+
+
+def queryDevices():
+ q, contents = query("devices")
+ humanReadableFields = [
+ "notabilityPercentile",
+ "deviceName",
+ "os",
+ "deviceType",
+ "application",
+ "numberSimilarDevices",
+ "numberSessions",
+ "firstSeen",
+ "lastSeen",
+ "duration",
+ "deviceId",
+ ]
+ humanReadable = displayTable(contents, humanReadableFields)
+ for content in contents:
+ content["query"] = q
+ returnResults(contents, "Devices", "query", humanReadable, None)
+
+
+def queryDomains():
+ q, contents = query("domains")
+ humanReadableFields = [
+ "name",
+ "notability",
+ "created",
+ "lastUpdated",
+ "expiration",
+ "registrantOrg",
+ "registrantCountry",
+ "registrarName",
+ "nameservers",
+ "deviceCount",
+ "intelCount",
+ "lastSeen",
+ ]
+ humanReadable = displayTable(contents, humanReadableFields)
+ for content in contents:
+ content["query"] = q
+ returnResults(contents, "Domains", "query", humanReadable, None)
+
+
+def pcapDownload():
+ slurp(["monitoringPointID"])
+ session = {}
+ for field in ["hostA", "hostB", "startTimeRFC3339Nano", "endTimeRFC3339Nano"]:
+ if field in args:
+ session[field] = args[field]
+ if "startTimeRFC3339Nano" in args:
+ session["startTimeRFC3339Nano"] = args["startTime"]
+ if "endTimeRFC3339Nano" in args:
+ session["endTimeRFC3339Nano"] = args["endTime"]
+ for field in ["protocol", "portA", "portB"]:
+ if field in args:
+ session[field] = int(args[field])
+ request["sessions"] = [session]
+ path = "/pcap/download"
+ response = requests.post(prefix + path, json=request, headers=headers, verify=verify)
+ if response.status_code < 200 or response.status_code >= 300:
+ return_error('Request Failed.\nStatus code: {} with body {} with headers {}'.format(
+ str(response.status_code),
+ response.content,
+ str(response.headers))
+ )
+ b64 = response.json()["pcap"]
+ bytes = base64.b64decode(b64)
+ demisto.results(fileResult("download.pcap", bytes))
+
+
+def fetchIncidents():
+ threatBehaviorsString = params.get("threat_behaviors") or ""
+ threatBehaviors = [threatBehavior.strip() for threatBehavior in threatBehaviorsString.split(",")]
+ if threatBehaviors == [""]:
+ threatBehaviors = []
+ lastRun = demisto.getLastRun()
+ formatString = "%Y-%m-%d %H:%M:%S+0000"
+ earlyTimeString = "1970-01-01 00:00:00+0000"
+ startTimeString = lastRun.get("time") or earlyTimeString
+ startTime = datetime.strptime(startTimeString, formatString)
+ endTime = datetime.utcnow()
+ endTimeString = datetime.strftime(endTime, formatString)
+ if timedelta(minutes=int(params['fetch_interval'])) <= endTime - startTime:
+ jsonRequest = {
+ "startTime": startTimeString,
+ "endTime": endTimeString,
+ "threatBehaviors": threatBehaviors
+ }
+ response = requests.post(prefix + "/threat-behavior/matches", json=jsonRequest, headers=headers, verify=verify)
+ jsonResponse = response.json()
+ matchingThreatBehaviors = jsonResponse.get("matchingThreatBehaviors", [])
+
+ def toIncident(matchingThreatBehavior):
+ # Currently the threat behavior API doesn't allow us to retrieve metadata for
+ # the behaviors that matched, which is why this incident record is mostly empty
+ #
+ # However, we can provide the original query that the threat behavior corresponded
+ # to plus the date range so that a playbook can feed them back into
+ # `awake-query-{devices,activities}` to retrieving the matching devices or
+ # activities that triggered the match to the threat behavior.
+ return {
+ "Name": matchingThreatBehavior["name"],
+ "Query": matchingThreatBehavior["query"],
+ "StartTime": startTimeString,
+ "EndTime": endTimeString,
+ }
+ demisto.incidents(map(toIncident, matchingThreatBehaviors))
+ # Don't increase the low-water-mark until we actually find incidents
+ #
+ # This is a precaution because incidents sometimes appear in an old time
+ # bucket after a delay
+ if 0 < len(matchingThreatBehaviors):
+ lastRun = {"time": endTimeString}
+ else:
+ demisto.incidents([])
+ demisto.setLastRun(lastRun)
+
+
+''' EXECUTION '''
+LOG('command is %s' % (command))
+
+try:
+ if command == "test-module":
+ # If we got this far we already successfully authenticated against the server
+ demisto.results('ok')
+
+ elif command == "fetch-incidents":
+ fetchIncidents()
+
+ elif command == "awake-query-devices":
+ queryDevices()
+
+ elif command == "awake-query-activities":
+ queryActivities()
+
+ elif command == "awake-query-domains":
+ queryDomains()
+
+ elif command == "awake-pcap-download":
+ pcapDownload()
+
+ elif command == "domain":
+ lookupDomain()
+
+ elif command == "email":
+ lookupEmail()
+
+ elif command == "ip":
+ lookupIp()
+
+ elif command == "device":
+ lookupDevice()
+
+except Exception, e:
+ if command == "fetch-incidents":
+ raise
+ LOG(e)
+ LOG.print_log()
+ return_error(e.message)
diff --git a/Integrations/AwakeSecurity/AwakeSecurity.yml b/Integrations/AwakeSecurity/AwakeSecurity.yml
new file mode 100644
index 000000000000..6eee82397f47
--- /dev/null
+++ b/Integrations/AwakeSecurity/AwakeSecurity.yml
@@ -0,0 +1,641 @@
+commonfields:
+ id: Awake Security
+ version: -1
+name: Awake Security
+display: Awake Security
+category: Network Security
+description: Network Traffic Analysis
+detaileddescription: ""
+configuration:
+- display: Credentials
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: true
+- display: Awake Security server address
+ name: server
+ defaultvalue: https://example.awake.cloud
+ type: 0
+ required: true
+- display: Trust any certificate (unsecure)
+ name: unsecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Fetch incidents
+ name: isFetch
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Incident type
+ name: incidentType
+ defaultvalue: ""
+ type: 13
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ type: 8
+ required: true
+- display: 'Comma-separated list of threat behaviors to generate incidents for, e.g.,
+ "Exfiltration: SSL upload from non-browser to notable domain, Lateral Movement:
+ Unix-based PSEXEC, C2: Possible ICMP tunnel"'
+ name: threat_behaviors
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Period between incident fetch interval (in minutes)
+ name: fetch_interval
+ defaultvalue: "1"
+ type: 0
+ required: true
+- display: Minimum threshold to determine an indicator suspicious in Demisto
+ name: suspicious_threshold
+ defaultvalue: "33"
+ type: 0
+ required: true
+- display: Minimum threshold to determine an indicator malicious in Demisto
+ name: malicious_threshold
+ defaultvalue: "66"
+ type: 0
+ required: true
+script:
+ script: ''
+ type: python
+ commands:
+ - name: awake-query-devices
+ arguments:
+ - name: queryExpression
+ description: A query expression in Awake Query Language
+ - name: startTime
+ required: true
+ description: Query start time ("2000-01-01T00:00:00Z")
+ - name: endTime
+ required: true
+ description: Query end time ("2000-01-01T00:00:00Z")
+ - name: ipAddress
+ description: IP address to filter by (exact match)
+ - name: deviceName
+ description: Device name to filter by (regular expression)
+ - name: domainName
+ description: Domain name to filter by (regular expression)
+ - name: protocol
+ description: Protocol to filter by (all uppercase, i.e. "TLS")
+ - name: tag
+ description: Tag to filter by (regular expression)
+ outputs:
+ - contextPath: AwakeSecurity.Devices.deviceId
+ description: Awake Security unique identifier for the specified device
+ type: string
+ - contextPath: AwakeSecurity.Devices.deviceName
+ description: Device name
+ type: string
+ - contextPath: AwakeSecurity.Devices.firstSeen
+ description: Date that the specified device was first seen
+ type: string
+ - contextPath: AwakeSecurity.Devices.lastSeen
+ description: Date that the specified device was last seen
+ type: string
+ - contextPath: AwakeSecurity.Devices.os
+ description: Operating system associated with the specified device
+ type: string
+ - contextPath: AwakeSecurity.Devices.deviceType
+ description: Device type
+ type: string
+ - contextPath: AwakeSecurity.Devices.ips
+ description: List of IP addresses associated with the specified device
+ type: unknown
+ - contextPath: AwakeSecurity.Devices.monitoringPointIds
+ description: List of monitoring point IDs the specified device was seen on
+ type: string
+ - contextPath: AwakeSecurity.Devices.application
+ description: List of applications the specified device was seen using
+ type: string
+ - contextPath: AwakeSecurity.Devices.notabilityPercentile
+ description: How the notability of this device compares to other devices
+ type: number
+ - contextPath: AwakeSecurity.Devices.numberSimilarDevices
+ description: Number of devices that are similar to this device
+ type: number
+ - contextPath: AwakeSecurity.Devices.numberSessions
+ description: Number of TCP sessions for this device
+ type: number
+ - contextPath: AwakeSecurity.Devices.ackTime
+ description: Date of the last TCP session acknowledgment of the device associated
+ with the specified email address
+ type: number
+ - contextPath: AwakeSecurity.Devices.whiteListed
+ description: Is the device associated with the specified email address in the
+ white list
+ type: bool
+ description: Query devices in Awake Security
+ - name: awake-query-activities
+ arguments:
+ - name: queryExpression
+ description: A query expression in the Awake Query Language
+ - name: startTime
+ required: true
+ description: Query start time ("2000-01-01T00:00:00Z")
+ - name: endTime
+ required: true
+ description: Query end time ("2000-01-01T00:00:00Z")
+ - name: ipAddress
+ description: IP address to filter by (exact match)
+ - name: deviceName
+ description: Device name to filter by (regular expression)
+ - name: domainName
+ description: Domain name to filter by (regular expression)
+ - name: protocol
+ description: Protocol to filter by (all uppercase, i.e. "TLS")
+ - name: tag
+ description: ' Tag to filter by (regular expression)'
+ outputs:
+ - contextPath: AwakeSecurity.Activities.activityId
+ description: UUID that uniquely identifies the activity
+ type: string
+ - contextPath: AwakeSecurity.Activities.sessionId
+ description: UUID that uniquely identifies the corresponding session
+ type: string
+ - contextPath: AwakeSecurity.Activities.sourceIP
+ description: IP address of the source
+ type: string
+ - contextPath: AwakeSecurity.Activities.sourceHost
+ description: Hostname of the source
+ type: string
+ - contextPath: AwakeSecurity.Activities.sourcePort
+ description: Port of the source
+ type: number
+ - contextPath: AwakeSecurity.Activities.destinationIP
+ description: IP address of the destination
+ type: string
+ - contextPath: AwakeSecurity.Activities.destinationHost
+ description: Hostname of the destination
+ type: string
+ - contextPath: AwakeSecurity.Activities.destinationPort
+ description: Port of the destination
+ type: number
+ - contextPath: AwakeSecurity.Activities.directionKnown
+ description: Do we know for sure which endpoint was the client?
+ type: bool
+ - contextPath: AwakeSecurity.Activities.activityDeviceName
+ description: Device name for the endpoint within your network
+ type: string
+ - contextPath: AwakeSecurity.Activities.activityStart
+ description: Date when the activity began
+ type: string
+ - contextPath: AwakeSecurity.Activities.activityEnd
+ description: Date when the activity ended
+ type: string
+ - contextPath: AwakeSecurity.Activities.protocols
+ description: Protocols that the activity used
+ type: string
+ description: Query activities in Awake Security
+ - name: awake-query-domains
+ arguments:
+ - name: queryExpression
+ description: A query expression in the Awake Query Language
+ - name: startTime
+ required: true
+ description: Query start time ("2000-01-01T00:00:00Z")
+ - name: endTime
+ required: true
+ description: Query end time ("2000-01-01T00:00:00Z")
+ - name: ipAddress
+ description: IP address to filter by (exact match)
+ - name: deviceName
+ description: Device name to filter by (regular expression)
+ - name: domainName
+ description: Domain name to filter by (regular expression)
+ - name: protocol
+ description: Protocol to filter by (all uppercase, i.e. "TLS")
+ - name: tag
+ description: ' Tag to filter by (regular expression)'
+ outputs:
+ - contextPath: AwakeSecurity.Domains.name
+ description: Domain name
+ type: string
+ - contextPath: AwakeSecurity.Domains.created
+ description: Date the specified domain was created
+ type: string
+ - contextPath: AwakeSecurity.Domains.lastUpdated
+ description: Date the specified domain was last updated
+ type: string
+ - contextPath: AwakeSecurity.Domains.registrantOrg
+ description: Organization of the registrant
+ type: string
+ - contextPath: AwakeSecurity.Domains.registrantCountry
+ description: Country of the registrant
+ type: string
+ - contextPath: AwakeSecurity.Domains.registrarName
+ description: Name of the registrar
+ type: string
+ - contextPath: AwakeSecurity.Domains.whoisServer
+ description: Address of the WHOIS server
+ type: string
+ - contextPath: AwakeSecurity.Domains.whoisFound
+ description: Was this domain found via WHOIS
+ type: bool
+ - contextPath: AwakeSecurity.Domains.deviceCount
+ description: Number of devices currently interacting with the specified domain
+ type: number
+ - contextPath: AwakeSecurity.Domains.totalDevices
+ description: Total number of devices that have interacted with the specified
+ domain
+ type: number
+ - contextPath: AwakeSecurity.Domains.intelCount
+ description: Number of matches of imported intel against the specified domain
+ type: number
+ - contextPath: AwakeSecurity.Domains.lastSeen
+ description: Date of the most recent interaction with the specified domain
+ type: number
+ - contextPath: AwakeSecurity.Domains.nameservers
+ description: List of authoritative nameservers for the specified domain
+ type: string
+ - contextPath: AwakeSecurity.Domains.notability
+ description: Notability score of the domain
+ type: number
+ - contextPath: AwakeSecurity.Domains.whiteListed
+ description: Is the specified domain in white list
+ type: bool
+ description: Query domains in Awake Security
+ - name: awake-pcap-download
+ arguments:
+ - name: protocol
+ description: Protocol to filter by (all uppercase, e.g., "TLS")
+ - name: hostA
+ description: First host's address
+ - name: portA
+ description: First host's port
+ - name: hostB
+ description: Second host's address
+ - name: portB
+ description: Second host's port
+ - name: startTime
+ description: Query start time ("2000-01-01T00:00:00Z")
+ - name: endTime
+ description: Query end time ("2000-01-01T00:00:00Z")
+ outputs:
+ - contextPath: File.Size
+ description: File size
+ type: number
+ - contextPath: File.MD5
+ description: MD5 of the file
+ type: string
+ - contextPath: File.SHA1
+ description: SHA-1 of the file
+ type: string
+ - contextPath: File.SHA256
+ description: SHA-256 of the file
+ type: string
+ - contextPath: File.Name
+ description: File name
+ type: string
+ - contextPath: File.SSDeep
+ description: SSDeep hash of the file
+ type: string
+ - contextPath: File.EntryID
+ description: War room Entry ID of the file
+ type: string
+ - contextPath: File.Info
+ description: File common metadata
+ type: string
+ - contextPath: File.Type
+ description: File type
+ type: string
+ - contextPath: File.Extension
+ description: 'File Extension e.g: "pcap"'
+ type: string
+ description: Download a PCAP
+ - name: domain
+ arguments:
+ - name: domain
+ required: true
+ description: The domain name
+ - name: lookback_minutes
+ description: How many minutes of history to query from the current time. Default
+ is 480
+ defaultValue: "480"
+ outputs:
+ - contextPath: AwakeSecurity.Domains.approxBytesTransferred
+ description: Approximate bytes the indicator transferred
+ type: number
+ - contextPath: AwakeSecurity.Domains.DomainWithSameRegistrant
+ description: Domains with the same registrant
+ type: unknown
+ - contextPath: AwakeSecurity.Domains.domainsWithSameRegistrant.registrationDate
+ description: Date that the domain with the same registrant as the specified
+ domain was registered
+ type: string
+ - contextPath: AwakeSecurity.Domains.IntelSources
+ description: Indicators of compromise from Awake Security
+ type: string
+ - contextPath: AwakeSecurity.Domains.ipAddresses
+ description: IP addressesassociated with the domain
+ type: string
+ - contextPath: AwakeSecurity.Domains.isAlexaTopOneMillion
+ description: Does the domain appear in Alexa 1 million list
+ type: string
+ - contextPath: AwakeSecurity.Domains.isDGA
+ description: Is domain generation algorithm
+ type: boolean
+ - contextPath: AwakeSecurity.Domains.lastSeen
+ description: Last time the domain was seen
+ type: string
+ - contextPath: AwakeSecurity.Domains.notabillity
+ description: Notability score of the domain
+ type: number
+ - contextPath: AwakeSecurity.Domains.numAssociatedActivities
+ description: Number of network activities associated with the domain
+ type: number
+ - contextPath: AwakeSecurity.Domains.numAssociatedADevices
+ description: Number of devices associated with the domain
+ type: number
+ - contextPath: AwakeSecurity.Domains.protocols
+ description: List of protocols used in the domain activities
+ type: string
+ - contextPath: AwakeSecurity.Domains.relatedSubdomains
+ description: Related subdomains of the specified domain
+ type: string
+ - contextPath: AwakeSecurity.Domains.subdomains
+ description: Subdomains of the specified domain
+ type: string
+ - contextPath: AwakeSecurity.Domains.topDevices
+ description: LIst of devices that accessed the specified domain (maximum is
+ 10)
+ type: string
+ - contextPath: AwakeSecurity.Domains.totalNumDevices
+ description: Total number of devices that accessed the specified domain
+ type: number
+ - contextPath: AwakeSecurity.Domains.whiteListed
+ description: Is the specified domain in white list
+ type: boolean
+ - contextPath: DBotScore.Indicator
+ description: The specified domain
+ type: string
+ - contextPath: DBotScore.Score
+ description: Severity score of the specified domain in Demisto
+ type: number
+ - contextPath: DBotScore.Type
+ description: Indicator type in Demisto
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to assess the specified domain
+ type: string
+ description: Lookup and enrich a domain
+ - name: ip
+ arguments:
+ - name: ip
+ required: true
+ description: The IP address
+ - name: lookback_minutes
+ description: How many minutes of history to query from the current time. Default
+ is 480
+ defaultValue: "480"
+ outputs:
+ - contextPath: AwakeSecurity.IPs.activityCount
+ description: Number of activities associated with the specified IP address
+ type: number
+ - contextPath: AwakeSecurity.IPs.deviceCount
+ description: Number of devices associated with the specified IP address
+ type: number
+ - contextPath: AwakeSecurity.IPs.devices
+ description: Device object associated with the specified IP address
+ type: unknown
+ - contextPath: AwakeSecurity.IPs.domains
+ description: Domain object associated with the specified IP address
+ type: unknown
+ - contextPath: AwakeSecurity.IPs.ipFirstSeen
+ description: Date that the IP address was first seen in Awake Security
+ type: string
+ - contextPath: AwakeSecurity.IPs.ipLastSeen
+ description: Date that the IP address was last seen in Awake Security
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The specified IP address
+ type: string
+ - contextPath: DBotScore.Score
+ description: Severity score of the specified IP address in Demisto
+ type: number
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to assess the specified IP address
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type in Demisto
+ type: string
+ description: Lookup and enrich an IP address
+ - name: email
+ arguments:
+ - name: email
+ description: The email address
+ - name: lookback_minutes
+ description: How many minutes of history to query from the current time. Default
+ is 480
+ defaultValue: "480"
+ outputs:
+ - contextPath: AwakeSecurity.Emails.deviceId
+ description: Device ID associated with the specified email address
+ type: string
+ - contextPath: AwakeSecurity.Emails.deviceName
+ description: Device name associated with the specified email address
+ type: string
+ - contextPath: AwakeSecurity.Emails.firstSeen
+ description: Date that the email address was first seen in Awake Security
+ type: string
+ - contextPath: AwakeSecurity.Emails.lastSeen
+ description: Date that the email address was last seen in Awake Security
+ type: string
+ - contextPath: AwakeSecurity.Emails.duration
+ description: Time (in seconds) between the email address first seen date and
+ last seen date in Awake Security
+ type: string
+ - contextPath: AwakeSecurity.Emails.os
+ description: Operating system of the device associated with the specified email
+ address
+ type: string
+ - contextPath: AwakeSecurity.Emails.deviceType
+ description: Device type associated with the specified email address
+ type: string
+ - contextPath: AwakeSecurity.Emails.ips
+ description: IP addresses that the device associated with the specified email
+ address accessed
+ type: string
+ - contextPath: AwakeSecurity.Emails.monitoringPointIds
+ description: Monitoring point IDs on which the device associated with the specified
+ email address were seen
+ type: string
+ - contextPath: AwakeSecurity.Emails.application
+ description: Email applications associated with this email address
+ type: string
+ - contextPath: AwakeSecurity.Emails.notabilityPercentile
+ description: Notability (risk score) of the specified email address
+ type: number
+ - contextPath: AwakeSecurity.Emails.numberSimilarDevices
+ description: Number of similar devices associated with the device of the specified
+ email addresses
+ type: number
+ - contextPath: AwakeSecurity.Emails.numberSessions
+ description: Number of TCP sessions the device associated with this email address
+ initiated
+ type: number
+ - contextPath: AwakeSecurity.Emails.ackTime
+ description: Date of the last TCP session acknowledgment of the device associated
+ with the specified email address
+ type: string
+ - contextPath: AwakeSecurity.Emails.whiteListed
+ description: Is the device associated with the specified email address in the
+ white list
+ type: bool
+ - contextPath: DBotScore.Score
+ description: Severity score of the specified email address in Demisto
+ type: number
+ - contextPath: DBotScore.Type
+ description: Indicator type in Demisto
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to assess the specified email address
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The specified email address
+ type: string
+ description: Lookup and enrich an email address
+ - name: device
+ arguments:
+ - name: device
+ required: true
+ description: The device ID
+ - name: lookback_minutes
+ description: How many minutes of history to query from the current time. Default
+ is 480
+ defaultValue: "480"
+ outputs:
+ - contextPath: AwakeSecurity.Devices.deviceScore
+ description: Risk score of the specified device
+ type: number
+ - contextPath: AwakeSecurity.Devices.notableDomainCount.value
+ description: Number of suspicious domains accessed by the specified device
+ type: number
+ - contextPath: AwakeSecurity.Devices.notableDomainCount.percentile
+ description: Percentile of the specified device for notable domains accessed
+ type: number
+ - contextPath: AwakeSecurity.Devices.notableDomainCount.weight
+ description: Importance given to the suspicious domains when calculating the
+ specified device risk score
+ type: number
+ - contextPath: AwakeSecurity.Devices.iocCount.value
+ description: Number of suspicious IOCs associated with the specified device
+ type: number
+ - contextPath: AwakeSecurity.Devices.iocCount.percentile
+ description: Percentile of the specified device for notable IOCs
+ type: number
+ - contextPath: AwakeSecurity.Devices.iocCount.weight
+ description: Importance given to the IOCs when calculating the specified device
+ risk score
+ type: number
+ - contextPath: AwakeSecurity.Devices.watchlistCount.value
+ description: Total number of current threat behaviors associated with the specified
+ device
+ type: number
+ - contextPath: AwakeSecurity.Devices.watchlistCount.percentile
+ description: How this device compares to other devices for number of threat
+ behaviors
+ type: number
+ - contextPath: AwakeSecurity.Devices.watchlistCount.weight
+ description: Importance given to the threat behaviors when calculating the specified
+ device risk score
+ type: number
+ - contextPath: AwakeSecurity.Devices.activityCount.value
+ description: Number of characteristic artifacts associated with the specified
+ device
+ type: number
+ - contextPath: AwakeSecurity.Devices.activityCount.percentile
+ description: How this device compares to other devices for characteristic artifacts
+ type: number
+ - contextPath: AwakeSecurity.Devices.activityCount.weight
+ description: Importance given to the characteristic artifacts when calculating
+ the specified device risk score
+ type: number
+ - contextPath: AwakeSecurity.Devices.deviceName
+ description: Device name
+ type: string
+ - contextPath: AwakeSecurity.Devices.deviceType
+ description: Device type
+ type: string
+ - contextPath: AwakeSecurity.Devices.os
+ description: Operating system associated with the specified device
+ type: string
+ - contextPath: AwakeSecurity.Devices.recentIp
+ description: Most recent IP address associated with the specified device
+ type: string
+ - contextPath: AwakeSecurity.Devices.activeIp
+ description: Most common IP address associated with the specified device
+ type: string
+ - contextPath: AwakeSecurity.Devices.commonEmail
+ description: Most common email address associated with the specified device
+ type: string
+ - contextPath: AwakeSecurity.Devices.commonUsername
+ description: Most common username associated with the specified device
+ type: string
+ - contextPath: AwakeSecurity.Devices.commonMpid
+ description: Most common monitoring point ID the specified device was seen on
+ type: string
+ - contextPath: AwakeSecurity.Devices.nSimilarDevices
+ description: Number of devices that are similar to the specified device
+ type: number
+ - contextPath: AwakeSecurity.Devices.tags
+ description: Tags applied to the specified device
+ type: string
+ - contextPath: AwakeSecurity.Devices.ipCount
+ description: Number of IP addresses associated with the specified device
+ type: number
+ - contextPath: AwakeSecurity.Devices.emailCount
+ description: Number of email addresses associated with this device
+ type: number
+ - contextPath: AwakeSecurity.Devices.usernameCount
+ description: Number of usernames associated with the specified device
+ type: number
+ - contextPath: AwakeSecurity.Devices.applicationCount
+ description: Number of applications associated with the specified device
+ type: number
+ - contextPath: AwakeSecurity.Devices.mpids
+ description: List of monitoring point IDs associated with th specified device
+ type: string
+ - contextPath: AwakeSecurity.Devices.protocols.count
+ description: Number of time this protocol was used by the specified device
+ type: number
+ - contextPath: AwakeSecurity.Devices.firstSeen
+ description: Date that the specified device was first seen
+ type: string
+ - contextPath: AwakeSecurity.Devices.lastSeen
+ description: Date that the specified device was last seen
+ type: string
+ - contextPath: AwakeSecurity.Devices.osVersion
+ description: Operating system version of the specified device
+ type: string
+ - contextPath: AwakeSecurity.Devices.deviceGuid
+ description: Awake Security unique identifier for the specified device
+ type: string
+ - contextPath: AwakeSecurity.Devices.ips
+ description: List of IP addresses associated with the specified device
+ type: unknown
+ - contextPath: AwakeSecurity.Devices.usernames
+ description: List of usernames that were inferred as accounts on the specified
+ device
+ type: string
+ - contextPath: AwakeSecurity.Devices.emails
+ description: List of email addresses associated with the specified device
+ type: string
+ - contextPath: AwakeSecurity.Devices.ackTs
+ description: Date of the last TCP session acknowledgment of the specified device
+ type: string
+ - contextPath: AwakeSecurity.Devices.whiteListed
+ description: Is the device associated with the specified email address in the
+ white list
+ type: bool
+ - contextPath: AwakeSecurity.Devices.protocols.name
+ description: Type of protocol used by the specified device
+ type: string
+ description: Lookup and enrich a device
+ isfetch: true
+ runonce: false
+tests:
+- awake_security_test_pb
diff --git a/Integrations/AwakeSecurity/AwakeSecurity_image.png b/Integrations/AwakeSecurity/AwakeSecurity_image.png
new file mode 100644
index 000000000000..7a7ccb1dcc37
Binary files /dev/null and b/Integrations/AwakeSecurity/AwakeSecurity_image.png differ
diff --git a/Integrations/AwakeSecurity/CHANGELOG.md b/Integrations/AwakeSecurity/CHANGELOG.md
new file mode 100644
index 000000000000..3c618a892cca
--- /dev/null
+++ b/Integrations/AwakeSecurity/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.8.0] - 2019-08-06
+ - The ***Trust any certificate*** parameter now works as expected.
\ No newline at end of file
diff --git a/Integrations/AwakeSecurity/Pipfile b/Integrations/AwakeSecurity/Pipfile
new file mode 100644
index 000000000000..1f4881ed672c
--- /dev/null
+++ b/Integrations/AwakeSecurity/Pipfile
@@ -0,0 +1,15 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+
+[packages]
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/AwakeSecurity/Pipfile.lock b/Integrations/AwakeSecurity/Pipfile.lock
new file mode 100644
index 000000000000..bd5a86d9d4c5
--- /dev/null
+++ b/Integrations/AwakeSecurity/Pipfile.lock
@@ -0,0 +1,308 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "1fd564b978cf016eca093f3dfd295ed6ecae2fed0d591fcda830d512fa1fe4b8"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.5"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
+ "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
+ ],
+ "version": "==2019.6.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32",
+ "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==3.7.4"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==1.0.2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265",
+ "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7",
+ "sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db"
+ ],
+ "version": "==0.18"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af",
+ "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"
+ ],
+ "version": "==19.0"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e",
+ "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8"
+ ],
+ "markers": "python_version < '3.6'",
+ "version": "==2.3.4"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:02c2b6d268695a8b64ad61847f92e611e6afcff33fd26c3a2125370c4662905d",
+ "sha256:ee1e85575587c5b58ddafa25e1c1b01691ef172e139fc25585e5d3f02451da93"
+ ],
+ "index": "pypi",
+ "version": "==1.9.4"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a",
+ "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03"
+ ],
+ "version": "==2.4.0"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae",
+ "sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6"
+ ],
+ "index": "pypi",
+ "version": "==4.6.4"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
+ "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ ],
+ "version": "==2.22.0"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:12e17c7ad1397fd1df5ead7727eb3f1bdc9fe1c18293b0492e0e01b57997e38d",
+ "sha256:dc9e416a095ee7c3360056990d52e5611fb94469352fc1c2dc85be1ff2189146"
+ ],
+ "index": "pypi",
+ "version": "==1.6.0"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
+ "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
+ ],
+ "version": "==1.25.3"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/AzureCompute/AzureCompute.py b/Integrations/AzureCompute/AzureCompute.py
new file mode 100644
index 000000000000..b37fa694fc65
--- /dev/null
+++ b/Integrations/AzureCompute/AzureCompute.py
@@ -0,0 +1,910 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+'''IMPORTS'''
+
+import requests
+from datetime import datetime
+
+'''GLOBAL VARS'''
+
+PARAMS = demisto.params()
+USE_SSL = not demisto.params().get('unsecure')
+TENANT_ID = PARAMS.get('tenant_id')
+TOKEN = PARAMS.get('token')
+HOST = PARAMS.get('host', 'https://management.azure.com')
+SERVER = HOST[:-1] if HOST.endswith('/') else HOST
+API_VERSION = '2018-06-01'
+HEADERS = {}
+SUBSCRIPTION_ID = None
+BASE_URL = None
+
+# Image options to be used in the create_vm_command
+IMAGES = {
+ 'ubuntu server 14.04 lts': {
+ 'publisher': 'Canonical',
+ 'offer': 'UbuntuServer',
+ 'sku': '14.04-LTS',
+ 'version': 'latest'
+ },
+ 'ubuntu server 16.04 lts': {
+ 'publisher': 'Canonical',
+ 'offer': 'UbuntuServer',
+ 'sku': '16.04-LTS',
+ 'version': 'latest'
+ },
+ 'ubuntu server 18.04 lts': {
+ 'publisher': 'Canonical',
+ 'offer': 'UbuntuServer',
+ 'sku': '18.04-LTS',
+ 'version': 'latest'
+ },
+ 'red hat enterprise linux 7.6': {
+ 'publisher': 'RedHat',
+ 'offer': 'RHEL',
+ 'sku': '7-RAW',
+ 'version': 'latest'
+ },
+ 'centos-based 7.5': {
+ 'publisher': 'OpenLogic',
+ 'offer': 'CentOS',
+ 'sku': '7.5',
+ 'version': 'latest'
+ },
+ 'windows server 2012 r2 datacenter': {
+ 'publisher': 'MicrosoftWindowsServer',
+ 'offer': 'WindowsServer',
+ 'sku': '2012-R2-Datacenter',
+ 'version': 'latest'
+ },
+ 'windows server 2016 datacenter': {
+ 'publisher': 'MicrosoftWindowsServer',
+ 'offer': 'WindowsServer',
+ 'sku': '2016-Datacenter',
+ 'version': 'latest'
+ },
+ 'windows 10 pro version 1803': {
+ 'publisher': 'MicrosoftWindowsDesktop',
+ 'offer': 'Windows-10',
+ 'sku': 'rs4-pro',
+ 'version': 'latest'
+ },
+ 'windows 10 pro version 1809': {
+ 'publisher': 'MicrosoftWindowsDesktop',
+ 'offer': 'Windows-10',
+ 'sku': 'rs5-pro',
+ 'version': 'latest'
+ }
+}
+
+# Error messages for different provisioning states
+CREATING_OR_UPDATING_ERR = 'Please wait for the VM to finish being' \
+ ' {} before executing this command. To retrieve the ' \
+ 'last known state of the VM, execute the ' \
+ '`azure-vm-get-instance-details` command. '
+DELETING_ERR = 'You cannot execute this command because the VM is being deleted.'
+FAILED_ERR = 'Unable to power-off or power-on \'{}\' virtual machine ' \
+ 'because the following provisioning failure occurred during ' \
+ 'the vm\'s creation.\ncode: "{}"\nmessage: "{}"\nVisit the ' \
+ 'Azure Web Portal to take care of this issue.'
+
+# Error messages determined by the provisioning state of the VM
+PROVISIONING_STATE_TO_ERRORS = {
+ 'creating': CREATING_OR_UPDATING_ERR.format('created'),
+ 'updating': CREATING_OR_UPDATING_ERR.format('updated'),
+ 'deleting': DELETING_ERR,
+ 'failed': FAILED_ERR
+}
+
+'''SETUP'''
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+# Remove proxy if not set to true in params
+if not PARAMS.get('proxy'):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+
+'''HELPER FUNCTIONS'''
+
+
+def screen_errors(error_message, *args, **kwargs):
+ """
+ Make sure that the values passed as args and the keys in kwargs do not appear in error messages
+
+ parameter: (string) error_message
+ The error message that needs to be screened for the values in args and the keys
+ in kwargs
+
+ parameter: (list) *args
+ Arguments that need to be screened from error outputs and that will be replaced
+ by x's enclosed by a '<' symbol on the left, and a '>' symbol on the right
+
+ parameter: (dict) **kwargs
+ Key-value pairs for each of which the user wishes to screen the key identifier string
+ from the error_message and replace it with its assigned value string. Useful for
+ when the user wishes to replace sensitive data with a value of their choosing
+ instead of the default x's enclosed by '<', and '>' symbols on the left and right respectively
+
+ returns:
+ The error message free of sensitive information as determined by the values of
+ args and the keys of kwargs
+ """
+ if isinstance(error_message, Exception):
+ # Format Exception object as String
+ error_as_dict = vars(error_message)
+ updated_error_message = ''
+ for key, val in error_as_dict.iteritems():
+ if updated_error_message != '':
+ updated_error_message += '\n' + str(key) + ': ' + str(val)
+ else:
+ updated_error_message += str(key) + ': ' + str(val)
+ elif not isinstance(error_message, str):
+ # If not an Exception or a String, try to cast to a string
+ updated_error_message = str(error_message)
+ else:
+ updated_error_message = error_message
+
+ for argument in args:
+ if argument != '' and argument in updated_error_message:
+ length = len(argument)
+ placeholder = '<' + 'x' * length + '>'
+ updated_error_message = updated_error_message.replace(argument, placeholder)
+
+ for key, value in kwargs.items():
+ if key != '' and key in updated_error_message:
+ updated_error_message = updated_error_message.replace(key, value)
+
+ return updated_error_message
+
+
+def validate_provisioning_state(args):
+ """
+ Ensure that the provisioning state of a VM is 'Succeeded'
+
+ For all provisioning states other than 'Succeeded', this method will raise an
+ exception with an informative error message.
+
+ parameter: (dict) args
+ The command arguments passed to either the `azure-vm-start-instance` or
+ `azure-vm-poweroff-instance` commands
+
+ returns:
+ None
+ """
+ response = get_vm(args)
+ # Retrieve relevant properties for checking provisioning state and returning
+ # informative error messages if necessary
+ vm_name = response.get('name')
+ properties = response.get('properties')
+ provisioning_state = properties.get('provisioningState')
+ statuses = properties.get('instanceView', {}).get('statuses')
+
+ # Check if the current ProvisioningState of the VM allows for executing this command
+ if provisioning_state.lower() == 'failed':
+ for status in statuses:
+ status_code = status.get('code')
+ if 'provisioningstate/failed' in status_code.lower():
+ message = status.get('message')
+ err_msg = PROVISIONING_STATE_TO_ERRORS.get('failed')
+ raise Exception(err_msg.format(vm_name, status_code, message)) # type: ignore
+ # In the case that the microsoft API changes and the status code is no longer
+ # relevant, preventing the above exception with its detailed error message from
+ # being raised, then raise the below exception with a more general error message
+ err_msg = 'Cannot execute this command because the ProvisioningState of the VM is \'Failed\'.'
+ raise Exception(err_msg)
+ elif provisioning_state.lower() in PROVISIONING_STATE_TO_ERRORS.keys():
+ err_msg = PROVISIONING_STATE_TO_ERRORS.get(provisioning_state.lower())
+ raise Exception(err_msg)
+
+
+def epoch_seconds(d=None):
+ """
+ Return the number of seconds for given date. If no date, return current.
+
+ parameter: (date) d
+ The date to convert to seconds
+
+ returns:
+ The date in seconds
+ """
+ if not d:
+ d = datetime.utcnow()
+ return int((d - datetime.utcfromtimestamp(0)).total_seconds())
+
+
+def set_subscription_id():
+ """
+ Setting subscription ID to the context and returning it
+ """
+ headers = {
+ 'Authorization': TOKEN,
+ 'Accept': 'application/json'
+ }
+ token_retrieval_url = 'https://demistobot.demisto.com/azurecompute-token' # disable-secrets-detection
+ parameters = {'tenant': TENANT_ID, 'product': 'AzureCompute'}
+ r = requests.get(token_retrieval_url, headers=headers, params=parameters, verify=USE_SSL)
+ try:
+ response = r.json()
+ if r.status_code != requests.codes.ok:
+ return_error('Error: {}\nDescription:{}'.format(response.get('title'), response.get('detail')))
+ sub_id = response.get('subscription_id')
+ demisto.setIntegrationContext({
+ 'token': response.get('token'),
+ 'stored': epoch_seconds(),
+ 'subscription_id': sub_id
+ })
+ return sub_id
+ except ValueError as e:
+ if e.message == 'No JSON object could be decoded':
+ return_error(response.content)
+ else:
+ raise e
+
+
+def update_access_token():
+ """
+ Check if we have a valid token and if not get one and update global HEADERS
+ """
+ ctx = demisto.getIntegrationContext()
+ if ctx.get('token') and ctx.get('stored'):
+ if epoch_seconds() - ctx.get('stored') < 60 * 60 - 30:
+ HEADERS['Authorization'] = 'Bearer ' + ctx.get('token')
+ return
+ headers = {
+ 'Authorization': TOKEN,
+ 'Accept': 'application/json'
+ }
+ token_retrieval_url = 'https://demistobot.demisto.com/azurecompute-token' # disable-secrets-detection
+ parameters = {'tenant': TENANT_ID, 'product': 'AzureCompute'}
+ r = requests.get(token_retrieval_url, headers=headers, params=parameters, verify=USE_SSL)
+ if r.status_code not in {200, 201}:
+ return_error('Error in authentication with the application. Try checking the credentials you entered.')
+ try:
+ response = r.json()
+ except ValueError:
+ err_msg = 'There was a problem in retrieving an updated access token.'
+ err_msg += ' The response from the Demistobot server did not contain the expected content.'
+ return_error(err_msg)
+ demisto.setIntegrationContext({
+ 'token': response.get('token'),
+ 'stored': epoch_seconds(),
+ 'subscription_id': response.get('subscription_id')
+ })
+ HEADERS['Authorization'] = 'Bearer ' + response.get('token')
+
+
+def assign_image_attributes(image):
+ """
+ Retrieve image properties determined by the chosen image
+
+ returns:
+ Image Properties Tuple (sku, publisher, offer, version)
+ """
+ image = image.lower()
+ image_properties = IMAGES.get(image)
+ if not image_properties:
+ err_msg = 'Invalid value entered for the \'os_image\' argument. '
+ err_msg += 'Only values from the provided options are accepted.'
+ raise Exception(err_msg)
+ sku = image_properties.get('sku')
+ publisher = image_properties.get('publisher')
+ offer = image_properties.get('offer')
+ version = image_properties.get('version')
+ return sku, publisher, offer, version
+
+
+def create_vm_parameters(args):
+ """
+ Construct the VM object
+
+ Use the actual parameters passed to the 'azure-vm-create-instance' command
+ to build a vm object that will be sent in the body of the command's associated
+ API call.
+
+ parameter: (dict) args
+ Dictionary that contains the actual parameters that were passed to the
+ 'azure-vm-create-instance' command
+
+ returns:
+ Virtual Machine Object
+ """
+ # Retrieve relevant command arguments
+ location = args.get('virtual_machine_location')
+ vm_size = args.get('vm_size')
+ image = args.get('os_image')
+ sku = args.get('sku')
+ publisher = args.get('publisher')
+ version = args.get('version')
+ offer = args.get('offer')
+ vm_name = args.get('virtual_machine_name')
+ resource_group = args.get('resource_group')
+ admin_username = args.get('admin_username')
+ admin_password = args.get('admin_password')
+ nic_name = args.get('nic_name')
+ full_nic_id = '/subscriptions/' + SUBSCRIPTION_ID + '/resourceGroups/' # type: ignore
+ full_nic_id += resource_group + '/providers/Microsoft.Network/networkInterfaces/' + nic_name
+
+ if not image and not (sku and publisher and version and offer):
+ err_msg = 'You must enter a value for the \'os_image\' argument '
+ err_msg += 'or the group of arguments, \'sku\', \'publisher\', \'version\', and \'offer\'.'
+ raise Exception(err_msg)
+
+ if image:
+ sku, publisher, offer, version = assign_image_attributes(image)
+
+ # Construct VM object
+ vm = {
+ 'location': location,
+ 'properties': {
+ 'hardwareProfile': {
+ 'vmSize': vm_size
+ },
+ 'storageProfile': {
+ 'imageReference': {
+ 'sku': sku,
+ 'publisher': publisher,
+ 'version': version,
+ 'offer': offer
+ },
+ 'osDisk': {
+ 'caching': 'ReadWrite',
+ 'managedDisk': {
+ 'storageAccountType': 'Standard_LRS'
+ },
+ 'name': vm_name,
+ 'createOption': 'FromImage'
+ }
+ },
+ 'osProfile': {
+ 'adminUsername': admin_username,
+ 'computerName': vm_name,
+ 'adminPassword': admin_password
+ },
+ 'networkProfile': {
+ 'networkInterfaces': [
+ {
+ 'id': full_nic_id,
+ 'properties': {
+ 'primary': 'true'
+ }
+ }
+ ]
+ }
+ },
+ 'name': vm_name
+ }
+
+ return vm
+
+
+def http_request(method, url_suffix=None, data=None, headers=HEADERS,
+ params=None, codes=None, full_url=None, j_son=None):
+ """
+ A wrapper for requests lib to send our requests and handle requests and responses better
+
+ parameter: (string) method
+ A string denoting the http request method to use.
+ Can be 'GET', 'POST, 'PUT', 'DELETE', etc.
+
+ parameter: (string) url_suffix
+ The API endpoint that determines which data we are trying to access/create/update
+ in our call to the API
+
+ parameter: (dict) data
+ The key/value pairs to be form-encoded
+
+ parameter: (dict) headers
+ The headers to use with the request
+
+ parameter: (dict) params
+ The parameters to use with this request
+
+ parameter: (set) codes
+ The set of status codes against which the status code of the response should be checked
+
+ parameter: (string) full_url
+ The full url to make a request to. Only necessary in the case that you need to make
+ an API request to an endpoint which differs in its base url from the majority of
+ the API calls in the integration
+
+ parameter: (dict) j_son
+ A JSON serializable Python object to send in the body of the request
+
+ returns:
+ JSON Response Object
+ """
+ update_access_token()
+ try:
+ url = full_url if full_url else None
+ if not url:
+ url = BASE_URL + url_suffix if url_suffix else BASE_URL
+ r = requests.request(
+ method,
+ url,
+ headers=headers,
+ data=data,
+ params=params,
+ verify=USE_SSL,
+ json=j_son
+ )
+ green_codes = codes if codes else {200, 201, 202, 204}
+ if r.status_code not in green_codes:
+ err_msg = 'Error in API call to Azure Compute Integration [{}] - {}'.format(r.status_code, r.reason)
+ err = r.json().get('error')
+ if err:
+ err_msg1 = '\nError code: {}\nError message: {}'.format(err.get('code'), err.get('message'))
+ err_msg += err_msg1
+ raise Exception(err_msg)
+ response = json.loads(r.content)
+ except ValueError:
+ response = r.content
+
+ return response
+
+
+'''MAIN FUNCTIONS / API CALLS'''
+
+# <---------- Test Module ----------> #
+
+
+def test_module():
+ # Implicitly will test TENANT_ID, TOKEN and SUBSCRIPTION_ID
+ list_resource_groups()
+ demisto.results('ok')
+
+# <-------- Resource Groups --------> #
+
+
+def list_resource_groups():
+ parameters = {'api-version': '2018-05-01'}
+ response = http_request('GET', params=parameters, codes={200})
+ return response
+
+
+def list_resource_groups_command():
+ """
+ List all Resource Groups belonging to your Azure subscription
+
+ returns:
+ Resource-Group Objects
+ """
+ response = list_resource_groups()
+ # Retrieve relevant properties to return to context
+ value = response.get('value')
+ resource_groups = []
+ for resource_group in value:
+ resource_group_context = {
+ 'Name': resource_group.get('name'),
+ 'ID': resource_group.get('id'),
+ 'Location': resource_group.get('location'),
+ 'ProvisioningState': resource_group.get('properties', {}).get('provisioningState')
+ }
+ resource_groups.append(resource_group_context)
+
+ title = 'List of Resource Groups'
+ human_readable = tableToMarkdown(title, resource_groups, removeNull=True)
+ entry_context = {'Azure.ResourceGroup(val.Name && val.Name === obj.Name)': resource_groups}
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['text'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': entry_context
+ })
+
+
+# <-------- Virtual Machines --------> #
+
+def list_vms(resource_group):
+ # Construct endpoint URI suffix
+ url_endpoint = resource_group + '/providers/Microsoft.Compute/virtualMachines'
+ parameters = {'api-version': API_VERSION}
+ # Call API
+ response = http_request('GET', url_endpoint, params=parameters, codes={200})
+ return response
+
+
+def list_vms_command():
+ """
+ List the VM instances in the specified Resource Group
+
+ demisto parameter: (string) resource_group
+ Resource Group of the VMs
+
+ returns:
+ Virtual Machine Objects
+ """
+ resource_group = demisto.args().get('resource_group')
+ response = list_vms(resource_group)
+
+ vm_objects_list = response.get('value')
+
+ vms = []
+ for vm_object in vm_objects_list:
+ vm_name = vm_object.get('name').lower()
+ location = vm_object.get('location')
+ properties = vm_object.get('properties')
+ provisioning_state = properties.get('provisioningState')
+ os_disk = properties.get('storageProfile', {}).get('osDisk')
+ datadisk = os_disk.get('diskSizeGB', 'NA')
+ vm_id = properties.get('vmId')
+ os_type = os_disk.get('osType')
+ vm = {
+ 'Name': vm_name,
+ 'ID': vm_id,
+ 'Size': datadisk,
+ 'OS': os_type,
+ 'Location': location,
+ 'ProvisioningState': provisioning_state,
+ 'ResourceGroup': resource_group
+ }
+ vms.append(vm)
+
+ title = 'Microsoft Azure - List of Virtual Machines in Resource Group "{}"'.format(resource_group)
+ table_headers = ['Name', 'ID', 'Size', 'OS', 'Location', 'ProvisioningState', 'ResourceGroup']
+ human_readable = tableToMarkdown(title, vms, headers=table_headers, removeNull=True)
+ entry_context = {'Azure.Compute(val.Name && val.Name === obj.Name)': vms}
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['text'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': entry_context
+ })
+
+
+def get_vm(args):
+ # Retrieve relevant command arguments
+ resource_group = args.get('resource_group')
+ vm_name = args.get('virtual_machine_name')
+
+ # Construct endpoint URI suffix
+ url_endpoint = resource_group + '/providers/Microsoft.Compute/virtualMachines/' + vm_name
+ parameters = {'$expand': 'instanceView', 'api-version': API_VERSION}
+
+ # Call API
+ response = http_request('GET', url_endpoint, params=parameters, codes={200})
+
+ return response
+
+
+def get_vm_command():
+ """
+ Get the properties of a specified Virtual Machine
+
+ demisto parameter: (string) resource_group
+ Resource Group to which the virtual machine belongs
+
+ demisto parameter: (string) virtual_machine_name
+ Name of the virtual machine you wish to view the details of
+
+ returns:
+ Virtual Machine Object
+ """
+ args = demisto.args()
+ response = get_vm(args)
+
+ # Retrieve relevant properties to return to context
+ vm_name = response.get('name').lower()
+ properties = response.get('properties')
+ os_disk = properties.get('storageProfile', {}).get('osDisk')
+ datadisk = os_disk.get('diskSizeGB', 'NA')
+ vm_id = properties.get('vmId')
+ os_type = os_disk.get('osType')
+ provisioning_state = properties.get('provisioningState')
+ location = response.get('location')
+ statuses = properties.get('instanceView', {}).get('statuses')
+ power_state = None
+ for status in statuses:
+ status_code = status.get('code')
+ status_code_prefix = status_code[:status_code.find('/')]
+ if status_code_prefix == 'PowerState':
+ power_state = status.get('displayStatus')
+
+ vm = {
+ 'Name': vm_name,
+ 'ID': vm_id,
+ 'Size': datadisk,
+ 'OS': os_type,
+ 'ProvisioningState': provisioning_state,
+ 'Location': location,
+ 'PowerState': power_state,
+ 'ResourceGroup': args.get('resource_group')
+ }
+
+ title = 'Properties of VM "{}"'.format(vm_name)
+ table_headers = ['Name', 'ID', 'Size', 'OS', 'ProvisioningState', 'Location', 'PowerState']
+ human_readable = tableToMarkdown(title, vm, headers=table_headers, removeNull=True)
+ entry_context = {'Azure.Compute(val.Name && val.Name === obj.Name)': vm}
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': entry_context
+ })
+
+
+def create_vm(args):
+ # Retrieve relevant command arguments
+ resource_group = args.get('resource_group')
+ vm_name = args.get('virtual_machine_name')
+
+ # Construct endpoint URI suffix
+ url_endpoint = resource_group + '/providers/Microsoft.Compute/virtualMachines/' + vm_name
+ parameters = {'api-version': API_VERSION}
+
+ # Construct VM object utilizing parameters passed as command arguments
+ payload = create_vm_parameters(args)
+
+ # Call API
+ response = http_request('PUT', url_endpoint, params=parameters, j_son=payload)
+
+ return response
+
+
+def create_vm_command():
+ """
+ Create a virtual machine instance with the specified OS image
+
+ demisto parameter: (string) resource_group
+ Resource group to which the new VM will belong
+
+ demisto parameter: (string) virtual_machine_name
+ Name to assign to the new virtual machine
+
+ demisto parameter: (string) virtual_machine_location
+ Region in which the vm will be hosted
+
+ demisto parameter: (string) nic_name
+ The name of the Network Interface to link the VM with. This must be created from the Azure Portal
+
+ demisto parameter: (string) vm_size
+ The name of a VirtualMachineSize which determines the size of the deployed vm
+
+ demisto parameter: (string) os_image
+ Choose the base operating system image of the vm
+
+ demisto parameter: (string) sku
+ SKU of the image to be used
+
+ demisto parameter: (string) publisher
+ Name of the publisher of the image
+
+ demisto parameter: (string) version
+ Version of the image to use
+
+ demisto parameter: (string) offer
+ Specifies the offer of the platform image or marketplace image used
+ to create the virtual machine
+
+ demisto parameter: (string) admin_username
+ Admin Username to be used when creating the VM
+
+ demisto parameter: (string) admin_password
+ Admin Password to be used when creating the VM
+
+ returns:
+ Virtual Machine Object
+ """
+ args = demisto.args()
+ response = create_vm(args)
+
+ # Retrieve relevant properties to return to context
+ vm_name = response.get('name').lower()
+ properties = response.get('properties')
+ os_disk = properties.get('storageProfile', {}).get('osDisk')
+ datadisk = os_disk.get('diskSizeGB', 'NA')
+ vm_id = properties.get('vmId')
+ os_type = os_disk.get('osType')
+ provisioning_state = properties.get('provisioningState')
+ location = response.get('location')
+
+ vm = {
+ 'Name': vm_name,
+ 'ID': vm_id,
+ 'Size': datadisk,
+ 'OS': os_type,
+ 'ProvisioningState': provisioning_state,
+ 'Location': location,
+ 'ResourceGroup': args.get('resource_group')
+ }
+
+ title = 'Created Virtual Machine "{}"'.format(vm_name)
+ human_readable = tableToMarkdown(title, vm, removeNull=True)
+ entry_context = {'Azure.Compute(val.Name && val.Name === obj.Name)': vm}
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': entry_context
+ })
+
+
+def delete_vm(args):
+ # Retrieve relevant command arguments
+ resource_group = args.get('resource_group')
+ vm_name = args.get('virtual_machine_name')
+
+ # Construct endpoint URI suffix (for de-allocation of compute resources)
+ url_endpoint = resource_group + '/providers/Microsoft.Compute/virtualMachines/' + vm_name + '/deallocate'
+ parameters = {'api-version': API_VERSION}
+
+ # Call API to deallocate compute resources
+ http_request('POST', url_endpoint, params=parameters, codes={200, 202})
+
+ # Construct endpoint URI suffix (for deletion)
+ url_endpoint = resource_group + '/providers/Microsoft.Compute/virtualMachines/' + vm_name
+ parameters = {'api-version': API_VERSION}
+
+ # Call API to delete
+ response = http_request('DELETE', url_endpoint, params=parameters, codes={200, 202, 204})
+
+ return response
+
+
+def delete_vm_command():
+ """
+ Delete a specified Virtual Machine
+
+ demisto parameter: (string) resource_group
+ Resource Group to which the virtual machine belongs
+
+ demisto parameter: (string) virtual_machine_name
+ Name of the virtual machine to delete
+
+ returns:
+ Success message to the war room
+ """
+ args = demisto.args()
+ delete_vm(args)
+ success_msg = '"{}" VM Deletion Successfully Initiated'.format(args.get('virtual_machine_name'))
+ demisto.results(success_msg)
+
+
+def start_vm(args):
+ # Retrieve relevant command arguments
+ resource_group = args.get('resource_group')
+ vm_name = args.get('virtual_machine_name')
+
+ # Construct endpoint URI suffix
+ url_endpoint = resource_group + '/providers/Microsoft.Compute/virtualMachines/' + vm_name + '/start'
+ parameters = {'api-version': API_VERSION}
+
+ # Call API
+ response = http_request('POST', url_endpoint, params=parameters, codes={202})
+
+ return response
+
+
+def start_vm_command():
+ """
+ Power-on a specified Virtual Machine
+
+ demisto parameter: (string) resource_group
+ Resource Group to which the virtual machine belongs
+
+ demisto parameter: (string) virtual_machine_name
+ Name of the virtual machine to power-on
+
+ returns:
+ Virtual Machine Object
+ """
+ args = demisto.args()
+ vm_name = args.get('virtual_machine_name').lower()
+
+ # Raise an exception if the VM isn't in the proper provisioning state
+ validate_provisioning_state(args)
+
+ start_vm(args)
+
+ vm = {
+ 'Name': vm_name,
+ 'ResourceGroup': args.get('resource_group'),
+ 'PowerState': 'VM starting'
+ }
+
+ title = 'Power-on of Virtual Machine "{}" Successfully Initiated'.format(vm_name)
+ human_readable = tableToMarkdown(title, vm, removeNull=True)
+ entry_context = {'Azure.Compute(val.Name && val.Name === obj.Name)': vm}
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': vm,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': entry_context
+ })
+
+
+def poweroff_vm(args):
+ # Retrieve relevant command arguments
+ resource_group = args.get('resource_group')
+ vm_name = args.get('virtual_machine_name')
+
+ # Construct endpoint URI suffix
+ url_endpoint = resource_group + '/providers/Microsoft.Compute/virtualMachines/' + vm_name + '/powerOff'
+ parameters = {'api-version': API_VERSION}
+
+ # Call API
+ response = http_request('POST', url_endpoint, params=parameters, codes={202})
+
+ return response
+
+
+def poweroff_vm_command():
+ """
+ Power-off a specified Virtual Machine
+
+ demisto parameter: (string) resource_group
+ Resource Group to which the virtual machine belongs
+
+ demisto parameter: (string) virtual_machine_name
+ Name of the virtual machine to power-off
+
+ returns:
+ Virtual Machine Object
+ """
+ args = demisto.args()
+ vm_name = args.get('virtual_machine_name').lower()
+
+ # Raise an exception if the VM isn't in the proper provisioning state
+ validate_provisioning_state(args)
+
+ poweroff_vm(args)
+
+ vm = {
+ 'Name': vm_name,
+ 'ResourceGroup': args.get('resource_group'),
+ 'PowerState': 'VM stopping'
+ }
+
+ title = 'Power-off of Virtual Machine "{}" Successfully Initiated'.format(vm_name)
+ human_readable = tableToMarkdown(title, vm, removeNull=True)
+ entry_context = {'Azure.Compute(val.Name && val.Name === obj.Name)': vm}
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': vm,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': entry_context
+ })
+
+
+'''COMMAND SWITCHBOARD'''
+
+commands = {
+ 'azure-vm-list-instances': list_vms_command,
+ 'azure-vm-get-instance-details': get_vm_command,
+ 'azure-vm-start-instance': start_vm_command,
+ 'azure-vm-poweroff-instance': poweroff_vm_command,
+ 'azure-vm-create-instance': create_vm_command,
+ 'azure-vm-delete-instance': delete_vm_command,
+ 'azure-list-resource-groups': list_resource_groups_command
+}
+
+'''EXECUTION'''
+
+try:
+ # Initial setup
+ SUBSCRIPTION_ID = set_subscription_id()
+ BASE_URL = SERVER + '/subscriptions/' + SUBSCRIPTION_ID + '/resourceGroups/'
+
+ if demisto.command() == 'test-module':
+ test_module()
+ elif demisto.command() in commands.keys():
+ commands[demisto.command()]()
+
+except Exception as e:
+ screened_error_message = screen_errors(e.message, TENANT_ID)
+ return_error(screened_error_message)
diff --git a/Integrations/AzureCompute/AzureCompute.yml b/Integrations/AzureCompute/AzureCompute.yml
new file mode 100644
index 000000000000..a539f6aae835
--- /dev/null
+++ b/Integrations/AzureCompute/AzureCompute.yml
@@ -0,0 +1,414 @@
+category: IT Services
+commonfields:
+ id: Azure Compute
+ version: -1
+configuration:
+- defaultvalue: https://management.azure.com
+ display: Host URL (e.g. https://management.azure.com)
+ name: host
+ required: true
+ type: 0
+- display: Tenant ID (received from the admin consent - see Detailed Instructions
+ (?) section)
+ name: tenant_id
+ required: true
+ type: 4
+- display: Token (received from the admin consent - see detailed instructions (?))
+ name: token
+ required: true
+ type: 4
+- display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+- defaultvalue: 'true'
+ display: Trust any certificate (unsecure)
+ name: unsecure
+ required: false
+ type: 8
+description: Create and Manage Azure Virtual Machines
+display: Azure Compute
+name: Azure Compute
+script:
+ script: ''
+ type: python
+ commands:
+ - arguments:
+ - default: false
+ description: The resource group of the virtual machines. To see all the resource
+ groups associated with your subscription, run the `azure-list-resource-groups`
+ command. If none are present, navigate to the Azure Web Portal to create resource
+ groups.
+ isArray: false
+ name: resource_group
+ required: true
+ secret: false
+ description: Lists the virtual machine instances in the given resource group.
+ execution: false
+ name: azure-vm-list-instances
+ outputs:
+ - contextPath: Azure.Compute.Name
+ description: The name of the virtual machine.
+ type: string
+ - contextPath: Azure.Compute.Location
+ description: The location of the virtual machine.
+ type: string
+ - contextPath: Azure.Compute.ProvisioningState
+ description: The provisioning state of the virtual machine.
+ type: string
+ - contextPath: Azure.Compute.ResourceGroup
+ description: The resource group in which the virtual machine resides.
+ type: string
+ - contextPath: Azure.Compute.ID
+ description: The ID of the virtual machine.
+ type: string
+ - contextPath: Azure.Compute.Size
+ description: The size of the deployed virtual machine (in gigabytes).
+ type: number
+ - contextPath: Azure.Compute.OS
+ description: The OS running on the virtual machine.
+ type: string
+ - arguments:
+ - default: false
+ description: Resource Group to which the virtual machine belongs. To see all
+ the resource groups associated with your subscription, run the `azure-list-resource-groups`
+ command. If none are present, navigate to the Azure Web Portal to create resource
+ groups.
+ isArray: false
+ name: resource_group
+ required: true
+ secret: false
+ - default: false
+ description: Name of the virtual machine to power on. To see all virtual machines
+ and their associated names for a specific resource group, run the `azure-vm-list-instances`
+ command.
+ isArray: false
+ name: virtual_machine_name
+ required: true
+ secret: false
+ description: Powers on a given virtual machine.
+ execution: true
+ name: azure-vm-start-instance
+ outputs:
+ - contextPath: Azure.Compute.Name
+ description: Name of the VM that was started
+ type: string
+ - contextPath: Azure.Compute.ResourceGroup
+ description: Resource group the VM resides in
+ type: string
+ - contextPath: Azure.Compute.PowerState
+ description: Whether the VM instance is powered on or off
+ type: string
+ - arguments:
+ - default: false
+ description: The resource group to which the virtual machine belongs. To see
+ all the resource groups associated with your subscription, run the `azure-list-resource-groups`
+ command. If none are present, navigate to the Azure Web Portal to create resource
+ groups.
+ isArray: false
+ name: resource_group
+ required: true
+ secret: false
+ - default: false
+ description: The name of the virtual machine to power off. To see all virtual
+ machines with their associated names for a specific resource group, run the
+ `azure-vm-list-instances` command.
+ isArray: false
+ name: virtual_machine_name
+ required: true
+ secret: false
+ description: Powers off a given virtual machine.
+ execution: true
+ name: azure-vm-poweroff-instance
+ outputs:
+ - contextPath: Azure.Compute.Name
+ description: The name of the virtual machine that was powered off.
+ type: string
+ - contextPath: Azure.Compute.ResourceGroup
+ description: The resource group in which the virtual machine resides.
+ type: string
+ - contextPath: Azure.Compute.PowerState
+ description: Whether the virtual machine instance is powered on or off.
+ type: string
+ - arguments:
+ - default: false
+ description: The resource group to which the virtual machine belongs. To see
+ all the resource groups associated with your subscription, run the `azure-list-resource-groups`
+ command. If none are present, navigate to the Azure Web Portal to create resource
+ groups.
+ isArray: false
+ name: resource_group
+ required: true
+ secret: false
+ - default: false
+ description: The name of the virtual machine you want to view the details of.
+ To see all the virtual machines with their associated names for a specific
+ resource group, run the `azure-vm-list-instances` command.
+ isArray: false
+ name: virtual_machine_name
+ required: true
+ secret: false
+ description: Gets the properties of a given virtual machine.
+ execution: false
+ name: azure-vm-get-instance-details
+ outputs:
+ - contextPath: Azure.Compute.Name
+ description: The name of the virtual machine you want to get details of.
+ type: string
+ - contextPath: Azure.Compute.ID
+ description: The ID of the virtual machine.
+ type: string
+ - contextPath: Azure.Compute.Size
+ description: The size of the deployed virtual machine (in gigabytes).
+ type: number
+ - contextPath: Azure.Compute.OS
+ description: The OS running on the given virtual machine.
+ type: string
+ - contextPath: Azure.Compute.ProvisioningState
+ description: The provisioning state of the deployed virtual machine.
+ type: string
+ - contextPath: Azure.Compute.Location
+ description: The region in which the virtual machine is hosted.
+ type: string
+ - contextPath: Azure.Compute.PowerState
+ description: Whether the virtual machine instance is powered on or off.
+ type: string
+ - contextPath: Azure.Compute.ResourceGroup
+ description: The resource group to which the virtual machine belongs.
+ type: string
+ - arguments:
+ - default: false
+ description: The resource group to which the new virtual machine will belong.
+ To see all the resource groups associated with your subscription, run the
+ `azure-list-resource-groups` command. If none are present, navigate to the
+ Azure Web Portal to create resource groups.
+ isArray: false
+ name: resource_group
+ required: true
+ secret: false
+ - default: false
+ description: The name of the virtual machine to create.
+ isArray: false
+ name: virtual_machine_name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The location in which to create the virtual machine.
+ isArray: false
+ name: virtual_machine_location
+ predefined:
+ - westus2
+ - westus
+ - westindia
+ - westeurope
+ - westcentralus
+ - uksouth
+ - ukwest
+ - southeastasia
+ - northcentralus
+ - northeurope
+ - southcentralus
+ - southindia
+ - francesouth
+ - francecentral
+ - japaneast
+ - japanwest
+ - koreacentral
+ - koreasouth
+ - brazilsouth
+ - canadacentral
+ - canadaeast
+ - centralindia
+ - eastus2
+ - eastasia
+ - westus
+ - centralus
+ - eastus
+ - australiacentral
+ - australiacentral2
+ - australiaeast
+ - australiasoutheast
+ required: true
+ secret: false
+ - default: false
+ description: The name of the Network Interface to link the virtual machine with.
+ A Network Interface has to be created in the Azure Portal. Note that the virtual
+ machine's location property must match that of the Network Interface you choose
+ to link it to. To see a list of available Network Interfaces visit the Azure
+ Web Portal, navigate to the search bar at the top of the page, type "network
+ interfaces", and in the dynamic drop-down menu that appears, click the 'Network
+ interfaces' option that appears under the 'Services' category. If none are
+ present, you will need to create a new Network Interface which you can do
+ by clicking the '+Add' button towards the top left of the page and following
+ the instructions. For more information regarding Network Interfaces, see the
+ [Azure documentation](https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-network-interface).
+ isArray: false
+ name: nic_name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The name of a VirtualMachineSize, which determines the size of
+ the deployed virtual machine. For more information, see the [Azure documentation](https://docs.microsoft.com/en-us/rest/api/compute/virtualmachines/listavailablesizes#virtualmachinesize).
+ isArray: false
+ name: vm_size
+ predefined:
+ - Standard_D1_v2
+ - Standard_D2_v2
+ - Standard_D2s_v3
+ - Standard_B1ms
+ - Standard_B1s
+ - Standard_B2s
+ - Standard_B4ms
+ - Standard_D4s_v3
+ - Standard_DS1_v2
+ - Standard_DS2_v2
+ - Standard_DS3_v2
+ - Promo_DS2_v2
+ - Promo_DS3_v2
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The base operating system image of the virtual machine.
+ isArray: false
+ name: os_image
+ predefined:
+ - Ubuntu Server 14.04 LTS
+ - Ubuntu Server 16.04 LTS
+ - Ubuntu Server 18.04 LTS
+ - Red Hat Enterprise Linux 7.6
+ - CentOS-based 7.5
+ - Windows Server 2012 R2 Datacenter
+ - Windows Server 2016 Datacenter
+ - Windows 10 Pro Version 1803
+ - Windows 10 Pro Version 1809
+ required: false
+ secret: false
+ - default: false
+ defaultValue: 2016-Datacenter
+ description: SKU of the OS image to be used. To see a list of available SKUs,
+ visit your Azure Web Portal, click the symbol that looks similar to a '>'
+ on the top bar of the page. This should open a cloud shell, make sure it is
+ a bash shell. At the command prompt enter `az vm image list-skus` along with
+ the appropriate arguments that it will prompt you with to display the list
+ of VM image SKUs available in the Azure Marketplace.
+ isArray: false
+ name: sku
+ required: false
+ secret: false
+ - default: false
+ defaultValue: MicrosoftWindowsServer
+ description: Name of the publisher of the OS image. To see a list of available
+ publishers, visit your Azure Web Portal, click the symbol that looks similar
+ to a '>' on the top bar of the page which should open a cloud shell, make
+ sure it is a bash shell. At the command prompt enter `az vm image list-publishers`
+ along with the appropriate arguments that it will prompt you with to display
+ the list of VM image publishers available in the Azure Marketplace.
+ isArray: false
+ name: publisher
+ required: false
+ secret: false
+ - default: false
+ defaultValue: latest
+ description: Version of the image to use. The supported formats are Major.Minor.Build
+ or 'latest'. Major, Minor, and Build are decimal numbers. Specify 'latest'
+ to use the latest version of an image available at deploy time.
+ isArray: false
+ name: version
+ required: false
+ secret: false
+ - default: false
+ defaultValue: WindowsServer
+ description: Specifies the offer of the platform image or marketplace image
+ used to create the virtual machine. To see a list of available offers, visit
+ your Azure Web Portal, click the symbol that looks similar to a '>' on the
+ top bar of the page which should open a cloud shell, make sure it is a bash
+ shell. At the command prompt enter `az vm image list-offers` along with the
+ appropriate arguments that it will prompt you with to display the list of
+ VM image offers available in the Azure Marketplace.
+ isArray: false
+ name: offer
+ required: false
+ secret: false
+ - default: false
+ defaultValue: DemistoUser
+ description: The admin username to use when creating the virtual machine.
+ isArray: false
+ name: admin_username
+ required: false
+ secret: false
+ - default: false
+ defaultValue: Passw0rd@123
+ description: The admin password to use when creating the virtual machine.
+ isArray: false
+ name: admin_password
+ required: false
+ secret: false
+ description: Creates a virtual machine instance with the specified OS image.
+ execution: true
+ name: azure-vm-create-instance
+ outputs:
+ - contextPath: Azure.Compute.Name
+ description: The name of the created virtual machine instance.
+ type: string
+ - contextPath: Azure.Compute.ResourceGroup
+ description: The resource group in which the virtual machine resides.
+ type: string
+ - contextPath: Azure.Compute.ID
+ description: The ID of the virtual machine.
+ type: string
+ - contextPath: Azure.Compute.Size
+ description: The size of the deployed virtual machine (in gigabytes).
+ type: number
+ - contextPath: Azure.Compute.OS
+ description: The OS running on the specified virtual machine.
+ type: string
+ - contextPath: Azure.Compute.ProvisioningState
+ description: The provisioning state of the deployed virtual machine.
+ type: string
+ - contextPath: Azure.Compute.Location
+ description: The region in which the virtual machine is hosted.
+ type: string
+ - description: Lists all resource groups that belong to your Azure subscription.
+ execution: false
+ name: azure-list-resource-groups
+ outputs:
+ - contextPath: Azure.ResourceGroup.Name
+ description: The name of the resource group.
+ type: string
+ - contextPath: Azure.ResourceGroup.ID
+ description: The ID of the resource group.
+ type: string
+ - contextPath: Azure.ResourceGroup.Location
+ description: The location of the resource group.
+ type: string
+ - contextPath: Azure.ResourceGroup.ProvisioningState
+ description: The provisioning state of the resource group.
+ type: string
+ - arguments:
+ - default: false
+ description: The resource group to which the virtual machine belongs. To see
+ all the resource groups associated with your subscription, run the `azure-list-resource-groups`
+ command. If none are present, navigate to the Azure Web Portal to create resource
+ groups.
+ isArray: false
+ name: resource_group
+ required: true
+ secret: false
+ - default: false
+ description: The name of the virtual machine to delete. To see all the virtual
+ machines with their associated names for a specific resource group, run the
+ `azure-vm-list-instances` command.
+ isArray: false
+ name: virtual_machine_name
+ required: false
+ secret: false
+ description: Deletes a specified virtual machine.
+ execution: false
+ name: azure-vm-delete-instance
+ isfetch: false
+ runonce: false
+tests:
+- No test
\ No newline at end of file
diff --git a/Integrations/AzureCompute/AzureCompute_description.md b/Integrations/AzureCompute/AzureCompute_description.md
new file mode 100644
index 000000000000..4d69a6811dc1
--- /dev/null
+++ b/Integrations/AzureCompute/AzureCompute_description.md
@@ -0,0 +1,2 @@
+To grant access to Microsoft Azure Compute, an admin needs to approve Demisto using an admin consent flow. To start the admin consent flow, click [here](https://demistobot.demisto.com/azurecompute-sub).
+ After entering your Microsoft Azure Subscription ID, which you can find in the Azure Web Portal, and authorizing the Demisto app, you will get a tenant ID and authentication token. Enter both of these when you configure the integration instance.
\ No newline at end of file
diff --git a/Integrations/AzureCompute/AzureCompute_image.png b/Integrations/AzureCompute/AzureCompute_image.png
new file mode 100644
index 000000000000..f600381f86e4
Binary files /dev/null and b/Integrations/AzureCompute/AzureCompute_image.png differ
diff --git a/Integrations/AzureSecurityCenter/AzureSecurityCenter.py b/Integrations/AzureSecurityCenter/AzureSecurityCenter.py
new file mode 100644
index 000000000000..a8618b186fbc
--- /dev/null
+++ b/Integrations/AzureSecurityCenter/AzureSecurityCenter.py
@@ -0,0 +1,1574 @@
+from CommonServerPython import *
+
+""" IMPORTS """
+import requests
+import ast
+from datetime import datetime
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+# remove proxy if not set to true in params
+if not demisto.params().get("proxy"):
+ del os.environ["HTTP_PROXY"]
+ del os.environ["HTTPS_PROXY"]
+ del os.environ["http_proxy"]
+ del os.environ["https_proxy"]
+
+""" GLOBAL VARS """
+CONTEXT = demisto.getIntegrationContext()
+USE_SSL = not demisto.params().get("unsecure", False)
+DEMISTOBOT = "https://demistobot.demisto.com/azuresc-token"
+SUBSCRIPTION_ID = CONTEXT.get("subscription_id")
+SUBSCRIPTION_URL = "/subscriptions/{}".format(SUBSCRIPTION_ID)
+TOKEN = demisto.params().get("token")
+TENANT_ID = demisto.params().get("tenant_id")
+BASE_URL = demisto.params().get("server_url")
+RESOURCE = "https://management.azure.com/"
+AUTH_GRANT_TYPE = "client_credentials"
+
+# API Versions
+ALERT_API_VERSION = "2015-06-01-preview"
+LOCATION_API_VERSION = "2015-06-01-preview"
+ATP_API_VERSION = "2017-08-01-preview"
+APS_API_VERSION = "2017-08-01-preview"
+IPP_API_VERSION = "2017-08-01-preview"
+JIT_API_VERSION = "2015-06-01-preview"
+STORAGE_API_VERSION = "2018-07-01"
+
+""" HELPER FUNCTIONS """
+
+
+def set_subscription_id():
+ """
+ Setting subscription ID to the context and returning it
+ """
+ headers = {"Authorization": TOKEN, "Accept": "application/json"}
+ params = {"tenant": TENANT_ID, "product": "AzureSecurityCenter"}
+ r = requests.get(DEMISTOBOT, headers=headers, params=params, verify=USE_SSL)
+ try:
+ data = r.json()
+ if r.status_code != requests.codes.ok:
+ return_error(
+ "Error in API call to Azure Security Center [{}] - {}".format(
+ r.status_code, r.text
+ )
+ )
+ sub_id = data.get("subscription_id")
+ demisto.setIntegrationContext(
+ {
+ "token": data.get("token"),
+ "stored": epoch_seconds(),
+ "subscription_id": sub_id,
+ }
+ )
+ return sub_id
+ except ValueError:
+ return_error("There was problem with your request: {}".format(r.content))
+
+
+def epoch_seconds(d=None):
+ """
+ Return the number of seconds for given date. If no date, return current.
+ """
+ if not d:
+ d = datetime.utcnow()
+ return int((d - datetime.utcfromtimestamp(0)).total_seconds())
+
+
+def get_token():
+ """
+ Check if we have a valid token and if not get one
+ """
+ token = CONTEXT.get("token")
+ stored = CONTEXT.get("stored")
+ if token and stored:
+ if epoch_seconds() - stored < 60 * 60 - 30:
+ return token
+ headers = {"Authorization": TOKEN, "Accept": "application/json"}
+ r = requests.get(
+ DEMISTOBOT,
+ headers=headers,
+ params={"tenant": TENANT_ID, "product": "AzureSecurityCenter"},
+ verify=USE_SSL,
+ )
+ data = r.json()
+ if r.status_code != requests.codes.ok:
+ return_error(
+ "Error in API call to Azure Security Center [{}] - {}".format(
+ r.status_code, r.text
+ )
+ )
+ demisto.setIntegrationContext(
+ {
+ "token": data.get("token"),
+ "stored": epoch_seconds(),
+ "subscription_id": data.get("subscription_id"),
+ }
+ )
+ return data.get("token")
+
+
+def http_request(method, url_suffix, body=None, params=None, add_subscription=True):
+ """
+ Generic request to the graph
+ """
+ token = get_token()
+ headers = {
+ "Authorization": "Bearer " + token,
+ "Content-Type": "application/json",
+ "Accept": "application/json",
+ }
+
+ if add_subscription:
+ url = BASE_URL + SUBSCRIPTION_URL + url_suffix
+ else:
+ url = BASE_URL + url_suffix
+
+ r = requests.request(method, url, json=body, params=params, headers=headers)
+ if r.status_code not in {200, 201, 202, 204}:
+ return_error(
+ "Error in API call to Azure Security Center [{}] - {}".format(
+ r.status_code, r.text
+ )
+ )
+ try:
+ r = r.json()
+ return r
+ except ValueError:
+ return dict()
+
+
+# Format ports in JIT access policy rule to (portNum, protocol, allowedAddress, maxDuration)
+def format_jit_port_rule(ports):
+ port_array = list()
+ for port in ports:
+ # for each item in unicode, has to use str to decode to ascii
+ p_num = str(port.get("number"))
+ p_src_addr = (
+ str(port.get("allowedSourceAddressPrefix"))
+ if port.get("allowedSourceAddressPrefix") != "*"
+ else "any"
+ )
+ p_protocol = str(port.get("protocol")) if port.get("protocol") != "*" else "any"
+ p_max_duration = str(port.get("maxRequestAccessDuration"))
+ port_array.append(str((p_num, p_protocol, p_src_addr, p_max_duration)))
+ return ", ".join(port_array)
+
+
+# Format ports in JIT access request to (portNum, allowedAddress, endTime, status)
+def format_jit_port_request(ports):
+ port_array = list()
+ for port in ports:
+ # for each item in unicode, has to use str to decode to ascii
+ p_num = str(port.get("number"))
+ p_src_addr = (
+ str(port.get("allowedSourceAddressPrefix"))
+ if port.get("allowedSourceAddressPrefix") != "*"
+ else "any"
+ )
+ p_status = str(port.get("status"))
+ p_end_time = str(port.get("endTimeUtc"))
+ port_array.append(str((p_num, p_src_addr, p_end_time, p_status)))
+ return ", ".join(port_array)
+
+
+def normalize_context_key(string):
+ """Normalize context keys
+ Function will normalize the string (remove white spaces and tailings)
+ Args:
+ string (str):
+ Returns:
+ Normalized string
+ """
+ tmp = string[:1].upper() + string[1:]
+ return tmp.replace(" ", "")
+
+
+""" FUNCTIONS """
+""" Alert Start """
+
+
+def get_alert_command(args):
+ """Getting specified alert from API
+ Args
+ args (dict): dictionary containing commands args
+ """
+ resource_group_name = args.get("resource_group_name")
+ asc_location = args.get("asc_location")
+ alert_id = args.get("alert_id")
+ alert = get_alert(resource_group_name, asc_location, alert_id)
+ final_output = list()
+
+ # Basic Property Table
+ properties = alert.get("properties")
+ if properties:
+ basic_table_output = [
+ {
+ "DisplayName": properties.get("alertDisplayName"),
+ "CompromisedEntity": properties.get("compromisedEntity"),
+ "Description": properties.get("description"),
+ "DetectedTime": properties.get("detectedTimeUtc"),
+ "ReportedTime": properties.get("reportedTimeUtc"),
+ "ReportedSeverity": properties.get("reportedSeverity"),
+ "ConfidenceScore": properties.get("confidenceScore", "None"),
+ "State": properties.get("state"),
+ "ActionTaken": properties.get("actionTaken"),
+ "CanBeInvestigated": properties.get("canBeInvestigated"),
+ "RemediationSteps": properties.get("remediationSteps"),
+ "VendorName": properties.get("vendorName"),
+ "AssociatedResource": properties.get("associatedResource"),
+ "AlertName": properties.get("alertName"),
+ "InstanceID": properties.get("instanceId", "None"),
+ "ID": alert.get("name"),
+ "ExtendedProperties": properties.get("extendedProperties"),
+ "Entities": properties.get("entities"),
+ "SubscriptionID": properties.get("subscriptionId"),
+ }
+ ]
+
+ md = tableToMarkdown(
+ "Azure Security Center - Get Alert - Basic Property",
+ basic_table_output,
+ [
+ "DisplayName",
+ "CompromisedEntity",
+ "Description",
+ "DetectedTime",
+ "ReportedTime",
+ "ReportedSeverity",
+ "ConfidenceScore",
+ "State",
+ "ActionTaken",
+ "CanBeInvestigated",
+ "RemediationSteps",
+ "VendorName",
+ "AssociatedResource",
+ "AlertName",
+ "InstanceID",
+ "ID",
+ ],
+ removeNull=True,
+ )
+
+ ec = {
+ "AzureSecurityCenter.Alert(val.ID && val.ID === obj.ID)": basic_table_output
+ }
+
+ basic_table_entry = {
+ "Type": entryTypes["note"],
+ "Contents": alert,
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ "EntryContext": ec,
+ }
+ final_output.append(basic_table_entry)
+
+ # Extended Properties Table
+ if (
+ alert.get("properties")
+ and alert.get("properties")
+ and alert.get("properties").get("extendedProperties")
+ ):
+ extended_properties = dict()
+ properties = alert.get("properties")
+ if isinstance(properties.get("extendedProperties"), dict):
+ for key, value in alert["properties"]["extendedProperties"].items():
+ extended_properties[normalize_context_key(key)] = value
+ extended_table_entry = {
+ "Type": entryTypes["note"],
+ "Contents": alert["properties"]["extendedProperties"],
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": tableToMarkdown(
+ "Azure Security Center - Get Alert - Extended Property",
+ extended_properties,
+ removeNull=True,
+ ),
+ }
+ final_output.append(extended_table_entry)
+
+ # Entities Table
+ entities = properties.get("entities")
+ if entities:
+ if isinstance(entities, dict):
+ entities_table_output = list()
+ for entity in entities:
+ entities_table_output.append(
+ {
+ "Content": ast.literal_eval(str(entity)),
+ "Type": entity["type"],
+ }
+ )
+
+ md = tableToMarkdown(
+ "Azure Security Center - Get Alert - Entity",
+ entities_table_output,
+ removeNull=True,
+ )
+
+ entities_table_entry = {
+ "Type": entryTypes["note"],
+ "Contents": alert.get("properties").get("entities"),
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ }
+ final_output.append(entities_table_entry)
+ demisto.results(final_output)
+
+
+def get_alert(resource_group_name, asc_location, alert_id):
+ """Building query
+
+ Args:
+ resource_group_name (str): ResourceGroupName
+ asc_location (str): Azure Security Center location
+ alert_id (str): Alert ID
+
+ Returns:
+ response body (dict)
+
+ """
+ cmd_url = ""
+ if resource_group_name:
+ cmd_url += "/resourceGroups/{}".format(resource_group_name)
+ cmd_url += "/providers/Microsoft.Security/locations/{}/alerts/{}?api-version={}".format(
+ asc_location, alert_id, ALERT_API_VERSION
+ )
+ response = http_request("GET", cmd_url)
+ return response
+
+
+def list_alerts_command(args):
+ """Getting all alerts
+
+ Args:
+ args (dict): usually demisto.args()
+ """
+ resource_group_name = args.get("resource_group_name")
+ asc_location = args.get("asc_location")
+ filter_query = args.get("filter")
+ select_query = args.get("select")
+ expand_query = args.get("expand")
+
+ alerts = list_alerts(
+ resource_group_name, asc_location, filter_query, select_query, expand_query
+ ).get("value")
+ outputs = list()
+ for alert in alerts:
+ properties = alert.get("properties")
+ if properties:
+ outputs.append(
+ {
+ "DisplayName": properties.get("alertDisplayName"),
+ "CompromisedEntity": properties.get("compromisedEntity"),
+ "DetectedTime": properties.get("detectedTimeUtc"),
+ "ReportedSeverity": properties.get("reportedSeverity"),
+ "State": properties.get("state"),
+ "ActionTaken": properties.get("actionTaken"),
+ "Description": properties.get("description"),
+ "ID": alert.get("name"),
+ }
+ )
+
+ md = tableToMarkdown(
+ "Azure Security Center - List Alerts",
+ outputs,
+ [
+ "DisplayName",
+ "CompromisedEntity",
+ "DetectedTime",
+ "ReportedSeverity",
+ "State",
+ "ActionTaken",
+ "Description",
+ "ID",
+ ],
+ removeNull=True,
+ )
+ ec = {"AzureSecurityCenter.Alert(val.ID && val.ID === obj.ID)": outputs}
+ entry = {
+ "Type": entryTypes["note"],
+ "Contents": alerts,
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ "EntryContext": ec,
+ }
+ demisto.results(entry)
+
+
+def get_alerts(
+ resource_group_name, asc_location, filter_query, select_query, expand_query
+):
+ """Building query
+
+ Args:
+ resource_group_name (str): ResourceGroupName
+ asc_location (str): Azure Security Center location
+ filter_query (str): what to filter
+ select_query (str): what to select
+ expand_query (str): what to expand
+
+ Returns:
+ dict: contains response body
+ """
+ cmd_url = ""
+ if resource_group_name:
+ cmd_url += "/resourceGroups/{}/providers/Microsoft.Security".format(
+ resource_group_name
+ )
+ # ascLocation muse be using with specifying resourceGroupName
+ if asc_location:
+ cmd_url += "/locations/{}".format(asc_location)
+ else:
+ cmd_url += "/providers/Microsoft.Security"
+ cmd_url += "/alerts?api-version={}".format(ALERT_API_VERSION)
+
+ if filter_query:
+ cmd_url += "&$filter={}".format(filter_query)
+ if select_query:
+ cmd_url += "&$select={}".format(select_query)
+ if expand_query:
+ cmd_url += "&$expand={}".format(expand_query)
+
+ response = http_request("GET", cmd_url)
+ return response
+
+
+def list_alerts(
+ resource_group_name, asc_location, filter_query, select_query, expand_query
+):
+ """Listing alerts
+
+ Args:
+ resource_group_name (str): ResourceGroupName
+ asc_location (str): Azure Security Center location
+ filter_query (str): what to filter
+ select_query (str): what to select
+ expand_query (str): what to expand
+
+ Returns:
+ dict: contains response body
+ """
+ cmd_url = ""
+ if resource_group_name:
+ cmd_url += "/resourceGroups/{}/providers/Microsoft.Security".format(
+ resource_group_name
+ )
+ # ascLocation must be using with specifying resourceGroupName
+ if asc_location:
+ cmd_url += "/locations/{}".format(asc_location)
+ else:
+ cmd_url += "/providers/Microsoft.Security"
+ cmd_url += "/alerts?api-version={}".format(ALERT_API_VERSION)
+
+ if filter_query:
+ cmd_url += "&$filter={}".format(filter_query)
+ if select_query:
+ cmd_url += "&$select={}".format(select_query)
+ if expand_query:
+ cmd_url += "&$expand={}".format(expand_query)
+
+ response = http_request("GET", cmd_url)
+ return response
+
+
+def update_alert_command(args):
+ """Update given alert
+
+ Args:
+ args (dict): usually demisto.args()
+ """
+ resource_group_name = args.get("resource_group_name")
+ asc_location = args.get("asc_location")
+ alert_id = args.get("alert_id")
+ alert_update_action_type = args.get("alert_update_action_type")
+ response = update_alert(
+ resource_group_name, asc_location, alert_id, alert_update_action_type
+ )
+ outputs = {"ID": response.get("id"), "ActionTaken": alert_update_action_type}
+
+ ec = {"AzureSecurityCenter.Alert(val.ID && val.ID === obj.ID)": outputs}
+
+ demisto.results(
+ {
+ "Type": entryTypes["note"],
+ "Contents": "Alert - {} has been set to {}.".format(
+ alert_id, alert_update_action_type
+ ),
+ "ContentsFormat": formats["text"],
+ "EntryContext": ec,
+ }
+ )
+
+
+def update_alert(resource_group_name, asc_location, alert_id, alert_update_action_type):
+ """Building query
+
+ Args:
+ resource_group_name (str): Resource Name Group
+ asc_location (str): Azure Security Center Location
+ alert_id (str): Alert ID
+ alert_update_action_type (str): What update type need to update
+
+ Returns:
+ dict: response body
+ """
+ cmd_url = ""
+ if resource_group_name:
+ cmd_url += "/resourceGroups/{}".format(resource_group_name)
+ cmd_url += "/providers/Microsoft.Security/locations/{}/alerts/{}/{}?api-version={}".format(
+ asc_location, alert_id, alert_update_action_type, ALERT_API_VERSION
+ )
+ return http_request("POST", cmd_url)
+
+
+""" Alert End """
+
+""" Location Start """
+
+
+def list_locations_command():
+ """Getting all locations
+ """
+ locations = list_locations().get("value")
+ outputs = list()
+ if locations:
+ for location in locations:
+ if location.get("properties") and location.get("properties").get(
+ "homeRegionName"
+ ):
+ home_region_name = location.get("properties").get("homeRegionName")
+ else:
+ home_region_name = None
+ outputs.append(
+ {
+ "HomeRegionName": home_region_name,
+ "Name": location.get("name"),
+ "ID": location.get("id"),
+ }
+ )
+ md = tableToMarkdown(
+ "Azure Security Center - List Locations",
+ outputs,
+ ["HomeRegionName", "Name", "ID"],
+ removeNull=True,
+ )
+ ec = {"AzureSecurityCenter.Location(val.ID && val.ID === obj.ID)": outputs}
+ entry = {
+ "Type": entryTypes["note"],
+ "Contents": locations,
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ "EntryContext": ec,
+ }
+ demisto.results(entry)
+ else:
+ demisto.results("No locations found")
+
+
+def list_locations():
+ """Building query
+
+ Returns:
+ dict: response body
+ """
+ cmd_url = "/providers/Microsoft.Security/locations?api-version={}".format(
+ LOCATION_API_VERSION
+ )
+ response = http_request("GET", cmd_url)
+ return response
+
+
+""" Location End """
+
+""" Advanced Threat Protection Start """
+
+
+def update_atp_command(args):
+ """Updating given Advanced Threat Protection (enable/disable)
+
+ Args:
+ args (dict): usually demisto.args()
+ """
+ resource_group_name = args.get("resource_group_name")
+ setting_name = args.get("setting_name")
+ is_enabled = args.get("is_enabled")
+ storage_account = args.get("storage_account")
+ response = update_atp(
+ resource_group_name, storage_account, setting_name, is_enabled
+ )
+ outputs = {
+ "ID": response.get("id"),
+ "Name": response.get("name"),
+ "IsEnabled": response.get("properties").get("is_enabled"),
+ }
+ md = tableToMarkdown(
+ "Azure Security Center - Update Advanced Threat Detection Setting",
+ outputs,
+ ["ID", "Name", "IsEnabled"],
+ removeNull=True,
+ )
+ ec = {
+ "AzureSecurityCenter.AdvancedThreatProtection(val.ID && val.ID === obj.ID)": outputs
+ }
+
+ demisto.results(
+ {
+ "Type": entryTypes["note"],
+ "Contents": response,
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ "EntryContext": ec,
+ }
+ )
+
+
+def update_atp(resource_group_name, storage_account, setting_name, is_enabled):
+ """Building query
+
+ Args:
+ resource_group_name (str): Resource Group Name
+ storage_account (str): Storange Account
+ setting_name (str): Setting Name
+ is_enabled (str): true/false
+
+ Returns:
+ dict: respones body
+ """
+ cmd_url = (
+ "/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}"
+ "/providers/Microsoft.Security/advancedThreatProtectionSettings/{}?api-version={}".format(
+ resource_group_name, storage_account, setting_name, ATP_API_VERSION
+ )
+ )
+ data = {
+ "id": "/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage"
+ "/storageAccounts/{}/providers/Microsoft.Security/advancedThreatProtectionSettings/{}".format(
+ SUBSCRIPTION_ID, resource_group_name, storage_account, setting_name
+ ),
+ "name": setting_name,
+ "type": "Microsoft.Security/advancedThreatProtectionSettings",
+ "properties": {"is_enabled": is_enabled},
+ }
+ response = http_request("PUT", cmd_url, body=data)
+ return response
+
+
+def get_atp_command(args):
+ """Get given Advanced Threat Protection settings
+
+ Args:
+ args (dict): usually demisto.args()
+ """
+ resource_group_name = args.get("resource_group_name")
+ setting_name = args.get("setting_name")
+ storage_account = args.get("storage_account")
+ response = get_atp(resource_group_name, storage_account, setting_name)
+ outputs = {
+ "ID": response.get("id"),
+ "Name": response.get("name"),
+ "IsEnabled": response["properties"]["isEnabled"]
+ if response.get("properties") and response.get("properties").get("isEnabled")
+ else None,
+ }
+ md = tableToMarkdown(
+ "Azure Security Center - Get Advanced Threat Detection Setting",
+ outputs,
+ ["ID", "Name", "IsEnabled"],
+ removeNull=True,
+ )
+ ec = {
+ "AzureSecurityCenter.AdvancedThreatProtection(val.ID && val.ID === obj.ID)": outputs
+ }
+ demisto.results(
+ {
+ "Type": entryTypes["note"],
+ "Contents": response,
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ "EntryContext": ec,
+ }
+ )
+
+
+def get_atp(resource_group_name, storage_account, setting_name):
+ """Building query
+
+ Args:
+ resource_group_name (str): Resource Group Name
+ storage_account (str): Storange Account
+ setting_name (str): Setting Name
+
+ Returns:
+
+ """
+ cmd_url = (
+ "/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts"
+ "/{}/providers/Microsoft.Security/advancedThreatProtectionSettings/{}?api-version={}".format(
+ resource_group_name, storage_account, setting_name, ATP_API_VERSION
+ )
+ )
+ response = http_request("GET", cmd_url)
+ return response
+
+
+""" Advanced Threat Protection End """
+
+""" Auto Provisioning Settings Start """
+
+
+def update_aps_command(args):
+ """Updating Analytics Platform System
+
+ Args:
+ args (dict): usually demisto.args()
+ """
+ setting_name = args.get("setting_name")
+ auto_provision = args.get("auto_provision")
+ setting = update_aps(setting_name, auto_provision)
+ outputs = [
+ {
+ "Name": setting.get("name"),
+ "AutoProvision": setting["properties"]["auto_provision"]
+ if setting.get("properties")
+ and setting.get("properties").get("auto_provision")
+ else None,
+ "ID": setting.get("id"),
+ }
+ ]
+
+ md = tableToMarkdown(
+ "Azure Security Center - Update Auto Provisioning Setting",
+ outputs,
+ ["Name", "AutoProvision", "ID"],
+ removeNull=True,
+ )
+ ec = {
+ "AzureSecurityCenter.AutoProvisioningSetting(val.ID && val.ID === obj.ID)": outputs
+ }
+ entry = {
+ "Type": entryTypes["note"],
+ "Contents": setting,
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ "EntryContext": ec,
+ }
+ demisto.results(entry)
+
+
+def update_aps(setting_name, auto_provision):
+ """Building query
+
+ Args:
+ setting_name (str): Setting name
+ auto_provision (str): Auto provision setting (On/Off)
+
+ Returns:
+ dict: response body
+ """
+ cmd_url = "/providers/Microsoft.Security/autoProvisioningSettings/{}?api-version={}".format(
+ setting_name, APS_API_VERSION
+ )
+ data = {"properties": {"auto_provision": auto_provision}}
+ response = http_request("PUT", cmd_url, body=data)
+ return response
+
+
+def list_aps_command():
+ """List all Analytics Platform System
+
+ """
+ settings = list_aps().get("value")
+ outputs = []
+ for setting in settings:
+ outputs.append(
+ {
+ "Name": setting.get("name"),
+ "AutoProvision": setting.get("properties").get("autoProvision")
+ if setting.get("properties")
+ and setting.get("properties").get("autoProvision")
+ else None,
+ "ID": setting.get("id"),
+ }
+ )
+
+ md = tableToMarkdown(
+ "Azure Security Center - List Auto Provisioning Settings",
+ outputs,
+ ["Name", "AutoProvision", "ID"],
+ removeNull=True,
+ )
+
+ ec = {
+ "AzureSecurityCenter.AutoProvisioningSetting(val.ID && val.ID === obj.ID)": outputs
+ }
+
+ entry = {
+ "Type": entryTypes["note"],
+ "Contents": settings,
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ "EntryContext": ec,
+ }
+ demisto.results(entry)
+
+
+def list_aps():
+ """Build query
+
+ Returns:
+ dict: response body
+ """
+ cmd_url = "/providers/Microsoft.Security/autoProvisioningSettings?api-version={}".format(
+ APS_API_VERSION
+ )
+ response = http_request("GET", cmd_url)
+ return response
+
+
+def get_aps_command(args):
+ """Get given Analytics Platform System setting
+
+ Args:
+ args (dict): usually demisto.args()
+ """
+ setting_name = args.get("setting_name")
+ setting = get_aps(setting_name)
+ outputs = [
+ {
+ "Name": setting.get("name"),
+ "AutoProvision": setting.get("properties").get("autoProvision")
+ if setting.get("properties")
+ and setting.get("properties").get("autoProvision")
+ else None,
+ "ID": setting["id"],
+ }
+ ]
+ md = tableToMarkdown(
+ "Azure Security Center - Get Auto Provisioning Setting",
+ outputs,
+ ["Name", "AutoProvision", "ID"],
+ removeNull=True,
+ )
+ ec = {
+ "AzureSecurityCenter.AutoProvisioningSetting(val.ID && val.ID === obj.ID)": outputs
+ }
+
+ entry = {
+ "Type": entryTypes["note"],
+ "Contents": setting,
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ "EntryContext": ec,
+ }
+ demisto.results(entry)
+
+
+def get_aps(setting_name):
+ """Build query
+
+ Args:
+ setting_name: Setting name
+
+ Returns:
+ dict: response body
+ """
+ cmd_url = "/providers/Microsoft.Security/autoProvisioningSettings/{}?api-version={}".format(
+ setting_name, APS_API_VERSION
+ )
+ response = http_request("GET", cmd_url)
+ return response
+
+
+""" Auto Provisioning Settings End """
+
+""" Information Protection Policies Start """
+
+
+def list_ipp_command(args):
+ """Listing all Internet Presence Provider
+
+ Args:
+ args (dict): usually demisto.args()
+ """
+ management_group = args.get("management_group")
+ policies = list_ipp(management_group).get("value")
+ outputs = list()
+ if policies:
+ for policy in policies:
+ if policy.get("properties") and policy.get("properties").get("labels"):
+ label_names = ", ".join(
+ [
+ label.get("displayName")
+ for label in policy["properties"]["labels"].values()
+ ]
+ )
+ information_type_names = ", ".join(
+ [
+ it["displayName"]
+ for it in policy["properties"]["informationTypes"].values()
+ ]
+ )
+ else:
+ label_names, information_type_names = None, None
+ outputs.append(
+ {
+ "Name": policy.get("name"),
+ "Labels": label_names,
+ "InformationTypeNames": information_type_names,
+ "InformationTypes": policy.get("properties").get("informationTypes")
+ if policy.get("properties")
+ and policy.get("properties").get("informationTypes")
+ else None,
+ "ID": policy["id"],
+ }
+ )
+ md = tableToMarkdown(
+ "Azure Security Center - List Information Protection Policies",
+ outputs,
+ ["Name", "Labels", "InformationTypeNames", "ID"],
+ removeNull=True,
+ )
+
+ ec = {
+ "AzureSecurityCenter.InformationProtectionPolicy(val.ID && val.ID === obj.ID)": outputs
+ }
+
+ entry = {
+ "Type": entryTypes["note"],
+ "Contents": policies,
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ "EntryContext": ec,
+ }
+ demisto.results(entry)
+ else:
+ demisto.results("no ")
+
+
+def list_ipp(management_group=None):
+ """Building query
+
+ Args:
+ management_group: Managment group to pull (if needed)
+
+ Returns:
+ dict: response body
+
+ """
+ cmd_url = str()
+ scope_is_subscription = True
+ if management_group:
+ cmd_url += "/providers/Microsoft.Management/managementGroups/{}".format(
+ management_group
+ )
+ scope_is_subscription = False
+ cmd_url += "/providers/Microsoft.Security/informationProtectionPolicies?api-version={}".format(
+ IPP_API_VERSION
+ )
+ response = http_request("GET", cmd_url, add_subscription=scope_is_subscription)
+ return response
+
+
+def get_ipp_command(args):
+ """Getting Internet Presence Provider information
+ Args:
+ args (dict): usually demisto.args()
+ """
+ policy_name = args.get("policy_name")
+ management_group = args.get("management_group")
+ policy = get_ipp(policy_name, management_group)
+ properties = policy.get("properties")
+ labels = properties.get("labels")
+ if properties and isinstance(labels, dict):
+ # Basic Property table
+ labels = ", ".join(
+ [
+ (str(label.get("displayName")) + str(label.get("enabled")))
+ for label in labels.values()
+ ]
+ )
+ basic_table_output = [
+ {"Name": policy.get("name"), "Labels": labels, "ID": policy.get("id")}
+ ]
+
+ md = tableToMarkdown(
+ "Azure Security Center - Get Information Protection Policy - Basic Property",
+ basic_table_output,
+ ["Name", "Labels", "ID"],
+ removeNull=True,
+ )
+ ec = {
+ "AzureSecurityCenter.InformationProtectionPolicy(val.ID && val.ID === obj.ID)": basic_table_output
+ }
+
+ basic_table_entry = {
+ "Type": entryTypes["note"],
+ "Contents": policy,
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ "EntryContext": ec,
+ }
+
+ # Information Type table
+ info_type_table_output = list()
+ for information_type_data in properties.get("informationTypes").values():
+ keywords = ", ".join(
+ [
+ (
+ str(keyword.get("displayName"))
+ + str(keyword.get("custom"))
+ + str(keyword.get("canBeNumeric"))
+ )
+ for keyword in information_type_data.get("keywords")
+ ]
+ )
+ info_type_table_output.append(
+ {
+ "DisplayName": information_type_data.get("displayname"),
+ "Enabled": information_type_data("enabled"),
+ "Custom": information_type_data("custom"),
+ "Keywords": keywords,
+ "RecommendedLabelID": information_type_data("recommendedLabelId"),
+ }
+ )
+ md = tableToMarkdown(
+ "Azure Security Center - Get Information Protection Policy - Information Types",
+ info_type_table_output,
+ ["DisplayName", "Enabled", "Custom", "Keywords", "RecommendedLabelID"],
+ removeNull=True,
+ )
+ info_type_table_entry = {
+ "Type": entryTypes["note"],
+ "Contents": properties.get("informationTypes"),
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ }
+ demisto.results([basic_table_entry, info_type_table_entry])
+ else:
+ demisto.results("No properties found in {}".format(management_group))
+
+
+def get_ipp(policy_name, management_group):
+ """Building query
+
+ Args:
+ policy_name (str): Policy name
+ management_group (str): Managment group
+
+ Returns:
+ dict: respone body
+ """
+ cmd_url = ""
+ score_is_subscription = True
+ if management_group:
+ cmd_url += "/providers/Microsoft.Management/managementGroups/{}".format(
+ management_group
+ )
+ score_is_subscription = False
+ cmd_url += "/providers/Microsoft.Security/informationProtectionPolicies/{}?api-version={}".format(
+ policy_name, IPP_API_VERSION
+ )
+ response = http_request("GET", cmd_url, add_subscription=score_is_subscription)
+ return response
+
+
+""" Information Protection Policies End """
+
+""" Jit Network Access Policies Start """
+
+
+def list_jit_command(args):
+ """Lists all Just-in-time Virtual Machines
+
+ Args:
+ args (dict): usually demisto.args()
+ """
+ asc_location = args.get("asc_location")
+ resource_group_name = args.get("resource_group_name")
+ policies = list_jit(asc_location, resource_group_name)["value"]
+ outputs = []
+ for policy in policies:
+ # summarize rules in (VMName: allowPort,...) format
+ if policy.get("properties") and policy.get("properties").get("virtualMachines"):
+ rules_data = policy["properties"]["virtualMachines"]
+ rules_summary_array = []
+ for rule in rules_data:
+ ID = rule.get("id")
+ if isinstance(ID, str):
+ vm_name = ID.split("/")[-1]
+ else:
+ vm_name = None # type: ignore
+ vm_ports = [str(port.get("number")) for port in rule.get("ports")]
+ rules_summary_array.append(
+ "({}: {})".format(vm_name, ", ".join(vm_ports))
+ )
+ rules = ", ".join(rules_summary_array)
+
+ outputs.append(
+ {
+ "Name": policy.get("name"),
+ "Rules": rules,
+ "Location": policy.get("location"),
+ "Kind": policy.get("kind"),
+ "ID": policy.get("id"),
+ }
+ )
+ md = tableToMarkdown(
+ "Azure Security Center - List JIT Access Policies",
+ outputs,
+ ["Name", "Rules", "Location", "Kind"],
+ removeNull=True,
+ )
+ ec = {"AzureSecurityCenter.JITPolicy(val.ID && val.ID === obj.ID)": outputs}
+ entry = {
+ "Type": entryTypes["note"],
+ "Contents": policies,
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ "EntryContext": ec,
+ }
+ demisto.results(entry)
+
+
+def list_jit(asc_location, resource_group_name):
+ """Building query
+
+ Args:
+ asc_location: Machine location
+ resource_group_name: Resource group name
+
+ Returns:
+ dict: response body
+ """
+ cmd_url = ""
+ if resource_group_name:
+ cmd_url += "/resourceGroups/{}".format(resource_group_name)
+ if asc_location:
+ cmd_url += "/providers/Microsoft.Security/locations/{}".format(asc_location)
+ cmd_url += "/providers/Microsoft.Security/jitNetworkAccessPolicies?api-version={}".format(
+ JIT_API_VERSION
+ )
+ response = http_request("GET", cmd_url)
+ return response
+
+
+def get_jit_command(args):
+ """Getting given Just-in-time machine
+
+ Args:
+ args (dict): usually demisto.args()
+ """
+ policy_name = args.get("policy_name")
+ asc_location = args.get("asc_location")
+ resource_group_name = args.get("resource_group_name")
+ policy = get_jit(policy_name, asc_location, resource_group_name)
+
+ # Property table
+ property_table_output = [
+ {
+ "Name": policy.get("name"),
+ "Kind": policy.get("kind"),
+ "ProvisioningState": policy.get("properties").get("provisioningState")
+ if policy.get("properties")
+ and policy.get("properties").get("provisioningState")
+ else None,
+ "Location": policy.get("location"),
+ "Rules": policy.get("properties").get("virtualMachines")
+ if policy.get("properties")
+ and policy.get("properties").get("virtualMachines")
+ else None,
+ "Requests": policy.get("properties").get("requests")
+ if policy.get("properties") and policy.get("properties").get("requests")
+ else None,
+ "ID": policy.get("id"),
+ }
+ ]
+ md = tableToMarkdown(
+ "Azure Security Center - Get JIT Access Policy - Properties",
+ property_table_output,
+ ["Name", "Kind", "ProvisioningState", "Location", "ID"],
+ removeNull=True,
+ )
+
+ ec = {
+ "AzureSecurityCenter.JITPolicy(val.ID && val.ID === obj.ID)": property_table_output
+ }
+
+ property_table_entry = {
+ "Type": entryTypes["note"],
+ "Contents": policy,
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ "EntryContext": ec,
+ }
+
+ # Rules table
+ rules_table_output = list()
+ properties = policy.get("properties")
+ virtual_machines = properties.get("virtualMachines")
+ if isinstance(properties, dict) and virtual_machines:
+ for rule in virtual_machines:
+ rules_table_output.append(
+ {
+ "VmID": rule.get("id"),
+ "Ports": format_jit_port_rule(rule.get("ports")),
+ }
+ )
+ md = tableToMarkdown(
+ "Azure Security Center - Get JIT Access Policy - Rules",
+ rules_table_output,
+ ["VmID", "Ports"],
+ removeNull=True,
+ )
+ rules_table_entry = {
+ "Type": entryTypes["note"],
+ "Contents": properties.get("virtualMachines"),
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ }
+
+ # Requests table
+ requests_table_output = list()
+
+ for requestData in properties.get("requests", []):
+ vms = list()
+ for vm in requestData.get("virtualMachines"):
+ vm_name = vm["id"].split("/")[-1]
+ vm_ports = format_jit_port_request(vm.get("ports"))
+ vms.append("[{}: {}]".format(vm_name, vm_ports))
+ requests_table_output.append(
+ {
+ "VirtualMachines": ", ".join(vms),
+ "Requestor": requestData.get("requestor")
+ if requestData.get("requestor")
+ else "service-account",
+ "StartTimeUtc": requestData.get("startTimeUtc"),
+ }
+ )
+ md = tableToMarkdown(
+ "Azure Security Center - Get JIT Access Policy - Requests",
+ requests_table_output,
+ ["VirtualMachines", "Requestor", "StartTimeUtc"],
+ removeNull=True,
+ )
+
+ requests_table_entry = {
+ "Type": entryTypes["note"],
+ "Contents": properties.get("requests"),
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ }
+ demisto.results([property_table_entry, rules_table_entry, requests_table_entry])
+
+
+def get_jit(policy_name, asc_location, resource_group_name):
+ """Building query
+
+ Args:
+ policy_name: Policy name
+ asc_location: Machine location
+ resource_group_name: Resource name group
+
+ Returns:
+ dict: response body
+ """
+ cmd_url = (
+ "/resourceGroups/{}/providers/Microsoft.Security/locations/{}/jitNetworkAccessPolicies/"
+ "{}?api-version={}".format(
+ resource_group_name, asc_location, policy_name, JIT_API_VERSION
+ )
+ )
+ response = http_request("GET", cmd_url)
+ return response
+
+
+def initiate_jit_command(args):
+ resource_group_name = args.get("resource_group_name")
+ asc_location = args.get("asc_location")
+ policy_name = args.get("policy_name")
+ vm_id = args.get("vmID")
+ port = args.get("port")
+ source_address = args.get("source_address")
+ duration = args.get("duration")
+ response = initiate_jit(
+ resource_group_name,
+ asc_location,
+ policy_name,
+ vm_id,
+ port,
+ source_address,
+ duration,
+ )
+ policy_id = (
+ "/subscriptions/{}/resourceGroups/{}/providers/"
+ "Microsoft.Security/locations/{}/jitNetworkAccessPolicies/{}".format(
+ SUBSCRIPTION_ID, resource_group_name, asc_location, policy_name
+ )
+ )
+ virtual_machines = response.get("virtualMachines")
+ if virtual_machines and len(virtual_machines) > 0:
+ machine = virtual_machines[0]
+ port = machine.get("ports")[0]
+
+ outputs = {
+ "VmID": machine.get("id"),
+ "PortNum": port.get("number"),
+ "AllowedSourceAddress": port.get("allowedSourceAddressPrefix"),
+ "EndTimeUtc": port.get("endTimeUtc"),
+ "Status": port.get("status"),
+ "Requestor": response.get("requestor"),
+ "PolicyID": policy_id,
+ }
+
+ md = tableToMarkdown(
+ "Azure Security Center - Initiate JIT Access Request",
+ outputs,
+ [
+ "VmID",
+ "PortNum",
+ "AllowedSourceAddress",
+ "EndTimeUtc",
+ "Status",
+ "Requestor",
+ ],
+ removeNull=True,
+ )
+
+ ec = {
+ "AzureSecurityCenter.JITPolicy(val.ID && val.ID ="
+ "== obj.{}).Initiate(val.endTimeUtc === obj.EndTimeUtc)".format(
+ policy_id
+ ): outputs
+ }
+
+ demisto.results(
+ {
+ "Type": entryTypes["note"],
+ "Contents": response,
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ "EntryContext": ec,
+ }
+ )
+
+
+def initiate_jit(
+ resource_group_name,
+ asc_location,
+ policy_name,
+ vm_id,
+ port,
+ source_address,
+ duration,
+):
+ """Starting new Just-in-time machine
+
+ Args:
+ resource_group_name: Resource group name
+ asc_location: Machine location
+ policy_name: Policy name
+ vm_id: Virtual Machine ID
+ port: ports to be used
+ source_address: Source address
+ duration: Time in
+
+ Returns:
+ dict: response body
+ """
+ cmd_url = (
+ "/resourceGroups/{}/providers/Microsoft.Security/"
+ "locations/{}/jitNetworkAccessPolicies/{}/initiate?api-version={}".format(
+ resource_group_name, asc_location, policy_name, JIT_API_VERSION
+ )
+ )
+ # only supports init access for one vm and one port now
+ data = {
+ "virtualMachines": [
+ {
+ "ID": vm_id,
+ "ports": [
+ {
+ "number": port,
+ "duration": duration,
+ "allowedSourceAddressPrefix": source_address,
+ }
+ ],
+ }
+ ]
+ }
+ response = http_request("POST", cmd_url, body=data)
+ return response
+
+
+def delete_jit_command(args):
+ """Deletes a Just-in-time machine
+
+ Args:
+ args (dict): usually demisto.args()
+ """
+ asc_location = args.get("asc_location")
+ resource_group_name = args.get("resource_group_name")
+ policy_name = args.get("policy_name")
+ delete_jit(asc_location, resource_group_name, policy_name)
+
+ policy_id = (
+ "/subscriptions/{}/resourceGroups/"
+ "{}/providers/Microsoft.Security/locations/{}/jitNetworkAccessPolicies/{}".format(
+ SUBSCRIPTION_ID, resource_group_name, asc_location, policy_name
+ )
+ )
+
+ outputs = {"ID": policy_id, "Action": "deleted"}
+
+ ec = {"AzureSecurityCenter.JITPolicy(val.ID && val.ID === obj.ID)": outputs}
+ demisto.results(
+ {
+ "Type": entryTypes["note"],
+ "Contents": "Policy - {} has been deleted sucessfully.".format(policy_name),
+ "ContentsFormat": formats["text"],
+ "EntryContext": ec,
+ }
+ )
+
+
+def delete_jit(asc_location, resource_group_name, policy_name):
+ """Building query
+
+ Args:
+ asc_location: Machine location
+ resource_group_name: Resource group name
+ policy_name: Policy name
+ """
+ cmd_url = (
+ "/resourceGroups/{}/providers/Microsoft.Security/"
+ "locations/{}/jitNetworkAccessPolicies/{}?api-version={}"
+ "".format(resource_group_name, asc_location, policy_name, JIT_API_VERSION)
+ )
+ http_request("DELETE", cmd_url)
+
+
+""" Jit Network Access Policies End """
+
+""" Storage Start """
+
+
+# Add this command to security center integration because ATP-related command requires storage account info
+def list_sc_storage_command():
+ """Listing all Security Center Storages
+
+ """
+ accounts = list_sc_storage().get("value")
+ outputs = list()
+ for account in accounts:
+ account_id_array = account.get("id", str()).split("/")
+ resource_group_name = account_id_array[
+ account_id_array.index("resourceGroups") + 1
+ ]
+ outputs.append(
+ {
+ "Name": account.get("name"),
+ "ResourceGroupName": resource_group_name,
+ "Location": account.get("location"),
+ "ID": account.get("id"),
+ }
+ )
+ md = tableToMarkdown(
+ "Azure Security Center - List Storage Accounts",
+ outputs,
+ ["Name", "ResourceGroupName", "Location"],
+ removeNull=True,
+ )
+ ec = {"AzureSecurityCenter.Storage(val.ID && val.ID === obj.ID)": outputs}
+
+ entry = {
+ "Type": entryTypes["note"],
+ "Contents": accounts,
+ "ContentsFormat": formats["json"],
+ "ReadableContentsFormat": formats["markdown"],
+ "HumanReadable": md,
+ "EntryContext": ec,
+ }
+ demisto.results(entry)
+
+
+def list_sc_storage():
+ """Building query
+
+ Returns:
+ dict: response body
+
+ """
+ cmd_url = "/providers/Microsoft.Storage/storageAccounts?api-version={}".format(
+ STORAGE_API_VERSION
+ )
+ response = http_request("GET", cmd_url)
+ return response
+
+
+""" Storage End """
+
+""" Functions start """
+if not SUBSCRIPTION_ID:
+ SUBSCRIPTION_ID = set_subscription_id()
+ SUBSCRIPTION_URL = "/subscriptions/{}".format(SUBSCRIPTION_ID)
+
+try:
+ if demisto.command() == "test-module":
+ # If the command will fail, error will be thrown from the request itself
+ list_locations()
+ demisto.results("ok")
+ elif demisto.command() == "azure-sc-get-alert":
+ get_alert_command(demisto.args())
+ elif demisto.command() == "azure-sc-list-alert":
+ list_alerts_command(demisto.args())
+ elif demisto.command() == "azure-sc-update-alert":
+ update_alert_command(demisto.args())
+ elif demisto.command() == "azure-sc-list-location":
+ list_locations_command()
+ elif demisto.command() == "azure-sc-update-atp":
+ update_atp_command(demisto.args())
+ elif demisto.command() == "azure-sc-get-atp":
+ get_atp_command(demisto.args())
+ elif demisto.command() == "azure-sc-update-aps":
+ update_aps_command(demisto.args())
+ elif demisto.command() == "azure-sc-list-aps":
+ list_aps_command()
+ elif demisto.command() == "azure-sc-get-aps":
+ get_aps_command(demisto.args())
+ elif demisto.command() == "azure-sc-list-ipp":
+ list_ipp_command(demisto.args())
+ elif demisto.command() == "azure-sc-get-ipp":
+ get_ipp_command(demisto.args())
+ elif demisto.command() == "azure-sc-list-jit":
+ list_jit_command(demisto.args())
+ elif demisto.command() == "azure-sc-get-jit":
+ get_jit_command(demisto.args())
+ elif demisto.command() == "azure-sc-initiate-jit":
+ initiate_jit_command(demisto.args())
+ elif demisto.command() == "azure-sc-delete-jit":
+ delete_jit_command(demisto.args())
+ elif demisto.command() == "azure-sc-list-storage":
+ list_sc_storage_command()
+except Exception, e:
+ LOG(e.message)
+ LOG.print_log()
+ raise
diff --git a/Integrations/AzureSecurityCenter/AzureSecurityCenter.yml b/Integrations/AzureSecurityCenter/AzureSecurityCenter.yml
new file mode 100644
index 000000000000..75dfc45f67c9
--- /dev/null
+++ b/Integrations/AzureSecurityCenter/AzureSecurityCenter.yml
@@ -0,0 +1,280 @@
+category: Analytics & SIEM
+commonfields:
+ id: Azure Security Center
+ version: -1
+configuration:
+- defaultvalue: https://management.azure.com
+ display: Microsoft Azure Management URL
+ name: server_url
+ required: false
+ type: 0
+- display: Tenant ID (received from the admin consent - see Detailed Instructions
+ (?) section)
+ name: tenant_id
+ required: true
+ type: 4
+- display: Token (received from the admin consent - see detailed instructions (?))
+ name: token
+ required: true
+ type: 4
+- display: Trust any certificate (unsecure)
+ name: unsecure
+ required: false
+ type: 8
+- display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+description: Unified security management and advanced threat protection across hybrid
+ cloud workloads.
+display: Azure Security Center
+name: Azure Security Center
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The name of the resource group within the user's subscription.
+ The name is case insensitive.
+ isArray: false
+ name: resource_group_name
+ required: false
+ secret: false
+ - default: false
+ description: The location where Azure Security Center stores the data of the
+ subscription. Run the 'azure-sc-location-list' command to get the ascLocation.
+ This command requires the resourceGroupName argument.
+ isArray: false
+ name: asc_location
+ required: false
+ secret: false
+ - default: false
+ description: OData filter
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ - default: false
+ description: OData select
+ isArray: false
+ name: select
+ required: false
+ secret: false
+ - default: false
+ description: OData expand
+ isArray: false
+ name: expand
+ required: false
+ secret: false
+ description: Lists alerts for the subscription according to the specified filters.
+ execution: false
+ name: azure-sc-list-alert
+ outputs:
+ - contextPath: AzureSecurityCenter.Alert.AlertDisplayName
+ description: Alert display name
+ type: string
+ - contextPath: AzureSecurityCenter.Alert.CompromisedEntity
+ description: The entity on which the incident occurred
+ type: string
+ - contextPath: AzureSecurityCenter.Alert.DetectedTimeUtc
+ description: Time the vendor detected the incident
+ type: date
+ - contextPath: AzureSecurityCenter.Alert.ReportedSeverity
+ description: Estimated severity of this alert
+ type: string
+ - contextPath: AzureSecurityCenter.Alert.State
+ description: Alert state (Active, Dismissed, etc.)
+ type: string
+ - contextPath: AzureSecurityCenter.Alert.ID
+ description: Alert ID
+ type: string
+ - arguments:
+ - default: false
+ description: Resource group name
+ isArray: false
+ name: resource_group_name
+ required: true
+ secret: false
+ - default: true
+ defaultValue: current
+ description: Name of the Advanced Threat Detection setting, default is 'current'.
+ isArray: false
+ name: setting_name
+ required: false
+ secret: false
+ - default: false
+ description: Storage name in your Azure account
+ isArray: false
+ name: storage_account
+ required: true
+ secret: false
+ - default: false
+ description: Indicates whether Advanced Threat Protection is enabled.
+ isArray: false
+ name: is_enabled
+ required: true
+ secret: false
+ description: Updates Advanced Threat Detection settings.
+ execution: false
+ name: azure-sc-update-atp
+ outputs:
+ - contextPath: AzureSecurityCenter.AdvancedThreatProtection.ID
+ description: Resource ID
+ type: string
+ - contextPath: AzureSecurityCenter.AdvancedThreatProtection.Name
+ description: Resource Name
+ type: string
+ - contextPath: AzureSecurityCenter.AdvancedThreatProtection.IsEnabled
+ description: Indicates whether Advanced Threat Protection is enabled
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the resource group.
+ isArray: false
+ name: resource_group_name
+ required: true
+ secret: false
+ - default: false
+ defaultValue: current
+ description: Name of Advanced Threat Detection setting, default setting's name
+ is 'current'.
+ isArray: false
+ name: setting_name
+ required: false
+ secret: false
+ - default: false
+ description: Name of a storage in your azure account.
+ isArray: false
+ name: storage_account
+ required: true
+ secret: false
+ description: Returns the Advanced Threat Protection setting.
+ execution: false
+ name: azure-sc-get-atp
+ outputs:
+ - contextPath: AzureSecurityCenter.AdvancedThreatProtection.ID
+ description: Resource ID
+ type: string
+ - contextPath: AzureSecurityCenter.AdvancedThreatProtection.Name
+ description: Resource name
+ type: string
+ - contextPath: AzureSecurityCenter.AdvancedThreatProtection.IsEnabled
+ description: Indicates whether Advanced Threat Protection is enabled
+ type: string
+ - arguments:
+ - default: true
+ defaultValue: default
+ description: Name of the auto provisioning setting, default setting's name is
+ 'default'
+ isArray: false
+ name: setting_name
+ required: true
+ secret: false
+ - default: false
+ description: Describes the type of security agent provisioning action to take
+ (On or Off)
+ isArray: false
+ name: auto_provision
+ required: true
+ secret: false
+ description: Updates a specific auto provisioning setting.
+ execution: false
+ name: azure-sc-update-aps
+ outputs:
+ - contextPath: AzureSecurityCenter.AutoProvisioningSetting.Name
+ description: Setting display name
+ type: string
+ - contextPath: AzureSecurityCenter.AutoProvisioningSetting.AutoProvision
+ description: Display the type of security agent provisioning action to take
+ (On or Off)
+ type: string
+ - contextPath: AzureSecurityCenter.AutoProvisioningSetting.ID
+ description: Setting resource ID
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: default
+ description: Name of the auto provisioning setting
+ isArray: false
+ name: setting_name
+ required: true
+ secret: false
+ description: Returns details of a specific auto provisioning setting.
+ execution: false
+ name: azure-sc-get-aps
+ outputs:
+ - contextPath: AzureSecurityCenter.AutoProvisioningSetting.Name
+ description: Setting display name
+ type: string
+ - contextPath: AzureSecurityCenter.AutoProvisioningSetting.AutoProvision
+ description: Display the type of security agent provisioning action to take
+ (On or Off)
+ type: string
+ - contextPath: AzureSecurityCenter.AutoProvisioningSetting.ID
+ description: Set resource ID
+ type: string
+ - description: Lists auto provisioning settings in the subscription.
+ execution: false
+ name: azure-sc-list-aps
+ outputs:
+ - contextPath: AzureSecurityCenter.AutoProvisioningSetting.Name
+ description: Setting display name
+ type: string
+ - contextPath: AzureSecurityCenter.AutoProvisioningSetting.AutoProvision
+ description: Display the type of security agent provisioning action to take
+ (On or Off)
+ type: string
+ - contextPath: AzureSecurityCenter.AutoProvisioningSetting.ID
+ description: Setting resource ID
+ type: string
+ - arguments:
+ - default: false
+ description: The location where Azure Security Center stores the data of the
+ subscription. Run the 'azure-sc-list-location' command to get the asc_location.
+ isArray: false
+ name: asc_location
+ required: false
+ secret: false
+ - default: false
+ description: The name of the resource group within the user's subscription.
+ The name is case insensitive.
+ isArray: false
+ name: resource_group_name
+ required: false
+ secret: false
+ description: Lists all policies for protecting resources using Just-in-Time access
+ control.
+ execution: false
+ name: azure-sc-list-jit
+ outputs:
+ - contextPath: AzureSecurityCenter.JITPolicy.Name
+ description: Poliyc display name
+ type: string
+ - contextPath: AzureSecurityCenter.JITPolicy.Rules
+ description: 'CSV list of access rules for Microsoft.Compute/virtualMachines
+ resource, in the format (VMName: allowPort1,...)'
+ type: string
+ - contextPath: AzureSecurityCenter.JITPolicy.Location
+ description: Location where the resource is stored
+ type: string
+ - contextPath: AzureSecurityCenter.JITPolicy.Kind
+ description: Policy resource type
+ type: string
+ - description: Lists all the storage accounts available under the subscription.
+ execution: false
+ name: azure-sc-list-storage
+ outputs:
+ - contextPath: AzureSecurityCenter.Storage.Name
+ description: Name of the storage account
+ type: string
+ - contextPath: AzureSecurityCenter.Storage.ResourceGroupName
+ description: Names of the attached resource group
+ type: string
+ - contextPath: AzureSecurityCenter.Storage.Location
+ description: The geo-location where the resource resides
+ type: string
+ isfetch: false
+ runonce: false
+ script: ''
+ type: python
+tests:
+- No tests
diff --git a/Integrations/AzureSecurityCenter/AzureSecurityCenter_Image.png b/Integrations/AzureSecurityCenter/AzureSecurityCenter_Image.png
new file mode 100644
index 000000000000..9164d824b4b5
Binary files /dev/null and b/Integrations/AzureSecurityCenter/AzureSecurityCenter_Image.png differ
diff --git a/Integrations/AzureSecurityCenter/AzureSecurityCenter_description.md b/Integrations/AzureSecurityCenter/AzureSecurityCenter_description.md
new file mode 100644
index 000000000000..fabf7966d701
--- /dev/null
+++ b/Integrations/AzureSecurityCenter/AzureSecurityCenter_description.md
@@ -0,0 +1,2 @@
+To grant access to Microsoft Security Center, an admin needs to approve Demisto using an admin consent flow. To start the admin consent flow, click [here](https://demistobot.demisto.com/azuresc-sub).
+ After authorizing the Demisto app, you'll receive a tenant ID, which you should enter in the integration instance settings.
\ No newline at end of file
diff --git a/Integrations/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.py b/Integrations/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.py
new file mode 100644
index 000000000000..fdac0508edf0
--- /dev/null
+++ b/Integrations/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.py
@@ -0,0 +1,448 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import json
+import requests
+from typing import List, Dict
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+USERNAME = demisto.params().get('credentials', {}).get('identifier')
+PASSWORD = demisto.params().get('credentials', {}).get('password')
+API_KEY = demisto.params().get('key')
+SYSTEM_NAME = demisto.params().get('system_name')
+# Remove trailing slash to prevent wrong URL path to service
+SERVER = demisto.params()['url'][:-1] \
+ if (demisto.params()['url'] and demisto.params()['url'].endswith('/')) else demisto.params()['url']
+# Should we use SSL
+USE_SSL = not demisto.params().get('insecure', False)
+# Service base URL
+BASE_URL = SERVER + '/BeyondTrust/api/public/v3'
+# Headers to be sent in requests
+HEADERS = {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json',
+}
+
+SESSION = requests.session()
+ERR_DICT = {
+ '4031': 'User does not have permission.',
+ '4034': 'Request is not yet approved.',
+ '4091': 'Conflicting request exists. This user or another user has already requested a password for the'
+ 'specified account'
+}
+
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method: str, suffix_url: str, data=None):
+ """
+ A wrapper for requests lib to send our requests and handle requests
+ and responses better
+
+ Parameters
+ ----------
+ method : str
+ HTTP method, e.g. 'GET', 'POST' ... etc.
+ suffix_url : str
+ API endpoint.
+ data: str
+ Data to be sent in a 'POST' request.
+
+ Returns
+ -------
+ Response from having made the request.
+ """
+ url = BASE_URL + suffix_url
+ try:
+ res = SESSION.request(
+ method,
+ url,
+ verify=USE_SSL,
+ data=data, # type: ignore
+ headers=HEADERS
+ )
+ except requests.exceptions.SSLError:
+ ssl_error = 'Could not connect to BeyondTrust: Could not verify certificate.'
+ return return_error(ssl_error)
+ except (requests.exceptions.ConnectionError, requests.exceptions.Timeout,
+ requests.exceptions.TooManyRedirects, requests.exceptions.RequestException) as e:
+ connection_error = f'Could not connect to BeyondTrust: {e}'
+ return return_error(connection_error)
+
+ # Handle error responses gracefully
+ if res.status_code not in {200, 201, 204}:
+ txt = res.text
+ if txt in ERR_DICT:
+ txt = ERR_DICT[txt]
+ elif res.status_code in ERR_DICT:
+ txt = ERR_DICT[txt]
+ elif res.status_code == 401:
+ txt = 'Wrong credentials.'
+ return_error(f'Error in API call to BeyondSafe Integration [{res.status_code}] - {txt})')
+ try:
+ return res.json()
+ except ValueError:
+ return None
+
+
+def signin():
+ """
+ Starts a session in BeyondTrust
+ """
+ suffix_url = '/Auth/SignAppin'
+ header = {'Authorization': f'PS-Auth key={API_KEY}; runas={USERNAME}; pwd=[{PASSWORD}];'}
+ SESSION.headers.update(header)
+ http_request('POST', suffix_url)
+
+
+def signout():
+ """
+ Ends a session
+ """
+
+ suffix_url = '/auth/signout'
+ http_request('POST', suffix_url)
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def get_managed_accounts_request():
+ """
+ Request for all managed accounts
+ """
+ suffix_url = '/managedaccounts'
+ response = http_request('GET', suffix_url)
+
+ return response
+
+
+def get_managed_accounts():
+ """
+ Returns a list of Managed Accounts that can be requested by the current user.
+ """
+ data = []
+ headers = ['AccountName', 'AccountID', 'AssetName', 'AssetID', 'DomainName', 'LastChangeDate', 'NextChangeDate']
+ managed_accounts = get_managed_accounts_request()
+ for account in managed_accounts:
+ data.append({
+ 'LastChangeDate': account.get('LastChangeDate'),
+ 'NextChangeDate': account.get('NextChangeDate'),
+ 'AssetID': account.get('SystemId'),
+ 'AssetName': account.get('SystemName'),
+ 'DomainName': account.get('DomainName'),
+ 'AccountID': account.get('AccountId'),
+ 'AccountName': account.get('AccountName')
+
+ })
+
+ entry_context = {'BeyondTrust.Account(val.AccountID === obj.AccountID)': managed_accounts}
+
+ return_outputs(tableToMarkdown('BeyondTrust Managed Accounts', data, headers, removeNull=True), entry_context,
+ managed_accounts)
+
+
+def get_managed_systems_request() -> List[Dict]:
+ """
+ Request for all managed systems
+ """
+ suffix_url = '/managedsystems'
+ response = http_request('GET', suffix_url)
+
+ return response
+
+
+def get_managed_systems():
+ """
+ Returns a list of Managed Systems.
+ """
+ data = []
+ managed_systems = get_managed_systems_request()
+ for managed_system in managed_systems:
+ data.append({
+ 'ManagedAssetID': managed_system.get('ManagedSystemID'),
+ 'ChangeFrequencyDays': managed_system.get('ChangeFrequencyDays'),
+ 'AssetID': managed_system.get('AssetID'),
+ 'DatabaseID': managed_system.get('DatabaseID'),
+ 'DirectoryID': managed_system.get('DirectoryID'),
+ 'AssetName': managed_system.get('SystemName'),
+ 'PlatformID': managed_system.get('PlatformID'),
+ 'Port': managed_system.get('Port')
+ })
+
+ entry_context = {'BeyondTrust.System(val.ManagedAssetID === obj.ManagedAssetID)': managed_systems}
+
+ return_outputs(tableToMarkdown('BeyondTrust Managed Systems', data, removeNull=True), entry_context,
+ managed_systems)
+
+
+def create_release_request(data: str):
+ """
+ Request for credentials release
+ """
+ suffix_url = '/requests'
+ response = http_request('POST', suffix_url, data=data)
+
+ return response
+
+
+def create_release():
+ """
+ Creates a new release request.
+ Retrieves the credentials for an approved and active (not expired) credentials release request.
+
+ demisto parameter: (string) access_type
+ The type of access requested (View, RDP, SSH). Defualt is "View".
+
+ demisto parameter: (int) system_id
+ ID of the Managed System to request.
+
+ demisto parameter: (int) account_id
+ ID of the Managed Account to request.
+
+ demisto parameter: (int) duration_minutes
+ The request duration (in minutes).
+
+ demisto parameter: (string) reason
+ The reason for the request.
+
+ demisto parameter: (int) access_policy_schedule_id
+ The Schedule ID of an Access Policy to use for the request. If omitted, automatically selects the best schedule.
+
+ demisto parameter: (bool) conflict_option
+ The conflict resolution option to use if an existing request is found for the same user,
+ system, and account ("reuse" or "renew").
+ """
+ access_type = demisto.args().get('access_type')
+ system_id = demisto.args().get('system_id')
+ account_id = demisto.args().get('account_id')
+ duration_minutes = demisto.args().get('duration_minutes')
+ reason = demisto.args().get('reason')
+ conflict_option = demisto.args().get('conflict_option')
+
+ data = {
+ 'SystemId': system_id,
+ 'AccountId': account_id,
+ 'DurationMinutes': duration_minutes
+ }
+
+ if access_type:
+ data['AccessType'] = access_type
+
+ if reason:
+ data['Reason'] = reason
+
+ if conflict_option:
+ data['ConflictOption'] = conflict_option
+
+ request = create_release_request(str(data))
+ request_id = str(request)
+
+ credentials = get_credentials_request(request_id)
+
+ response = {
+ 'RequestID': request_id,
+ 'Password': credentials
+ }
+
+ entry_context = {'BeyondTrust.Request(val.AccountID === obj.AccountID)': createContext(response)}
+ return_outputs(tableToMarkdown('The new release was created successfully.', response), entry_context, response)
+
+
+def get_credentials_request(request_id: str):
+ """
+ Request for specific credentials
+ """
+
+ suffix_url = '/credentials/' + request_id
+ response = http_request('GET', suffix_url)
+
+ return response
+
+
+def get_credentials():
+ """
+ Retrieves the credentials for an approved and active (not expired) credentials release request.
+
+ demisto parameter: (int) request_id
+ ID of the Request for which to retrieve the credentials
+ """
+
+ request_id = demisto.args().get('request_id')
+ request = str(request_id)
+ response = get_credentials_request(request)
+
+ demisto.results('The credentials for BeyondTrust request: ' + response)
+
+
+def check_in_credentials_request(request_id: str, data: dict):
+ """
+ Request for check-in credentials
+
+ """
+ suffix_url = f'/Requests/{request_id}/Checkin'
+ response = http_request('PUT', suffix_url, data=json.dumps(data))
+
+ return response
+
+
+def check_in_credentials():
+ """
+ Checks-in/releases a request before it has expired.
+
+ demisto parameter: (int) request_id
+ ID of the request to release.
+
+ demisto parameter: (string) reason
+ A reason or comment why the request is being released.
+
+ """
+ request_id = demisto.args().get('request_id')
+ reason = str(demisto.args().get('reason'))
+
+ data = {'Reason': reason if reason else ''}
+
+ check_in_credentials_request(request_id, data)
+
+ demisto.results('The release was successfully checked-in/released')
+
+
+def change_credentials_request(account_id: str, data: dict):
+ """
+ Request to change credentials
+ """
+ suffix_url = f'/ManagedAccounts/{account_id}/Credentials'
+ response = http_request('PUT', suffix_url, data=json.dumps(data))
+
+ return response
+
+
+def change_credentials():
+ """
+ Updates the credentials for a Managed Account, optionally applying the change to the Managed System.
+
+ demisto parameter: (int) account_id
+ ID of the account for which to set the credentials.
+
+ demisto parameter: (string) password
+ The new password to set. If not given, generates a new, random password.
+
+ demisto parameter: (string) public_key
+ The new public key to set on the host. This is required if PrivateKey is given and updateSystem=true.
+
+ demisto parameter: (string) private_key
+ The private key to set (provide Passphrase if encrypted).
+
+ demisto parameter: (string) pass_phrase
+ The passphrase to use for an encrypted private key.
+
+ demisto parameter: (bool) update_system
+ Whether to update the credentials on the referenced system.
+
+ """
+ account_id = demisto.args().get('account_id')
+ password = demisto.args().get('password')
+ public_key = demisto.args().get('public_key')
+ private_key = demisto.args().get('private_key')
+ pass_phrase = demisto.args().get('pass_phrase')
+ update_system = demisto.args().get('update_system')
+
+ data = {
+ 'AccountId': account_id
+ }
+
+ if password:
+ data['Password'] = password
+
+ if private_key:
+ if public_key and update_system is True:
+ data['PrivateKey'] = private_key
+ data['PublicKey'] = public_key
+ else:
+ return_error('Missing public key')
+
+ if pass_phrase:
+ data['Passphrase'] = pass_phrase
+
+ change_credentials_request(account_id, data)
+
+ demisto.results('The password has been changed')
+
+
+def fetch_credentials():
+ """
+ Returns: Account's credentials
+ """
+ credentials = []
+ identifier = demisto.args().get('identifier')
+ duration_minutes = 1
+ account_info = get_managed_accounts_request()
+
+ for account in account_info:
+ account_name = account.get('AccountName')
+ system_name = account.get('SystemName')
+ if SYSTEM_NAME and system_name != SYSTEM_NAME:
+ continue
+ item = {
+ 'SystemId': account.get('SystemId'),
+ 'AccountId': account.get('AccountId'),
+ 'DurationMinutes': duration_minutes
+ }
+
+ release_id = create_release_request(str(item))
+
+ password = get_credentials_request(str(release_id))
+
+ credentials.append({
+ 'user': account_name,
+ 'password': password,
+ 'name': system_name
+ })
+
+ if identifier:
+ credentials = list(filter(lambda c: c.get('name', '') == identifier, credentials))
+
+ demisto.credentials(credentials)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('Command being called is %s' % (demisto.command()))
+
+try:
+ handle_proxy()
+ signin()
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ get_managed_accounts_request()
+ demisto.results('ok')
+ elif demisto.command() == 'beyondtrust-get-managed-accounts':
+ get_managed_accounts()
+ elif demisto.command() == 'beyondtrust-get-managed-systems':
+ get_managed_systems()
+ elif demisto.command() == 'beyondtrust-create-release-request':
+ create_release()
+ elif demisto.command() == 'beyondtrust-get-credentials':
+ get_credentials()
+ elif demisto.command() == 'beyondtrust-check-in-credentials':
+ check_in_credentials()
+ elif demisto.command() == 'beyondtrust-change-credentials':
+ change_credentials()
+ elif demisto.command() == 'fetch-credentials':
+ fetch_credentials()
+
+# Log exceptions
+except Exception as e:
+ LOG(str(e))
+ LOG.print_log()
+ raise
+finally:
+ signout()
diff --git a/Integrations/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.yml b/Integrations/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.yml
new file mode 100644
index 000000000000..79b87b14f193
--- /dev/null
+++ b/Integrations/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe.yml
@@ -0,0 +1,330 @@
+category: Authentication
+commonfields:
+ id: BeyondTrust Password Safe
+ version: -1
+configuration:
+- display: Server URL (e.g https://192.168.0.1)
+ name: url
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: true
+ type: 9
+- display: API Key
+ name: key
+ required: true
+ type: 4
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Fetch credentials
+ name: isFetchCredentials
+ required: false
+ type: 8
+- display: System Name (optional for fetch credentials)
+ name: system_name
+ required: false
+ type: 0
+description: Unified password and session management for seamless accountability and
+ control over privileged accounts.
+display: BeyondTrust Password Safe
+name: BeyondTrust Password Safe
+script:
+ commands:
+ - deprecated: false
+ description: Returns a list of managed accounts that the current user has permissions
+ to request.
+ execution: false
+ name: beyondtrust-get-managed-accounts
+ outputs:
+ - contextPath: BeyondTrust.Account.PlatformID
+ description: ID of the managed system platform.
+ type: Number
+ - contextPath: BeyondTrust.Account.SystemID
+ description: ID of the managed system.
+ type: Number
+ - contextPath: BeyondTrust.Account.SystemName
+ description: Name of the managed system.
+ type: String
+ - contextPath: BeyondTrust.Account.DomainName
+ description: ID of the managed account.
+ type: Number
+ - contextPath: BeyondTrust.Account.AccountName
+ description: Name of the managed account.
+ type: String
+ - contextPath: BeyondTrust.Account.InstanceName
+ description: Database instance name of a database-type managed system.
+ type: String
+ - contextPath: BeyondTrust.Account.DefualtReleaseDuration
+ description: Default release duration.
+ type: Number
+ - contextPath: BeyondTrust.Account.MaximumReleaseDuration
+ description: Maximum release duration.
+ type: Number
+ - contextPath: BeyondTrust.Account.LastChangeDate
+ description: The date and time of the last password change.
+ type: Date
+ - contextPath: BeyondTrust.Account.NexeChangeDate
+ description: The date and time of the next scheduled password change.
+ type: Date
+ - contextPath: BeyondTrust.Account.IsChanging
+ description: True if the account credentials are in the process of changing,
+ otherwise false.
+ type: Boolean
+ - contextPath: BeyondTrust.Account.IsISAAccess
+ description: True if the account is for Information Systems Administrator (ISA)
+ access, otherwise false.
+ type: Boolean
+ - contextPath: BeyondTrust.Account.AccountID
+ description: ID of the managed account.
+ type: Number
+ - deprecated: false
+ description: Returns a list of managed systems.
+ execution: false
+ name: beyondtrust-get-managed-systems
+ outputs:
+ - contextPath: BeyondTrust.System.Port
+ description: The port used to connect to the host. If null and the related Platform.PortFlag
+ is true, Password Safe uses Platform.DefaultPort for communication.
+ type: Number
+ - contextPath: BeyondTrust.System.Timeout
+ description: Connection timeout – Length of time in seconds before a slow or
+ unresponsive connection to the system fails.
+ type: String
+ - contextPath: BeyondTrust.System.ResetPasswordOnMismatchFlag
+ description: True to queue a password change when scheduled password test fails,
+ otherwise false.
+ type: Boolean
+ - contextPath: BeyondTrust.System.ChangeFrequencyDays
+ description: When ChangeFrequencyType is “xdaysâ€, the frequency with which the
+ password changes (between 1-90 days).
+ type: Number
+ - contextPath: BeyondTrust.System.ISAReleaseDuration
+ description: Default Information Systems Administrator (ISA) release duration.
+ type: Number
+ - contextPath: BeyondTrust.System.FunctionalAccountID
+ description: ID of the functional account used for local Managed Account password
+ changes.
+ type: Number
+ - contextPath: BeyondTrust.System.ChangeFrequencyType
+ description: 'The change frequency for scheduled password changes: "first"–
+ Changes are scheduled for the first day of the month; "last"– Changes are
+ scheduled for the last day of the month; "xdays"– Changes are scheduled every
+ "x" days (see ChangeFrequencyDays)'
+ type: String
+ - contextPath: BeyondTrust.System.DirectoryID
+ description: ID of the directory. Is set if the Managed System is a Directory.
+ type: Number
+ - contextPath: BeyondTrust.System.ManagedAssetID
+ description: ID of the Managed System.
+ type: Number
+ - contextPath: BeyondTrust.System.AssetID
+ description: ID of the asset. Is set if the Managed System is an Asset or a
+ Database.
+ type: Number
+ - contextPath: BeyondTrust.System.PlatformID
+ description: ID of the Managed System Platform.
+ type: Number
+ - contextPath: BeyondTrust.System.ElevationCommand
+ description: Elevation command to use (sudo, pbrun, or pmrun).
+ type: String
+ - contextPath: BeyondTrust.System.CheckPasswordFlag
+ description: True to enable password testing, otherwise false.
+ type: Boolean
+ - contextPath: BeyondTrust.System.CloudID
+ description: ID of the Cloud System. Is set if the Managed System is a Cloud
+ System.
+ type: Number
+ - contextPath: BeyondTrust.System.DSSKeyRuleID
+ description: ID of the default DSS Key Rule assigned to Managed Accounts that
+ were created under this Managed System.
+ type: Number
+ - contextPath: BeyondTrust.System.PasswordRuleID
+ description: ID of the default Password Rule assigned to Managed Accounts that
+ were created under this Managed System.
+ type: Number
+ - contextPath: BeyondTrust.System.NetBiosName
+ description: Domain NetBIOS name. Setting this value will allow Password Safe
+ to fall back to the NetBIOS name, if needed.
+ type: String
+ - contextPath: BeyondTrust.System.DatabaseID
+ description: ID of the database. Is set if the Managed System is a Database.
+ type: Number
+ - contextPath: BeyondTrust.System.MaxReleaseDuration
+ description: Default maximum release duration.
+ type: Number
+ - contextPath: BeyondTrust.System.ChangePasswordAfterAnyReleaseFlag
+ description: True to change passwords on release of a request, otherwise false.
+ type: Boolean
+ - contextPath: BeyondTrust.System.SystemName
+ description: Name of the related entity (Asset, Directory, Database, or Cloud).
+ type: String
+ - contextPath: BeyondTrust.System.ReleaseDuration
+ description: Default release duration.
+ type: Number
+ - contextPath: BeyondTrust.System.ContactEmail
+ description: Email address of the user that manages the system.
+ type: String
+ - contextPath: BeyondTrust.System.Description
+ description: The description of the system.
+ type: String
+ - contextPath: BeyondTrust.System.ChangeTime
+ description: Time (UTC) that password changes are scheduled to occur.
+ type: String
+ - contextPath: BeyondTrust.System.AutoManagementFlag
+ description: True if password auto-management is enabled, otherwise false.
+ type: Boolean
+ - contextPath: BeyondTrust.System.LoginAccountID
+ description: ID of the Functional Account used for SSH session logins.
+ type: Number
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The type of access requested (View, RDP, SSH). Defualt is "View".
+ isArray: false
+ name: access_type
+ predefined:
+ - View
+ - RDP
+ - SSH
+ required: false
+ secret: false
+ - default: false
+ description: ID of the Managed System to request. Get the ID from get-managed
+ accounts command
+ isArray: false
+ name: system_id
+ required: true
+ secret: false
+ - default: false
+ description: ID of the Managed Account to request. Get the ID from get-managed
+ accounts command
+ isArray: false
+ name: account_id
+ required: true
+ secret: false
+ - default: false
+ description: The request duration (in minutes).
+ isArray: false
+ name: duration_minutes
+ required: true
+ secret: false
+ - default: false
+ description: The reason for the request.
+ isArray: false
+ name: reason
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The conflict resolution option to use if an existing request is
+ found for the same user, system and account ("reuse" or "renew").
+ isArray: false
+ name: conflict_option
+ predefined:
+ - reuse
+ - renew
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a new credentials release request.
+ execution: false
+ name: beyondtrust-create-release-request
+ outputs:
+ - contextPath: BeyondTrust.Request.Credentials
+ description: The credentials for the requested ID.
+ type: String
+ - contextPath: BeyondTrust.Request.RequestID
+ description: The request ID.
+ type: Number
+ - arguments:
+ - default: false
+ description: ID of the request to release.
+ isArray: false
+ name: request_id
+ required: true
+ secret: false
+ - default: false
+ description: A reason or comment why the request is being released.
+ isArray: false
+ name: reason
+ required: false
+ secret: false
+ deprecated: false
+ description: Checks-in/releases a request before it expires.
+ execution: false
+ name: beyondtrust-check-in-credentials
+ - arguments:
+ - default: false
+ description: ID of the Request for which to retrieve the credentials
+ isArray: false
+ name: request_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the credentials for an approved and active (not expired)
+ credentials release request.
+ execution: false
+ name: beyondtrust-get-credentials
+ - arguments:
+ - default: false
+ description: ID of the account for which to set the credentials.
+ isArray: false
+ name: account_id
+ required: true
+ secret: false
+ - default: false
+ description: The new password to set. If not given, generates a new, random
+ password.
+ isArray: false
+ name: password
+ required: false
+ secret: false
+ - default: false
+ description: The new public key to set on the host. This is required if PrivateKey
+ is given and updateSystem=true.
+ isArray: false
+ name: public_key
+ required: false
+ secret: false
+ - default: false
+ description: The private key to set (provide Passphrase if encrypted).
+ isArray: false
+ name: private_key
+ required: false
+ secret: false
+ - default: false
+ description: The passphrase to use for an encrypted private key.
+ isArray: false
+ name: pass_phrase
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: Whether to update the credentials on the referenced system.
+ isArray: false
+ name: update_system
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Updates the credentials for a Managed Account, optionally applying
+ the change to the Managed System.
+ execution: false
+ name: beyondtrust-change-credentials
+ dockerimage: demisto/python3:3.7.3.286
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- BeyondTrust-Test
diff --git a/Integrations/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe_description.md b/Integrations/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe_description.md
new file mode 100644
index 000000000000..29cf7068fee4
--- /dev/null
+++ b/Integrations/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe_description.md
@@ -0,0 +1 @@
+To configure an integration instance, you need your BeyondTrust API key. The API key is generated after you configure an API Registration. For detailed instructions, see the [BeyondTrust Password Safe Admin Guide](https://www.beyondtrust.com/docs/password-safe/documents/6-9/ps-admin-6-9-0.pdf).
diff --git a/Integrations/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe_image.png b/Integrations/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe_image.png
new file mode 100644
index 000000000000..663c0e4ac239
Binary files /dev/null and b/Integrations/BeyondTrust_Password_Safe/BeyondTrust_Password_Safe_image.png differ
diff --git a/Integrations/BitDam/BidDam_description.md b/Integrations/BitDam/BidDam_description.md
new file mode 100644
index 000000000000..b3da3f5315e2
--- /dev/null
+++ b/Integrations/BitDam/BidDam_description.md
@@ -0,0 +1,6 @@
+## Generate an API Token
+This integration requires an API token.
+
+1. Log in to your BitDam Dashboard.
+2. Navigate to your profile and click the **API token** tab.
+3. Click **Generate API Token**.
diff --git a/Integrations/BitDam/BitDam.py b/Integrations/BitDam/BitDam.py
new file mode 100644
index 000000000000..4afe315c7c91
--- /dev/null
+++ b/Integrations/BitDam/BitDam.py
@@ -0,0 +1,229 @@
+import demistomock as demisto
+from CommonServerPython import *
+'''IMPORTS'''
+import requests
+import base64
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+'''INTEGRATION PARAMS'''
+API_TOKEN = demisto.params().get('apitoken')
+URL_BASE = demisto.params().get('url')
+USE_PROXY = demisto.params().get('proxy', False)
+UNSECURE = not demisto.params().get('insecure', False)
+
+'''CONSTANTS'''
+READ_BINARY_MODE = 'rb'
+SLASH = '/'
+SCAN_FILE_URL = 'direct/scan/file/'
+GET_FILE_VERDICT_URL = 'direct/verdict/?hash={}'
+TOKEN_PREFIX = 'Bearer' # guardrails-disable-line
+RESPONSE_CODE_OK = 200
+STATUS_IN_PROGRESS = 'IN_PROGRESS'
+STATUS_DONE = 'DONE'
+AUTH_HEADERS = {
+ 'Authorization': "{} {}".format(TOKEN_PREFIX, API_TOKEN)
+}
+
+VERDICT_SCANNING = 'Scanning'
+VERDICT_MALICIOUS = 'Malicious'
+VERDICT_APPROVED = 'Approved'
+VERDICT_ERROR = 'Error'
+VERDICT_BENIGN = 'Benign'
+VERDICT_TIMEOUT = 'Timeout'
+SCAN_ONGOING = 'Still scanning...'
+
+BITDAM_COMMAND_PREFIX = 'bitdam'
+DBOTSCORE_UNKNOWN = 0
+DBOTSCORE_CLEAN = 1
+DBOTSCORE_MALICIOUS = 3
+
+'''HANDLE PROXY'''
+handle_proxy()
+
+
+'''HELPER FUNCTIONS'''
+
+
+def get_file_bytes(entry_id):
+ get_file_path_res = demisto.getFilePath(entry_id)
+ file_path = get_file_path_res["path"]
+ with open(file_path, READ_BINARY_MODE) as fopen:
+ bytes = fopen.read()
+ return base64.b64encode(bytes)
+
+
+def get_url_base_with_trailing_slash():
+ '''
+ Returns the intergation's base url parameter, making sure it contains an trailing slash
+ '''
+ url_base = URL_BASE
+ return url_base if url_base.endswith(SLASH) else url_base + SLASH
+
+
+def build_json_response(content, context, human_readable):
+ return {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': content,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(human_readable, content),
+ 'EntryContext': context
+ }
+
+
+def get_file_name(entry_id):
+ get_file_path_res = demisto.getFilePath(entry_id)
+ return get_file_path_res["name"]
+
+
+def verdict_to_dbotscore(verdict):
+ if VERDICT_APPROVED == verdict:
+ return DBOTSCORE_CLEAN
+ elif VERDICT_MALICIOUS == verdict:
+ return DBOTSCORE_MALICIOUS
+ elif VERDICT_SCANNING == verdict:
+ return DBOTSCORE_UNKNOWN
+ else:
+ return DBOTSCORE_UNKNOWN
+
+
+'''API_IMPL'''
+
+
+def scan_file():
+ response = scan_file_command()
+ returned_sha1 = parse_scan_file_response(response)
+ # Build demisto reponse
+ response_content = {'SHA1': returned_sha1}
+ response_context = {'BitDam': {'FileScan': {'SHA1': returned_sha1}}}
+ return build_json_response(response_content, response_context, "File was submitted successfully")
+
+
+def scan_file_command():
+ # Get data to build the request
+ entry_id = demisto.args().get('entryId')
+ file_name = get_file_name(entry_id)
+ file_bytes = get_file_bytes(entry_id)
+ json_data = {'file_name': file_name,
+ 'file_data_base64': base64.b64encode(file_bytes)}
+ raw_json = json.dumps(json_data, ensure_ascii=False)
+ url = "{}{}".format(get_url_base_with_trailing_slash(), SCAN_FILE_URL)
+
+ # Send the HTTP request
+ response = requests.post(url, data=raw_json, headers=AUTH_HEADERS, verify=UNSECURE)
+ return response
+
+
+def parse_scan_file_response(response):
+ # Parse response
+ if RESPONSE_CODE_OK != response.status_code:
+ raise Exception("Scan file failed. Response code -{}, Data- '{}'".format(str(response.status_code), response.content))
+ response_json = json.loads(response.content)
+ if 'sha1' not in response_json:
+ raise Exception(
+ "Scan file failed. Bad response json - {}".format(response.content))
+ returned_sha1 = response_json['sha1']
+ return returned_sha1
+
+
+def get_file_verdict():
+ identifier_value = demisto.args().get('idValue')
+ response = get_file_verdict_command(identifier_value)
+ verdict, status = parse_get_file_verdict_response(response)
+ response_content = {'STATUS': status,
+ 'VERDICT': verdict,
+ 'ID': identifier_value}
+ context = {}
+ context['BitDam.Analysis(val.ID && val.ID == obj.ID)'] = {
+ 'Status': status,
+ 'Verdict': verdict,
+ 'ID': identifier_value
+ }
+
+ if VERDICT_MALICIOUS == verdict:
+ context[outputPaths['file']] = {'SHA1': identifier_value}
+ context[outputPaths['file']]['Malicious'] = {
+ 'Vendor': 'BitDam',
+ 'Description': 'Process whitelist inconsistency by bitdam-get-file-verdict',
+ 'Name': identifier_value
+ }
+
+ dbotscore = verdict_to_dbotscore(verdict)
+ if dbotscore:
+ context[outputPaths['dbotscore']] = {
+ 'Indicator': identifier_value,
+ 'Type': 'File',
+ 'Vendor': 'BitDam',
+ 'Score': dbotscore
+ }
+ response_context = context
+ return build_json_response(response_content, response_context,
+ "Get file verdict was performed successfully")
+
+
+def parse_get_file_verdict_response(response):
+ # Parse results
+ if RESPONSE_CODE_OK != response.status_code:
+ raise Exception("Get file verdict failed. Response code -{}, Data- '{}'".format(str(response.status_code),
+ response.content))
+ response_json = json.loads(response.content)
+ status = ''
+ verdict = ''
+ if 'scan_data' not in response_json or 'verdict' not in response_json['scan_data']:
+ raise Exception("Get file verdict failed. Unknown response schema. Data- '{}'".format(response.content))
+
+ verdict = response_json['scan_data']['verdict']
+ if verdict == SCAN_ONGOING or verdict == VERDICT_SCANNING:
+ # Still in progress
+ verdict = VERDICT_SCANNING
+ status = STATUS_IN_PROGRESS
+ else:
+ status = STATUS_DONE
+
+ return verdict, status
+
+
+def get_file_verdict_command(identifier_value):
+ # Get data to build the request
+ scan_file_relative_url_formatted = GET_FILE_VERDICT_URL.format(identifier_value)
+
+ url = "{}{}".format(get_url_base_with_trailing_slash(), scan_file_relative_url_formatted)
+ # Send the request
+ response = requests.get(url, headers=AUTH_HEADERS, verify=UNSECURE)
+ return response
+
+
+def upload_test_file_to_scan():
+ d = {
+ "file_name": "demisto.txt",
+ "file_data_base64": 'ZGVtaXN0bw=='
+ }
+ url = "{}{}".format(get_url_base_with_trailing_slash(), SCAN_FILE_URL)
+ response = requests.post(url, headers=AUTH_HEADERS, json=d, verify=UNSECURE)
+ return response
+
+
+def test_module():
+ response = upload_test_file_to_scan()
+ if RESPONSE_CODE_OK == response.status_code:
+ return True
+ raise Exception("Status code - {}, Error- '{}'".format(str(response.status_code),
+ response.content))
+
+
+'''COMMAND_CLASIFIER'''
+try:
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ if test_module():
+ demisto.results('ok')
+ sys.exit(0)
+ elif demisto.command() == 'bitdam-upload-file':
+ demisto.results(scan_file())
+ elif demisto.command() == 'bitdam-get-verdict':
+ demisto.results(get_file_verdict())
+except Exception as e:
+ LOG(e)
+ return_error("Error: {}".format(str(e)))
diff --git a/Integrations/BitDam/BitDam.yml b/Integrations/BitDam/BitDam.yml
new file mode 100644
index 000000000000..fa91d1d034df
--- /dev/null
+++ b/Integrations/BitDam/BitDam.yml
@@ -0,0 +1,87 @@
+commonfields:
+ id: BitDam
+ version: -1
+name: BitDam
+display: BitDam
+category: Email Gateway
+description: BitDam secure email gateway protects from advanced content-borne threats
+ with the most accurate prevention of known and unknown threats, at their source.
+configuration:
+- display: BitDam API URL
+ name: url
+ defaultvalue: https://app.bitdam.com/api/v1.0
+ type: 0
+ required: true
+- display: API Token
+ name: apitoken
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Trust any certificate (not secure)
+ name: insecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ type: 8
+ required: false
+script:
+ script: ''
+ type: python
+ subtype: python3
+ commands:
+ - name: bitdam-upload-file
+ arguments:
+ - name: entryId
+ required: true
+ description: The file's entry ID from the War Room. Supported files types are doc, dot, docx,
+ docm, dotx, dotm, pdf, rtf, xls, xlt, xlsx, xlsm, xltx, xltm, xlsb, xlam,
+ csv, ppt, pptx, pptm, potx, potm, ppam, ppsx, ppsm and pps.
+ outputs:
+ - contextPath: BitDam.FileScan.SHA1
+ description: The SHA1 hash of the file.
+ type: string
+ description: Uploads a file sample to the BitDam service.
+ - name: bitdam-get-verdict
+ arguments:
+ - name: idValue
+ required: true
+ description: The value of the file's unique identifier (for example - file's
+ SHA1)
+ outputs:
+ - contextPath: BitDam.Analysis.Status
+ description: The status of the analysis. Can be "DONE" or "IN_PROGRESS".
+ type: string
+ - contextPath: BitDam.Analysis.Verdict
+ description: The final verdict of the analysis.
+ type: string
+ - contextPath: BitDam.Analysis.ID
+ description: The unique identifier of the analysis.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The DBot score.
+ type: number
+ - contextPath: DBotScore.Type
+ description: The indicator Type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: The DBot score vendor.
+ type: string
+ - contextPath: File.Malicious.Name
+ description: The name of the file.
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision.
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the reason that the vendor made the decision.
+ type: string
+ description: Returns the verdict for a scanned file.
+ runonce: false
+tests:
+- Detonate File - BitDam Test
diff --git a/Integrations/BitDam/BitDam_image.png b/Integrations/BitDam/BitDam_image.png
new file mode 100644
index 000000000000..9e71fb585f39
Binary files /dev/null and b/Integrations/BitDam/BitDam_image.png differ
diff --git a/Integrations/BitDam/CHANGELOG.md b/Integrations/BitDam/CHANGELOG.md
new file mode 100644
index 000000000000..e69132731304
--- /dev/null
+++ b/Integrations/BitDam/CHANGELOG.md
@@ -0,0 +1,8 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+- Verdicts were changed and new verdicts were added (external PR)
+- Changed the verdict handling logic in the get_file_verdict command (external PR)
+- Fixed the test module (external PR)
+- Modified test playbook according to new verdicts
diff --git a/Integrations/BitDam/Pipfile b/Integrations/BitDam/Pipfile
new file mode 100644
index 000000000000..9ed479d31633
--- /dev/null
+++ b/Integrations/BitDam/Pipfile
@@ -0,0 +1,23 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+flake8 = "*"
+
+[packages]
+certifi = "==2017.11.5"
+chardet = "==3.0.4"
+idna = "==2.6"
+olefile = "==0.44"
+requests = "==2.18.4"
+urllib3 = "==1.22"
+PyYAML = "==3.12"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/BitDam/Pipfile.lock b/Integrations/BitDam/Pipfile.lock
new file mode 100644
index 000000000000..5ce943c6690e
--- /dev/null
+++ b/Integrations/BitDam/Pipfile.lock
@@ -0,0 +1,422 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "6e86eca8a4a0771ea3f6302e96f7afb9a7fe1f342e02d00c2443c36b86f1af28"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "certifi": {
+ "hashes": [
+ "sha256:244be0d93b71e93fc0a0a479862051414d0e00e16435707e5bf5000f92e04694",
+ "sha256:5ec74291ca1136b40f0379e1128ff80e866597e4e2c1e755739a913bbc3613c0"
+ ],
+ "index": "pypi",
+ "version": "==2017.11.5"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "index": "pypi",
+ "version": "==2.6"
+ },
+ "olefile": {
+ "hashes": [
+ "sha256:61f2ca0cd0aa77279eb943c07f607438edf374096b66332fae1ee64a6f0f73ad"
+ ],
+ "index": "pypi",
+ "version": "==0.44"
+ },
+ "pyyaml": {
+ "hashes": [
+ "sha256:16b20e970597e051997d90dc2cddc713a2876c47e3d92d59ee198700c5427736",
+ "sha256:3262c96a1ca437e7e4763e2843746588a965426550f3797a79fca9c6199c431f",
+ "sha256:592766c6303207a20efc445587778322d7f73b161bd994f227adaa341ba212ab",
+ "sha256:5ac82e411044fb129bae5cfbeb3ba626acb2af31a8d17d175004b70862a741a7",
+ "sha256:827dc04b8fa7d07c44de11fabbc888e627fa8293b695e0f99cb544fdfa1bf0d1",
+ "sha256:bc6bced57f826ca7cb5125a10b23fd0f2fff3b7c4701d64c439a300ce665fff8",
+ "sha256:c01b880ec30b5a6e6aa67b09a2fe3fb30473008c85cd6a67359a1b15ed6d83a4",
+ "sha256:e863072cdf4c72eebf179342c94e6989c67185842d9997960b3e69290b2fa269"
+ ],
+ "index": "pypi",
+ "version": "==3.12"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "index": "pypi",
+ "version": "==1.22"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==1.5"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:244be0d93b71e93fc0a0a479862051414d0e00e16435707e5bf5000f92e04694",
+ "sha256:5ec74291ca1136b40f0379e1128ff80e866597e4e2c1e755739a913bbc3613c0"
+ ],
+ "index": "pypi",
+ "version": "==2017.11.5"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:296a9c0d0607f689f2a262d4ca3fa2b22146ac0acb07fd281125c86dee3bcf50",
+ "sha256:5e4c0d9de6ffc76f625eda1f5e28cec0700aed7cdacfe5070964df002ac02fec"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==4.0.1"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "entrypoints": {
+ "hashes": [
+ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
+ "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
+ ],
+ "version": "==0.3"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "flake8": {
+ "hashes": [
+ "sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548",
+ "sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696"
+ ],
+ "index": "pypi",
+ "version": "==3.7.8"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==1.0.2"
+ },
+ "functools32": {
+ "hashes": [
+ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0",
+ "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.3.post2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16",
+ "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.3.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "index": "pypi",
+ "version": "==2.6"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:0c505102757e7fa28b9f0958d8bc81301159dea16e2649858c92edc158b78a83",
+ "sha256:9a9f75ce32e78170905888acbf2376a81d3f21ecb3bb4867050413411d3ca7a9"
+ ],
+ "markers": "python_version < '3.8'",
+ "version": "==0.21"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:02b260c8deb80db09325b99edf62ae344ce9bc64d68b7a634410b8e9a568edbf",
+ "sha256:18f9c401083a4ba6e162355873f906315332ea7035803d0fd8166051e3d402e3",
+ "sha256:1f2c6209a8917c525c1e2b55a716135ca4658a3042b5122d4e3413a4030c26ce",
+ "sha256:2f06d97f0ca0f414f6b707c974aaf8829c2292c1c497642f63824119d770226f",
+ "sha256:616c94f8176808f4018b39f9638080ed86f96b55370b5a9463b2ee5c926f6c5f",
+ "sha256:63b91e30ef47ef68a30f0c3c278fbfe9822319c15f34b7538a829515b84ca2a0",
+ "sha256:77b454f03860b844f758c5d5c6e5f18d27de899a3db367f4af06bec2e6013a8e",
+ "sha256:83fe27ba321e4cfac466178606147d3c0aa18e8087507caec78ed5a966a64905",
+ "sha256:84742532d39f72df959d237912344d8a1764c2d03fe58beba96a87bfa11a76d8",
+ "sha256:874ebf3caaf55a020aeb08acead813baf5a305927a71ce88c9377970fe7ad3c2",
+ "sha256:9f5caf2c7436d44f3cec97c2fa7791f8a675170badbfa86e1992ca1b84c37009",
+ "sha256:a0c8758d01fcdfe7ae8e4b4017b13552efa7f1197dd7358dc9da0576f9d0328a",
+ "sha256:a4def978d9d28cda2d960c279318d46b327632686d82b4917516c36d4c274512",
+ "sha256:ad4f4be843dace866af5fc142509e9b9817ca0c59342fdb176ab6ad552c927f5",
+ "sha256:ae33dd198f772f714420c5ab698ff05ff900150486c648d29951e9c70694338e",
+ "sha256:b4a2b782b8a8c5522ad35c93e04d60e2ba7f7dcb9271ec8e8c3e08239be6c7b4",
+ "sha256:c462eb33f6abca3b34cdedbe84d761f31a60b814e173b98ede3c81bb48967c4f",
+ "sha256:fd135b8d35dfdcdb984828c84d695937e58cc5f49e1c854eb311c4d6aa03f4f1"
+ ],
+ "version": "==1.4.2"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9",
+ "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe"
+ ],
+ "version": "==19.1"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e",
+ "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8"
+ ],
+ "markers": "python_version == '3.4.*' or python_version < '3'",
+ "version": "==2.3.4"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6",
+ "sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34"
+ ],
+ "version": "==0.13.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pyflakes": {
+ "hashes": [
+ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
+ "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ ],
+ "version": "==2.1.1"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42",
+ "sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300"
+ ],
+ "index": "pypi",
+ "version": "==1.9.5"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
+ "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
+ ],
+ "version": "==2.4.2"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:8fc39199bdda3d9d025d3b1f4eb99a192c20828030ea7c9a0d2840721de7d347",
+ "sha256:d100a02770f665f5dcf7e3f08202db29857fee6d15f34c942be0a511f39814f0"
+ ],
+ "index": "pypi",
+ "version": "==4.6.5"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:510df890afe08d36eca5bb16b4aa6308a6f85e3159ad3013bac8b9de7bd5a010",
+ "sha256:88d3402dd8b3c69a9e4f9d3a73ad11b15920c6efd36bc27bf1f701cf4a8e4646"
+ ],
+ "index": "pypi",
+ "version": "==1.7.0"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typing": {
+ "hashes": [
+ "sha256:91dfe6f3f706ee8cc32d38edbbf304e9b7583fb37108fef38229617f8b3eba23",
+ "sha256:c8cabb5ab8945cd2f54917be357d134db9cc1eb039e59d1606dc1e60cb1d9d36",
+ "sha256:f38d83c5a7a7086543a0f649564d661859c5146a85775ab90c0d2f93ffaa9714"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==3.7.4.1"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "index": "pypi",
+ "version": "==1.22"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e",
+ "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
+ ],
+ "version": "==0.6.0"
+ }
+ }
+}
diff --git a/Integrations/BluecatAddressManager/BluecatAddressManager.py b/Integrations/BluecatAddressManager/BluecatAddressManager.py
new file mode 100644
index 000000000000..3e8b4184c2a6
--- /dev/null
+++ b/Integrations/BluecatAddressManager/BluecatAddressManager.py
@@ -0,0 +1,443 @@
+from CommonServerPython import *
+
+''' IMPORTS '''
+
+import requests
+import ipaddress
+
+
+# error class for token errors
+class TokenException(Exception):
+ pass
+
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+TOKEN = demisto.params().get('token')
+# Remove trailing slash to prevent wrong URL path to service
+SERVER = demisto.params().get('url')[:-1] \
+ if (demisto.params().get('url') and demisto.params().get('url').endswith('/')) else demisto.params().get('url')
+# Should we use SSL
+USE_SSL = not demisto.params().get('insecure', False)
+# Service base URL
+BASE_URL = f'{SERVER}/Services/REST/v1'
+
+# Headers to be sent in requests
+HEADERS = {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+}
+TOKEN_LIFE_TIME_MINUTES = 5
+USER_CONF = demisto.params().get('conf_name')
+USERNAME = demisto.params().get('credentials').get('identifier')
+PASSWORD = demisto.params().get('credentials').get('password')
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url_suffix, params=None, data=None, headers=HEADERS, safe=False):
+ """
+ A wrapper for requests lib to send our requests and handle requests and responses better.
+
+ :type method: ``str``
+ :param method: HTTP method for the request.
+
+ :type url_suffix: ``str``
+ :param url_suffix: The suffix of the URL (endpoint)
+
+ :type params: ``dict``
+ :param params: The URL params to be passed.
+
+ :type data: ``str``
+ :param data: The body data of the request.
+
+ :type headers: ``dict``
+ :param headers: Request headers
+
+ :type safe: ``bool``
+ :param safe: If set to true will return None in case of http error
+
+ :return: Returns the http request response json
+ :rtype: ``dict``
+ """
+ headers['Authorization'] = get_token()
+ url = BASE_URL + url_suffix
+ try:
+ res = requests.request(method, url, verify=USE_SSL, params=params, data=data, headers=headers)
+ # Try to create a new token
+ if res.status_code == 401:
+ headers['Authorization'] = get_token(new_token=True)
+ res = requests.request(method, url, verify=USE_SSL, params=params, data=data, headers=headers)
+ except requests.exceptions.RequestException:
+ return_error('Error in connection to the server. Please make sure you entered the URL correctly.')
+ # Handle error responses gracefully
+ if res.status_code not in {200, 201, 202}:
+ result_msg = None
+ try:
+ result_msg = res.json()
+ finally:
+ reason = result_msg if result_msg else res.reason
+ err_msg = f'Error in API call. code:{res.status_code}; reason: {reason}'
+ if safe:
+ return None
+ return_error(err_msg)
+ return res.json()
+
+
+def get_token(new_token=False):
+ """
+ Retrieves the token from the server if it's expired and updates the global HEADERS to include it
+
+ :param new_token: If set to True will generate a new token regardless of time passed
+
+ :rtype: ``str``
+ :return: Token
+ """
+ now = datetime.now()
+ ctx = demisto.getIntegrationContext()
+ if ctx and not new_token:
+ passed_minutes = get_passed_minutes(now, datetime.fromtimestamp(ctx.get('time')))
+ if passed_minutes >= TOKEN_LIFE_TIME_MINUTES:
+ # token expired
+ auth_token = get_token_request()
+ demisto.setIntegrationContext({'auth_token': auth_token, 'time': date_to_timestamp(now) / 1000})
+ else:
+ # token hasn't expired
+ auth_token = ctx.get('auth_token')
+ else:
+ # generating new token
+ auth_token = get_token_request()
+ demisto.setIntegrationContext({'auth_token': auth_token, 'time': date_to_timestamp(now) / 1000})
+ return auth_token
+
+
+def get_configuration():
+ """
+ Gets the chosen configuration to run queries on
+
+ :return: User configuration id, or the first configuration id if no user configuration provided
+ """
+ user_conf = USER_CONF
+ params = {
+ 'type': 'Configuration',
+ 'start': 0,
+ 'count': 100
+ }
+ confs = http_request('GET', '/getEntities', params)
+ if not confs:
+ return_error('No configurations could be fetched from the system')
+ if user_conf:
+ for conf in confs:
+ if conf.get('name') == user_conf:
+ return conf.get('id')
+
+ return confs[0].get('id')
+
+
+def get_passed_minutes(start_time, end_time):
+ """
+ Returns the time passed in minutes
+ :param start_time: Start time in datetime
+ :param end_time: End time in datetime
+ :return: The passed minutes in int
+ """
+ time_delta = start_time - end_time
+ return time_delta.seconds / 60
+
+
+def properties_to_camelized_dict(properties):
+ properties = properties.split('|')
+ properties_dict = {}
+ for _property in properties:
+ if _property:
+ key_val_pair = _property.split('=')
+ # camelize the key
+ key = key_val_pair[0][0].upper() + key_val_pair[0][1:]
+ properties_dict[key] = key_val_pair[1]
+ return properties_dict
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def get_token_request():
+ url_args = {
+ 'username': USERNAME,
+ 'password': PASSWORD
+ }
+ start_idx = 16
+ end_delim = ' <-'
+ url = BASE_URL + '/login'
+ res = requests.request('GET', url, verify=USE_SSL, params=url_args)
+ if res.status_code != 200:
+ raise TokenException('Error: Failed to create a new token, please check your credentials')
+ res_json = res.json()
+ end_idx = res_json.index(end_delim)
+ return res_json[start_idx:end_idx]
+
+
+def test_module():
+ """
+ Performs basic get request to get item samples
+ """
+ get_token(new_token=True)
+ demisto.results('ok')
+
+
+def query_ip_command():
+ ip = demisto.getArg('ip')
+ try:
+ if isinstance(ipaddress.ip_address(ip), ipaddress.IPv6Address):
+ ip_type = 'IPv6'
+ base_ip_raw_res = query_ipv6(ip)
+ else:
+ ip_type = 'IPv4'
+ base_ip_raw_res = query_ipv4(ip)
+
+ # entity with id 0 is root, and CONF is root of parent
+ if base_ip_raw_res.get('id') in (None, 0, CONF):
+ return_outputs(f'IP: {ip} was not found.', {}, base_ip_raw_res)
+ else:
+ base_ip_parents = get_entity_parents(base_ip_raw_res.get('id'))
+ ip_object = {
+ 'ID': base_ip_raw_res.get('id'),
+ 'Name': base_ip_raw_res.get('name'),
+ 'Parents': base_ip_parents,
+ 'Type': ip_type
+ }
+ ip_object.update(properties_to_camelized_dict(base_ip_raw_res.get('properties')))
+ ec = {
+ 'BlueCat.AddressManager.IP(obj.ID === val.ID)': ip_object,
+ 'IP(val.Address === obj.Address)': {'Address': ip}
+ }
+ hr = create_human_readable_ip(ip_object, ip)
+ return_outputs(hr, ec, base_ip_raw_res)
+
+ except ipaddress.AddressValueError:
+ return_error(f'Invalid IP: {ip}')
+
+
+def query_ipv4(ip):
+ params = {
+ 'containerId': CONF,
+ 'address': ip
+ }
+ return http_request('GET', '/getIP4Address', params=params)
+
+
+def query_ipv6(ip):
+ params = {
+ 'containerId': CONF,
+ 'address': ip
+ }
+ return http_request('GET', '/getIP6Address', params=params)
+
+
+def get_entity_parents(base_id):
+ base_ip_parents = []
+ entity_parent = get_entity_parent(entity_id=base_id)
+ # entity with id 0 is root, and CONF is root of parent
+ while entity_parent.get('id') not in (None, 0, CONF):
+ parent_obj = {
+ 'ID': entity_parent.get('id'),
+ 'Type': entity_parent.get('type'),
+ 'Name': entity_parent.get('name'),
+ }
+ parent_obj.update(properties_to_camelized_dict(entity_parent.get('properties')))
+ base_ip_parents.append(parent_obj)
+ entity_parent = get_entity_parent(entity_id=entity_parent.get('id'))
+
+ return base_ip_parents
+
+
+def get_entity_parent(entity_id):
+ params = {
+ 'entityId': entity_id
+ }
+ return http_request('GET', '/getParent', params=params)
+
+
+def create_human_readable_ip(ip_object, ip_value):
+ ip_object_cpy = dict(ip_object)
+ reversed_parents = list(reversed(ip_object_cpy['Parents']))
+ ip_object_cpy.pop('Parents')
+ hr = tblToMd(f'{ip_value} IP Result:', ip_object_cpy, headerTransform=pascalToSpace)
+ hr += tblToMd('Parents Details:', reversed_parents, headerTransform=pascalToSpace)
+ return hr
+
+
+def get_range_by_ip_command():
+ ip = demisto.getArg('ip')
+ try:
+ if isinstance(ipaddress.ip_address(ip), ipaddress.IPv6Address) or isinstance(ipaddress.ip_address(ip),
+ ipaddress.IPv4Address):
+ range_raw_res = get_range_by_ip(ip)
+
+ if range_raw_res.get('id') in (None, 0, CONF):
+ return_outputs(f'IP range was not found for {ip}.', {}, range_raw_res)
+ else:
+ base_ip_parents = get_entity_parents(range_raw_res.get('id'))
+
+ range_object = {
+ 'ID': range_raw_res.get('id'),
+ 'Name': range_raw_res.get('name'),
+ 'Parents': base_ip_parents,
+ 'Type': range_raw_res.get('type')
+ }
+
+ range_object.update(properties_to_camelized_dict(range_raw_res.get('properties')))
+ ec = {'BlueCat.AddressManager.Range(obj.ID === val.ID)': range_object}
+ hr = create_human_readable_range(range_object, ip)
+ return_outputs(hr, ec, range_raw_res)
+
+ except ipaddress.AddressValueError:
+ return_error(f'Invalid IP: {ip}')
+
+
+def get_range_by_ip(ip):
+ params = {
+ 'containerId': CONF,
+ 'type': '',
+ 'address': ip
+ }
+ return http_request('GET', '/getIPRangedByIP', params=params)
+
+
+def create_human_readable_range(range_object, ip_value):
+ range_object_cpy = dict(range_object)
+ reversed_parents = list(reversed(range_object_cpy['Parents']))
+ range_object_cpy.pop('Parents')
+ hr = tblToMd(f'{ip_value} Range Result:', range_object_cpy, headerTransform=pascalToSpace)
+ hr += tblToMd('Parents Details:', reversed_parents, headerTransform=pascalToSpace)
+ return hr
+
+
+def get_response_policies_command():
+ start = demisto.getArg('start')
+ count = demisto.getArg('count')
+ raw_response_policies = get_response_policies(start, count)
+ response_policies, hr = create_response_policies_result(raw_response_policies)
+ return_outputs(hr, response_policies, raw_response_policies)
+
+
+def get_response_policies(start, count):
+ params = {
+ 'parentId': CONF,
+ 'type': 'ResponsePolicy',
+ 'start': start,
+ 'count': count
+ }
+ return http_request('GET', '/getEntities', params=params)
+
+
+def create_response_policies_result(raw_response_policies):
+ response_policies = []
+ if raw_response_policies:
+ hr = '## Response Policies:\n'
+ for response_policy in raw_response_policies:
+ response_policy_obj = {
+ 'ID': response_policy.get('id'),
+ 'Name': response_policy.get('name'),
+ 'Type': response_policy.get('type')
+ }
+ response_policy_obj.update(properties_to_camelized_dict(response_policy.get('properties')))
+ hr += tblToMd(response_policy_obj['Name'], response_policy_obj)
+ response_policies.append(response_policy_obj)
+ return {'BlueCat.AddressManager.ResponsePolicies(val.ID === obj.ID)': response_policies}, hr
+ return {}, 'Could not find any response policy'
+
+
+def add_domain_response_policy_command():
+ policy_id = demisto.getArg('policy_id')
+ domain = demisto.getArg('domain')
+ raw_response = add_domain_response_policy(policy_id, domain)
+ error_msg = f'Failed to add {domain} to response policy {policy_id}, ' \
+ f'possibly the domain already exists in the response policy.'
+ if raw_response:
+ return_outputs(f'Successfully added {domain} to response policy {policy_id}', {}, raw_response)
+ else:
+ return_outputs(error_msg, {}, raw_response)
+
+
+def add_domain_response_policy(policy_id, domain):
+ params = {
+ 'policyId': policy_id,
+ 'itemName': domain
+ }
+ return http_request('POST', '/addResponsePolicyItem', params=params)
+
+
+def remove_domain_response_policy_command():
+ policy_id = demisto.getArg('policy_id')
+ domain = demisto.getArg('domain')
+ raw_response = remove_domain_response_policy(policy_id, domain)
+ error_msg = f'Failed to remove {domain} from response policy {policy_id}, ' \
+ f'possibly the domain doesn\'t exist in the response policy.'
+ if raw_response:
+ return_outputs(f'Successfully removed {domain} from response policy {policy_id}', {}, raw_response)
+ else:
+ return_outputs(error_msg, {}, raw_response)
+
+
+def remove_domain_response_policy(policy_id, domain):
+ params = {
+ 'policyId': policy_id,
+ 'itemName': domain
+ }
+ return http_request('DELETE', '/deleteResponsePolicyItem', params=params)
+
+
+def search_response_policy_by_domain_command():
+ domain = demisto.getArg('domain')
+ raw_response_policies = search_response_policy_by_domain(domain)
+ response_policies, hr = create_response_policies_result(raw_response_policies)
+ return_outputs(hr, response_policies, raw_response_policies)
+
+
+def search_response_policy_by_domain(domain):
+ params = {
+ 'configurationId': CONF,
+ 'itemName': domain
+ }
+ return http_request('GET', '/findResponsePoliciesWithItem', params=params)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+try:
+ CONF = get_configuration()
+except TokenException as e:
+ return_error(str(e))
+
+
+def main():
+ handle_proxy()
+ command = demisto.command()
+ LOG(f'Command being called is {command}')
+
+ try:
+ if command == 'test-module':
+ test_module()
+ elif command == 'bluecat-am-query-ip':
+ query_ip_command()
+ elif command == 'bluecat-am-get-range-by-ip':
+ get_range_by_ip_command()
+ elif command == 'bluecat-am-get-response-policies':
+ get_response_policies_command()
+ elif command == 'bluecat-am-search-response-policies-by-domain':
+ search_response_policy_by_domain_command()
+ elif command == 'bluecat-am-response-policy-add-domain':
+ add_domain_response_policy_command()
+ elif command == 'bluecat-am-response-policy-remove-domain':
+ remove_domain_response_policy_command()
+
+ # Log exceptions
+ except Exception as e:
+ return_error(str(e))
+
+
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/BluecatAddressManager/BluecatAddressManager.yml b/Integrations/BluecatAddressManager/BluecatAddressManager.yml
new file mode 100644
index 000000000000..758ef441a4bf
--- /dev/null
+++ b/Integrations/BluecatAddressManager/BluecatAddressManager.yml
@@ -0,0 +1,217 @@
+category: Authentication
+commonfields:
+ id: BluecatAddressManager
+ version: -1
+configuration:
+- defaultvalue: https://192.168.0.1
+ display: Server URL (e.g., https://192.168.0.1)
+ name: url
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: true
+ type: 9
+- defaultvalue: 'true'
+ display: Trust any certificate (not secure)
+ name: insecure
+ defaultvalue: ""
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ required: false
+ type: 8
+- display: Configuration Name
+ name: conf_name
+ required: false
+ type: 0
+description: Integration Template
+display: BluecatAddressManager
+name: BluecatAddressManager
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: The IP to get data about
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: false
+ description: Enriches an IP with data about IP networks and blocks that it belongs
+ to, linked IPs and mac addresses, etc.
+ execution: false
+ name: bluecat-am-query-ip
+ outputs:
+ - contextPath: BlueCat.AddressManager.IP.ID
+ description: The address manager ID of the IP address.
+ type: Number
+ - contextPath: BlueCat.AddressManager.IP.Name
+ description: Name of the IP address.
+ type: String
+ - contextPath: BlueCat.AddressManager.IP.MacAddress
+ description: Corresponding MAC address.
+ type: String
+ - contextPath: BlueCat.AddressManager.IP.Parents.ID
+ description: ID of the parent IP address.
+ type: String
+ - contextPath: BlueCat.AddressManager.IP.Parents.Type
+ description: Type of the parent IP address.
+ type: String
+ - contextPath: BlueCat.AddressManager.IP.Parents.Name
+ description: Name of the parent IP address.
+ type: String
+ - contextPath: BlueCat.AddressManager.IP.Parents.CIDR
+ description: Classless Inter-Domain Routing.
+ type: String
+ - contextPath: BlueCat.AddressManager.IP.Type
+ description: Type of IP address.
+ type: String
+ - contextPath: IP.Address
+ description: Address of IP.
+ type: String
+ - contextPath: BlueCat.AddressManager.IP.Parents.Prefix
+ description: Prefix of the IP address.
+ type: String
+ - arguments:
+ - default: false
+ defaultValue: '0'
+ description: Start index from which to get the response policies. Default is
+ 0.
+ isArray: false
+ name: start
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '30'
+ description: Maximum number of response policies to return.
+ isArray: false
+ name: count
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns all response policies.
+ execution: false
+ name: bluecat-am-get-response-policies
+ outputs:
+ - contextPath: BlueCat.AddressManager.ResponsePolicies.ID
+ description: ID of the response policy.
+ type: Number
+ - contextPath: BlueCat.AddressManager.ResponsePolicies.Name
+ description: Name of the response policy.
+ type: String
+ - contextPath: BlueCat.AddressManager.ResponsePolicies.Ttl
+ description: Time to live (TTL) of the response policy.
+ type: Unknown
+ - contextPath: BlueCat.AddressManager.ResponsePolicies.Type
+ description: Type of the response policy (BLACKLIST, BLACKHOLE, WHITELIST, or
+ REDIRECT).
+ type: String
+ - contextPath: BlueCat.AddressManager.ResponsePolicies.RedirectTarget
+ description: Target of redirect, in case of REDIRECT policy type.
+ type: String
+ - arguments:
+ - default: false
+ description: Domain name by which to search.
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: Searches all response policies in which the given domain is included.
+ execution: false
+ name: bluecat-am-search-response-policies-by-domain
+ outputs:
+ - contextPath: BlueCat.AddressManager.ResponsePolicies.ID
+ description: ID of the response policy.
+ type: Number
+ - contextPath: BlueCat.AddressManager.ResponsePolicies.Name
+ description: Name of the response policy.
+ type: String
+ - contextPath: BlueCat.AddressManager.ResponsePolicies.Ttl
+ description: Time to live (TTL) of the response policy
+ type: Unknown
+ - contextPath: BlueCat.AddressManager.ResponsePolicies.Type
+ description: Type of the responce policy ( BLACKLIST, BLACKHOLE, WHITELIST,
+ or REDIRECT).
+ type: String
+ - contextPath: BlueCat.AddressManager.ResponsePolicies.RedirectTarget
+ description: Target of redirect, in case of REDIRECT policy type.
+ type: String
+ - arguments:
+ - default: true
+ description: ID of the response policy to edit.
+ isArray: false
+ name: policy_id
+ required: true
+ secret: false
+ - default: false
+ description: Domain to add to the response policy.
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds a domain to the given response policy.
+ execution: false
+ name: bluecat-am-response-policy-add-domain
+ - arguments:
+ - default: true
+ description: ID of the response policy to edit.
+ isArray: false
+ name: policy_id
+ required: true
+ secret: false
+ - default: false
+ description: Domain to remove from the response policy.
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes a domain from the given response policy.
+ execution: false
+ name: bluecat-am-response-policy-remove-domain
+ - arguments:
+ - default: false
+ description: The IP address for which to get the range.
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the IPv4 Block containing the specified IPv4 address.
+ execution: false
+ name: bluecat-am-get-range-by-ip
+ outputs:
+ - contextPath: BlueCat.AddressManager.Range.ID
+ description: The address manager ID of the Range.
+ type: String
+ - contextPath: BlueCat.AddressManager.Range.Name
+ description: Name of the Range.
+ type: String
+ - contextPath: BlueCat.AddressManager.Range.Type
+ description: Type of the Range.
+ type: String
+ - contextPath: BlueCat.AddressManager.Range.Parents.ID
+ description: ID of the parent Range.
+ type: String
+ - contextPath: BlueCat.AddressManager.Range.Parents.Type
+ description: Type of the parent Range.
+ type: String
+ - contextPath: BlueCat.AddressManager.Range.Parents.Name
+ description: Name of the parent Range.
+ type: String
+ - contextPath: BlueCat.AddressManager.Range.Parents.CIDR
+ description: Classless Inter-Domain Routing.
+ type: String
+ dockerimage: demisto/python3:3.7.3.286
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- Bluecat Address Manager test
diff --git a/Integrations/BluecatAddressManager/BluecatAddressManager_description.md b/Integrations/BluecatAddressManager/BluecatAddressManager_description.md
new file mode 100644
index 000000000000..ea6ac3bb838a
--- /dev/null
+++ b/Integrations/BluecatAddressManager/BluecatAddressManager_description.md
@@ -0,0 +1,23 @@
+When you configure an instance of the Bluecat Address Manager integration in Demisto, you need to provide an API user account.
+
+## Generate an API user account
+
+1. Using the Address Manager web interface, log in to Address Manager as an administrator.
+2. On the Administration page, click **Users and Groups**.
+3. In the **Users** section, click **New**.
+4. Enter a name in the **Username** field.
+5. In the **Authentication** section, type and confirm the API user password in the **Password and ConfirmPassword** fields. If external authenticators are available, an **Other** checkbox and a list of authenticators appears in the **Authentication** section. To use an external authenticator for the API user, select the **Other** checkbox, and then select an authenticator from the list.
+6. In the **Extra Information** section, set the following parameters for the API user.
+ - E-mail Address (required)
+ - Phone number (optional)
+7. In the **User Access** section, define the user type, security, and history privileges, and access type.
+ - Type of User: select either Non-Administrator or Administrator. Non-Administrator users have access only to DNS and IPAM management functions. Administrator users have unlimited access to all Address Manager functions.
+ - Security Privilege: select a security privilege type from the drop-down list. This field is available only for Non-Administrator users with GUI, API, or GUI and API access.
+ - History Privilege: select a history privilege type from the drop-down list. This field is available only for Non-Administrator users with GUI, or GUI and API access.
+ - Access Type: select the type of access; GUI, API, or GUI and API. GUI users can access Address Manager only through the Address Manager web interface. API users can access Address Manager only through the API. GUI and API users can access Address Manager either through the Address Manager web interface or the API.
+8. In the **Assign to Group** section, To assign the user to one or more existing user groups, go to Assign to Group section, and enter the name of a use group. Select a name from the list, and then click **Add**.
+9. (optional) In the **Change Control** section, add comments to describe the changes. Although this step is optional but maybe set to be required.
+10. Click **Add**.
+
+About Integration Parameters:
+* **Configuration Name:** In case you have more than one configuration set up, you can use this parameter to manually set the configuration you want the instance to run on, otherwise, if no value was given, the integration will use the first configuration it can find.
diff --git a/Integrations/BluecatAddressManager/BluecatAddressManager_image.png b/Integrations/BluecatAddressManager/BluecatAddressManager_image.png
new file mode 100644
index 000000000000..a8a5b459dcf5
Binary files /dev/null and b/Integrations/BluecatAddressManager/BluecatAddressManager_image.png differ
diff --git a/Integrations/BluecatAddressManager/CHANGELOG.md b/Integrations/BluecatAddressManager/CHANGELOG.md
new file mode 100644
index 000000000000..bb7c71c756a3
--- /dev/null
+++ b/Integrations/BluecatAddressManager/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+ - Added the ***bluecat-am-get-range-by-ip*** command.
+ - Improved handling of cases in which an error is returned from querying a non-existing IP address.
diff --git a/Integrations/C2sec/C2sec.py b/Integrations/C2sec/C2sec.py
new file mode 100644
index 000000000000..19edc7102cda
--- /dev/null
+++ b/Integrations/C2sec/C2sec.py
@@ -0,0 +1,207 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import requests
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+API_KEY = demisto.params()['apikey']
+HEADERS = {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+}
+
+DOMAIN = demisto.params()['domainName']
+if demisto.params()['endpointURL'].count("/") == 3:
+ ENDPOINTURL = demisto.params()['endpointURL'] + "/"
+else:
+ ENDPOINTURL = demisto.params()['endpointURL']
+
+USE_SSL = not demisto.params().get('unsecure', False)
+
+
+def add_domain(domain, newscan):
+ if newscan == 'true':
+ newscan = 1
+ else:
+ newscan = 0
+
+ params = {
+ 'apikey': API_KEY,
+ 'domain': domain,
+ 'newscan': newscan
+ }
+ call = requests.get(ENDPOINTURL + '/iriskaddcompany', headers=HEADERS, verify=USE_SSL, params=params)
+
+ if call.status_code == requests.codes.ok:
+ result_dictionary = {
+ 'result': call.json()['result'],
+ 'Name': domain
+ }
+
+ md = tableToMarkdown('Add domain Result', result_dictionary)
+ return {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': call.json(),
+ 'HumanReadable': md,
+ 'EntryContext': {
+ "C2sec.Domain(val.Name && val.Name == obj.Name)": result_dictionary
+ }
+ }
+ else:
+ return 'Error searching domain - status code: [%d] - reason: %s' % (call.status_code, call.text)
+
+
+def get_scan_status(workitemid):
+ params = {
+ 'apikey': API_KEY,
+ 'workitemid': workitemid
+ }
+ call = requests.get(ENDPOINTURL + '/iriskqueryapistatus', headers=HEADERS, verify=USE_SSL, params=params)
+ if call.status_code == requests.codes.ok:
+ resp = call.json()
+ resp['apistatus']['workitemid'] = workitemid
+ md = tableToMarkdown('Get scan Result', resp['apistatus'], removeNull=True)
+ return {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': resp,
+ 'HumanReadable': md,
+ 'EntryContext': {
+ "C2sec.Domain.Scan(val.workitemid && val.workitemid == obj.workitemid)": resp['apistatus']
+ }
+ }
+ else:
+ return 'Error getting scan results [%d] - reason: %s' % (call.status_code, call.text)
+
+
+def get_domain_issues(domain, severity=None):
+ params = {
+ 'apikey': API_KEY,
+ 'domain': domain
+ }
+ call = requests.get(ENDPOINTURL + '/iRiskQueryIssues', headers=HEADERS, verify=USE_SSL, params=params)
+ if call.status_code == requests.codes.ok:
+ contexts = []
+ for issue in call.json()['issueList']:
+ if severity and severity != issue['severity']:
+ continue
+
+ context = {
+ 'ID': issue['id'],
+ 'Issue': issue['issue'],
+ 'Severity': issue['severity'],
+ 'Component': issue['component'],
+ 'ComponentDisplay': issue['componentDisplay'],
+ 'Details': issue['detail'],
+ 'Asset': issue['asset'],
+ 'Rec': issue['rec'],
+ }
+ contexts.append(context)
+
+ md = tableToMarkdown('Get domain Issues Result', contexts,
+ ['ID', 'Issue', 'Severity', 'Component', 'ComponentDisplay', 'Details', 'Asset', 'Rec'])
+ return {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': call.json(),
+ 'HumanReadable': md,
+ 'EntryContext': {
+ "C2sec.Domain(val.Name && val.Name == obj.Name)": {
+ 'Name': domain,
+ 'Issue': contexts
+ }
+ }
+ }
+ else:
+ return 'Error getting issues [%d] - reason: %s' % (call.status_code, call.text)
+
+
+def rescan_domain(domain):
+ params = {
+ 'apikey': API_KEY,
+ 'domain': domain
+ }
+ call = requests.get(ENDPOINTURL + '/iRiskRescanCompany', headers=HEADERS, verify=USE_SSL, params=params)
+ if call.status_code == requests.codes.ok:
+ md = tableToMarkdown('Rescan domain Results', call.json())
+ context = call.json()
+ context['domain'] = domain
+
+ return {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': call.json(),
+ 'HumanReadable': md,
+ 'EntryContext': {
+ "C2sec.Domain.Scan(val.workitemid && val.workitemid == obj.workitemid)": context
+ }
+ }
+ else:
+ return 'Error rescanning the domain [%d] - reason: %s' % (call.status_code, call.text)
+
+
+def get_scan_results(domain, component):
+ params = {
+ 'apikey': API_KEY,
+ 'domain': domain,
+ 'component': component
+ }
+ call = requests.get(ENDPOINTURL + '/iRiskQueryComponentData', headers=HEADERS, verify=USE_SSL, params=params)
+
+ if call.status_code == requests.codes.ok:
+ resp = call.json()
+
+ resp['Domain'] = domain
+ md = tableToMarkdown('Get Scan Result', resp, ['result', 'Domain', 'component'],
+ metadata="The results can be found in the context")
+
+ return {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': call.json(),
+ 'HumanReadable': md,
+ 'EntryContext': {
+ "C2sec.Domain.{}(val.Domain && val.Domain == obj.Domain)".format(component): resp
+ }
+ }
+ else:
+ return_error('Error getting the scan results [{}] - reason: {}'.format(call.status_code, call.text))
+
+
+LOG('Command being called is {}'.format(demisto.command()))
+try:
+ handle_proxy()
+ # The command demisto.command() holds the command sent from the user.
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test = add_domain(demisto.params()['domainName'], newscan='false')
+ if isinstance(test, dict) and "HTTPSConnectionPool" not in test:
+ demisto.results("ok")
+ else:
+ demisto.results(test)
+ elif demisto.command() == 'irisk-add-domain':
+ result = add_domain(demisto.args().get('domain', DOMAIN), demisto.args()['newscan'])
+ demisto.results(result)
+ elif demisto.command() == 'irisk-get-scan-status':
+ result = get_scan_status(demisto.args()['id'])
+ demisto.results(result)
+ elif demisto.command() == 'irisk-rescan-domain':
+ result = rescan_domain(demisto.args().get('domain', DOMAIN))
+ demisto.results(result)
+ elif demisto.command() == 'irisk-get-domain-issues':
+ result = get_domain_issues(demisto.args().get('domain', DOMAIN), demisto.args().get('severity'))
+ demisto.results(result)
+ elif demisto.command() == 'irisk-get-scan-results':
+ domain = demisto.args().get('domain', DOMAIN)
+ result = get_scan_results(domain, demisto.args()['component'])
+ demisto.results(result)
+
+# Log exceptions
+except Exception as e:
+ return_error(str(e))
diff --git a/Integrations/C2sec/C2sec.yml b/Integrations/C2sec/C2sec.yml
new file mode 100644
index 000000000000..8021deca5c13
--- /dev/null
+++ b/Integrations/C2sec/C2sec.yml
@@ -0,0 +1,194 @@
+commonfields:
+ id: C2sec irisk
+ version: -1
+name: C2sec irisk
+display: C2sec irisk
+fromversion: 5.0.0
+category: Data Enrichment & Threat Intelligence
+description: Understand Your Cyber Exposure as Easy as a Google Search
+configuration:
+- display: API URL (e.g. https://api.c2sec.com/api)
+ name: endpointURL
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: API Key
+ name: apikey
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Default domain name
+ name: domainName
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Trust any certificate (not secure)
+ name: unsecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+script:
+ script: '-'
+ type: python
+ commands:
+ - name: irisk-add-domain
+ arguments:
+ - name: domain
+ description: Domain to add to the portfolio. If empty, default domain will be used.
+ - name: newscan
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Flag to indicate whether a new scan is always initiated for the specified domain.
+ defaultValue: "true"
+ outputs:
+ - contextPath: C2Sec.Domain.result
+ description: Result status of adding the new company.
+ - contextPath: C2Sec.Domain.Name
+ description: The name of the searched domain.
+ description: Adds a domain to portfolio.
+ - name: irisk-get-scan-status
+ arguments:
+ - name: id
+ required: true
+ description: Domain workitemid for which to check the status.
+ outputs:
+ - contextPath: C2sec.Domain.Scan.domain
+ description: The name of the scanned domain.
+ type: string
+ - contextPath: C2sec.Domain.Scan.workitemid
+ description: The ID of the current scan.
+ type: number
+ - contextPath: C2sec.Domain.Scan.completeTime
+ description: The time that the scan was completed.
+ type: date
+ - contextPath: C2sec.Domain.Scan.creationTime
+ description: The time that the scan was initiated.
+ type: date
+ - contextPath: C2sec.Domain.Scan.status
+ description: The status of the current scan ("processing"/"completed").
+ type: number
+ description: Queries the status of a scan.
+ - name: irisk-rescan-domain
+ arguments:
+ - name: domain
+ description: Domain to re-scan. If empty, the default domain will be used.
+ outputs:
+ - contextPath: C2sec.Domain.Scan.domain
+ description: The name of the scanned domain.
+ type: string
+ - contextPath: C2sec.Domain.Scan.workitemid
+ description: Scan ID.
+ type: number
+ - contextPath: C2sec.Domain.Scan.result
+ description: The scan result status.
+ type: string
+ description: Initiates a re-scan for a domain within a portfolio.
+ - name: irisk-get-domain-issues
+ arguments:
+ - name: domain
+ description: The domain to query. If empty, the default domain it will be used.
+ - name: severity
+ description: Filter query results by issue severity..
+ outputs:
+ - contextPath: C2sec.Domain.Name
+ description: The name of the domain against which it was checked.
+ type: string
+ - contextPath: C2sec.Domain.Issue.ID
+ description: Issue ID.
+ type: string
+ - contextPath: C2sec.Domain.Issue.Asset
+ description: Asset associated with the issues. For example, IP addresses, website URLs, and so on.
+ type: string
+ - contextPath: C2sec.Domain.Issue.Component
+ description: The component used in the issue.
+ type: string
+ - contextPath: C2sec.Domain.Issue.ComponentDisplay
+ description: The display name of the component being used.
+ type: string
+ - contextPath: C2sec.Domain.Issue.Details
+ description: The details for the issue.
+ type: string
+ - contextPath: C2sec.Domain.Issue.Issue
+ description: The name of the issue.
+ type: string
+ - contextPath: C2sec.Domain.Issue.Severity
+ description: The severity of the issue.
+ type: string
+ description: Returns the issues located under the specified domain.
+ - name: irisk-get-scan-results
+ arguments:
+ - name: domain
+ required: true
+ description: The domain to query. If empty, default domain will be used.
+ - name: component
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - credential
+ - network
+ - application
+ description: The component to query.
+ outputs:
+ - contextPath: C2sec.Domain.application.result
+ description: Query status.
+ type: string
+ - contextPath: C2sec.Domain.application.Domain
+ description: The domain name being queried.
+ type: string
+ - contextPath: C2sec.Domain.application.data.appdetail
+ description: Details about the application being checked.
+ type: string
+ - contextPath: C2sec.Domain.application.data.info
+ description: Information regarding the data being processed.
+ type: string
+ - contextPath: C2sec.Domain.application.data.website
+ description: Website address being processed.
+ type: string
+ - contextPath: C2sec.Domain.credential.result
+ description: Query status.
+ type: string
+ - contextPath: C2sec.Domain.credential.Domain
+ description: The domain name being queried.
+ type: string
+ - contextPath: C2sec.Domain.credential.data.user
+ description: User name.
+ type: string
+ - contextPath: C2sec.Domain.credential.data.pw
+ description: User password.
+ type: string
+ - contextPath: C2sec.Domain.network.result
+ description: Query status.
+ type: string
+ - contextPath: C2sec.Domain.network.Domain
+ description: The domain name being queried.
+ type: string
+ - contextPath: C2sec.Domain.network.data.firewall
+ description: Firewall status.
+ type: string
+ - contextPath: C2sec.Domain.network.data.port
+ description: Port number.
+ type: string
+ - contextPath: C2sec.Domain.network.data.IP
+ description: IP address.
+ type: string
+ - contextPath: C2sec.Domain.network.data.Name
+ description: Name of the user.
+ type: string
+ - contextPath: C2sec.Domain.network.data.service
+ description: Name of the service being used.
+ type: string
+ - contextPath: C2sec.Domain.network.data.protocol
+ description: Name of the protocol being used.
+ type: string
+ - contextPath: C2sec.Domain.network.data.state
+ description: State of the network application ("open" or "closed").
+ type: string
+ description: Query Data for specific component for companies in the portfolio
+ runonce: false
diff --git a/Integrations/C2sec/C2sec_description.md b/Integrations/C2sec/C2sec_description.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Integrations/C2sec/C2sec_image.png b/Integrations/C2sec/C2sec_image.png
new file mode 100644
index 000000000000..65daca513c83
Binary files /dev/null and b/Integrations/C2sec/C2sec_image.png differ
diff --git a/Integrations/C2sec/CHANGELOG.md b/Integrations/C2sec/CHANGELOG.md
new file mode 100644
index 000000000000..c1be4c35c3f1
--- /dev/null
+++ b/Integrations/C2sec/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.8.2] - 2019-08-22
+#### New Integration
+Understand Your Cyber Exposure as Easy as a Google Search
\ No newline at end of file
diff --git a/Integrations/CIRCL/CIRCL.py b/Integrations/CIRCL/CIRCL.py
new file mode 100644
index 000000000000..b9c896a2704a
--- /dev/null
+++ b/Integrations/CIRCL/CIRCL.py
@@ -0,0 +1,275 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import requests
+import json
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARS '''
+BASE_URL = demisto.getParam('url')
+USERNAME = demisto.getParam('credentials')['identifier']
+PASSWORD = demisto.getParam('credentials')['password']
+AUTH = (USERNAME, PASSWORD)
+USE_SSL = not demisto.params().get('insecure', False)
+IS_USING_PROXY = True if demisto.params().get('proxy') else False
+LAST_TIME_KEY = 'time_last'
+
+
+def http_request(method, url):
+ response = requests.request(
+ method,
+ url,
+ auth=AUTH,
+ verify=USE_SSL
+ )
+
+ if response.status_code != 200:
+ return_error('Error in API call: [%d] - %s' % (response.status_code, response.reason))
+
+ return response
+
+
+def validate_sha1(sha1):
+ if len(sha1) != 40:
+ return_error('Invalid SHA-1, expected 40 characters: %s' % (sha1))
+
+
+def validate_ip_of_cidr(ip):
+ regex = r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'
+
+ match = re.search(regex, ip)
+
+ if match is None:
+ return_error('Invalid IP or CIDR: %s' % (ip))
+
+
+def timestamp_to_string(timestamp):
+ if timestamp is None:
+ return None
+
+ return datetime.utcfromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
+
+
+def dns_get_command(url):
+ response = http_dns_get(url)
+
+ results = list(map(lambda line: json.loads(line), response.text.splitlines()))
+ results = merge_by_rdata(results)
+
+ records = []
+
+ for result in results:
+ records.append(create_dns_record_context(result))
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': response.text,
+ 'HumanReadable': tableToMarkdown("CIRCL Dns - " + url, records),
+ 'EntryContext': {
+ 'CIRCLdns.Query(val.Value===obj.Value)': {
+ 'Value': url,
+ 'Record': records,
+ }
+ }
+ })
+
+
+def http_dns_get(url):
+ query_url = BASE_URL + '/pdns/query/' + url
+
+ return http_request('GET', query_url)
+
+# The results may contain several records with the same 'rdata' but different (not interesting) other properties.
+# This function will merge the records and keep the later "last seen time".
+
+
+def merge_by_rdata(results):
+ results_map = {} # type: dict
+
+ for e in results:
+ key = e['rdata']
+ other = results_map.get(key)
+
+ if other is not None and other[LAST_TIME_KEY] > e[LAST_TIME_KEY]:
+ e = other
+
+ results_map[key] = e
+
+ return list(results_map.values())
+
+
+def create_dns_record_context(record):
+ last_time = timestamp_to_string(record[LAST_TIME_KEY])
+
+ return {
+ 'Data': record['rdata'],
+ 'LastTime': last_time,
+ }
+
+
+def list_certificates(queryValue):
+ validate_ip_of_cidr(queryValue)
+ response = http_list_certificates(queryValue)
+
+ data = response.json()
+ records = []
+
+ for ip, ip_data in data.items():
+ records.append(create_ip_context(ip, ip_data))
+
+ result = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': data,
+ 'HumanReadable': tableToMarkdown('List certificates for ' + queryValue, records),
+ 'EntryContext': {
+ 'CIRCLssl.IPAddress(val.Value===obj.Value)': records
+ }
+ }
+
+ demisto.results(result)
+
+
+def http_list_certificates(queryValue):
+ query_url = BASE_URL + '/v2pssl/query/' + queryValue
+
+ return http_request('GET', query_url)
+
+
+def create_ip_context(ip, ipData):
+ certificates = []
+
+ for sha1 in ipData['certificates']:
+ subjects = ipData['subjects'].get(sha1, {}).get('values', [])
+ certificates.append(create_list_certificate_context(sha1, subjects))
+
+ return {
+ 'Value': ip,
+ 'Certificate': certificates
+ }
+
+
+def create_list_certificate_context(sha1, subjects):
+ return {
+ 'SHA1': sha1,
+ 'Subjects': subjects
+ }
+
+
+def list_certificate_seen_ips(sha1, limit):
+ validate_sha1(sha1)
+ response = http_list_certificate_seen_ips(sha1)
+
+ data = response.json()
+ certificate = create_certificate_seen_ips_context(sha1, data, limit)
+
+ result = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': data,
+ 'HumanReadable': 'Hits: ' + str(certificate['Hits']),
+ 'EntryContext': {
+ 'CIRCLssl.Certificate(val.SHA1===obj.SHA1)': certificate,
+ }
+ }
+
+ demisto.results(result)
+
+
+def http_list_certificate_seen_ips(sha1):
+ query_url = BASE_URL + '/v2pssl/cquery/' + sha1
+
+ return http_request('GET', query_url)
+
+
+def create_certificate_seen_ips_context(sha1, data, limit):
+ return {
+ 'SHA1': sha1,
+ 'Hits': data['hits'],
+ 'IPAddress': data['seen'][:limit],
+ }
+
+
+def get_certificate_details(sha1):
+ validate_sha1(sha1)
+ response = http_get_certificate_details(sha1)
+
+ data = response.json()
+ certificate = create_certificate_details(sha1, data)
+
+ result = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': data,
+ 'HumanReadable': tableToMarkdown("CIRCL ssl certificate - " + sha1, certificate),
+ 'EntryContext': {
+ 'CIRCLssl.Certificate(val.SHA1===obj.SHA1)': certificate,
+ }
+ }
+
+ demisto.results(result)
+
+
+def http_get_certificate_details(sha1):
+ query_url = BASE_URL + '/v2pssl/cfetch/' + sha1
+
+ return http_request('GET', query_url)
+
+
+def create_certificate_details(sha1, data):
+ info = data['info']
+ usage = ''
+ distribution = ''
+
+ extension = info.get('extension', {})
+
+ usage = extension.get('keyUsage', usage)
+ usage = extension.get('extendedKeyUsage', usage)
+ distribution = extension.get('crlDistributionPoints', distribution)
+ times_seen = data.get('icsi', {}).get('times_seen')
+
+ return {
+ 'SHA1': sha1,
+ 'Usage': usage,
+ 'Distribution': distribution,
+ 'Issuer': info['issuer'],
+ 'Time': info['not_before'],
+ 'Subject': info['subject'],
+ 'Key': info['key'],
+ 'Pem': data['pem'],
+ 'Seen': times_seen,
+ }
+
+
+''' EXECUTION CODE '''
+
+LOG('command is %s' % (demisto.command(), ))
+try:
+ command = demisto.command()
+ args = demisto.args()
+ handle_proxy()
+
+ if command == 'test-module':
+ result = http_dns_get('test.com')
+ demisto.results('ok')
+
+ elif command == 'circl-dns-get':
+ dns_get_command(args.get('queryValue'))
+
+ elif command == 'circl-ssl-list-certificates':
+ list_certificates(args.get('queryValue'))
+
+ elif command == 'circl-ssl-query-certificate':
+ limit = int(args.get('limitResults', 100))
+ sha1 = args.get('certificate')
+
+ list_certificate_seen_ips(sha1, limit)
+
+ elif command == 'circl-ssl-get-certificate':
+ get_certificate_details(args.get('certificate'))
+
+except Exception as e:
+ return_error(str(e))
diff --git a/Integrations/CIRCL/CIRCL.yml b/Integrations/CIRCL/CIRCL.yml
new file mode 100644
index 000000000000..09c71b65aa4c
--- /dev/null
+++ b/Integrations/CIRCL/CIRCL.yml
@@ -0,0 +1,145 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: CIRCL
+ version: -1
+configuration:
+- defaultvalue: https://www.circl.lu
+ display: Server URL (e.g. https://www.circl.lu)
+ name: url
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: true
+ type: 9
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+description: |-
+ CIRCL Passive DNS is a database storing historical DNS records from various resources.
+ CIRCL Passive SSL is a database storing historical X.509 certificates seen per IP address. The Passive SSL historical data is indexed per IP address.
+display: CIRCL
+name: CIRCL
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: IP address, hostname, or domain name
+ isArray: false
+ name: queryValue
+ required: true
+ secret: false
+ deprecated: false
+ description: Get DNS records for your query value from CIRCL's Passive DNS.
+ execution: false
+ name: circl-dns-get
+ outputs:
+ - contextPath: CIRCLdns.Query.Value
+ description: Query Value
+ type: string
+ - contextPath: CIRCLdns.Query.Record.Data
+ description: DNS Record or IP Address
+ type: string
+ - contextPath: CIRCLdns.Query.Record.LastTime
+ description: DNS record last recorded time
+ type: date
+ - arguments:
+ - default: false
+ description: IP address or CIDR block
+ isArray: false
+ name: queryValue
+ required: true
+ secret: false
+ deprecated: false
+ description: Query IP address or CIDR blocks (/32 up to /23) for SSL certificates
+ history.
+ execution: false
+ name: circl-ssl-list-certificates
+ outputs:
+ - contextPath: CIRCLssl.IPAddress.Value
+ description: IP address
+ type: string
+ - contextPath: CIRCLssl.IPAddress.Certificate.SHA1
+ description: ' The SHA-1 fingerprint of the certificate'
+ type: string
+ - contextPath: CIRCLssl.IPAddress.Certificate.Subjects
+ description: Certificate subjects
+ type: string
+ - arguments:
+ - default: false
+ description: SHA-1 fingerprint of a certificate
+ isArray: false
+ name: certificate
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: Limit the results number (Increasing number can cause browser slowdowns).
+ isArray: false
+ name: limitResults
+ required: false
+ secret: false
+ deprecated: false
+ description: Query a certificate value to get all associated addresses
+ execution: false
+ name: circl-ssl-query-certificate
+ outputs:
+ - contextPath: CIRCLssl.Certificate.SHA1
+ description: ' The SHA-1 fingerprint of the certificate'
+ type: string
+ - contextPath: CIRCLssl.Certificate.Hits
+ description: Number of hits for the certificate (number of associated addresses)
+ type: number
+ - contextPath: CIRCLssl.Certificate.IPAddress
+ description: IP address associated to the certificate
+ type: string
+ - arguments:
+ - default: false
+ description: SHA-1 fingerprint of a certificate
+ isArray: false
+ name: certificate
+ required: true
+ secret: false
+ deprecated: false
+ description: Get the raw certificate and related information.
+ execution: false
+ name: circl-ssl-get-certificate
+ outputs:
+ - contextPath: CIRCLssl.Certificate.SHA1
+ description: The SHA-1 fingerprint of the certificate
+ type: string
+ - contextPath: CIRCLssl.Certificate.Usage
+ description: Extended key usage
+ type: string
+ - contextPath: CIRCLssl.Certificate.Distribution
+ description: CRL distribution points
+ type: string
+ - contextPath: CIRCLssl.Certificate.Issuer
+ description: Certificate issuer
+ type: string
+ - contextPath: CIRCLssl.Certificate.Time
+ description: Certificate issued time (***not_before)
+ type: date
+ - contextPath: CIRCLssl.Certificate.Subject
+ description: Certificate subject
+ type: string
+ - contextPath: CIRCLssl.Certificate.Key
+ description: Certificate public key
+ type: string
+ - contextPath: CIRCLssl.Certificate.Pem
+ description: Certificate in PEM format
+ type: string
+ - contextPath: CIRCLssl.Certificate.Seen
+ description: Number of times the certificate was seen
+ type: number
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- CirclIntegrationTest
diff --git a/Integrations/CIRCL/CIRCL_image.png b/Integrations/CIRCL/CIRCL_image.png
new file mode 100644
index 000000000000..858e2344c760
Binary files /dev/null and b/Integrations/CIRCL/CIRCL_image.png differ
diff --git a/Integrations/CarbonBlackProtect/CHANGELOG.md b/Integrations/CarbonBlackProtect/CHANGELOG.md
new file mode 100644
index 000000000000..76e629736527
--- /dev/null
+++ b/Integrations/CarbonBlackProtect/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+Fixed an issue with the ***fetch-incidents*** command where users received an error when there were no incidents to fetch.
diff --git a/Integrations/CarbonBlackProtect/CarbonBlackProtect.py b/Integrations/CarbonBlackProtect/CarbonBlackProtect.py
new file mode 100644
index 000000000000..c5c29d74801a
--- /dev/null
+++ b/Integrations/CarbonBlackProtect/CarbonBlackProtect.py
@@ -0,0 +1,2191 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+
+import json
+import requests
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+TOKEN = demisto.params().get('token')
+# Remove trailing slash to prevent wrong URL path to service
+SERVER = demisto.params()['url'][:-1] if (demisto.params().get('url') and demisto.params()['url'].endswith('/')) \
+ else demisto.params().get('url')
+BASE_URL = f'{SERVER}/api/bit9platform/v1'
+# Should we use SSL
+USE_SSL = not demisto.params().get('insecure', False)
+FETCH_TIME = demisto.params().get('fetch_time', '3 days')
+CB_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
+CB_NO_MS_TIME_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
+INCIDENTS_PER_FETCH = int(demisto.params().get('max_incidents_per_fetch', 15))
+# Headers to be sent in requests
+HEADERS = {
+ 'X-Auth-Token': TOKEN,
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+}
+
+
+''' HUMAN READABLE HEADERS '''
+
+
+APPROVAL_REQUEST_HEADERS = [
+ 'ID',
+ 'FileName',
+ 'Status',
+ 'DateCreated',
+ 'Platform'
+]
+
+
+COMPUTER_HEADERS = [
+ 'ID',
+ 'Hostname',
+ 'IPAddress',
+ 'MACAddress',
+ 'OSVersion',
+ 'Processor',
+ 'Processors',
+ 'Model'
+]
+
+
+CONNECTOR_HEADERS = [
+ 'ID',
+ 'AnalysisName',
+ 'Enabled',
+ 'AnalysisEnabled',
+ 'AnalysisTargets',
+ 'CanAnalyze',
+ 'ConnectorVersion'
+]
+
+
+EVENT_HEADERS = [
+ 'ID',
+ 'Type',
+ 'SubType',
+ 'Severity',
+ 'Description'
+]
+
+
+FILE_ANALYSIS_HEADERS = [
+ 'ID',
+ 'PathName',
+ 'Priority',
+ 'FileCatalogId',
+ 'ComputerID',
+ 'DateCreated',
+ 'DateModified',
+ 'CreatedBy'
+]
+
+
+FILE_CATALOG_HEADERS = [
+ 'ID',
+ 'Name',
+ 'Type',
+ 'Extension',
+ 'Path',
+ 'Size'
+ 'ProductName',
+ 'Publisher',
+ 'Company'
+]
+
+
+FILE_INSTANCE_HEADERS = [
+ 'ID',
+ 'Name',
+ 'Path',
+ 'ComputerID',
+ 'CatalogID'
+]
+
+
+FILE_RULE_HEADERS = [
+ 'ID',
+ 'Name',
+ 'Description',
+ 'PolicyIDs',
+ 'FileState',
+ 'CatalogID',
+ 'Hash',
+ 'ReportOnly'
+]
+
+
+FILE_UPLOAD_HEADERS = [
+ 'ID',
+ 'PathName',
+ 'UploadPath',
+ 'UploadStatus',
+ 'DateCreated',
+ 'DateModified',
+ 'UploadedFileSize',
+ 'ComputerId',
+ 'Priority',
+ 'CreatedBy'
+]
+
+POLICY_HEADERS = [
+ 'ID',
+ 'Name',
+ 'PackageName',
+ 'Description',
+ 'EnforcementLevel'
+]
+
+PUBLISHER_HEADERS = [
+ 'ID',
+ 'Name',
+ 'Description',
+ 'Reputation',
+ 'State',
+ 'SignedCertificatesCount',
+ 'SignedFilesCount'
+]
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url_suffix, params=None, data=None, headers=HEADERS, safe=False, parse_json=True):
+ """
+ A wrapper for requests lib to send our requests and handle requests and responses better.
+
+ :type method: ``str``
+ :param method: HTTP method for the request.
+
+ :type url_suffix: ``str``
+ :param url_suffix: The suffix of the URL (endpoint)
+
+ :type params: ``dict``
+ :param params: The URL params to be passed.
+
+ :type data: ``dict``
+ :param data: The body data of the request.
+
+ :type headers: ``dict``
+ :param headers: Request headers
+
+ :type safe: ``bool``
+ :param safe: If set to true will return None in case of error
+
+ :return: Returns the http request response json
+ :rtype: ``dict`` or ``str``
+ """
+ url = BASE_URL + url_suffix
+ try:
+ res = requests.request(
+ method,
+ url,
+ verify=USE_SSL,
+ params=params,
+ json=data,
+ headers=headers,
+ )
+ except requests.exceptions.RequestException as e:
+ LOG(str(e))
+ return_error('Error in connection to the server. Please make sure you entered the URL correctly.')
+ # Handle error responses gracefully
+ if res.status_code not in {200, 201}:
+ if safe:
+ return None
+ elif res.status_code == 401:
+ reason = 'Unauthorized. Please check your API token'
+ else:
+ try:
+ reason = res.json()
+ except ValueError:
+ reason = res.reason
+ return_error(f'Error in API call status code: {res.status_code}, reason: {reason}')
+ if parse_json:
+ return res.json()
+ return res.content
+
+
+def remove_prefix(prefix, full_str):
+ """
+ Removes prefix from beginning of full_str if found
+ :param prefix: Prefix to remove from full_str
+ :param full_str: String to have its prefix removed
+ :return: full_str without the provided prefix
+ """
+ if full_str.startswith(prefix):
+ return full_str[len(prefix):]
+ return full_str
+
+
+def event_severity_to_dbot_score(severity):
+ """
+ Converts an severity int to DBot score representation
+ Event severity. Can be one of:
+ 2 = Critical -> 3
+ 3 = Error -> 0
+ 4 = Warning -> 2
+ 5 = Notice -> 2
+ 6 = Info -> 0
+ 7 = Debug -> 0
+
+ :type severity: ``int``
+ :param severity: Int representation of a severity
+
+ :return: DBot score representation of the severity
+ :rtype ``int``
+ """
+ severity = int(severity)
+ if severity == 2:
+ return 3
+ elif severity in (4, 5):
+ return 2
+ return 0
+
+
+def cbp_date_to_timestamp(date):
+ """
+ Converts a date in carbon black's format to timestamp
+ :param date: Date string in cbp date format
+ :return: Timestamp of the given date
+ """
+ try:
+ ts = date_to_timestamp(date, date_format=CB_TIME_FORMAT)
+ except ValueError:
+ ts = date_to_timestamp(date, date_format=CB_NO_MS_TIME_FORMAT)
+ return ts
+
+
+def event_to_incident(event):
+ """
+ Creates an incident of a detection.
+
+ :type event: ``dict``
+ :param event: Single event object
+
+ :return: Incident representation of an event
+ :rtype ``dict``
+ """
+ incident = {
+ 'name': event.get('description'),
+ 'occurred': str(event.get('timestamp')),
+ 'rawJSON': json.dumps(event),
+ 'severity': event_severity_to_dbot_score(event.get('severity'))
+ }
+ return incident
+
+
+def remove_keys_with_empty_value(dict_with_params):
+ """
+ Removes from dict keys with empty values
+ :param dict_with_params: dict to remove empty keys from
+ :return: dict without any empty fields
+ """
+ return {k: v for k, v in dict_with_params.items() if v}
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """
+ Performs basic get request to get item samples
+ """
+ http_request('GET', '/computer?limit=-1')
+
+
+def search_file_catalog_command():
+ """
+ Searches for file catalog
+ :return: EntryObject of the file catalog
+ """
+ args = demisto.args()
+ raw_catalogs = search_file_catalog(args.get('query'), args.get('limit'), args.get('offset'), args.get('sort'),
+ args.get('group'), args.get('fileName'), args.get('fileType'),
+ args.get('computerId'), args.get('threat'), args.get('fileState'),
+ args.get('hash'))
+ headers = args.get('headers', FILE_CATALOG_HEADERS)
+ catalogs = []
+ for catalog in raw_catalogs:
+ catalogs.append({
+ 'Size': catalog.get('fileSize'),
+ 'Path': catalog.get('pathName'),
+ 'SHA1': catalog.get('sha1'),
+ 'SHA256': catalog.get('sha256'),
+ 'MD5': catalog.get('md5'),
+ 'Name': catalog.get('fileName'),
+ 'Type': catalog.get('fileType'),
+ 'ProductName': catalog.get('productName'),
+ 'ID': catalog.get('id'),
+ 'Publisher': catalog.get('publisher'),
+ 'Company': catalog.get('company'),
+ 'Extension': catalog.get('fileExtension')
+ })
+ hr_title = "CarbonBlack Protect File Catalog Search"
+ hr = tableToMarkdown(hr_title, catalogs, headers, removeNull=True, headerTransform=pascalToSpace)
+ catalogs = {'File(val.SHA1 === obj.SHA1)': catalogs} if catalogs else None
+ return_outputs(hr, catalogs, raw_catalogs)
+
+
+@logger
+def search_file_catalog(q=None, limit=None, offset=None, sort=None, group=None, file_name=None, file_type=None,
+ computer_id=None, threat=None, file_state=None, hash_value=None):
+ """
+ Sends the request for file catalog, and returns the result json
+ :param q: Query to be executed
+ :param limit: Limit on the amount of results to be fetched
+ :param offset: Offset of the catalogs to be fetched
+ :param sort: Sort argument for request
+ :param group: Group argument for request
+ :param file_name: Name of the file under which this unique hash was first seen
+ :param file_type: Type of the file
+ :param computer_id: Id of computer where this file was first seen
+ :param threat: Threat of this file
+ :param file_state: File state of this hash
+ :param hash_value: Hash of the file
+ :return: File catalog response json
+ """
+
+ url_params = {
+ "limit": limit,
+ "offset": offset,
+ "sort": sort,
+ "group": group,
+ "q": q.split('&') if q else [] # handle multi condition queries in the following formats: a&b
+ }
+ if file_name:
+ url_params['q'].append(f'fileName:{file_name}')
+ if file_type:
+ url_params['q'].append(f'fileType:{file_type}')
+ if computer_id:
+ url_params['q'].append(f'computerId:{computer_id}')
+ if threat:
+ url_params['q'].append(f'threat:{file_catalog_threat_to_int(threat)}')
+ if file_state:
+ url_params['q'].append(f'fileState:{file_catalog_file_state_to_int(file_state)}')
+ if hash_value:
+ hash_type = get_hash_type(hash_value)
+ if hash_type != 'Unknown':
+ url_params['q'].append(f'{hash_type}:{hash_value}')
+
+ return http_request('GET', '/fileCatalog', params=url_params)
+
+
+@logger
+def file_catalog_threat_to_int(threat):
+ """
+ Threat of this file. Can be one of:
+ -1=Unknown
+ 0=Clean
+ 50=Potential risk
+ 100=Malicious
+ :param threat:
+ :return:
+ """
+ threat_dict = {
+ 'Unknown': -1,
+ 'Clean': 0,
+ 'Potential risk': 50,
+ 'Malicious': 100
+ }
+ return threat_dict.get(threat, threat)
+
+
+@logger
+def file_catalog_file_state_to_int(file_state):
+ """
+ File state of this hash. Can be one of:
+ 1=Unapproved
+ 2=Approved
+ 3=Banned
+ 4=Approved by Policy
+ 5=Banned by Policy
+ :param file_state: String value of file state
+ :return:
+ """
+ file_state_dict = {
+ 'Unapproved': 1,
+ 'Approved': 2,
+ 'Banned': 3,
+ 'Approved by Policy': 4,
+ 'Banned by Policy': 5
+ }
+ return file_state_dict.get(file_state, file_state)
+
+
+def search_computer_command():
+ """
+ Searches for file catalog
+ :return: EntryObject of the computer
+ """
+ args = demisto.args()
+ raw_computers = search_computer(args.get('query'), args.get('limit'), args.get('offset'), args.get('sort'),
+ args.get('group'), args.get('name'), args.get('ipAddress'), args.get('macAddress'))
+ headers = args.get('headers', COMPUTER_HEADERS)
+ computers = []
+ for computer in raw_computers:
+ computers.append({
+ 'Memory': computer.get('memorySize'),
+ 'Processors': computer.get('processorCount'),
+ 'Processor': computer.get('processorModel'),
+ 'OS': computer.get('osShortName'),
+ 'OSVersion': computer.get('osName'),
+ 'MACAddress': computer.get('macAddress'),
+ 'Model': computer.get('machineModel'),
+ 'IPAddress': computer.get('ipAddress'),
+ 'Hostname': computer.get('name'),
+ 'ID': computer.get('id')
+ })
+ hr_title = "CarbonBlack Protect Computer Search"
+ hr = tableToMarkdown(hr_title, computers, headers, removeNull=True, headerTransform=pascalToSpace)
+ computers = {'Endpoint(val.ID === obj.ID)': computers} if computers else None
+ return_outputs(hr, computers, raw_computers)
+
+
+@logger
+def search_computer(q=None, limit=None, offset=None, sort=None, group=None, name=None, ip_address=None, mac=None):
+ """
+ Sends the request for file catalog, and returns the result json
+ :param q: Query to be executed
+ :param limit: Limit on the amount of results to be fetched
+ :param offset: Offset of the computers to be fetched
+ :param sort: Sort argument for request
+ :param group: Group argument for request
+ :param name: Computer name
+ :param ip_address: Last known IP address of this computer
+ :param mac: MAC address of adapter used to connect to the CB Protection Server
+ :return: Computer response json
+ """
+ url_params = {
+ "limit": limit,
+ "offset": offset,
+ "sort": sort,
+ "group": group,
+ "q": q.split('&') if q else [] # handle multi condition queries in the following formats: a&b
+ }
+ if name:
+ url_params['q'].append(f'name:{name}')
+ if ip_address:
+ url_params['q'].append(f'ipAddress:{ip_address}')
+ if mac:
+ url_params['q'].append(f'macAddress:{mac}')
+
+ return http_request('GET', '/Computer', params=url_params)
+
+
+def update_computer_command():
+ """
+ Updates computer
+ :return: EntryObject of the computer
+ """
+ args = demisto.args()
+ raw_computers = update_computer(
+ args.get('id'),
+ args.get('name'),
+ args.get('computerTag'),
+ args.get('description'),
+ args.get('policyId'),
+ args.get('automaticPolicy'),
+ args.get('localApproval'),
+ args.get('refreshFlags'),
+ args.get('prioritized'),
+ args.get('debugLevel'),
+ args.get('kernelDebugLevel'),
+ args.get('debugFlags'),
+ args.get('debugDuration'),
+ args.get('cCLevel'),
+ args.get('cCFlags'),
+ args.get('forceUpgrade'),
+ args.get('template'),
+ )
+ computers = {
+ 'Memory': raw_computers.get('memorySize'),
+ 'Processors': raw_computers.get('processorCount'),
+ 'Processor': raw_computers.get('processorModel'),
+ 'OS': raw_computers.get('osShortName'),
+ 'OSVersion': raw_computers.get('osName'),
+ 'MACAddress': raw_computers.get('macAddress'),
+ 'Model': raw_computers.get('machineModel'),
+ 'IPAddress': raw_computers.get('ipAddress'),
+ 'Hostname': raw_computers.get('name'),
+ 'ID': raw_computers.get('id')
+ }
+ hr = tableToMarkdown('CarbonBlack Protect computer updated successfully', computers, COMPUTER_HEADERS)
+ return_outputs(hr, {'Endpoint(val.ID === obj.ID)': computers}, raw_computers)
+
+
+@logger
+def update_computer(id, name, computer_tag, description, policy_id, automatic_policy, local_approval, refresh_flags,
+ prioritized, debug_level, kernel_debug_level, debug_flags, debug_duration, cclevel, ccflags,
+ force_upgrade, template):
+ """
+ Update computer
+
+ :param id: id of computer
+ :param name: name of computer
+ :param computer_tag: computer tag of computer
+ :param description: description of computer
+ :param policy_id: policy id of the computer
+ :param automatic_policy: automatic policy flag
+ :param local_approval: local approval flag
+ :param refresh_flags: refresh flags
+ :param prioritized: Is prioritized
+ :param debug_level: debug level of computer
+ :param kernel_debug_level: kernel debug level of computer
+ :param debug_flags: debug flags
+ :param debug_duration: debug duration of computer
+ :param cclevel: cache consistency check level set for agent
+ :param ccflags: cache consistency check flags set for agent
+ :param force_upgrade: True if upgrade is forced for this computer
+ :param template: True if computer is a template
+ :return: Result json of the request
+ """
+ body_params = {
+ 'id': id,
+ 'name': name,
+ 'computerTag': computer_tag,
+ 'description': description,
+ 'policyId': policy_id,
+ 'automaticPolicy': automatic_policy,
+ 'localApproval': local_approval,
+ 'refreshFlags': refresh_flags,
+ 'prioritized': prioritized,
+ 'debugLevel': debug_level,
+ 'kernelDebugLevel': kernel_debug_level,
+ 'debugFlags': debug_flags,
+ 'debugDuration': debug_duration,
+ 'cCLevel': cclevel,
+ 'cCFlags': ccflags,
+ 'forceUpgrade': force_upgrade,
+ 'template': template,
+ }
+ body_params = remove_keys_with_empty_value(body_params)
+
+ return http_request('POST', '/computer', data=body_params)
+
+
+def get_computer_command():
+ """
+ Gets the requested computer
+ :return: EntryObject of the file catalog
+ """
+ args = demisto.args()
+ id = args.get('id')
+ raw_computer = get_computer(id)
+ computer = {
+ 'Memory': raw_computer.get('memorySize'),
+ 'Processors': raw_computer.get('processorCount'),
+ 'Processor': raw_computer.get('processorModel'),
+ 'OS': raw_computer.get('osShortName'),
+ 'OSVersion': raw_computer.get('osName'),
+ 'MACAddress': raw_computer.get('macAddress'),
+ 'Model': raw_computer.get('machineModel'),
+ 'IPAddress': raw_computer.get('ipAddress'),
+ 'Hostname': raw_computer.get('name'),
+ 'ID': raw_computer.get('id')
+ }
+ headers = args.get('headers', COMPUTER_HEADERS)
+ hr_title = f'CarbonBlack Protect Computer Get for {id}'
+ hr = tableToMarkdown(hr_title, computer, headers, removeNull=True, headerTransform=pascalToSpace)
+ entry_context_computer = {'Endpoint(val.ID === obj.ID)': computer} if computer else None
+ return_outputs(hr, entry_context_computer, raw_computer)
+
+
+@logger
+def get_computer(id):
+ """
+ Sends get computer request
+ :param id: Computer ID
+ :return: Result json of the request
+ """
+ url = f'/Computer/{id}'
+ return http_request('GET', url)
+
+
+def search_file_instance_command():
+ """
+ Searches for file instance
+ :return: EntryObject of the file instance
+ """
+ args = demisto.args()
+ raw_files = search_file_instance(args.get('query'), args.get('limit'), args.get('offset'), args.get('sort'),
+ args.get('group'), args.get('computerId'), args.get('fileName'))
+ headers = args.get('headers', FILE_INSTANCE_HEADERS)
+ files = []
+ if raw_files:
+ for file in raw_files:
+ files.append({
+ 'CatalogID': file.get('fileCatalogId'),
+ 'ComputerID': file.get('computerId'),
+ 'ID': file.get('id'),
+ 'Name': file.get('fileName'),
+ 'Path': file.get('pathName')
+ })
+ hr_title = "CarbonBlack Protect File Instance Search"
+ hr = tableToMarkdown(hr_title, files, headers, removeNull=True, headerTransform=pascalToSpace)
+ files = {'CBP.FileInstance(val.ID === obj.ID)': files} if files else None
+ return_outputs(hr, files, raw_files)
+
+
+@logger
+def search_file_instance(q=None, limit=None, offset=None, sort=None, group=None, computer_id=None, file_name=None):
+ """
+ Sends the request for file instance, and returns the result json
+ :param q: Query to be executed
+ :param limit: Limit on the amount of results to be fetched
+ :param offset: Offset of the file instances to be fetched
+ :param sort: Sort argument for request
+ :param group: Group argument for request
+ :param computer_id: Id of computer associated with this fileInstance
+ :param file_name: Name of the file on the agent
+ """
+ url_params = {
+ "limit": limit,
+ "offset": offset,
+ "sort": sort,
+ "group": group,
+ "q": q.split('&') if q else [] # handle multi condition queries in the following formats: a&b
+ }
+ if computer_id:
+ url_params['q'].append(f'computerId:{computer_id}')
+ if file_name:
+ url_params['q'].append(f'fileName:{file_name}')
+
+ return http_request('GET', '/fileInstance', params=url_params)
+
+
+def search_event_command():
+ """
+ Searches for file instance
+ :return: EntryObject of the file instance
+ """
+ args = demisto.args()
+ raw_events = search_event(args.get('query'), args.get('limit'), args.get('offset'), args.get('sort'),
+ args.get('group'), args.get('type'), args.get('computerId'), args.get('ipAddress'),
+ args.get('fileName'), args.get('severity'), args.get('userName'),
+ args.get('fileCatalogId'))
+ hr_events = []
+ events = []
+ if raw_events:
+ for event in raw_events:
+ event_json = {
+ 'FilePath': event.get('pathName'),
+ 'Param1': event.get('param1'),
+ 'Param2': event.get('param2'),
+ 'Param3': event.get('param3'),
+ 'SubTypeName': event.get('subtypeName'),
+ 'ComputerName': event.get('computerName'),
+ 'FileName': event.get('fileName'),
+ 'RuleName': event.get('ruleName'),
+ 'ProcessFileCatalogID': event.get('processFileCatalogId'),
+ 'StringID': event.get('stringId'),
+ 'IPAddress': event.get('ipAddress'),
+ 'PolicyID': event.get('policyId'),
+ 'Timestamp': event.get('timestamp'),
+ 'Username': event.get('userName'),
+ 'ComputerID': event.get('computerId'),
+ 'ProcessFileName': event.get('processFileName'),
+ 'IndicatorName': event.get('indicatorName'),
+ 'SubType': event.get('subtype'),
+ 'Type': event.get('type'),
+ 'ID': event.get('id'),
+ 'Description': event.get('description'),
+ 'Severity': event.get('severity'),
+ 'CommandLine': event.get('commandLine'),
+ 'ProcessPathName': event.get('processPathName')
+ }
+ events.append(event_json)
+ hr_event_json = dict(event_json)
+ hr_event_json['Type'] = event_type_to_string(hr_event_json['Type'])
+ hr_event_json['Severity'] = event_severity_to_string(hr_event_json['Severity'])
+ hr_events.append(hr_event_json)
+ headers = args.get('headers', EVENT_HEADERS)
+ hr_title = "CarbonBlack Protect Event Search"
+ hr = tableToMarkdown(hr_title, hr_events, headers, removeNull=True, headerTransform=pascalToSpace)
+ events = {'CBP.Event(val.ID === obj.ID)': events} if events else None
+ return_outputs(hr, events, raw_events)
+
+
+@logger
+def search_event(q=None, limit=None, offset=None, sort=None, group=None, e_type=None, computer_id=None, ip_address=None,
+ file_name=None, severity=None, user_name=None, file_catalog_id=None):
+ """
+ Sends the request for file instance, and returns the result json
+ :param q: Query to be executed
+ :param limit: Limit on the amount of results to be fetched
+ :param offset: Offset of the file instances to be fetched
+ :param sort: Sort argument for request
+ :param group: Group argument for request
+ :param e_type: Event type
+ :param computer_id: Id of computer associated with this event
+ :param ip_address: IP address associated with this event
+ :param file_name: Name of the file associated with this event
+ :param severity: Event severity
+ :param user_name: User name associated with this event
+ :param file_catalog_id: Id of fileCatalog entry associated with this fileRule
+ """
+ url_params = {
+ "limit": limit,
+ "offset": offset,
+ "sort": sort,
+ "group": group,
+ "q": q.split('&') if q else [] # handle multi condition queries in the following formats: a&b
+ }
+ if e_type:
+ url_params['q'].append(f'type:{event_type_to_int(e_type)}')
+ if computer_id:
+ url_params['q'].append(f'computerId:{computer_id}')
+ if ip_address:
+ url_params['q'].append(f'ipAddress:{ip_address}')
+ if file_name:
+ url_params['q'].append(f'fileName:{file_name}')
+ if severity:
+ url_params['q'].append(f'severity:{event_severity_to_int(severity)}')
+ if user_name:
+ url_params['q'].append(f'userName:{user_name}')
+ if file_catalog_id:
+ url_params['q'].append(f'fileCatalogId:{file_catalog_id}')
+
+ return http_request('GET', '/event', params=url_params)
+
+
+@logger
+def event_type_to_int(e_type):
+ """
+ Returns type of the event in int format
+ :param e_type: event type in string or int format
+ :return: type of the event in int format
+ """
+ type_dict = {
+ 'Server Management': 0,
+ 'Session Management': 1,
+ 'Computer Management': 2,
+ 'Policy Management': 3,
+ 'Policy Enforcement': 4,
+ 'Discovery': 5,
+ 'General Management': 6,
+ 'Internal Events': 8
+ }
+ return type_dict.get(e_type, e_type)
+
+
+@logger
+def event_severity_to_int(severity):
+ """
+ Return severity value in int
+ :param severity: severity in string or int
+ :return: severity value in int
+ """
+ severity_dict = {
+ 'Critical': 2,
+ 'Error': 3,
+ 'Warning': 4,
+ 'Notice': 5,
+ 'Info': 6,
+ 'Debug': 7
+ }
+ return severity_dict.get(severity, severity)
+
+
+@logger
+def event_type_to_string(e_type):
+ """
+ Returns event type as string
+ :param e_type: Event type in int
+ :return: event type as string
+ """
+ type_dict = {
+ 0: 'Server Management',
+ 1: 'Session Management',
+ 2: 'Computer Management',
+ 3: 'Policy Management',
+ 4: 'Policy Enforcement',
+ 5: 'Discovery',
+ 6: 'General Management',
+ 8: 'Internal Events'
+ }
+ return type_dict.get(e_type, e_type)
+
+
+@logger
+def event_severity_to_string(severity):
+ """
+ Returns event severity as string
+ :param severity: Severity of the event
+ :return: event severity as string
+ """
+ severity_dict = {
+ 2: 'Critical',
+ 3: 'Error',
+ 4: 'Warning',
+ 5: 'Notice',
+ 6: 'Info',
+ 7: 'Debug'
+ }
+ return severity_dict.get(severity, severity)
+
+
+def search_approval_request_command():
+ """
+ Searches for approval requests
+ :return: EntryObject of the approval requests
+ """
+ args = demisto.args()
+ raw_approval_requests = search_approval(args.get('query'), args.get('limit'), args.get('offset'),
+ args.get('sort'), args.get('group'))
+ hr_approval_requests = []
+ approval_requests = []
+ if raw_approval_requests:
+ for approval_request in raw_approval_requests:
+ approval_request_output = {
+ 'ID': approval_request.get('id'),
+ 'Resolution': approval_request.get('resolution'),
+ 'Status': approval_request.get('status'),
+ 'ResolutionComments': approval_request.get('resolutionComments'),
+ 'FileCatalogID': approval_request.get('fileCatalogId'),
+ 'ComputerID': approval_request.get('computerId'),
+ 'ComputerName': approval_request.get('computerName'),
+ 'DateCreated': approval_request.get('dateCreated'),
+ 'CreatedBy': approval_request.get('createdBy'),
+ 'EnforcementLevel': approval_request.get('enforcementLevel'),
+ 'RequestorEmail': approval_request.get('requestorEmail'),
+ 'Priority': approval_request.get('priority'),
+ 'FileName': approval_request.get('fileName'),
+ 'PathName': approval_request.get('pathName'),
+ 'Process': approval_request.get('process'),
+ 'Platform': approval_request.get('platform')
+ }
+ approval_requests.append(approval_request_output)
+ # handle human readable output
+ hr_approval_request = dict(approval_request_output)
+ hr_approval_request['Resolution'] = approval_request_resolution_to_string(hr_approval_request['Resolution'])
+ hr_approval_request['Status'] = approval_request_status_to_string(hr_approval_request['Status'])
+ hr_approval_requests.append(hr_approval_request)
+ headers = args.get('headers', APPROVAL_REQUEST_HEADERS)
+ hr_title = "CarbonBlack Protect Approval Request Search"
+ hr = tableToMarkdown(hr_title, hr_approval_requests, headers, removeNull=True, headerTransform=pascalToSpace)
+ approval_requests = {'CBP.ApprovalRequest(val.ID === obj.ID)': approval_requests} if approval_requests else None
+ return_outputs(hr, approval_requests, raw_approval_requests)
+
+
+@logger
+def search_approval(q=None, limit=None, offset=None, sort=None, group=None):
+ """
+ Sends the request for approval request, and returns the result json
+ :param q: Query to be executed
+ :param limit: Limit on the amount of results to be fetched
+ :param offset: Offset of the file instances to be fetched
+ :param sort: Sort argument for request
+ :param group: Group argument for request
+ """
+ url_params = {
+ "limit": limit,
+ "offset": offset,
+ "sort": sort,
+ "group": group
+ }
+ if q:
+ # handle multi condition queries in the following formats: a&b
+ q = q.split('&')
+ url_params['q'] = q
+
+ return http_request('GET', '/approvalRequest', params=url_params)
+
+
+@logger
+def approval_request_resolution_to_string(resolution):
+ """
+ Converts resolution as integer to string
+ Based on https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#approvalrequest
+ :param resolution: int that describes resolution [0-7]
+ :return: string representation of the resolution (fallback: returns resolution)
+ """
+ resolution_dict = {
+ 0: 'Not Resolved',
+ 1: 'Rejected',
+ 2: 'Resolved - Approved',
+ 3: 'Resolved - Rule Change',
+ 4: 'Resolved - Installer',
+ 5: 'Resolved - Updated',
+ 6: 'Resolved - Publisher',
+ 7: 'Resolved - Other'
+ }
+ return resolution_dict.get(resolution, resolution)
+
+
+@logger
+def approval_request_status_to_string(status):
+ """
+ Converts status as integer to string
+ Based on https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#approvalrequest
+ :param status: int that describes status [1-4]
+ :return: string representation of the status (fallback: returns status)
+ """
+ status_dict = {
+ 1: 'New',
+ 2: 'Open',
+ 3: 'Closed',
+ 4: 'Escalated'
+ }
+ return status_dict.get(status, status)
+
+
+def search_file_rule_command():
+ """
+ Searches for file rules
+ :return: EntryObject of the file rules
+ """
+ args = demisto.args()
+ raw_file_rules = search_file_rule(args.get('query'), args.get('limit'), args.get('offset'), args.get('sort'),
+ args.get('group'), args.get('fileCatalogId'), args.get('name'),
+ args.get('fileState'), args.get('sourceType'), args.get('hash'),
+ args.get('fileName'))
+ hr_file_rules = []
+ file_rules = []
+ if raw_file_rules:
+ for file_rule in raw_file_rules:
+ file_rule_output = {
+ 'ID': file_rule.get('id'),
+ 'CatalogID': file_rule.get('fileCatalogId'),
+ 'Description': file_rule.get('description'),
+ 'FileState': file_rule.get('fileState'),
+ 'Hash': file_rule.get('hash'),
+ 'Name': file_rule.get('name'),
+ 'PolicyIDs': file_rule.get('policyIds'),
+ 'ReportOnly': file_rule.get('reportOnly')
+ }
+ file_rules.append(file_rule_output)
+ hr_file_rule_output = dict(file_rule_output)
+ hr_file_rule_output['FileState'] = file_rule_file_state_to_string(hr_file_rule_output['FileState'])
+ hr_file_rules.append(hr_file_rule_output)
+ headers = args.get('headers', FILE_RULE_HEADERS)
+ hr_title = "CarbonBlack Protect File Rule Search"
+ hr = tableToMarkdown(hr_title, hr_file_rules, headers, removeNull=True, headerTransform=pascalToSpace)
+ file_rules = {'CBP.FileRule(val.ID === obj.ID)': file_rules} if file_rules else None
+ return_outputs(hr, file_rules, raw_file_rules)
+
+
+@logger
+def search_file_rule(q=None, limit=None, offset=None, sort=None, group=None, file_catalog_id=None, name=None,
+ file_state=None, source_type=None, hash_value=None, file_name=None):
+ """
+ Sends the request for file rule, and returns the result json
+ :param q: Query to be executed
+ :param limit: Limit on the amount of results to be fetched
+ :param offset: Offset of the file instances to be fetched
+ :param sort: Sort argument for request
+ :param group: Group argument for request
+ :param file_catalog_id: Id of fileCatalog entry associated with this fileRule
+ :param name: Name of this rule
+ :param file_state: File state for this
+ :param source_type: Mechanism that created this rule
+ :param hash_value: Hash associated with this rule
+ :param file_name: File name associated with this rule
+ """
+ url_params = {
+ "limit": limit,
+ "offset": offset,
+ "sort": sort,
+ "group": group,
+ "q": q.split('&') if q else [] # handle multi condition queries in the following formats: a&b
+ }
+ if file_catalog_id:
+ url_params['q'].append(f'fileCatalogId:{file_catalog_id}')
+ if name:
+ url_params['q'].append(f'name:{name}')
+ if file_state:
+ url_params['q'].append(f'fileState:{file_rule_file_state_to_int(file_state)}')
+ if source_type:
+ url_params['q'].append(f'sourceType:{file_rule_source_type_to_int(source_type)}')
+ if hash_value:
+ url_params['q'].append(f'hash:{hash_value}')
+ if file_name:
+ url_params['q'].append(f'fileName:{file_name}')
+
+ return http_request('GET', '/fileRule', params=url_params)
+
+
+@logger
+def file_rule_file_state_to_int(file_state):
+ """
+ Returns file rule file state in int format
+ :param file_state: File state of a file rule
+ :return: file rule file state in int format
+ """
+ state_dict = {
+ 'Unapproved': 1,
+ 'Approved': 2,
+ 'Banned': 3
+ }
+ return state_dict.get(file_state, file_state)
+
+
+@logger
+def file_rule_source_type_to_int(e_type):
+ """
+ Returns type of the event in int format
+ :param e_type: event type in string or int format
+ :return: type of the event in int format
+ """
+ type_dict = {
+ 'Manual': 1,
+ 'Trusted Directory': 2,
+ 'Reputation': 3,
+ 'Imported': 4,
+ 'External (API)': 5,
+ 'Event Rule': 6,
+ 'Application Template': 7,
+ 'Unified Management': 8
+ }
+ return type_dict.get(e_type, e_type)
+
+
+@logger
+def file_rule_file_state_to_string(state):
+ """
+ Converts state as integer to string
+ Based on https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#filerule
+ :param state: int that describes state [1-3]
+ :return: string representation of the state (fallback: returns state)
+ """
+ file_state_dict = {
+ 1: 'Unapproved',
+ 2: 'Approved',
+ 3: 'Banned'
+ }
+ return file_state_dict.get(state, state)
+
+
+def get_file_rule_command():
+ """
+ Gets the requested file rule
+ :return: EntryObject of the file catalog
+ """
+ args = demisto.args()
+ id = args.get('id')
+ raw_file_rule = get_file_rule(id)
+ file_rule = {
+ 'ID': raw_file_rule.get('id'),
+ 'CatalogID': raw_file_rule.get('fileCatalogId'),
+ 'Description': raw_file_rule.get('description'),
+ 'FileState': raw_file_rule.get('fileState'),
+ 'Hash': raw_file_rule.get('hash'),
+ 'Name': raw_file_rule.get('name'),
+ 'PolicyIDs': raw_file_rule.get('policyIds'),
+ 'ReportOnly': raw_file_rule.get('reportOnly')
+ }
+ hr_file_rule = dict(file_rule)
+ hr_file_rule['FileState'] = file_rule_file_state_to_string(hr_file_rule['FileState'])
+ headers = args.get('headers', FILE_RULE_HEADERS)
+ hr_title = f'CarbonBlack Protect File Rule Get for {id}'
+ hr = tableToMarkdown(hr_title, hr_file_rule, headers, removeNull=True, headerTransform=pascalToSpace)
+ entry_context_file_rule = {'CBP.FileRule(val.ID === obj.ID)': file_rule} if file_rule else None
+ return_outputs(hr, entry_context_file_rule, raw_file_rule)
+
+
+@logger
+def get_file_rule(id):
+ """
+ Sends get file rule request
+ :param id: File rule ID
+ :return: Result json of the request
+ """
+ url = f'/fileRule/{id}'
+ return http_request('GET', url)
+
+
+def delete_file_rule_command():
+ """
+ Deletes the requested file rule
+ :return: EntryObject of the file catalog
+ """
+ args = demisto.args()
+ id = args.get('id')
+ delete_file_rule(id)
+ hr = f"File Result {id} deleted successfully"
+ demisto.results(hr)
+
+
+@logger
+def delete_file_rule(id):
+ """
+ Sends delete file rule request
+ :param id: File rule ID
+ :return: Result of the request
+ """
+ url = BASE_URL + f'/fileRule/{id}'
+ res = requests.request(
+ 'DELETE',
+ url,
+ verify=USE_SSL,
+ headers=HEADERS
+ )
+ return res
+
+
+def update_file_rule_command():
+ """
+ Creates or update a file rule
+ :return: Entry object of the created file analysis
+ """
+ args = demisto.args()
+ raw_file_rule = update_file_rule(
+ args.get('hash'),
+ args.get('fileState'),
+ args.get('id'),
+ args.get('fileCatalogId'),
+ args.get('name'),
+ args.get('description'),
+ args.get('reportOnly'),
+ args.get('reputationApprovalsEnabled'),
+ args.get('forceInstaller'),
+ args.get('forceNotInstaller'),
+ args.get('policyIds'),
+ args.get('platformFlags'),
+ )
+ file_rule = {
+ 'ID': raw_file_rule.get('id'),
+ 'CatalogID': raw_file_rule.get('fileCatalogId'),
+ 'Description': raw_file_rule.get('description'),
+ 'FileState': raw_file_rule.get('fileState'),
+ 'Hash': raw_file_rule.get('hash'),
+ 'Name': raw_file_rule.get('name'),
+ 'PolicyIDs': raw_file_rule.get('policyIds'),
+ 'ReportOnly': raw_file_rule.get('reportOnly')
+ }
+ hr_file_rule = dict(file_rule)
+ hr_file_rule['FileState'] = file_rule_file_state_to_string(hr_file_rule['FileState'])
+ hr = tableToMarkdown('CarbonBlack Protect File Rule Updated successfully', hr_file_rule, FILE_RULE_HEADERS,
+ removeNull=True, headerTransform=pascalToSpace)
+ return_outputs(hr, {'CBP.FileRule(val.ID === obj.ID)': file_rule}, raw_file_rule)
+
+
+@logger
+def update_file_rule(hash, file_state, id, file_catalog_id, name, description, report_only,
+ reputation_approvals_enabled, force_installer, force_not_installer, policy_ids, platform_flags):
+ """
+ Update file rule
+ :param hash: hash of file rule
+ :param file_state: File state of this hash
+ :param id: id of the file rule
+ :param file_catalog_id: file catlog id
+ :param name: name of the file rule
+ :param description: description
+ :param report_only: True if this has a report-only ban
+ :param reputation_approvals_enabled: True if reputation approvals are enabled for this file
+ :param force_installer: True if this file is forced to act as installer
+ :param force_not_installer: True if this file is forced to act as ‘not installer'
+ :param policy_ids: List of IDs of policies where this rule applies.
+ :param platform_flags: Set of platform flags where this file rule will be valid
+ :return: Result json of the request
+ """
+ body_params = {
+ 'hash': hash,
+ 'fileState': file_state,
+ 'id': id,
+ 'fileCatalogId': file_catalog_id,
+ 'name': name,
+ 'description': description,
+ 'reportOnly': report_only,
+ 'reputationApprovalsEnabled': reputation_approvals_enabled,
+ 'forceInstaller': force_installer,
+ 'forceNotInstaller': force_not_installer,
+ 'policyIds': policy_ids,
+ 'platformFlags': platform_flags
+ }
+ body_params = remove_keys_with_empty_value(body_params)
+
+ return http_request('POST', '/fileRule', data=body_params)
+
+
+def search_policy_command():
+ """
+ Searches for policy
+ :return: EntryObject of the policies
+ """
+ args = demisto.args()
+ raw_policy = search_policy(args.get('query'), args.get('limit'), args.get('offset'),
+ args.get('sort'), args.get('group'),
+ args.get('enforcementLevel'), args.get('disconnectedEnforcementLevel'))
+ hr_policies = []
+ policies = []
+ if raw_policy:
+ for policy in raw_policy:
+ policy_ec = {
+ 'ReadOnly': policy.get('readOnly'),
+ 'EnforcementLevel': policy.get('enforcementLevel'),
+ 'ReputationEnabled': policy.get('reputationEnabled'),
+ 'AtEnforcementComputers': policy.get('atEnforcementComputers'),
+ 'Automatic': policy.get('automatic'),
+ 'Name': policy.get('name'),
+ 'FileTrackingEnabled': policy.get('fileTrackingEnabled'),
+ 'ConnectedComputers': policy.get('connectedComputers'),
+ 'PackageName': policy.get('packageName'),
+ 'AllowAgentUpgrades': policy.get('allowAgentUpgrades'),
+ 'TotalComputers': policy.get('totalComputers'),
+ 'LoadAgentInSafeMode': policy.get('loadAgentInSafeMode'),
+ 'AutomaticApprovalsOnTransition': policy.get('automaticApprovalsOnTransition'),
+ 'ID': policy.get('id'),
+ 'Description': policy.get('description'),
+ 'DisconnectedEnforcementLevel': policy.get('disconnectedEnforcementLevel')
+ }
+ policies.append(policy_ec)
+ hr_policy = dict(policy_ec)
+ hr_policy['EnforcementLevel'] = policy_enforcement_lvl_to_string(hr_policy['EnforcementLevel'])
+ hr_policies.append(hr_policy)
+ headers = args.get('headers', POLICY_HEADERS)
+ hr_title = "CarbonBlack Protect Policy Search"
+ hr = tableToMarkdown(hr_title, hr_policies, headers, removeNull=True, headerTransform=pascalToSpace)
+ policies = {'CBP.Policy(val.ID === obj.ID)': policies} if policies else None
+ return_outputs(hr, policies, raw_policy)
+
+
+@logger
+def search_policy(q=None, limit=None, offset=None, sort=None, group=None, enf_lvl=None, dc_enf_lvl=None):
+ """
+ Sends the request for search policy, and returns the result json
+ :param q: Query to be executed
+ :param limit: Limit on the amount of results to be fetched
+ :param offset: Offset of the file instances to be fetched
+ :param sort: Sort argument for request
+ :param group: Group argument for request
+ :param enf_lvl: Target enforcement level
+ :param dc_enf_lvl: Target enforcement level for disconnected computers
+ """
+ url_params = {
+ "limit": limit,
+ "offset": offset,
+ "sort": sort,
+ "group": group,
+ "q": q.split('&') if q else [] # handle multi condition queries in the following formats: a&b
+ }
+ if enf_lvl:
+ url_params['q'].append(f'enforcementLevel:{policy_enforcement_lvl_to_int(enf_lvl)}')
+ if dc_enf_lvl:
+ url_params['q'].append(f'disconnectedEnforcementLevel:{policy_enforcement_lvl_to_int(dc_enf_lvl)}')
+
+ demisto.info(url_params)
+
+ return http_request('GET', '/policy', params=url_params)
+
+
+@logger
+def policy_enforcement_lvl_to_int(enf_lvl):
+ """
+ Returns enforcement level in int
+ :param enf_lvl: enforcement level string
+ :return: enforcement level in int
+ """
+ enf_dict = {
+ 'High (Block Unapproved)': 20,
+ 'Medium (Prompt Unapproved)': 30,
+ 'Low (Monitor Unapproved)': 40,
+ 'None (Visibility)': 60,
+ 'None (Disabled)': 80
+ }
+ return enf_dict.get(enf_lvl, enf_lvl)
+
+
+@logger
+def policy_enforcement_lvl_to_string(enf_lvl):
+ """
+ Returns enforcement level in string
+ :param enf_lvl: enforcement level int
+ :return: enforcement level string
+ """
+ enf_dict = {
+ 20: 'High (Block Unapproved)',
+ 30: 'Medium (Prompt Unapproved)',
+ 40: 'Low (Monitor Unapproved)',
+ 60: 'None (Visibility)',
+ 80: 'None (Disabled)'
+ }
+
+ return enf_dict.get(enf_lvl, enf_lvl)
+
+
+def search_server_config_command():
+ """
+ Searches for server config
+ :return: EntryObject of the server configurations
+ """
+ args = demisto.args()
+ raw_server_configs = search_server_config(args.get('query'), args.get('limit'), args.get('offset'),
+ args.get('sort'), args.get('group'))
+ server_configs = []
+ if raw_server_configs:
+ for server_config in raw_server_configs:
+ server_configs.append({
+ 'ID': server_config.get('id'),
+ 'Value': server_config.get('value'),
+ 'Name': server_config.get('name')
+ })
+ headers = args.get('headers')
+ hr_title = "CarbonBlack Protect Server Config Search"
+ hr = tableToMarkdown(hr_title, server_configs, headers, removeNull=True, headerTransform=pascalToSpace)
+ server_configs = {'CBP.ServerConfig(val.ID === obj.ID)': server_configs} if server_configs else None
+ return_outputs(hr, server_configs, raw_server_configs)
+
+
+@logger
+def search_server_config(q=None, limit=None, offset=None, sort=None, group=None):
+ """
+ Sends the request for file rule, and returns the result json
+ :param q: Query to be executed
+ :param limit: Limit on the amount of results to be fetched
+ :param offset: Offset of the file instances to be fetched
+ :param sort: Sort argument for request
+ :param group: Group argument for request
+ """
+ url_params = {
+ "limit": limit,
+ "offset": offset,
+ "sort": sort,
+ "group": group
+ }
+ if q:
+ # handle multi condition queries in the following formats: a&b
+ q = q.split('&')
+ url_params['q'] = q
+
+ return http_request('GET', '/serverConfig', params=url_params)
+
+
+@logger
+def publisher_state_to_string(state):
+ """
+ Converts state as integer to string
+ Based on https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#publisher
+ :param state: int that describes state [1-5]
+ :return: string representation of the state (fallback: returns state)
+ """
+ publisher_state_dict = {
+ 1: 'Unapproved',
+ 2: 'Approved',
+ 3: 'Banned',
+ 4: 'Approved By Policy',
+ 5: 'Banned By Policy'
+ }
+ return publisher_state_dict.get(state, state)
+
+
+@logger
+def publisher_reputation_to_string(reputation):
+ """
+ Converts reputation as integer to string
+ Based on https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#publisher
+ :param reputation: int that describes reputation [0-3]
+ :return: string representation of the reputation (fallback: returns reputation)
+ """
+ publisher_reputation_dict = {
+ 0: 'Not trusted (Unknown)',
+ 1: 'Low',
+ 2: 'Medium',
+ 3: 'High'
+ }
+ return publisher_reputation_dict.get(reputation, reputation)
+
+
+def search_publisher_command():
+ """
+ Searches for publisher
+ :return: EntryObject of the publishers
+ """
+ args = demisto.args()
+ raw_publishers = search_publisher(args.get('query'), args.get('limit'), args.get('offset'), args.get('sort'),
+ args.get('group'), args.get('name'), args.get('publisherReputation'),
+ args.get('publisherState'))
+ hr_publishers = []
+ publishers = []
+ if raw_publishers:
+ for publisher in raw_publishers:
+ publisher_output = {
+ 'Description': publisher.get('description'),
+ 'ID': publisher.get('id'),
+ 'Name': publisher.get('name'),
+ 'Reputation': publisher.get('publisherReputation'),
+ 'SignedCertificatesCount': publisher.get('signedCertificateCount'),
+ 'SignedFilesCount': publisher.get('signedFilesCount'),
+ 'State': publisher.get('publisherState')
+ }
+ publishers.append(publisher_output)
+ hr_publisher_output = dict(publisher_output)
+ hr_publisher_output['State'] = publisher_state_to_string(hr_publisher_output['State'])
+ hr_publisher_output['Reputation'] = publisher_reputation_to_string(hr_publisher_output['Reputation'])
+ hr_publishers.append(hr_publisher_output)
+ headers = args.get('headers', PUBLISHER_HEADERS)
+ hr_title = "CarbonBlack Protect Publisher Search"
+ hr = tableToMarkdown(hr_title, hr_publishers, headers, removeNull=True, headerTransform=pascalToSpace)
+ publishers = {'CBP.Publisher(val.ID === obj.ID)': publishers} if publishers else None
+ return_outputs(hr, publishers, raw_publishers)
+
+
+@logger
+def search_publisher(q=None, limit=None, offset=None, sort=None, group=None, name=None, reputation=None, state=None):
+ """
+ Sends the request for publisher, and returns the result json
+ :param q: Query to be executed
+ :param limit: Limit on the amount of results to be fetched
+ :param offset: Offset of the file instances to be fetched
+ :param sort: Sort argument for request
+ :param group: Group argument for request
+ :param name: Subject name of leaf certificate for this publisher
+ :param reputation: Reputation of this publisher
+ :param state: State for this publisher
+ """
+ url_params = {
+ "limit": limit,
+ "offset": offset,
+ "sort": sort,
+ "group": group,
+ "q": q.split('&') if q else [] # handle multi condition queries in the following formats: a&b
+ }
+ if name:
+ url_params['q'].append(f'name:{name}')
+ if reputation:
+ url_params['q'].append(f'publisherReputation:{publisher_reputation_to_int(reputation)}')
+ if state:
+ url_params['q'].append(f'publisherState:{publisher_state_to_int(state)}')
+
+ return http_request('GET', '/publisher', params=url_params)
+
+
+@logger
+def publisher_reputation_to_int(reputation):
+ """
+ Returns the publisher reputation as an int
+ :param reputation: reputation of the publisher
+ :return: publisher reputation as a string
+ """
+ reputation_dict = {
+ 'Not trusted (Unknown)': 0,
+ 'Low': 1,
+ 'Medium': 2,
+ 'High': 3
+ }
+ return reputation_dict.get(reputation, reputation)
+
+
+@logger
+def publisher_state_to_int(state):
+ state_dict = {
+ 'Unapproved': 1,
+ 'Approved': 2,
+ 'Banned': 3,
+ 'Approved By Policy': 4,
+ 'Banned By Policy': 5
+ }
+ return state_dict.get(state, state)
+
+
+def get_file_analysis_command():
+ """
+ Gets the requested file analysis
+ :return: EntryObject of the file analysis
+ """
+ args = demisto.args()
+ id = args.get('id')
+ raw_file_analysis = get_file_analysis(id)
+ # if got here, then get_file_analysis returned a result, so we can assume it'll have a valid fileCatalogId
+ raw_file_rule = search_file_catalog(q=f"id:{raw_file_analysis.get('fileCatalogId')}")[0]
+ cbp_ec_key = 'CBP.FileAnalysis(val.ID === obj.ID)'
+ ec = create_file_analysis_result(raw_file_analysis, raw_file_rule, cbp_ec_key)
+ hr_title = f'CarbonBlack Protect Get File Analysis for {id}'
+ hr = tableToMarkdown(hr_title, ec[cbp_ec_key], FILE_ANALYSIS_HEADERS,
+ removeNull=True, headerTransform=pascalToSpace)
+ return_outputs(hr, ec, raw_file_analysis)
+
+
+@logger
+def get_file_analysis(id):
+ """
+ Sends get file analysis
+ :param id: File analysis ID
+ :return: Result json of the request
+ """
+ url = f'/fileAnalysis/{id}'
+ return http_request('GET', url)
+
+
+@logger
+def create_file_analysis_result(raw_file_analysis, raw_file_rule, cbp_ec_key):
+ result = {
+ cbp_ec_key: {
+ 'Priority': raw_file_analysis.get('priority'),
+ 'FileName': raw_file_analysis.get('fileName'),
+ 'PathName': raw_file_analysis.get('pathName'),
+ 'ComputerId': raw_file_analysis.get('computerId'),
+ 'DateModified': raw_file_analysis.get('dateModified'),
+ 'ID': raw_file_analysis.get('id'),
+ 'FileCatalogId': raw_file_analysis.get('fileCatalogId'),
+ 'DateCreated': raw_file_analysis.get('dateCreated'),
+ 'CreatedBy': raw_file_analysis.get('createdBy')
+ },
+ outputPaths['file']: {
+ 'FileCatalogId': raw_file_analysis.get('fileCatalogId'),
+ 'Name': raw_file_analysis.get('fileName'),
+ 'PathName': raw_file_analysis.get('pathName'),
+ 'SHA1': raw_file_rule.get('sha1'),
+ 'SHA256': raw_file_rule.get('sha256'),
+ 'MD5': raw_file_rule.get('md5')
+ },
+ 'DBotScore': [
+ {
+ 'Indicator': raw_file_rule.get('md5'),
+ 'Type': 'hash',
+ 'Vendor': 'Carbon Black Protection',
+ 'Score': raw_file_analysis.get('analysisResult', 0) # cb scoring as the same as dbot scoring
+ },
+ {
+ 'Indicator': raw_file_rule.get('sha1'),
+ 'Type': 'hash',
+ 'Vendor': 'Carbon Black Protection',
+ 'Score': raw_file_analysis.get('analysisResult', 0) # cb scoring as the same as dbot scoring
+ },
+ {
+ 'Indicator': raw_file_rule.get('sha256'),
+ 'Type': 'hash',
+ 'Vendor': 'Carbon Black Protection',
+ 'Score': raw_file_analysis.get('analysisResult', 0) # cb scoring as the same as dbot scoring
+ }
+ ]
+ }
+ # analysisResult == 3 -> Malicious
+ if int(raw_file_analysis.get('analysisResult', 0)) == 3:
+ result[outputPaths['file']].update({ # type: ignore
+ 'Malicious': {
+ 'Vendor': 'Carbon Black Protection',
+ 'Description': 'Carbon Black Protection found this file to be malicious.'
+ }
+ })
+ return result
+
+
+def update_file_analysis_command():
+ """
+ Creates or update a file analysis
+ :return: Entry object of the created file analysis
+ """
+ args = demisto.args()
+ raw_file_analysis = update_file_analysis(
+ args.get('fileCatalogId'),
+ args.get('connectorId'),
+ args.get('computerId'),
+ args.get('priority'),
+ args.get('analysisStatus'),
+ args.get('analysisTarget'),
+ args.get('id')
+ )
+ file_analysis = {
+ 'Priority': raw_file_analysis.get('priority'),
+ 'FileName': raw_file_analysis.get('fileName'),
+ 'PathName': raw_file_analysis.get('pathName'),
+ 'ComputerId': raw_file_analysis.get('computerId'),
+ 'DateModified': raw_file_analysis.get('dateModified'),
+ 'ID': raw_file_analysis.get('id'),
+ 'FileCatalogId': raw_file_analysis.get('fileCatalogId'),
+ 'DateCreated': raw_file_analysis.get('dateCreated'),
+ 'CreatedBy': raw_file_analysis.get('createdBy')
+ }
+ hr = tableToMarkdown('CarbonBlack Protect File Analysis Created successfully', file_analysis, FILE_ANALYSIS_HEADERS)
+ return_outputs(hr, {'CBP.FileAnalysis(val.ID === obj.ID)': file_analysis}, raw_file_analysis)
+
+
+@logger
+def update_file_analysis(file_catalog_id, connector_id, computer_id, priority, analysis_status, analysis_target, id):
+ """
+ Update file analysis
+ :param file_catalog_id: catalog id
+ :param connector_id: connector id
+ :param computer_id: computer id
+ :param priority: priority of the file analysis
+ :param analysis_status: status of the analysis
+ :param analysis_target: target of the analysis
+ :param id: id of the file analysis
+ :return: Result json of the request
+ """
+ body_params = {
+ 'fileCatalogId': file_catalog_id,
+ 'connectorId': connector_id,
+ 'computerId': computer_id,
+ 'priority': priority,
+ 'analysisStatus': analysis_status,
+ 'analysisTarget': analysis_target,
+ 'id': id
+ }
+ body_params = remove_keys_with_empty_value(body_params)
+
+ return http_request('POST', '/fileAnalysis', data=body_params)
+
+
+def update_file_upload_command():
+ """
+ Creates or update a file upload
+ :return: Entry object of the created file upload
+ """
+ args = demisto.args()
+ raw_file_upload = update_file_upload(
+ args.get('fileCatalogId'),
+ args.get('computerId'),
+ args.get('priority'),
+ args.get('uploadStatus'),
+ args.get('id')
+ )
+ file_upload = {
+ 'Priority': raw_file_upload.get('priority'),
+ 'FileName': raw_file_upload.get('fileName'),
+ 'UploadPath': raw_file_upload.get('uploadPath'),
+ 'ComputerId': raw_file_upload.get('computerId'),
+ 'DateModified': raw_file_upload.get('dateModified'),
+ 'ID': raw_file_upload.get('id'),
+ 'FileCatalogId': raw_file_upload.get('fileCatalogId'),
+ 'DateCreated': raw_file_upload.get('dateCreated'),
+ 'CreatedBy': raw_file_upload.get('createdBy'),
+ 'PathName': raw_file_upload.get('pathName'),
+ 'UploadStatus': raw_file_upload.get('uploadStatus'),
+ 'UploadedFileSize': raw_file_upload.get('uploadedFileSize'),
+ }
+ hr_file_upload = dict(file_upload)
+ hr_file_upload['UploadStatus'] = file_upload_status_to_string(hr_file_upload['UploadStatus'])
+ hr = tableToMarkdown('CarbonBlack Protect File Upload Created successfully', hr_file_upload, FILE_UPLOAD_HEADERS)
+ return_outputs(hr, {'CBP.FileUpload(val.ID === obj.ID)': file_upload}, raw_file_upload)
+
+
+@logger
+def update_file_upload(file_catalog_id, computer_id, priority, analysis_status, id):
+ """
+ Update file upload
+ :param file_catalog_id: catalog id
+ :param computer_id: computer id
+ :param priority: priority of file upload
+ :param analysis_status: analysis status
+ :param id: id of file upload
+ :return: Result json of the request
+ """
+ body_params = {
+ 'fileCatalogId': file_catalog_id,
+ 'computerId': computer_id,
+ 'priority': priority,
+ 'uploadStatus': analysis_status,
+ 'id': id
+ }
+ body_params = remove_keys_with_empty_value(body_params)
+
+ return http_request('POST', '/fileUpload', data=body_params)
+
+
+@logger
+def file_upload_status_to_string(status):
+ """
+ Converts status as integer to string
+ Based on https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#fileupload
+ :param status: int that describes state [0-6]
+ :return: string representation of the status (fallback: returns status)
+ """
+ file_status_dict = {
+ 0: 'Queued',
+ 1: 'Initiated',
+ 2: 'Uploading',
+ 3: 'Completed',
+ 4: 'Error',
+ 5: 'Cancelled',
+ 6: 'Deleted'
+ }
+ return file_status_dict.get(status, status)
+
+
+def download_file_upload_command():
+ """
+ Downloads file upload
+ :return: File result of file upload
+ """
+ id = demisto.args().get('id')
+ file_upload = get_file_upload(id)
+ raw_res = download_file_upload(id)
+ demisto.results(fileResult(file_upload.get('fileName', 'cb_uploaded_file'), raw_res))
+
+
+@logger
+def download_file_upload(id):
+ """
+ Downloads file upload from server
+ :param id: ID of the requested file upload
+ :return: File upload binary file
+ """
+ url = f'/fileUpload/{id}'
+ params = {
+ 'downloadFile': 'true'
+ }
+ return http_request('GET', url, params=params, parse_json=False)
+
+
+def search_file_upload_command():
+ """
+ Searches for file upload
+ :return: EntryObject of the file upload
+ """
+ args = demisto.args()
+ raw_file_uploads = search_file_upload(args.get('query'), args.get('limit'), args.get('offset'), args.get('sort'),
+ args.get('group'), args.get('computerId'), args.get('fileCatalogId'),
+ args.get('fileName'), args.get('uploadStatus'))
+ hr_file_uploads = []
+ file_uploads = []
+ if raw_file_uploads:
+ for file_upload in raw_file_uploads:
+ file_upload_output = {
+ 'Priority': file_upload.get('priority'),
+ 'FileName': file_upload.get('fileName'),
+ 'UploadPath': file_upload.get('uploadPath'),
+ 'ComputerId': file_upload.get('computerId'),
+ 'DateModified': file_upload.get('dateModified'),
+ 'ID': file_upload.get('id'),
+ 'FileCatalogId': file_upload.get('fileCatalogId'),
+ 'DateCreated': file_upload.get('dateCreated'),
+ 'CreatedBy': file_upload.get('createdBy'),
+ 'PathName': file_upload.get('pathName'),
+ 'UploadStatus': file_upload.get('uploadStatus'),
+ 'UploadedFileSize': file_upload.get('uploadedFileSize'),
+ }
+ file_uploads.append(file_upload_output)
+ hr_file_upload = dict(file_upload_output)
+ hr_file_upload['UploadStatus'] = file_upload_status_to_string(hr_file_upload['UploadStatus'])
+ hr_file_uploads.append(hr_file_upload)
+ headers = args.get('headers', FILE_UPLOAD_HEADERS)
+ hr_title = "CarbonBlack Protect File Upload Search"
+ hr = tableToMarkdown(hr_title, hr_file_uploads, headers, removeNull=True, headerTransform=pascalToSpace)
+ file_uploads = {'CBP.FileUpload(val.ID === obj.ID)': file_uploads} if file_uploads else None
+ return_outputs(hr, file_uploads, raw_file_uploads)
+
+
+@logger
+def search_file_upload(q=None, limit=None, offset=None, sort=None, group=None, computer_id=None, file_catalog_id=None,
+ file_name=None, upload_status=None):
+ """
+ Sends the request for file upload, and returns the result json
+ :param q: Query to be executed
+ :param limit: Limit on the amount of results to be fetched
+ :param offset: Offset of the file uploads to be fetched
+ :param sort: Sort argument for request
+ :param group: Group argument for request
+ :param computer_id: Id of computer entry associated with this analysis
+ :param file_catalog_id: Id of fileCatalog entry associated with this upload
+ :param file_name: Name of the file where file exists on the endpoint
+ :param upload_status: Status of upload
+ """
+ url_params = {
+ "limit": limit,
+ "offset": offset,
+ "sort": sort,
+ "group": group,
+ "q": q.split('&') if q else [] # handle multi condition queries in the following formats: a&b
+ }
+ if computer_id:
+ url_params['q'].append(f'computerId:{computer_id}')
+ if file_catalog_id:
+ url_params['q'].append(f'fileCatalogId:{file_catalog_id}')
+ if file_name:
+ url_params['q'].append(f'fileName:{file_name}')
+ if upload_status:
+ url_params['q'].append(f'uploadStatus:{file_upload_status_to_int(upload_status)}')
+
+ return http_request('GET', '/fileUpload', params=url_params)
+
+
+@logger
+def file_upload_status_to_int(upload_status):
+ """
+ Returns the upload status in int
+ :param upload_status: Upload status string
+ """
+ status_dict = {
+ 'Queued': 0,
+ 'Initiated': 1,
+ 'Uploading': 2,
+ 'Completed': 3,
+ 'Error': 4,
+ 'Cancelled': 5,
+ 'Deleted': 6
+ }
+
+ return status_dict.get(upload_status, upload_status)
+
+
+def search_file_analysis_command():
+ """
+ Searches for file analysis
+ :return: EntryObject of the file analysis
+ """
+ args = demisto.args()
+ raw_file_analysis = search_file_analysis(args.get('query'), args.get('limit'), args.get('offset'), args.get('sort'),
+ args.get('group'), args.get('fileCatalogId'), args.get('connectorId'),
+ args.get('fileName'), args.get('analysisStatus'),
+ args.get('analysisResult'))
+ file_analysis = []
+ if raw_file_analysis:
+ for analysis in raw_file_analysis:
+ file_analysis.append({
+ 'Priority': analysis.get('priority'),
+ 'FileName': analysis.get('fileName'),
+ 'PathName': analysis.get('pathName'),
+ 'ComputerId': analysis.get('computerId'),
+ 'DateModified': analysis.get('dateModified'),
+ 'ID': analysis.get('id'),
+ 'FileCatalogId': analysis.get('fileCatalogId'),
+ 'DateCreated': analysis.get('dateCreated'),
+ 'CreatedBy': analysis.get('createdBy')
+ })
+ headers = args.get('headers', FILE_ANALYSIS_HEADERS)
+ hr_title = "CarbonBlack Protect File Analysis Search"
+ hr = tableToMarkdown(hr_title, file_analysis, headers, removeNull=True, headerTransform=pascalToSpace)
+ file_analysis = {'CBP.FileAnalysis(val.ID === obj.ID)': file_analysis} if file_analysis else None
+ return_outputs(hr, file_analysis, raw_file_analysis)
+
+
+@logger
+def search_file_analysis(q=None, limit=None, offset=None, sort=None, group=None, file_catalog_id=None,
+ connector_id=None, file_name=None, status=None, result=None):
+ """
+ Sends the request for file analysis, and returns the result json
+ :param q: Query to be executed
+ :param limit: Limit on the amount of results to be fetched
+ :param offset: Offset of the file analysis to be fetched
+ :param sort: Sort argument for request
+ :param group: Group argument for request
+ :param file_catalog_id: Id of fileCatalog entry associated with this analysis
+ :param connector_id: Id of connector associated with this analysis
+ :param file_name: Name of the file where file exists on the endpoint
+ :param status: Status of analysis
+ :param result: Result of the analysis
+ """
+ url_params = {
+ "limit": limit,
+ "offset": offset,
+ "sort": sort,
+ "group": group,
+ "q": q.split('&') if q else [] # handle multi condition queries in the following formats: a&b
+ }
+ if file_catalog_id:
+ url_params['q'].append(f'fileCatalogId:{file_catalog_id}')
+ if connector_id:
+ url_params['q'].append(f'connectorId:{connector_id}')
+ if file_name:
+ url_params['q'].append(f'fileName:{file_name}')
+ if status:
+ url_params['q'].append(f'analysisStatus:{file_analysis_status_to_int(status)}')
+ if result:
+ url_params['q'].append(f'analysisResult:{file_analysis_result_to_int(result)}')
+
+ return http_request('GET', '/fileAnalysis', params=url_params)
+
+
+@logger
+def file_analysis_status_to_int(status):
+ status_dict = {
+ 'scheduled': 0,
+ 'submitted (file is sent for analysis)': 1,
+ 'processed (file is processed but results are not available yet)': 2,
+ 'analyzed (file is processed and results are available)': 3,
+ 'error': 4,
+ 'cancelled': 5
+ }
+ return status_dict.get(status, status)
+
+
+@logger
+def file_analysis_result_to_int(result):
+ result_dict = {
+ 'Not yet available': 0,
+ 'File is clean': 1,
+ 'File is a potential threat': 2,
+ 'File is malicious': 3
+ }
+ return result_dict.get(result, result)
+
+
+def get_file_upload_command():
+ """
+ Gets the requested file upload
+ :return: EntryObject of the file upload
+ """
+ args = demisto.args()
+ id = args.get('id')
+ raw_file_upload = get_file_upload(id)
+ file_upload = {
+ 'Priority': raw_file_upload.get('priority'),
+ 'FileName': raw_file_upload.get('fileName'),
+ 'UploadPath': raw_file_upload.get('uploadPath'),
+ 'ComputerId': raw_file_upload.get('computerId'),
+ 'DateModified': raw_file_upload.get('dateModified'),
+ 'ID': raw_file_upload.get('id'),
+ 'FileCatalogId': raw_file_upload.get('fileCatalogId'),
+ 'DateCreated': raw_file_upload.get('dateCreated'),
+ 'CreatedBy': raw_file_upload.get('createdBy'),
+ 'PathName': raw_file_upload.get('pathName'),
+ 'UploadStatus': raw_file_upload.get('uploadStatus'),
+ 'UploadedFileSize': raw_file_upload.get('uploadedFileSize'),
+ }
+ headers = args.get('headers', FILE_UPLOAD_HEADERS)
+ hr_file_upload = dict(file_upload)
+ hr_file_upload['UploadStatus'] = file_upload_status_to_string(hr_file_upload['UploadStatus'])
+ hr_title = f'CarbonBlack Protect File Upload Get for {id}'
+ hr = tableToMarkdown(hr_title, hr_file_upload, headers, removeNull=True, headerTransform=pascalToSpace)
+ entry_context_file_upload = {'CBP.FileUpload(val.ID === obj.ID)': file_upload} if file_upload else None
+ return_outputs(hr, entry_context_file_upload, raw_file_upload)
+
+
+@logger
+def get_file_upload(id):
+ """
+ Sends get file upload request
+ :param id: File upload ID
+ :return: Result json of the request
+ """
+ url = f'/fileUpload/{id}'
+ return http_request('GET', url)
+
+
+def get_connector_command():
+ """
+ Gets the requested connector
+ :return: EntryObject of the connector
+ """
+ args = demisto.args()
+ id = args.get('id')
+ raw_connector = get_connector(id)
+ connector = {
+ 'AnalysisEnabled': raw_connector.get('analysisEnabled'),
+ 'AnalysisName': raw_connector.get('analysisName'),
+ 'AnalysisTargets': raw_connector.get('analysisTargets'),
+ 'CanAnalyze': raw_connector.get('canAnalyze'),
+ 'ConnectorVersion': raw_connector.get('connectorVersion'),
+ 'Enabled': raw_connector.get('enabled'),
+ 'ID': raw_connector.get('id')
+ }
+ headers = args.get('headers', CONNECTOR_HEADERS)
+ hr_title = f'CarbonBlack Protect Connector Get for {id}'
+ hr = tableToMarkdown(hr_title, connector, headers, removeNull=True, headerTransform=pascalToSpace)
+ entry_context_connector = {'CBP.Connector(val.ID === obj.ID)': connector} if connector else None
+ return_outputs(hr, entry_context_connector, raw_connector)
+
+
+@logger
+def get_connector(id):
+ """
+ Sends get connector request
+ :param id: Connector ID
+ :return: Result json of the request
+ """
+ url = f'/connector/{id}'
+ return http_request('GET', url)
+
+
+def search_connector_command():
+ """
+ Searches for connectors
+ :return: EntryObject of the connectors
+ """
+ args = demisto.args()
+ raw_connectors = search_connector(args.get('query'), args.get('limit'), args.get('offset'),
+ args.get('sort'), args.get('group'))
+ connectors = []
+ if raw_connectors:
+ for connector in raw_connectors:
+ connectors.append({
+ 'AnalysisEnabled': connector.get('analysisEnabled'),
+ 'AnalysisName': connector.get('analysisName'),
+ 'AnalysisTargets': connector.get('analysisTargets'),
+ 'CanAnalyze': connector.get('canAnalyze'),
+ 'ConnectorVersion': connector.get('connectorVersion'),
+ 'Enabled': connector.get('enabled'),
+ 'ID': connector.get('id')
+ })
+ headers = args.get('headers', CONNECTOR_HEADERS)
+ hr_title = "CarbonBlack Protect Connector Search"
+ hr = tableToMarkdown(hr_title, connectors, headers, removeNull=True, headerTransform=pascalToSpace)
+ connectors = {'CBP.Connector(val.ID === obj.ID)': connectors} if connectors else None
+ return_outputs(hr, connectors, raw_connectors)
+
+
+@logger
+def search_connector(q=None, limit=None, offset=None, sort=None, group=None):
+ """
+ Sends the request for file analysis, and returns the result json
+ :param q: Query to be executed
+ :param limit: Limit on the amount of results to be fetched
+ :param offset: Offset of the file analysis to be fetched
+ :param sort: Sort argument for request
+ :param group: Group argument for request
+ """
+ url_params = {
+ "limit": limit,
+ "offset": offset,
+ "sort": sort,
+ "group": group
+ }
+ if q:
+ # handle multi condition queries in the following formats: a&b
+ q = q.split('&')
+ url_params['q'] = q
+
+ return http_request('GET', '/connector', params=url_params)
+
+
+def resolve_approval_request_command():
+ """
+ Updates an existing approval request
+ :return: EntryObject of the approval request
+ """
+ args = demisto.args()
+ raw_res = resolve_approval_request(
+ args.get('id'),
+ args.get('resolution'),
+ args.get('requestorEmail'),
+ args.get('resolutionComments'),
+ args.get('status')
+ )
+ approval_request = {
+ 'ID': raw_res.get('id'),
+ 'Resolution': raw_res.get('resolution'),
+ 'Status': raw_res.get('status'),
+ 'ResolutionComments': raw_res.get('resolutionComments')
+ }
+ hr_approval_request = dict(approval_request)
+ hr_approval_request['Status'] = approval_request_status_to_string(hr_approval_request['Status'])
+ hr_approval_request['Resolution'] = approval_request_resolution_to_string(hr_approval_request['Resolution'])
+ hr = tableToMarkdown('CarbonBlack Protect Approval Request Updated successfully', hr_approval_request)
+ return_outputs(hr, {'CBP.ApprovalRequest(val.ID === obj.ID)': approval_request}, raw_res)
+
+
+@logger
+def resolve_approval_request(id, resolution, requestor_email=None, res_comments=None, status=None):
+ """
+ Update file analysis
+ :param id: apporval request id
+ :param resolution: apporval request resolution
+ :param requestor_email: apporval request requestor email
+ :param res_comments: apporval request resolution comments
+ :param status: apporval request status
+ :return: Result json of the request
+ """
+ body_params = {
+ 'id': id,
+ 'resolution': resolution,
+ 'requestorEmail': requestor_email,
+ 'resolutionComments': res_comments,
+ 'status': status
+ }
+ body_params = remove_keys_with_empty_value(body_params)
+ return http_request('POST', '/approvalRequest', data=body_params)
+
+
+def fetch_incidents():
+ """
+ Fetches incident using the events API
+ :return: Fetched events in incident format
+ """
+ last_run = demisto.getLastRun()
+ # Get the last fetch time, if exists
+ last_fetch = last_run.get('first_event_time')
+
+ # Handle first time fetch, fetch incidents retroactively
+ if last_fetch is None:
+ last_fetch, _ = parse_date_range(FETCH_TIME, date_format=CB_TIME_FORMAT)
+ last_fetch_timestamp = cbp_date_to_timestamp(last_fetch)
+ query = f"timestamp>{last_fetch}"
+ user_query = demisto.params().get('fetch_query')
+ if user_query:
+ # Add user's query to default query
+ query = f'{query}&{user_query}'
+ events = search_event(q=query, limit=INCIDENTS_PER_FETCH)
+ incidents = []
+ if events:
+ for event in events:
+ incident = event_to_incident(event)
+ incident_date = incident['occurred']
+ incident_date_timestamp = cbp_date_to_timestamp(incident_date)
+ # Update last run and add incident if the incident is newer than last fetch
+ if incident_date_timestamp > last_fetch_timestamp:
+ last_fetch = incident_date
+ incidents.append(incident)
+ demisto.setLastRun({'first_event_time': last_fetch})
+ return incidents
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+
+# main added for unit tests
+def main():
+ handle_proxy()
+ command = demisto.command()
+ LOG(f'Command being called is {command}')
+ # should raise error in case of issue
+ if command == 'fetch-incidents':
+ demisto.incidents(fetch_incidents())
+ else:
+ try:
+ if command == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module()
+ demisto.results('ok')
+ elif command == 'cbp-fileCatalog-search':
+ search_file_catalog_command()
+ elif command == 'cbp-computer-search':
+ search_computer_command()
+ elif command == 'cbp-computer-update':
+ update_computer_command()
+ elif command == 'cbp-fileInstance-search':
+ search_file_instance_command()
+ elif command == 'cbp-event-search':
+ search_event_command()
+ elif command == 'cbp-approvalRequest-search':
+ search_approval_request_command()
+ elif command == 'cbp-fileRule-search':
+ search_file_rule_command()
+ elif command == 'cbp-fileRule-get':
+ get_file_rule_command()
+ elif command == 'cbp-fileRule-delete':
+ delete_file_rule_command()
+ elif command in ('cbp-fileRule-update', 'cbp-fileRule-createOrUpdate'):
+ update_file_rule_command()
+ elif command == 'cbp-policy-search':
+ search_policy_command()
+ elif command == 'cbp-serverConfig-search':
+ search_server_config_command()
+ elif command == 'cbp-publisher-search':
+ search_publisher_command()
+ elif command == 'cbp-fileAnalysis-search':
+ search_file_analysis_command()
+ elif command == 'cbp-fileAnalysis-get':
+ get_file_analysis_command()
+ elif command == 'cbp-fileAnalysis-createOrUpdate':
+ update_file_analysis_command()
+ elif command == 'cbp-fileUpload-createOrUpdate':
+ update_file_upload_command()
+ elif command == 'cbp-fileUpload-download':
+ download_file_upload_command()
+ elif command == 'cbp-fileUpload-search':
+ search_file_upload_command()
+ elif command == 'cbp-fileUpload-get':
+ get_file_upload_command()
+ elif command == 'cbp-computer-get':
+ get_computer_command()
+ elif command == 'cbp-connector-get':
+ get_connector_command()
+ elif command == 'cbp-connector-search':
+ search_connector_command()
+ elif command == 'cbp-approvalRequest-resolve':
+ resolve_approval_request_command()
+ else:
+ return_error(f"Command {command} is not supported.")
+ # Log exceptions
+ except Exception as e:
+ return_error(str(e))
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/CarbonBlackProtect/CarbonBlackProtect.yml b/Integrations/CarbonBlackProtect/CarbonBlackProtect.yml
new file mode 100644
index 000000000000..7ebc706a2642
--- /dev/null
+++ b/Integrations/CarbonBlackProtect/CarbonBlackProtect.yml
@@ -0,0 +1,2530 @@
+category: Endpoint
+commonfields:
+ id: CarbonBlackProtectionV2
+ version: -1
+configuration:
+- defaultvalue: https://192.168.0.1
+ display: Server URL (e.g. https://192.168.0.1)
+ name: url
+ required: true
+ type: 0
+- display: API Token
+ name: token
+ required: true
+ type: 4
+- defaultvalue: 3 days
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days)
+ name: fetch_time
+ required: false
+ type: 0
+- defaultvalue: '15'
+ display: Max incidents per fetch
+ name: incidents_per_fetch
+ required: false
+ type: 0
+- display: Fetch query
+ name: fetch_query
+ required: false
+ type: 0
+- display: Fetch incidents query
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+description: Carbon Black Enterprise Protection is a next-generation endpoint threat
+ prevention solution to deliver a portfolio of protection policies, real-time visibility
+ across environments, and comprehensive compliance rule sets in a single platform.
+display: Carbon Black Enterprise Protection V2
+name: CarbonBlackProtectionV2
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: 'Grouping is optional and can be defined with a single attribute:
+ &group=xyz. There can be only one grouping field. Example: group=osShortName'
+ isArray: false
+ name: group
+ required: false
+ secret: false
+ - default: false
+ description: '(Int) Is maximum number of results to retrieve. If not specified:
+ First 1000 results will be returned. If set to -1: Only result count will
+ be returned, without actual results. Offset parameter is ignored in this case.
+ If set to 0: All results will be returned. Offset parameter is ignored in
+ this case. Note that some result sets could be very large, resulting in query
+ timeout. Therefore, unless you know that query will not return more than 1000
+ results, it is recommended to retrieve data in chunks using offset and limit.'
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Offset in data set
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ description: 'A condition contains three parts: name, operator, and value. Name
+ is any valid field in the object that is being queried. Operator (: LIKE,
+ ! NOT LIKE, < Less than, > Greater than, + logical AND, - logical OR, | separating
+ values) is any of valid operators (see below). All operators consist of a
+ single character. Value is compared with operator and depends on field type.
+ See more: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#searching'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: 'Sorting is optional and can be defined with a single attribute:
+ &sort=xyz [ASC|DESC]. There can be only one sorting field. Default sort order
+ (if omitted) is ASC. xyz is field name from the result set.'
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ - default: false
+ description: Name of the file under which this unique hash was first seen
+ isArray: false
+ name: fileName
+ required: false
+ secret: false
+ - default: false
+ description: Type of the file
+ isArray: false
+ name: fileType
+ required: false
+ secret: false
+ - default: false
+ description: Id of computer where this file was first seen. You can get this
+ by executing cbp-computer-search command
+ isArray: false
+ name: computerId
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: |-
+ Threat of this file. Can be one of:
+ -1=Unknown
+ 0=Clean
+ 50=Potential risk
+ 100=Malicious
+ isArray: false
+ name: threat
+ predefined:
+ - Unknown
+ - Clean
+ - Potential risk
+ - Malicious
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: |-
+ File state of this hash. Can be one of:
+ 1=Unapproved
+ 2=Approved
+ 3=Banned
+ 4=Approved by Policy
+ 5=Banned by Policy
+ isArray: false
+ name: fileState
+ predefined:
+ - Unapproved
+ - Approved
+ - Banned
+ - Approved by Policy
+ - Banned by Polic
+ required: false
+ secret: false
+ - default: false
+ description: Hash of the file
+ isArray: false
+ name: hash
+ required: false
+ secret: false
+ deprecated: false
+ description: 'Search for file catalogs. See more: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#filecatalog'
+ execution: false
+ name: cbp-fileCatalog-search
+ outputs:
+ - contextPath: File.Size
+ description: Size of the file.
+ type: Unknown
+ - contextPath: File.Path
+ description: Path on the found hostname.
+ type: String
+ - contextPath: File.Name
+ description: Name of the file.
+ type: String
+ - contextPath: File.Type
+ description: File type.
+ type: String
+ - contextPath: File.ProductName
+ description: The name of the product to which this file belongs.
+ type: String
+ - contextPath: File.ID
+ description: Unique fileCatalog ID.
+ type: String
+ - contextPath: File.Publisher
+ description: The publisher of the file.
+ type: String
+ - contextPath: File.Company
+ description: The company for the product.
+ type: String
+ - contextPath: File.Extension
+ description: Extension of the file.
+ type: String
+ - arguments:
+ - default: false
+ description: 'A condition contains three parts: name, operator, and value. Name
+ is any valid field in the object that is being queried. Operator (: LIKE,
+ ! NOT LIKE, < Less than, > Greater than, + logical AND, - logical OR, | separating
+ values) is any of valid operators (see below). All operators consist of a
+ single character. Value is compared with an operator and depends on field
+ type. See more: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#searching'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Offset in data set.
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ description: 'Grouping is optional and can be defined with a single attribute:
+ &group=xyz. There can be only one grouping field, for example: group=osShortName'
+ isArray: false
+ name: group
+ required: false
+ secret: false
+ - default: false
+ description: 'Sorting is optional and can be defined with a single attribute:
+ &sort=xyz [ASC|DESC]. There can be only one sorting field. Default sort order
+ (if omitted) is ascending (ASC). xyz is field name from the result set.'
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ - default: false
+ description: Maximum number of results to retrieve (Int). If not specified,
+ the first 1000 results will be returned. If set to "-1", only the result count
+ will be returned, without actual results, and the Offset parameter is ignored.
+ If set to "0", all results will be returned, and the Offset parameter is ignored.
+ Some result sets might be very large, resulting in query timeout. Therefore,
+ unless you know that query will not return more than 1000 results, it is recommended
+ to retrieve data in chunks using offset and limit.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: Computer name
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: Last known IP address of this computer
+ isArray: false
+ name: ipAddress
+ required: false
+ secret: false
+ - default: false
+ description: MAC address of adapter used to connect to the CB Protection Server
+ isArray: false
+ name: macAddress
+ required: false
+ secret: false
+ deprecated: false
+ description: 'Search for computers. For more information, see the Carbon Black
+ documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#computer'
+ execution: false
+ name: cbp-computer-search
+ outputs:
+ - contextPath: Endpoint.OS
+ description: The short OS name running on the endpoint.
+ type: String
+ - contextPath: Memory
+ description: Amount of memory for the endpoint.
+ type: Number
+ - arguments:
+ - default: true
+ description: (Int) Unique computer ID.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: (String) Computer name can be changed only if computer is a template
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: (String) Custom computer tag.
+ isArray: false
+ name: computerTag
+ required: false
+ secret: false
+ - default: false
+ description: (String) Description of this computer.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: (Int) New ID of the policy for this computer. PolicyId is ignored
+ if either automaticPolicy is "True" or localApproval is "True".
+ isArray: false
+ name: policyId
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: (Boolean) "True" if this policy is assigned automatically through
+ AD. If localApproval is "True", this argument must be "False".
+ isArray: false
+ name: automaticPolicy
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: (Boolean) "True" if this computer is currently in local approval
+ mode. If automaticPolicy is "True", this argument must be "False".
+ isArray: false
+ name: localApproval
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: '(Int) Change refresh flags for this agent. Can be a combination
+ of: 0x01=Complete resynch of agent NAB and installer table is requested 0x02=Rescan
+ of programs installed on the computer is requested 0x20=Tell agent to refresh
+ config list 0x40=Force this agent to reregister with new cookie 0x200=Trigger
+ agent Reboot. 0x1000=Tell agent to refresh config list from the file 0x4000
+ Boost the priority of this agent over all others permanently (until it is
+ de-prioritized).'
+ isArray: false
+ name: refreshFlags
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: (Boolean) Set to "True" to prioritize this computer.
+ isArray: false
+ name: prioritized
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Current debug level of the agent. Range is from 0 (none)
+ to 8 (verbose). This value can be changed only if the "changeDiagnostics"
+ request parameter is set to "True".
+ isArray: false
+ name: debugLevel
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Current kernel debug level of the agent. Range is from 0
+ (none) to 5 (verbose). This value can be changed only if the "changeDiagnostics"
+ request parameter is set to "True".
+ isArray: false
+ name: kernelDebugLevel
+ required: false
+ secret: false
+ - default: false
+ description: '(Int) Debug flags. Can be 0 or combination of: 0x01 = Upload debug
+ files now 0x10 = Enable full memory dumps 0x20 = Copy agent cache 0x40 = Delete
+ debug files 0x80 = Upload agent cache 0x200 = Save verbose debug info + counters
+ to the cache when copied/uploaded 0x400 = Generate and upload an analysis.bt9
+ file that contains various constraint violation analysis information 0x800
+ = Run a health check and send results to server. This value can be changed
+ only if the "changeDiagnostics" request parameter is set to "True".'
+ isArray: false
+ name: debugFlags
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Debug duration in minutes. This value can be changed only
+ if the "changeDiagnostics" request parameter is set to "True".
+ isArray: false
+ name: debugDuration
+ required: false
+ secret: false
+ - default: false
+ description: '(Int) Cache consistency check level set for the agent. Can be
+ one of: 0 = None 1 = Quick verification 2 = Rescan known files Full scan for
+ new files. This value can be changed only if the "changeDiagnostics" request
+ parameter is set to "True".'
+ isArray: false
+ name: cCLevel
+ required: false
+ secret: false
+ - default: false
+ description: '(Int) Cache consistency check flags set for agent. Can be 0 or
+ combination of: 0x0001 = Whether this is just a test run or not 0x0002 = Should
+ the state of invalid files be preserved 0x0004 = Should new files found be
+ locally approved or not 0x0008 = Should we re-evaluate whether a file’s certificate
+ information is still valid or not 0x0010 = Whether the check was scheduled
+ or not 0x0020 = Whether the agent should run constraint checks to test for
+ invalid results 0x0040 = Whether we are only searching for new script types
+ as a result of a change to what ‘IsScript’ means 0x0080 = Whether we are doing
+ a level 3 check for initialization 0x0100 = This cache check is to remediate
+ CR# 18041 0x0200 = Force the re-evaluation of the IsCrawlable state and archive
+ type.'
+ isArray: false
+ name: cCFlags
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: (Boolean) Set to "True" to force an upgrade for this computer.
+ isArray: false
+ name: forceUpgrade
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: (Boolean) "True" if the computer is a VDI template. This value
+ can be changed only if the "changeTemplate" request parameter is set to "True".
+ isArray: false
+ name: template
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: |-
+ (Int) Mode of template cleanup. Can be one of:
+ 1=Manual (from console)
+ 2=Automatic, by time (specified by templateCloneCleanupTime and templateCloneCleanupTimeScale)
+ 3=Automatic, when new computer with the same name comes online
+ 4=Automatic, as soon as computer goes offline
+ This value can be changed only if the "changeTemplate" request parameter is set to "True".
+ isArray: false
+ name: templateCloneCleanupMode
+ predefined:
+ - '1'
+ - '2'
+ - '3'
+ - '4'
+ required: false
+ secret: false
+ - default: false
+ description: (Int) If the templateCloneCleanupMode value is "2", this is the
+ time before clone is cleaned up. Time unit is specified in templateCloneCleanupTimeScale.
+ This value can be changed only if the "changeTemplate" request parameter is
+ set to "True".
+ isArray: false
+ name: templateCloneCleanupTime
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: |-
+ (Int) Time unit of the template cleanup. Can be one of:
+ 1=Hours
+ 2=Days
+ 3=Weeks
+ This value can be changed only if the "changeTemplate" request parameter is set to "True".
+ isArray: false
+ name: templateCloneCleanupTimeScale
+ predefined:
+ - '1'
+ - '2'
+ - '3'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: (Boolean) If "True", clones of this template will track only new
+ and modified files. This value can be changed only if the "changeTemplate"
+ request parameter is set to "True".
+ isArray: false
+ name: templateTrackModsOnly
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: (Boolean) If "True", debug and CC properties of the computer will
+ be updated from the object sent in the body the request. This action requires
+ "Change advanced options" permission. Default is "False".
+ isArray: false
+ name: changeDiagnostics
+ required: false
+ secret: false
+ - default: false
+ description: (Boolean) If set to true, template settings will be updated from
+ the object sent in the body the request. This action requires "Change advanced
+ options" permission. Default is "False".
+ isArray: false
+ name: changeTemplate
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: (Boolean) Deletes a computer entry. Default is "False".
+ isArray: false
+ name: delete
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: (Boolean) Reset the CLI password for this computer. This action
+ requires "Change advanced options" permission.
+ isArray: false
+ name: resetCLIPassword
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: 'Updates computer objects. Note that some computer properties can
+ be changed only if specific boolean parameters are set. For more information,
+ see the Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#computer'
+ execution: false
+ name: cbp-computer-update
+ outputs:
+ - contextPath: Endpoint.Processors
+ description: The number of processors.
+ type: Number
+ - contextPath: Endpoint.OS
+ description: The short OS name running on the endpoint.
+ type: String
+ - contextPath: Endpoint.MACAddress
+ description: MAC address of the endpoint.
+ type: String
+ - contextPath: Endpoint.Model
+ description: The machine model, if available.
+ type: String
+ - contextPath: Endpoint.IPAddress
+ description: IP address of the endpoint.
+ type: String
+ - contextPath: Endpoint.Processor
+ description: Model of the processor.
+ type: String
+ - contextPath: Endpoint.Hostname
+ description: Hostname of the endpoint.
+ type: String
+ - contextPath: Endpoint.OSVersion
+ description: The full OS name running on the endpoint.
+ type: String
+ - contextPath: Endpoint.ID
+ description: The unique ID within the tool retreiving the endpoint.
+ type: String
+ - arguments:
+ - default: true
+ description: (Int) Unique computer ID.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: 'Returns information for a computer. For more information, see the
+ Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#computer'
+ execution: false
+ name: cbp-computer-get
+ outputs:
+ - contextPath: Endpoint.Processors
+ description: The number of processors.
+ type: Number
+ - contextPath: Endpoint.OS
+ description: The short OS name running on the endpoint.
+ type: String
+ - contextPath: Endpoint.MACAddress
+ description: MAC address of the endpoint.
+ type: String
+ - contextPath: Endpoint.Model
+ description: The machine model, if available.
+ type: String
+ - contextPath: Endpoint.IPAddress
+ description: IP address of the endpoint.
+ type: String
+ - contextPath: Endpoint.Processor
+ description: Model of the processor.
+ type: String
+ - contextPath: Endpoint.Hostname
+ description: Hostname of the endpoint.
+ type: String
+ - contextPath: Endpoint.OSVersion
+ description: The full OS name running on the endpoint.
+ type: String
+ - contextPath: Endpoint.ID
+ description: The unique ID within the tool retreiving the endpoint.
+ type: String
+ - arguments:
+ - default: false
+ description: 'A condition contains three parts: name, operator and value. Name
+ is any valid field in the object that is being queried. Operator (: LIKE,
+ ! NOT LIKE, < Less than, > Greater than, + logical AND, - logical OR, | separating
+ values) is any of valid operators (see below). All operators consist of a
+ single character. Value is compared with operator and depends on field type.
+ For more information, see the Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#searching'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Maximum number of results to retrieve. If not specified the
+ first 1000 results will be returned. If set to "-1", only the result count
+ will be returned, without actual results, and the "offset" parameter is ignored.
+ If set to "0", all results will be returned, and the "offset" parameter is
+ ignored. Note that some result sets might be very large, resulting in query
+ timeout. Therefore, unless you know that query will not return more than 1000
+ results, it is recommended to retrieve data in chunks using offset and limit.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Offset in the data set.
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ description: 'Grouping is optional and can be defined with a single attribute:
+ &group=xyz. There can be only one grouping field, for example: group=osShortName'
+ isArray: false
+ name: group
+ required: false
+ secret: false
+ - default: false
+ description: 'Sorting is optional and can be defined with a single attribute,
+ where xyz is the field name from the result set: &sort=xyz [ASC|DESC]. There
+ can be only one sorting field. Default sort order is ascending (ASC). '
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ - default: false
+ description: Id of computer associated with this fileInstance
+ isArray: false
+ name: computerId
+ required: false
+ secret: false
+ - default: false
+ description: Name of the file on the agent
+ isArray: false
+ name: fileName
+ required: false
+ secret: false
+ deprecated: false
+ description: 'Search for file instances. For more information, see the Carbon
+ Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#fileinstance'
+ execution: false
+ name: cbp-fileInstance-search
+ outputs:
+ - contextPath: CBP.FileInstance.CatalogID
+ description: The file ID in the file catalog.
+ type: String
+ - contextPath: CBP.FileInstance.ComputerID
+ description: The computer ID on which the file was found.
+ type: String
+ - contextPath: CBP.FileInstance.ID
+ description: CBP internal ID of the file instance.
+ type: String
+ - contextPath: CBP.FileInstance.Name
+ description: Name of the file.
+ type: String
+ - contextPath: CBP.FileInstance.Path
+ description: Path on the found hostname.
+ type: String
+ - arguments:
+ - default: false
+ description: 'A condition contains three parts: name, operator and value. Name
+ is any valid field in the object that is being queried. Operator (: LIKE,
+ ! NOT LIKE, < Less than, > Greater than, + logical AND, - logical OR, | separating
+ values) is any of valid operators (see below). All operators consist of a
+ single character. Value is compared with operator and depends on field type.
+ For more information, see the Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#searching'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Maximum number of results to retrieve. If not specified,
+ the first 1000 results will be returned. If set to "-1", only the result count
+ will be returned, without actual results, and the "offset" parameter is ignored.
+ If set to "0", all results will be returned, and the "offset" parameter is
+ ignored. Note that some result sets might be very large, resulting in query
+ timeout. Therefore, unless you know that query will not return more than 1000
+ results, it is recommended to retrieve data in chunks using offset and limit.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Offset in the data set.
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ description: 'Grouping is optional and can be defined with a single attribute:
+ &group=xyz. There can be only one grouping field, for example: group=osShortName'
+ isArray: false
+ name: group
+ required: false
+ secret: false
+ - default: false
+ description: 'Sorting is optional and can be defined with a single attribute,
+ where xyz is field name from the result set: &sort=xyz [ASC|DESC]. There can
+ be only one sorting field. Default sort order is ascending (ASC). '
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: |-
+ Event type. Can be one of:
+ 0 = Server Management
+ 1 = Session Management
+ 2 = Computer Management
+ 3 = Policy Management
+ 4 = Policy Enforcement
+ 5 = Discovery
+ 6 = General Management
+ 8 = Internal Events
+ isArray: false
+ name: type
+ predefined:
+ - Server Management
+ - Session Management
+ - Computer Management
+ - Policy Management
+ - Policy Enforcement
+ - Discovery
+ - General Management
+ - Internal Events
+ required: false
+ secret: false
+ - default: false
+ description: Id of computer associated with this event. You can get this by
+ executing cbp-computer-search command
+ isArray: false
+ name: computerId
+ required: false
+ secret: false
+ - default: false
+ description: IP address associated with this event
+ isArray: false
+ name: ipAddress
+ required: false
+ secret: false
+ - default: false
+ description: Name of the file associated with this event
+ isArray: false
+ name: fileName
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: |-
+ Event severity. Can be one of:
+ 2 = Critical
+ 3 = Error
+ 4 = Warning
+ 5 = Notice
+ 6 = Info
+ 7 = Debug
+ isArray: false
+ name: severity
+ predefined:
+ - Critical
+ - Error
+ - Warning
+ - Notice
+ - Info
+ - Debug
+ required: false
+ secret: false
+ - default: false
+ description: User name associated with this event
+ isArray: false
+ name: userName
+ required: false
+ secret: false
+ - default: false
+ description: Id of fileCatalog entry associated with this fileRule. Can be null
+ if file hasn’t been seen on any endpoints yet. You can get this by executing
+ cbp-fileCatalog-search
+ isArray: false
+ name: fileCatalogId
+ required: false
+ secret: false
+ deprecated: false
+ description: 'Search for events. For more information, see the Carbon Black documentation:
+ https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#event'
+ execution: false
+ name: cbp-event-search
+ outputs:
+ - contextPath: CBP.Event.FilePath
+ description: File path of the event.
+ type: String
+ - contextPath: CBP.Event.Param1
+ description: First event parameter.
+ type: String
+ - contextPath: CBP.Event.Param2
+ description: Second event parameter.
+ type: String
+ - contextPath: CBP.Event.Param3
+ description: Third event parameter.
+ type: String
+ - contextPath: CBP.Event.SubTypeName
+ description: Name of the subtype.
+ type: String
+ - contextPath: CBP.Event.ComputerName
+ description: Name of the computer related to the event.
+ type: String
+ - contextPath: CBP.Event.FileName
+ description: Name of the file related to the event.
+ type: String
+ - contextPath: CBP.Event.RuleName
+ description: Name of the rule related to the event.
+ type: String
+ - contextPath: CBP.Event.ProcessFileCatalogID
+ description: ID of the process file catalog ID.
+ type: String
+ - contextPath: CBP.Event.StringID
+ description: ID of the event string.
+ type: String
+ - contextPath: CBP.Event.IPAddress
+ description: IP address of the event.
+ type: String
+ - contextPath: CBP.Event.PolicyID
+ description: Policy ID of the event.
+ type: String
+ - contextPath: CBP.Event.Timestamp
+ description: Timestamp of the event.
+ type: Date
+ - contextPath: CBP.Event.Username
+ description: Username related to the event.
+ type: String
+ - contextPath: CBP.Event.ComputerID
+ description: ID of the event computer.
+ type: String
+ - contextPath: CBP.Event.ProcessFileName
+ description: File name of the process.
+ type: String
+ - contextPath: CBP.Event.FileCatalogID
+ description: ID of the file catalog.
+ type: String
+ - contextPath: CBP.Event.ProcessFileName
+ description: File name of the process.
+ type: String
+ - contextPath: CBP.Event.IndicatorName
+ description: Indicator name of the event.
+ type: String
+ - contextPath: CBP.Event.SubType
+ description: ID of the subtype.
+ type: Number
+ - contextPath: CBP.Event.Type
+ description: Type of the event.
+ type: Number
+ - contextPath: CBP.Event.ID
+ description: ID of the event.
+ type: Number
+ - contextPath: CBP.Event.Description
+ description: Description of the event.
+ type: String
+ - contextPath: CBP.Event.Severity
+ description: Severity of the event.
+ type: String
+ - contextPath: CBP.Event.CommandLine
+ description: Command line executed in the event.
+ type: String
+ - contextPath: CBP.Event.ProcessPathName
+ description: Path name of the process.
+ type: String
+ - arguments:
+ - default: false
+ description: 'A condition contains three parts: name, operator and value. Name
+ is any valid field in the object that is being queried. Operator (: LIKE,
+ ! NOT LIKE, < Less than, > Greater than, + logical AND, - logical OR, | separating
+ values) is any of valid operators (see below). All operators consist of a
+ single character. Value is compared with operator and depends on field type.
+ For more information, see the Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#searching'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Maximum number of results to retrieve. If not specified,
+ the first 1000 results will be returned. If set to "-1", only the result count
+ will be returned, without actual results, and the "offset" parameter is ignored.
+ If set to "0", all results will be returned, and the "offset" parameter is
+ ignored. Note that some result sets might be very large, resulting in query
+ timeout. Therefore, unless you know that query will not return more than 1000
+ results, it is recommended to retrieve data in chunks using offset and limit.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Offset in the data set.
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ description: 'Grouping is optional and can be defined with a single attribute:
+ &group=xyz. There can be only one grouping field. Example: group=osShortName'
+ isArray: false
+ name: group
+ required: false
+ secret: false
+ - default: false
+ description: 'Sorting is optional and can be defined with a single attribute,
+ where xyz is field name from the result set: &sort=xyz [ASC|DESC]. There can
+ be only one sorting field. Default sort order is ascending (ASC). '
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ deprecated: false
+ description: 'Search for approval requests. For more information, see the Carbon
+ Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#approvalrequest'
+ execution: false
+ name: cbp-approvalRequest-search
+ outputs:
+ - contextPath: CBP.ApprovalRequest.ID
+ description: ID of the approval request.
+ type: Number
+ - contextPath: CBP.ApprovalRequest.ResolutionComments
+ description: Comments added by the request resolver.
+ type: String
+ - contextPath: CBP.ApprovalRequest.Resolution
+ description: 'Resolution of the request. Can be one of: 0=Not Resolved, 1=Rejected,
+ 2=Resolved - Approved, 3=Resolved - Rule Change, 4=Resolved - Installer, 5=Resolved
+ - Updater, 6=Resolved - Publisher, 7=Resolved - Other.'
+ type: Number
+ - contextPath: CBP.ApprovalRequest.Status
+ description: 'Request status. Can be one of: 1=New, 2=Open, 3=Closed, 4=Escalated.'
+ type: Number
+ - contextPath: CBP.ApprovalRequest.FileCatalogID
+ description: ID of the fileCatalog entry associated with file for this event.
+ type: Number
+ - contextPath: CBP.ApprovalRequest.ComputerID
+ description: ID of the computer entry associated with this analysis.
+ type: Number
+ - contextPath: CBP.ApprovalRequest.ComputerName
+ description: Name of the computer associated with this event.
+ type: String
+ - contextPath: CBP.ApprovalRequest.DateCreated
+ description: Date/time when the notifier was created (UTC).
+ type: Date
+ - contextPath: CBP.ApprovalRequest.CreatedBy
+ description: User that created this notifier.
+ type: String
+ - contextPath: CBP.ApprovalRequest.EnforcementLevel
+ description: 'Enforcement level of the agent at the time of the request. Can
+ be one of: 20=High (Block Unapproved), 30=Medium (Prompt Unapproved), 40=Low
+ (Monitor Unapproved), 60=None (Visibility), 80=None (Disabled).'
+ type: Number
+ - contextPath: CBP.ApprovalRequest.RequestorEmail
+ description: Email address of the user that created this request.
+ type: String
+ - contextPath: CBP.ApprovalRequest.Priority
+ description: 'Priority of this request. Can be one of: 0=High, 1=Medium, 2=Low.'
+ type: Number
+ - contextPath: CBP.ApprovalRequest.FileName
+ description: Name of the file on the agent.
+ type: String
+ - contextPath: CBP.ApprovalRequest.PathName
+ description: Path of the file on the agent.
+ type: String
+ - contextPath: CBP.ApprovalRequest.Process
+ description: Process that attempted to execute the file on the agent (the full
+ process path).
+ type: String
+ - contextPath: CBP.ApprovalRequest.Platform
+ description: Platform of this approval request.
+ type: String
+ - arguments:
+ - default: false
+ description: 'A condition contains three parts: name, operator and value. Name
+ is any valid field in the object that is being queried. Operator (: LIKE,
+ ! NOT LIKE, < Less than, > Greater than, + logical AND, - logical OR, | separating
+ values) is any of valid operators (see below). All operators consist of a
+ single character. Value is compared with operator and depends on field type.
+ For more information, see the Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#searching'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Maximum number of results to retrieve. If not specified,
+ the first 1000 results will be returned. If set to "-1", only the result count
+ will be returned, without actual results, and the "offset" parameter is ignored.
+ If set to "0", all results will be returned, and the offset parameter is ignored.
+ Note that some result sets might be very large, resulting in query timeout.
+ Therefore, unless you know that query will not return more than 1000 results,
+ it is recommended to retrieve data in chunks using offset and limit.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Offset in the data set.
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ description: 'Grouping is optional and can be defined with a single attribute:
+ &group=xyz. There can be only one grouping field, for example: group=osShortName.'
+ isArray: false
+ name: group
+ required: false
+ secret: false
+ - default: false
+ description: 'Sorting is optional and can be defined with a single attribute
+ where xyz is field name from the result set: &sort=xyz [ASC|DESC]. There can
+ be only one sorting field. Default sort order is ascending (ASC). '
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ - default: false
+ description: Id of fileCatalog entry associated with this fileRule. Can be null
+ if file hasn’t been seen on any endpoints yet. You can get this by executing
+ cbp-fileCatalog-search
+ isArray: false
+ name: fileCatalogId
+ required: false
+ secret: false
+ - default: false
+ description: Name of this rule
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: |-
+ File state for this rule. Can be one of:
+ 1=Unapproved
+ 2=Approved
+ 3=Banned
+ isArray: false
+ name: fileState
+ predefined:
+ - Unapproved
+ - Approved
+ - Banned
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: "Mechanism that created this rule. Can be one of: \n1 = Manual\n\
+ 2 = Trusted Directory\n3 = Reputation\n4 = Imported\n5 = External (API)\n\
+ 6 = Event Rule\n7 = Application Template\n8 = Unified Management"
+ isArray: false
+ name: sourceType
+ predefined:
+ - Manual
+ - Trusted Directory
+ - Reputation
+ - Imported
+ - External (API)
+ - Event Rule
+ - Application Template
+ - Unified Management
+ required: false
+ secret: false
+ - default: false
+ description: Hash associated with this rule. Note that hash will be available
+ only if rule was created through md5 or sha-1 hash. If rule was created through
+ fileCatalogId or sha-256 hash that exists in the catalog, this field will
+ be empty
+ isArray: false
+ name: hash
+ required: false
+ secret: false
+ - default: false
+ description: File name associated with this rule. Note that file name will be
+ available only if rule was created through file name. If rule was created
+ through fileCatalogId or hash, this field will be empty
+ isArray: false
+ name: fileName
+ required: false
+ secret: false
+ deprecated: false
+ description: 'Search for file rules. For more information, see the Carbon Black
+ documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#filerule'
+ execution: false
+ name: cbp-fileRule-search
+ outputs:
+ - contextPath: CBP.FileRule.CatalogID
+ description: The file catalog ID for the rule.
+ type: String
+ - contextPath: CBP.FileRule.Description
+ description: Description of the rule.
+ type: String
+ - contextPath: CBP.FileRule.FileState
+ description: The file state for the rule.
+ type: String
+ - contextPath: CBP.FileRule.Hash
+ description: Hash for the rule.
+ type: String
+ - contextPath: CBP.FileRule.ID
+ description: ID of the rule.
+ type: String
+ - contextPath: CBP.FileRule.Name
+ description: Name of the rule.
+ type: String
+ - contextPath: CBP.FileRule.PolicyIDs
+ description: Policies of which this rule is a part.
+ type: String
+ - contextPath: CBP.FileRule.ReportOnly
+ description: Whether this rule is "reporting only, or also "enforcing".
+ type: String
+ - arguments:
+ - default: true
+ description: (Int) Unique ID of the file rule.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: 'Gets the file rule. For more information, see the Carbon Black documentation:
+ https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#filerule'
+ execution: false
+ name: cbp-fileRule-get
+ outputs:
+ - contextPath: CBP.FileRule.CatalogID
+ description: The file catalog ID for the rule.
+ type: String
+ - contextPath: CBP.FileRule.Description
+ description: Description of the rule.
+ type: String
+ - contextPath: CBP.FileRule.FileState
+ description: The file state for the rule.
+ type: String
+ - contextPath: CBP.FileRule.Hash
+ description: Hash for the rule.
+ type: String
+ - contextPath: CBP.FileRule.ID
+ description: ID of the rule.
+ type: String
+ - contextPath: CBP.FileRule.Name
+ description: Name of the rule.
+ type: String
+ - contextPath: CBP.FileRule.PolicyIDs
+ description: Policies of which this rule is a part.
+ type: String
+ - contextPath: CBP.FileRule.ReportOnly
+ description: Whether this rule is "reporting only, or also "enforcing".
+ type: String
+ - arguments:
+ - default: true
+ description: (Int) Unique id of this fileRule
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: 'Deletes the file rule. For more information, see the Carbon Black
+ documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#filerule'
+ execution: false
+ name: cbp-fileRule-delete
+ - arguments:
+ - default: false
+ description: 'A condition contains three parts: name, operator and value. Name
+ is any valid field in the object that is being queried. Operator (: LIKE,
+ ! NOT LIKE, < Less than, > Greater than, + logical AND, - logical OR, | separating
+ values) is any of valid operators (see below). All operators consist of a
+ single character. Value is compared with operator and depends on field type.
+ For more information, see the Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#searching'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Maximum number of results to retrieve. If not specified,
+ the first 1000 results will be returned. If set to "-1", only the result count
+ will be returned, without actual results, and the "offset" parameter is ignored.
+ If set to "0", all results will be returned, and the "offset" parameter is
+ ignored. Note that some result sets might be very large, resulting in query
+ timeout. Therefore, unless you know that query will not return more than 1000
+ results, it is recommended to retrieve data in chunks using offset and limit.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Offset in the data set.
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ description: 'Grouping is optional and can be defined with a single attribute:
+ &group=xyz. There can be only one grouping field, for example: group=osShortName.'
+ isArray: false
+ name: group
+ required: false
+ secret: false
+ - default: false
+ description: 'Sorting is optional and can be defined with a single attribute
+ where xyz is field name from the result set: &sort=xyz [ASC|DESC]. There can
+ be only one sorting field. Default sort order is ascending (ASC). '
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: |-
+ Target enforcement level. Can be one of:
+ 20=High (Block Unapproved)
+ 30=Medium (Prompt Unapproved)
+ 40=Low (Monitor Unapproved)
+ 60=None (Visibility)
+ 80=None (Disabled)
+ isArray: false
+ name: enforcementLevel
+ predefined:
+ - High (Block Unapproved)
+ - Medium (Prompt Unapproved)
+ - Low (Monitor Unapproved)
+ - None (Visibility)
+ - None (Disabled)
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: |-
+ Target enforcement level for disconnected computers. Can be one of:
+ 20=High (Block Unapproved)
+ 30=Medium (Prompt Unapproved)
+ 40=Low (Monitor Unapproved)
+ 60=None (Visibility)
+ 80=None (Disabled)
+ isArray: false
+ name: disconnectedEnforcementLevel
+ predefined:
+ - High (Block Unapproved)
+ - Medium (Prompt Unapproved)
+ - Low (Monitor Unapproved)
+ - None (Visibility)
+ - None (Disabled)
+ required: false
+ secret: false
+ deprecated: false
+ description: 'Search for policies. For more information, see the Carbon Black
+ documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#policy'
+ execution: false
+ name: cbp-policy-search
+ outputs:
+ - contextPath: CBP.Policy.ReadOnly
+ description: Whether the policy "read-only".
+ type: Boolean
+ - contextPath: CBP.Policy.EnforcementLevel
+ description: The level of enforcement of the policy.
+ type: String
+ - contextPath: CBP.Policy.ReputationEnabled
+ description: Whether the reputation for the policy is enabled.
+ type: Boolean
+ - contextPath: CBP.Policy.AtEnforcementComputers
+ description: Number of enforced computers.
+ type: Number
+ - contextPath: CBP.Policy.Automatic
+ description: Whether the policy is automatic.
+ type: Boolean
+ - contextPath: CBP.Policy.Name
+ description: Name of the policy.
+ type: String
+ - contextPath: CBP.Policy.FileTrackingEnabled
+ description: Whether file tracking enabled for the policy.
+ type: Boolean
+ - contextPath: CBP.Policy.ConnectedComputers
+ description: Number of connected computers associated with the policy.
+ type: Number
+ - contextPath: CBP.Policy.PackageName
+ description: Package name of the policy.
+ type: String
+ - contextPath: CBP.Policy.AllowAgentUpgrades
+ description: Whether the policy allows agent upgrades.
+ type: Boolean
+ - contextPath: CBP.Policy.TotalComputers
+ description: Number of computers associated with the policy.
+ type: Number
+ - contextPath: CBP.Policy.LoadAgentInSafeMode
+ description: Whether the agent should load in safe mode.
+ type: Boolean
+ - contextPath: CBP.Policy.AutomaticApprovalsOnTransition
+ description: Approve on transition.
+ type: String
+ - contextPath: CBP.Policy.ID
+ description: CBP internal ID of the policy.
+ type: String
+ - contextPath: CBP.Policy.Description
+ description: Description of the policy.
+ type: String
+ - contextPath: CBP.Policy.DisconnectedEnforcementLevel
+ description: The level of enforcement of the policy when disconnected.
+ type: String
+ - arguments:
+ - default: false
+ description: 'A condition contains three parts: name, operator and value. Name
+ is any valid field in the object that is being queried. Operator (: LIKE,
+ ! NOT LIKE, < Less than, > Greater than, + logical AND, - logical OR, | separating
+ values) is any of valid operators (see below). All operators consist of a
+ single character. Value is compared with operator and depends on field type.
+ For more information, see the Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#searching.'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Maximum number of results to retrieve. If not specified,
+ the first 1000 results will be returned. If set to "-1", only the result count
+ will be returned, without actual results, and the "offset" parameter is ignored.
+ If set to "0", all results will be returned, and the "offset" parameter is
+ ignored. Note that some result sets might be very large, resulting in query
+ timeout. Therefore, unless you know that query will not return more than 1000
+ results, it is recommended to retrieve data in chunks using offset and limit.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Offset in the data set.
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ description: 'Grouping is optional and can be defined with a single attribute:
+ &group=xyz. There can be only one grouping field, for example: group=osShortName.'
+ isArray: false
+ name: group
+ required: false
+ secret: false
+ - default: false
+ description: 'Sorting is optional and can be defined with a single attribute
+ where xyz is field name from the result set: &sort=xyz [ASC|DESC]. There can
+ be only one sorting field. Default sort order is ascending (ASC). '
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ deprecated: false
+ description: 'Search in server configurations. For more information, see the Carbon
+ Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#serverconfig.'
+ execution: false
+ name: cbp-serverConfig-search
+ outputs:
+ - contextPath: CBP.ServerConfig.ID
+ description: CBP internal ID of the server configuration.
+ type: String
+ - contextPath: CBP.ServerConfig.Name
+ description: Name of the server configuration.
+ type: String
+ - contextPath: CBP.ServerConfig.Value
+ description: Value of the server configuration.
+ type: String
+ - arguments:
+ - default: false
+ description: 'A condition contains three parts: name, operator and value. Name
+ is any valid field in the object that is being queried. Operator (: LIKE,
+ ! NOT LIKE, < Less than, > Greater than, + logical AND, - logical OR, | separating
+ values) is any of valid operators (see below). All operators consist of a
+ single character. Value is compared with operator and depends on field type.
+ For more information, see the Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#searching.'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Maximum number of results to retrieve. If not specified,
+ the first 1000 results will be returned. If set to "-1", only the result count
+ will be returned, without actual results, and the "offset" parameter is ignored.
+ If set to "0", all results will be returned, and the "offset" parameter is
+ ignored. Note that some result sets might be very large, resulting in query
+ timeout. Therefore, unless you know that query will not return more than 1000
+ results, it is recommended to retrieve data in chunks using offset and limit.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Offset in the data set.
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ description: 'Grouping is optional and can be defined with a single attribute:
+ &group=xyz. There can be only one grouping field, for example: group=osShortName.'
+ isArray: false
+ name: group
+ required: false
+ secret: false
+ - default: false
+ description: 'Sorting is optional and can be defined with a single attribute
+ where xyz is field name from the result set: &sort=xyz [ASC|DESC]. There can
+ be only one sorting field. Default sort order is ascending (ASC). '
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ - default: false
+ description: Subject name of leaf certificate for this publisher
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: |-
+ Reputation of this publisher. Can be one of:
+ 0=Not trusted (Unknown)
+ 1=Low
+ 2=Medium
+ 3=High
+ isArray: false
+ name: publisherReputation
+ predefined:
+ - Not trusted (Unknown)
+ - Low
+ - Medium
+ - High
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: |-
+ State for this publisher. Can be one of:
+ 1=Unapproved
+ 2=Approved
+ 3=Banned
+ 4=Approved By Policy
+ 5=Banned By Policy
+ isArray: false
+ name: publisherState
+ predefined:
+ - Unapproved
+ - Approved
+ - Banned
+ - Approved By Policy
+ - Banned By Policy
+ required: false
+ secret: false
+ deprecated: false
+ description: 'Search for publishers. For more information, see the Carbon Black
+ documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#publisher.'
+ execution: false
+ name: cbp-publisher-search
+ outputs:
+ - contextPath: CBP.Publisher.Description
+ description: Description of the publisher.
+ type: String
+ - contextPath: CBP.Publisher.ID
+ description: CBP internal ID of the publisher.
+ type: String
+ - contextPath: CBP.Publisher.Name
+ description: Name of the publisher.
+ type: String
+ - contextPath: CBP.Publisher.Reputation
+ description: Reputation of the publisher.
+ type: String
+ - contextPath: CBP.Publisher.SignedCertificatesCount
+ description: Number of certificates from the publisher.
+ type: Number
+ - contextPath: CBP.Publisher.SignedFilesCount
+ description: Number of signed files from publisher.
+ type: Number
+ - contextPath: CBP.Publisher.State
+ description: The state of the publisher.
+ type: String
+ - arguments:
+ - default: true
+ description: (Int) Unique fileAnalysis ID.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the object instance of this class.
+ execution: false
+ name: cbp-fileAnalysis-get
+ outputs:
+ - contextPath: CBP.FileAnalysis.Priority
+ description: File analysis priority. Valid range is [-2, 2], where 2 is highest
+ priority. Default priority is "0".
+ type: Number
+ - contextPath: CBP.FileAnalysis.PathName
+ description: Path of the file on the endpoint.
+ type: String
+ - contextPath: CBP.FileAnalysis.ComputerId
+ description: ID of the computer entry associated with this analysis.
+ type: String
+ - contextPath: CBP.FileAnalysis.DateModified
+ description: Date/time when the fileAnalysis request was last modified (UTC).
+ type: Date
+ - contextPath: CBP.FileAnalysis.ID
+ description: Unique fileAnalysis ID.
+ type: String
+ - contextPath: CBP.FileAnalysis.FileCatalogId
+ description: ID of the fileCatalog entry associated with this analysis.
+ type: String
+ - contextPath: CBP.FileAnalysis.DateCreated
+ description: Date/time when the fileAnalysis request was created (UTC).
+ type: Date
+ - contextPath: CBP.FileAnalysis.CreatedBy
+ description: User that requested the analysis.
+ type: String
+ - contextPath: File.FileCatalogId
+ description: ID ofthe fileCatalog entry associated with this analysis.
+ type: String
+ - contextPath: CBP.FileAnalysis.FileName
+ description: Name of the file on the endpoint.
+ type: String
+ - contextPath: File.Malicious
+ description: Vendor and description of the malicious file.
+ type: String
+ - contextPath: File.PathName
+ description: Path of the file on the endpoint.
+ type: Unknown
+ - contextPath: File.Name
+ description: ' Full file name, for example: "data.xls". '
+ type: String
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file.
+ type: String
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file.
+ type: String
+ - contextPath: File.MD5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: The indicator.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: The DBot score vendor.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The DBot score
+ type: number
+ - arguments:
+ - default: false
+ description: (Int) ID of the fileCatalog entry for which analysis is requested.
+ This value can be fetched via cbp-fileCatalog-search command.
+ isArray: false
+ name: fileCatalogId
+ required: true
+ secret: false
+ - default: false
+ description: (Int) ID of the target connector for the analysis. This value can
+ be fetched via cbp-connector-search command.
+ isArray: false
+ name: connectorId
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: (Int) ID of the computer from which to upload the file. If "0",
+ the system will identify the best computer from which to get the file. This
+ value can be fetched via cbp-computer-search command.
+ isArray: false
+ name: computerId
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: '(Int) The analysis priority (valid range: -2, 2), where "2" is
+ highest priority. Default priority is "0".'
+ isArray: false
+ name: priority
+ predefined:
+ - '-2'
+ - '-1'
+ - '0'
+ - '1'
+ - '2'
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Status of the analysis. The status of an analysis that is
+ in progress can be changed to "5" (Cancelled).
+ isArray: false
+ name: analysisStatus
+ required: false
+ secret: false
+ - default: false
+ description: (String) Target of the analysis. It has to be one of possible analysisTarget
+ options defined for the given connector object, or empty for connectors without
+ defined analysisTargets.
+ isArray: false
+ name: analysisTarget
+ required: false
+ secret: false
+ - default: false
+ description: If specified, will try to update the file analysis with this ID.
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates or updates a file analysis request.
+ execution: false
+ name: cbp-fileAnalysis-createOrUpdate
+ outputs:
+ - contextPath: CBP.FileAnalysis.Priority
+ description: 'File analysis priority in range (valid range: -2, 2), where "2"
+ is highest priority. Default priority is "0".'
+ type: Number
+ - contextPath: CBP.FileAnalysis.PathName
+ description: Path of the file where the file exists on the endpoint.
+ type: String
+ - contextPath: CBP.FileAnalysis.ComputerID
+ description: ID of the computer entry associated with this analysis.
+ type: String
+ - contextPath: CBP.FileAnalysis.DateModified
+ description: Date/time when the fileAnalysis request was last modified (UTC).
+ type: Date
+ - contextPath: CBP.FileAnalysis.FileCatalogId
+ description: ID of the fileCatalog entry associated with this analysis.
+ type: String
+ - contextPath: CBP.FileAnalysis.DateCreated
+ description: Date/time when the fileAnalysis request was created (UTC).
+ type: Date
+ - contextPath: CBP.FileAnalysis.ID
+ description: Unique fileAnalysis ID.
+ type: String
+ - contextPath: CBP.FileAnalysis.CreatedBy
+ description: User that requested the analysis.
+ type: String
+ - arguments:
+ - default: false
+ description: 'A condition contains three parts: name, operator and value. Name
+ is any valid field in the object that is being queried. Operator (: LIKE,
+ ! NOT LIKE, < Less than, > Greater than, + logical AND, - logical OR, | separating
+ values) is any of valid operators (see below). All operators consist of a
+ single character. Value is compared with operator and depends on field type.
+ For more informatoin, see the Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#searching'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Maximum number of results to retrieve. If not specified,
+ the first 1000 results will be returned. If set to "-1", only the result count
+ will be returned, without actual results, and the offset parameter is ignored.
+ If set to "0", all results will be returned, and the offset parameter is ignored.
+ Note that some result sets might be very large, resulting in query timeout.
+ Therefore, unless you know that query will not return more than 1000 results,
+ it is recommended to retrieve data in chunks using offset and limit.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Offset in the data set.
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ description: 'Grouping is optional and can be defined with a single attribute:
+ &group=xyz. There can be only one grouping field, for example: group=osShortName.'
+ isArray: false
+ name: group
+ required: false
+ secret: false
+ - default: false
+ description: 'Sorting is optional and can be defined with a single attribute:
+ &sort=xyz [ASC|DESC], where xyz is the field name from the result set. There
+ can be only one sorting field. Default sort order is ascending (ASC). '
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ - default: false
+ description: Id of fileCatalog entry associated with this analysis. You can
+ get this by executing cbp-fileCatalog-search
+ isArray: false
+ name: fileCatalogId
+ required: false
+ secret: false
+ - default: false
+ description: Id of connector associated with this analysis. You can get this
+ by executing cbp-connector-search
+ isArray: false
+ name: connectorId
+ required: false
+ secret: false
+ - default: false
+ description: |
+ Name of the file where file exists on the endpoint
+ isArray: false
+ name: fileName
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: |-
+ Status of analysis. Can be one of:
+ 0 = scheduled
+ 1 = submitted (file is sent for analysis)
+ 2 = processed (file is processed but results are not available yet)
+ 3 = analyzed (file is processed and results are available)
+ 4 = error
+ 5 = cancelled
+ isArray: false
+ name: analysisStatus
+ predefined:
+ - scheduled
+ - submitted (file is sent for analysis)
+ - processed (file is processed but results are not available yet)
+ - analyzed (file is processed and results are available)
+ - error
+ - cancelled
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: |-
+ Result of the analysis. Can be one of:
+ 0 = Not yet available
+ 1 = File is clean
+ 2 = File is a potential threat
+ 3 = File is malicious
+ isArray: false
+ name: analysisResult
+ predefined:
+ - Not yet available
+ - File is clean
+ - File is a potential threat
+ - File is malicious
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns objects that match the specified criteria.
+ execution: false
+ name: cbp-fileAnalysis-search
+ outputs:
+ - contextPath: CBP.FileAnalysis.Priority
+ description: 'File analysis priority in range (valid range: -2, 2), where "2"
+ is highest priority. Default priority is "0".'
+ type: Number
+ - contextPath: CBP.FileAnalysis.PathName
+ description: Path of the file where the file exists on the endpoint.
+ type: String
+ - contextPath: CBP.FileAnalysis.ComputerID
+ description: ID of the computer entry associated with this analysis.
+ type: String
+ - contextPath: CBP.FileAnalysis.DateModified
+ description: Date/time when the fileAnalysis request was last modified (UTC).
+ type: Date
+ - contextPath: CBP.FileAnalysis.FileCatalogId
+ description: ID of the fileCatalog entry associated with this analysis.
+ type: String
+ - contextPath: CBP.FileAnalysis.DateCreated
+ description: Date/time when the fileAnalysis request was created (UTC).
+ type: Date
+ - contextPath: CBP.FileAnalysis.ID
+ description: Unique fileAnalysis ID.
+ type: String
+ - contextPath: CBP.FileAnalysis.CreatedBy
+ description: User that requested this analysis.
+ type: String
+ - arguments:
+ - default: true
+ description: (Int) Unique ID of this fileUpload.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the object instance of this class.
+ execution: false
+ name: cbp-fileUpload-get
+ outputs:
+ - contextPath: CBP.FileUpload.Priority
+ description: 'File analysis priority in range (valid range: -2, 2), where "2"
+ is highest priority. Default priority is "0".'
+ type: Number
+ - contextPath: CBP.FileUpload.FileName
+ description: Name of the file where the file exists on the endpoint.
+ type: String
+ - contextPath: CBP.FileUpload.UploadPath
+ description: Local upload path for the file on the server (can be a shared network
+ path). Note that the file is compressed in a ZIP archive.
+ type: String
+ - contextPath: CBP.FileUpload.ComputerId
+ description: ID of the computer entry associated with this analysis.
+ type: String
+ - contextPath: CBP.FileUpload.DateModified
+ description: Date/time when the fileAnalysis request was last modified (UTC).
+ type: Date
+ - contextPath: CBP.FileUpload.ID
+ description: Unique fileAnalysis ID.
+ type: String
+ - contextPath: CBP.FileUpload.FileCatalogId
+ description: ID of the fileCatalog entry associated with this analysis.
+ type: String
+ - contextPath: CBP.FileUpload.DateCreated
+ description: Date/time when the fileAnalysis request was created (UTC).
+ type: Date
+ - contextPath: CBP.FileUpload.PathName
+ description: Path of the file where there file exists on the endpoint.
+ type: String
+ - contextPath: CBP.FileUpload.UploadStatus
+ description: 'Status of the upload (valid range: 0-6).'
+ type: Number
+ - contextPath: CBP.FileUpload.UploadedFileSize
+ description: Size of the uploaded file. The file size will be 0 unless the uploadStatus
+ is "3" (Completed).
+ type: String
+ - contextPath: CBP.FileUpload.CreatedBy
+ description: User that requested the analysis.
+ type: String
+ - arguments:
+ - default: true
+ description: (Int) Unique ID of the fileUpload.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the bject instance of this class.
+ execution: false
+ name: cbp-fileUpload-download
+ - arguments:
+ - default: false
+ description: (Int) ID of the fileCatalog entry for file to upload. This value
+ can be fetched via cbp-fileCatalog-search command.
+ isArray: false
+ name: fileCatalogId
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: (Int) ID of the computer entry associated with this analysis. This
+ value can be fetched via cbp-computer-search command.
+ isArray: false
+ name: computerId
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'File analysis priority in range (valid range: -2, 2), where "2"
+ is highest priority. Default priority is "0".'
+ isArray: false
+ name: priority
+ predefined:
+ - '-2'
+ - '-1'
+ - '0'
+ - '1'
+ - '2'
+ required: false
+ secret: false
+ - default: false
+ description: (Int)Status of upload. The status of "upload in progress" can be
+ changed to "5" (Cancelled). Any upload can be changed to "6" (Deleted).
+ isArray: false
+ name: uploadStatus
+ required: false
+ secret: false
+ - default: false
+ description: ID of the file upload to update. If omitted, will create a new
+ file upload.
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates or updates a file upload request.
+ execution: false
+ name: cbp-fileUpload-createOrUpdate
+ outputs:
+ - contextPath: CBP.FileUpload.Priority
+ description: 'File analysis priority in range (valid range: -2, 2), where "2"
+ is highest priority. Default priority is "0".'
+ type: Number
+ - contextPath: CBP.FileUpload.CreatedByUserId
+ description: ID of the user that requested the analysis.
+ type: String
+ - contextPath: CBP.FileUpload.UploadPath
+ description: Local upload path for this file on the server (can be a shared
+ network path). Note that the file is compressed in a ZIP archive.
+ type: String
+ - contextPath: CBP.FileUpload.FileName
+ description: Name of the file where the file exists on the endpoint.
+ type: String
+ - contextPath: CBP.FileUpload.PathName
+ description: Path of the file where the file exists on the endpoint.
+ type: String
+ - contextPath: CBP.FileUpload.UploadStatus
+ description: 'Status of the upload (valid range: 0-6).'
+ type: Number
+ - contextPath: CBP.FileUpload.ComputerID
+ description: ID of the computer entry associated with this analysis.
+ type: String
+ - contextPath: CBP.FileUpload.DateModified
+ description: Date/time when the fileAnalysis request was last modified (UTC).
+ type: Date
+ - contextPath: CBP.FileUpload.FileCatalogId
+ description: ID of the fileCatalog entry associated with this analysis.
+ type: String
+ - contextPath: CBP.FileUpload.DateCreated
+ description: Date/time when the fileAnalysis request was created (UTC).
+ type: Date
+ - contextPath: CBP.FileUpload.ID
+ description: Unique fileAnalysis ID.
+ type: String
+ - contextPath: CBP.FileUpload.UploadedFileSize
+ description: Size of uploaded file. The file size will be 0 unless the uploadStatus
+ is "3" (Completed).
+ type: Number
+ - arguments:
+ - default: false
+ description: 'A condition contains three parts: name, operator and value. Name
+ is any valid field in the object that is being queried. Operator (: LIKE,
+ ! NOT LIKE, < Less than, > Greater than, + logical AND, - logical OR, | separating
+ values) is any of valid operators (see below). All operators consist of a
+ single character. Value is compared with operator and depends on field type.
+ For more information, see the Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#searching'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Maximum number of results to retrieve. If not specified,
+ the first 1000 results will be returned. If set to "-1", only the result count
+ will be returned, without actual results, and the offset parameter is ignored.
+ If set to "0", all results will be returned, and the offset parameter is ignored.
+ Note that some result sets might be very large, resulting in query timeout.
+ Therefore, unless you know that query will not return more than 1000 results,
+ it is recommended to retrieve data in chunks using offset and limit.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Offset in the data set.
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ description: 'Grouping is optional and can be defined with a single attribute:
+ &group=xyz. There can be only one grouping field, for example: group=osShortName.'
+ isArray: false
+ name: group
+ required: false
+ secret: false
+ - default: false
+ description: 'Sorting is optional and can be defined with a single attribute:
+ &sort=xyz [ASC|DESC], where xyz is the field name from the result set. There
+ can be only one sorting field. Default sort order is ascending (ASC).'
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ - default: false
+ description: Id of computer entry associated with this analysis. This can be
+ fetched via cbp-computer-search
+ isArray: false
+ name: computerId
+ required: false
+ secret: false
+ - default: false
+ description: Id of fileCatalog entry associated with this upload. This can be
+ fetched via cbp-fileCatalog-search
+ isArray: false
+ name: fileCatalogId
+ required: false
+ secret: false
+ - default: false
+ description: Name of the file where file exists on the endpoint
+ isArray: false
+ name: fileName
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: |-
+ Status of upload. Can be one of:
+ 0 = Queued
+ 1 = Initiated
+ 2 = Uploading
+ 3 = Completed
+ 4 = Error
+ 5 = Cancelled
+ 6 = Deleted
+ isArray: false
+ name: uploadStatus
+ predefined:
+ - Queued
+ - Initiated
+ - Uploading
+ - Completed
+ - Error
+ - Cancelled
+ - Deleted
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns objects that match the specified criteria.
+ execution: false
+ name: cbp-fileUpload-search
+ outputs:
+ - contextPath: CBP.FileUpload.Priority
+ description: 'File analysis priority in range (valid range: -2, 2), where "2"
+ is highest priority. Default priority is "0".'
+ type: Number
+ - contextPath: CBP.FileUpload.CreatedByUserId
+ description: ID of the user that requested the analysis.
+ type: String
+ - contextPath: CBP.FileUpload.UploadPath
+ description: Local upload path for this file on the server (can be a shared
+ network path). Note that the file is compressed in a ZIP archive.
+ type: String
+ - contextPath: CBP.FileUpload.FileName
+ description: Name of the file where the file exists on the endpoint.
+ type: String
+ - contextPath: CBP.FileUpload.PathName
+ description: Path of the file where the file exists on the endpoint.
+ type: String
+ - contextPath: CBP.FileUpload.UploadStatus
+ description: 'Status of upload (valid range: 0-6).'
+ type: Number
+ - contextPath: CBP.FileUpload.ComputerID
+ description: ID of the computer entry associated with this analysis.
+ type: String
+ - contextPath: CBP.FileUpload.DateModified
+ description: Date/time when the fileAnalysis request was last modified (UTC).
+ type: Date
+ - contextPath: CBP.FileUpload.FileCatalogId
+ description: ID of the fileCatalog entry associated with this analysis.
+ type: String
+ - contextPath: CBP.FileUpload.DateCreated
+ description: Date/time when the fileAnalysis request was created (UTC).
+ type: Date
+ - contextPath: CBP.FileUpload.ID
+ description: Unique fileAnalysis ID.
+ type: String
+ - contextPath: CBP.FileUpload.UploadedFileSize
+ description: Size of the uploaded file. The file size will be 0 unless the uploadStatus
+ is "3" (Completed).
+ type: Number
+ - arguments:
+ - default: true
+ description: (Int) Unique connector ID.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the object instance of this class.
+ execution: false
+ name: cbp-connector-get
+ outputs:
+ - contextPath: CBP.Connector.AnalysisEnabled
+ description: '"True" if the analysis component of this connector is enabled.
+ "False" if the analysis component of this connector is disabled.'
+ type: Boolean
+ - contextPath: CBP.Connector.AnalysisName
+ description: Name for the analysis component of the connector (can be same as
+ the name field).
+ type: String
+ - contextPath: CBP.Connector.AnalysisTargets
+ description: Array of possible analysis targets. Analysis targets are required
+ when creating a new fileAnalysis. They usualy represent different OS and configurations
+ and are available only for some internal connectors.
+ type: String
+ - contextPath: CBP.Connector.CanAnalyze
+ description: '"True" if this connector can analyze files. "False" if this connector
+ cannot analyze files.'
+ type: Boolean
+ - contextPath: CBP.Connector.ConnectorVersion
+ description: Version of this connector.
+ type: String
+ - contextPath: CBP.Connector.Enabled
+ description: '"True" if the connector is enabled. "False" if the connector is
+ disabled.'
+ type: Boolean
+ - contextPath: CBP.Connector.ID
+ description: Unique fileAnalysis ID.
+ type: String
+ - arguments:
+ - default: false
+ description: 'A condition contains three parts: name, operator and value. Name
+ is any valid field in the object that is being queried. Operator (: LIKE,
+ ! NOT LIKE, < Less than, > Greater than, + logical AND, - logical OR, | separating
+ values) is any of valid operators (see below). All operators consist of a
+ single character. Value is compared with operator and depends on field type.
+ For more information, see the Carbon Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#searching.'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Maximum number of results to retrieve. If not specified,
+ the first 1000 results will be returned. If set to "-1", only the result count
+ will be returned, without actual results, and the offset parameter is ignored.
+ If set to "0", all results will be returned, and the offset parameter is ignored.
+ Note that some result sets might be very large, resulting in query timeout.
+ Therefore, unless you know that query will not return more than 1000 results,
+ it is recommended to retrieve data in chunks using offset and limit.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: (Int) Offset in the data set.
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ description: 'Grouping is optional and can be defined with a single attribute:
+ &group=xyz. There can be only one grouping field, for example: group=osShortName.'
+ isArray: false
+ name: group
+ required: false
+ secret: false
+ - default: false
+ description: 'Sorting is optional and can be defined with a single attribute
+ where xyz is the field name from the result set: &sort=xyz [ASC|DESC]. There
+ can be only one sorting field. Default sort order is ascending (ASC).'
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns objects that match the specified criteria.
+ execution: false
+ name: cbp-connector-search
+ outputs:
+ - contextPath: CBP.Connector.AnalysisEnabled
+ description: '"True" if the analysis component of this connector is enabled.
+ "False" if the analysis component of this connector is disabled.'
+ type: Boolean
+ - contextPath: CBP.Connector.AnalysisName
+ description: Name for the analysis component of the connector (can be same as
+ the name field).
+ type: String
+ - contextPath: CBP.Connector.AnalysisTargets
+ description: Array of possible analysis targets. Analysis targets are required
+ when creating a new fileAnalysis. They usualy represent different OS and configurations
+ and are available only for some internal connectors.
+ type: String
+ - contextPath: CBP.Connector.CanAnalyze
+ description: '"True" if this connector can analyze files. "False" if this connector
+ cannot analyze files.'
+ type: Boolean
+ - contextPath: CBP.Connector.ConnectorVersion
+ description: Version of this connector.
+ type: String
+ - contextPath: CBP.Connector.Enabled
+ description: '"True" if the connector is enabled. "False" if the connector is
+ disabled.'
+ type: Boolean
+ - contextPath: CBP.Connector.ID
+ description: Unique fileAnalysis ID.
+ type: String
+ - arguments:
+ - default: true
+ description: ID of the approval request to update
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: |-
+ Resolution of the request. Resolution can be changed for open requests or
+ closed requests only. It can be one of:
+ 0=Not Resolved
+ 1=Rejected
+ 2=Resolved - Approved
+ 3=Resolved - Rule Change4=Resolved - Installer
+ 5=Resolved - Updater
+ 6=Resolved - Publisher
+ 7=Resolved - Other
+ isArray: false
+ name: resolution
+ predefined:
+ - Rejected
+ - Resolved - Approved
+ - Resolved - Rule Change4=Resolved - Installer
+ - Resolved - Updater
+ - Resolved - Publisher
+ - Resolved - Other
+ required: true
+ secret: false
+ - default: false
+ description: Email address of the user that created this request.
+ isArray: false
+ name: requestorEmail
+ required: false
+ secret: false
+ - default: false
+ description: Comments added by the user that resolved the request.
+ isArray: false
+ name: resolutionComments
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'Request status. Can be one of: 1=New, 2=Open, 3=Closed, 4=Escalated.
+ Prohibited transitions are from any status back to 0 or 1.'
+ isArray: false
+ name: status
+ predefined:
+ - New
+ - Open
+ - Closed
+ - Escalated
+ required: false
+ secret: false
+ deprecated: false
+ description: Resolves a file approval request.
+ execution: false
+ name: cbp-approvalRequest-resolve
+ outputs:
+ - contextPath: CBP.ApprovalRequest.ID
+ description: ID of the approval request.
+ type: Number
+ - contextPath: CBP.ApprovalRequest.ResolutionComments
+ description: Comments added by the user that resolved the request.
+ type: String
+ - contextPath: CBP.ApprovalRequest.Resolution
+ description: 'Resolution of request. Can be one of: 0=Not Resolved, 1=Rejected,
+ 2=Resolved - Approved, 3=Resolved - Rule Change, 4=Resolved - Installer, 5=Resolved
+ - Updater, 6=Resolved - Publisher, 7=Resolved - Other'
+ type: Number
+ - contextPath: CBP.ApprovalRequest.Status
+ description: 'Request status. Can be one of: 1=New, 2=Open, 3=Closed, 4=Escalated'
+ type: Number
+ - arguments:
+ - default: false
+ description: (String) Hash associated with this rule. This parameter is not
+ required if fileCatalogId is supplied.
+ isArray: false
+ name: hash
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: '(Int) File state for this rule. Can be one of: 1=Unapproved 2=Approved
+ 3=Banned'
+ isArray: false
+ name: fileState
+ predefined:
+ - '1'
+ - '2'
+ - '3'
+ required: true
+ secret: false
+ - default: false
+ description: (Int) Unique id of this fileRule
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ - default: false
+ description: (Int) ID of the fileCatalog entry associated with this fileRule.
+ Can be "0" if creating or modifying the rule based on the hash or file name.
+ isArray: false
+ name: fileCatalogId
+ required: false
+ secret: false
+ - default: false
+ description: (String) Name of this rule.
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: (String) Description of this rule.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: '(Boolean) If "true", creates a report-only ban. Note: fileState
+ has to be set to "1" (unapproved) before this flag can be set.'
+ isArray: false
+ name: reportOnly
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: (Boolean) "True" if reputation approvals are enabled for this file.
+ "False" if reputation approvals are disabled for this file.
+ isArray: false
+ name: 'reputationApprovalsEnabled '
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: (Boolean) "True" if this file is forced to act as installer, even
+ if the product detected it as ‘not installer’.
+ isArray: false
+ name: forceInstaller
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: (Boolean) "True" if this file is forced to act as ‘not installer’,
+ even if the product detected it as installer.
+ isArray: false
+ name: forceNotInstaller
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: (String) List of IDs of policies to which this rule applies. Set
+ to "0" if this is a global rule.
+ isArray: false
+ name: policyIds
+ required: false
+ secret: false
+ - default: false
+ description: '(Int) Set of platform flags where this file rule will be valid.
+ Combination of: 1 = Windows 2 = Mac 4 = Linux.'
+ isArray: false
+ name: platformFlags
+ required: false
+ secret: false
+ - default: false
+ description: Headers to present of the returned table.
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ deprecated: true
+ description: 'Creates or updates file rule. See more: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#filerule'
+ execution: false
+ name: cbp-fileRule-update
+ outputs:
+ - contextPath: CBP.FileRule.CatalogID
+ description: The file catalog ID for the rule.
+ type: String
+ - contextPath: CBP.FileRule.Description
+ description: The rule description.
+ type: String
+ - contextPath: CBP.FileRule.FileState
+ description: The file state for the rule.
+ type: String
+ - contextPath: CBP.FileRule.Hash
+ description: The hash for the rule.
+ type: String
+ - contextPath: CBP.FileRule.ID
+ description: The rule ID.
+ type: String
+ - contextPath: CBP.FileRule.Name
+ description: The rule name.
+ type: String
+ - contextPath: CBP.FileRule.PolicyIDs
+ description: The policies this rule belongs to.
+ type: String
+ - contextPath: CBP.FileRule.ReportOnly
+ description: Whether this rule "reporting only" or is also "enforcing".
+ type: String
+ - arguments:
+ - default: false
+ description: (String) Hash associated with this rule. This parameter is not
+ required if the fileCatalogId is supplied.
+ isArray: false
+ name: hash
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: '(Int) File state for this rule. Can be one of: 1=Unapproved 2=Approved
+ 3=Banned.'
+ isArray: false
+ name: fileState
+ predefined:
+ - '1'
+ - '2'
+ - '3'
+ required: true
+ secret: false
+ - default: false
+ description: (Int) Unique ID of this fileRule.
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ - default: false
+ description: (Int) ID of the fileCatalog entry associated with this fileRule.
+ Can be "0" if creating or modifying the rule based on the hash or file name.
+ This value can be fetched via cbp-fileCatalog-search command.
+ isArray: false
+ name: fileCatalogId
+ required: false
+ secret: false
+ - default: false
+ description: (String) Name of this rule.
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: (String) Description of this rule.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: '(Boolean) Set to "true" to create a report-only ban. Note: fileState
+ has to be set to "1" (unapproved) before this flag can be set.'
+ isArray: false
+ name: reportOnly
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: (Boolean) "True" if reputation approvals are enabled for this file.
+ "False" if reputation approvals are disabled for this file.
+ isArray: false
+ name: 'reputationApprovalsEnabled '
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: (Boolean) "True" if this file is forced to act as installer, even
+ if the product detected it as ‘not installer’.
+ isArray: false
+ name: forceInstaller
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: (Boolean) "True" if this file is forced to act as ‘not installer’,
+ even if the product detected it as installer.
+ isArray: false
+ name: forceNotInstaller
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: (String) List of IDs of policies to which this rule applies. Set
+ to "0" if this is a global rule.
+ isArray: false
+ name: policyIds
+ required: false
+ secret: false
+ - default: false
+ description: '(Int) Set of platform flags where this file rule will be valid.
+ combination of: 1 = Windows 2 = Mac 4 = Linux.'
+ isArray: false
+ name: platformFlags
+ required: false
+ secret: false
+ - default: false
+ description: Headers to present of the returned table.
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ deprecated: false
+ description: 'Creates or updates a file rule. For more information, see the Carbon
+ Black documentation: https://developer.carbonblack.com/reference/enterprise-protection/8.0/rest-api/#filerule'
+ execution: false
+ name: cbp-fileRule-createOrUpdate
+ outputs:
+ - contextPath: CBP.FileRule.CatalogID
+ description: The file catalog ID for the rule.
+ type: String
+ - contextPath: CBP.FileRule.Description
+ description: The rule description.
+ type: String
+ - contextPath: CBP.FileRule.FileState
+ description: The file state for the rule.
+ type: String
+ - contextPath: CBP.FileRule.Hash
+ description: The hash for the rule.
+ type: String
+ - contextPath: CBP.FileRule.ID
+ description: The rule ID.
+ type: String
+ - contextPath: CBP.FileRule.Name
+ description: The rule name.
+ type: String
+ - contextPath: CBP.FileRule.PolicyIDs
+ description: The policies this rule belongs to.
+ type: String
+ - contextPath: CBP.FileRule.ReportOnly
+ description: Is this rule "reporting only" or is it also "enforcing".
+ type: String
+ dockerimage: demisto/python3:3.7.3.221
+ subtype: python3
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- Carbon Black Enterprise Protection V2 Test
diff --git a/Integrations/CarbonBlackProtect/CarbonBlackProtect_description.md b/Integrations/CarbonBlackProtect/CarbonBlackProtect_description.md
new file mode 100644
index 000000000000..7650e80da5f8
--- /dev/null
+++ b/Integrations/CarbonBlackProtect/CarbonBlackProtect_description.md
@@ -0,0 +1,2 @@
+To find a API key corresponding with a particular Carbon Black user account, log into the console as that user, then click the username in the upper right -> Profile info.
+Then, click the "API Token" button on the left hand side to reveal the API token for the logged-in user. If there is no API token displayed, click the "Reset" button to create a new one.
\ No newline at end of file
diff --git a/Integrations/CarbonBlackProtect/CarbonBlackProtect_image.png b/Integrations/CarbonBlackProtect/CarbonBlackProtect_image.png
new file mode 100644
index 000000000000..f46f0a808a3a
Binary files /dev/null and b/Integrations/CarbonBlackProtect/CarbonBlackProtect_image.png differ
diff --git a/Integrations/CarbonBlackProtect/CarbonBlackProtect_test.py b/Integrations/CarbonBlackProtect/CarbonBlackProtect_test.py
new file mode 100644
index 000000000000..310515e4cd02
--- /dev/null
+++ b/Integrations/CarbonBlackProtect/CarbonBlackProtect_test.py
@@ -0,0 +1,69 @@
+from CommonServerPython import *
+
+
+def test_remove_prefix():
+ from CarbonBlackProtect import remove_prefix
+ prefix = "test_prefix"
+
+ str_with_prefix = '{}a'.format(prefix)
+ expected_response = 'a'
+ assert remove_prefix(prefix, str_with_prefix) == expected_response
+
+ str_without_prefix = 'b{}'.format(prefix)
+ expected_response = str_without_prefix
+ assert remove_prefix(prefix, str_without_prefix) == expected_response
+
+ str_with_two_prefixes = '{prefix}{prefix}c'.format(prefix=prefix)
+ expected_response = '{}c'.format(prefix)
+ assert remove_prefix(prefix, str_with_two_prefixes) == expected_response
+
+
+def test_event_severity_to_dbot_score():
+ malicious_scores = (2,)
+ warning_scores = (4, 5)
+ unknown_scores = (3, '6', 7)
+
+ assert_score(malicious_scores, 3)
+ assert_score(warning_scores, 2)
+ assert_score(unknown_scores, 0)
+
+
+def test_cbp_date_to_timestamp():
+ from CarbonBlackProtect import cbp_date_to_timestamp
+
+ cbp_time_with_milis = '2019-04-19T15:20:42.000000Z'
+ expected_ts = date_to_timestamp(cbp_time_with_milis, date_format='%Y-%m-%dT%H:%M:%S.%fZ')
+ assert cbp_date_to_timestamp(cbp_time_with_milis) == expected_ts
+
+ cbp_time_without_milis = '2019-04-19T15:20:42Z'
+ expected_ts = date_to_timestamp(cbp_time_without_milis, date_format='%Y-%m-%dT%H:%M:%SZ')
+ assert cbp_date_to_timestamp(cbp_time_without_milis) == expected_ts
+
+ try:
+ non_cbp_time = '20-04-2019T15:20:42'
+ cbp_date_to_timestamp(non_cbp_time)
+ raise AssertionError('cbp_date_to_timestamp should fail when passing non-cbp format dates')
+ except ValueError:
+ # if got here, then the right error was passed, so no further checks are required
+ pass
+
+
+def test_remove_keys_with_empty_value():
+ from CarbonBlackProtect import remove_keys_with_empty_value
+
+ base_dict = {
+ 'first': 1,
+ 'second': 2
+ }
+ assert remove_keys_with_empty_value(base_dict) == base_dict
+
+ dict_with_empty_value = dict(base_dict)
+ dict_with_empty_value['third'] = None
+ dict_with_empty_value['fourth'] = ''
+ assert remove_keys_with_empty_value(dict_with_empty_value) == base_dict
+
+
+def assert_score(severity_tuple, expected_output):
+ from CarbonBlackProtect import event_severity_to_dbot_score
+ for severity in severity_tuple:
+ assert event_severity_to_dbot_score(severity) == expected_output
diff --git a/Integrations/CarbonBlackProtect/Pipfile b/Integrations/CarbonBlackProtect/Pipfile
new file mode 100644
index 000000000000..41c7519a7a9f
--- /dev/null
+++ b/Integrations/CarbonBlackProtect/Pipfile
@@ -0,0 +1,14 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+
+[packages]
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/CarbonBlackProtect/Pipfile.lock b/Integrations/CarbonBlackProtect/Pipfile.lock
new file mode 100644
index 000000000000..643d94a1edbe
--- /dev/null
+++ b/Integrations/CarbonBlackProtect/Pipfile.lock
@@ -0,0 +1,174 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "030517bfcc68d7e2f82fb5831e88abe2f6540ec99eefed71048ae95c58697218"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:18c796c2cd35eb1a1d3f012a214a542790a1aed95e29768bdcb9f2197eccbd0b",
+ "sha256:96151fca2c6e736503981896495d344781b60d18bfda78dc11b290c6125ebdb6"
+ ],
+ "version": "==4.3.15"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33",
+ "sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39",
+ "sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019",
+ "sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088",
+ "sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b",
+ "sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e",
+ "sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6",
+ "sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b",
+ "sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5",
+ "sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff",
+ "sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd",
+ "sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7",
+ "sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff",
+ "sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d",
+ "sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2",
+ "sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35",
+ "sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4",
+ "sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514",
+ "sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252",
+ "sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109",
+ "sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f",
+ "sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c",
+ "sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92",
+ "sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577",
+ "sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d",
+ "sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d",
+ "sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f",
+ "sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a",
+ "sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b"
+ ],
+ "version": "==1.3.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:0125e8f60e9e031347105eb1682cef932f5e97d7b9a1a28d9bf00c22a5daef40",
+ "sha256:590044e3942351a1bdb1de960b739ff4ce277960f2425ad4509446dbace8d9d1"
+ ],
+ "markers": "python_version > '2.7'",
+ "version": "==6.0.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f",
+ "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746"
+ ],
+ "version": "==0.9.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:592eaa2c33fae68c7d75aacf042efc9f77b27c08a6224a4f59beab8d9a420523",
+ "sha256:ad3ad5c450284819ecde191a654c09b0ec72257a2c711b9633d677c71c9850c4"
+ ],
+ "index": "pypi",
+ "version": "==4.3.1"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:4d0d06d173eecf172703219a71dbd4ade0e13904e6bbce1ce660e2e0dc78b5c4",
+ "sha256:bfdf02789e3d197bd682a758cae0a4a18706566395fbe2803badcd1335e0173e"
+ ],
+ "index": "pypi",
+ "version": "==1.10.1"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:035a54ede6ce1380599b2ce57844c6554666522e376bd111eb940fbc7c3dad23",
+ "sha256:037c35f2741ce3a9ac0d55abfcd119133cbd821fffa4461397718287092d9d15",
+ "sha256:049feae7e9f180b64efacbdc36b3af64a00393a47be22fa9cb6794e68d4e73d3",
+ "sha256:19228f7940beafc1ba21a6e8e070e0b0bfd1457902a3a81709762b8b9039b88d",
+ "sha256:2ea681e91e3550a30c2265d2916f40a5f5d89b59469a20f3bad7d07adee0f7a6",
+ "sha256:3a6b0a78af298d82323660df5497bcea0f0a4a25a0b003afd0ce5af049bd1f60",
+ "sha256:5385da8f3b801014504df0852bf83524599df890387a3c2b17b7caa3d78b1773",
+ "sha256:606d8afa07eef77280c2bf84335e24390055b478392e1975f96286d99d0cb424",
+ "sha256:69245b5b23bbf7fb242c9f8f08493e9ecd7711f063259aefffaeb90595d62287",
+ "sha256:6f6d839ab09830d59b7fa8fb6917023d8cb5498ee1f1dbd82d37db78eb76bc99",
+ "sha256:730888475f5ac0e37c1de4bd05eeb799fdb742697867f524dc8a4cd74bcecc23",
+ "sha256:9819b5162ffc121b9e334923c685b0d0826154e41dfe70b2ede2ce29034c71d8",
+ "sha256:9e60ef9426efab601dd9aa120e4ff560f4461cf8442e9c0a2b92548d52800699",
+ "sha256:af5fbdde0690c7da68e841d7fc2632345d570768ea7406a9434446d7b33b0ee1",
+ "sha256:b64efdbdf3bbb1377562c179f167f3bf301251411eb5ac77dec6b7d32bcda463",
+ "sha256:bac5f444c118aeb456fac1b0b5d14c6a71ea2a42069b09c176f75e9bd4c186f6",
+ "sha256:bda9068aafb73859491e13b99b682bd299c1b5fd50644d697533775828a28ee0",
+ "sha256:d659517ca116e6750101a1326107d3479028c5191f0ecee3c7203c50f5b915b0",
+ "sha256:eddd3fb1f3e0f82e5915a899285a39ee34ce18fd25d89582bc89fc9fb16cd2c6"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.3.1"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ }
+ }
+}
diff --git a/Integrations/Censys/CHANGELOG.md b/Integrations/Censys/CHANGELOG.md
new file mode 100644
index 000000000000..1ae648ede9fc
--- /dev/null
+++ b/Integrations/Censys/CHANGELOG.md
@@ -0,0 +1,9 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+ - Added an error messages when a results does not return results. Previously, an error was returned.
+ - Added proxy support.
+
+## [19.8.2] - 2019-08-22
+ -
diff --git a/Integrations/Censys/Censys.py b/Integrations/Censys/Censys.py
new file mode 100644
index 000000000000..5c11b9b451bd
--- /dev/null
+++ b/Integrations/Censys/Censys.py
@@ -0,0 +1,79 @@
+import demistomock as demisto
+from CommonServerPython import *
+
+''' IMPORTS '''
+import requests
+
+''' GLOBAL VARIABLES '''
+API_URL = demisto.params()['url']
+API_ID = demisto.params()['apiid']
+API_SECRET = demisto.params()['secret']
+USE_SSL = not demisto.params().get('insecure', False)
+
+
+def test_module():
+ url_suffix = "view/ipv4/8.8.8.8"
+ res = send_request('GET', url_suffix)
+ if res is not None:
+ demisto.results('ok')
+
+
+def send_request(method, url_suffix, data=None):
+ res = requests.request(method, API_URL + url_suffix,
+ auth=(API_ID, API_SECRET),
+ data=data, verify=USE_SSL)
+ if res is not None:
+ data = json.loads(res.text)
+ if res.status_code == 404:
+ return None
+ elif res.status_code >= 400:
+ return_error("Received an error - status code [{0}], "
+ "error message: {1}".format(res.status_code, data["error"].title()))
+ return data
+ else:
+ return None
+
+
+def censys_view_command():
+ args = demisto.args()
+ query = args.get('query')
+ index = args.get('index')
+
+ url_suffix = 'view/{0}/{1}'.format(index, query)
+ raw = send_request('GET', url_suffix)
+ if raw:
+ demisto.results(raw)
+ else:
+ demisto.results("No view results for {0}.".format(query))
+
+
+def censys_search_command():
+ args = demisto.args()
+ query = args.get('query')
+ index = args.get('index')
+
+ url_suffix = 'search/{0}'.format(index)
+ data = {
+ "query": query,
+ "page": 1
+ }
+ raw = send_request('POST', url_suffix, json.dumps(data))
+ readable = tableToMarkdown("Search results for {0} in {1}".format(query, index), raw["results"])
+ return_outputs(readable, raw)
+
+
+''' EXECUTION CODE '''
+command = demisto.command()
+LOG('command is {0}'.format(command))
+try:
+ handle_proxy()
+ if command == 'test-module':
+ test_module()
+ elif command == 'cen-view':
+ censys_view_command()
+ elif command == 'cen-search':
+ censys_search_command()
+
+except Exception as ex:
+ LOG(ex)
+ return_error(str(ex))
diff --git a/Integrations/Censys/Censys.yml b/Integrations/Censys/Censys.yml
new file mode 100644
index 000000000000..2e26baf805c8
--- /dev/null
+++ b/Integrations/Censys/Censys.yml
@@ -0,0 +1,86 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: Censys
+ version: -1
+configuration:
+- defaultvalue: https://censys.io/api/v1/
+ display: Server URL
+ name: url
+ required: true
+ type: 0
+- display: Censys API ID
+ name: apiid
+ required: true
+ type: 0
+- display: Censys API Secret
+ name: secret
+ required: true
+ type: 4
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Censys is a search engine that allows computer scientists to ask questions
+ about the devices and networks that compose the Internet. Driven by Internet-wide
+ scanning, Censys lets researchers find specific hosts and create aggregate reports
+ on how devices, websites, and certificates are configured and deployed.
+display: Censys
+name: Censys
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: IP address for which to perform a query.
+ isArray: false
+ name: query
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The index from which to retrieve data. Can be "ipv4", "websites", or "certificates".
+ isArray: false
+ name: index
+ predefined:
+ - ipv4
+ - websites
+ - certificates
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns detailed information for an IP address within the specified index.
+ execution: false
+ name: cen-view
+ - arguments:
+ - default: false
+ description: The attribute for which you are searching (JSON format).
+ isArray: false
+ name: query
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The index on which to perform a query.
+ isArray: false
+ name: index
+ predefined:
+ - ipv4
+ - websites
+ - certificates
+ required: true
+ secret: false
+ deprecated: false
+ description: Searches for an attribute within the specified index.
+ execution: false
+ name: cen-search
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+system: true
+tests:
+- No test
diff --git a/Integrations/Censys/Censys_description.md b/Integrations/Censys/Censys_description.md
new file mode 100644
index 000000000000..c2e96dc89f75
--- /dev/null
+++ b/Integrations/Censys/Censys_description.md
@@ -0,0 +1 @@
+Two commands aspects of the Censys.io API is available, Search (cen-search) and View (cen-view). Cen-view is the primary interface. Automations should take the output of cen-view to parse and display the relevant data. Cen-search requires a more advanced search syntax and should be used primarily in pivoting and hypersearch scripts. More information on the search syntax can be found: https://censys.io/api/v1/docs/search
\ No newline at end of file
diff --git a/Integrations/Censys/Censys_image.png b/Integrations/Censys/Censys_image.png
new file mode 100644
index 000000000000..15400b0c0b34
Binary files /dev/null and b/Integrations/Censys/Censys_image.png differ
diff --git a/Integrations/CheckPhish/CheckPhish.py b/Integrations/CheckPhish/CheckPhish.py
new file mode 100644
index 000000000000..8d5546bd504a
--- /dev/null
+++ b/Integrations/CheckPhish/CheckPhish.py
@@ -0,0 +1,232 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import json
+import requests
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+BASE_URL = demisto.params()['url']
+API_KEY = demisto.params().get('token')
+GOOD_DISP = argToList(demisto.params().get('good_disp'))
+SUSP_DISP = argToList(demisto.params().get('susp_disp'))
+BAD_DISP = argToList(demisto.params().get('bad_disp'))
+USE_SSL = not demisto.params().get('insecure', False)
+
+HEADERS = {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+}
+
+STATUS_SUFFIX = '/status'
+CLEAN_STATUS = 'clean'
+PENDING_STATUS = 'PENDING'
+DONE_STATUS = 'DONE'
+
+DEFAULT_GOOD_DISP = {
+ 'clean'
+}
+
+DEFAULT_SUSP_DISP = {
+ 'drug_spam',
+ 'gambling',
+ 'hacked_website',
+ 'streaming',
+ 'suspicious'
+}
+
+DEFAULT_BAD_DISP = {
+ 'cryptojacking',
+ 'phish',
+ 'likely_phish',
+ 'scam'
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url, params=None, data=None):
+ res = requests.request(
+ method,
+ url,
+ verify=USE_SSL,
+ params=params,
+ data=data,
+ headers=HEADERS
+ )
+
+ if res.status_code not in {200}:
+ return_error('Error in API call to CheckPhish [%d] - %s' % (res.status_code, res.reason))
+
+ try:
+ return res.json()
+
+ except ValueError as err:
+ return_error('Failed to parse response from service, received the following error:\n{}'.format(str(err)))
+
+
+def unite_dispositions():
+ for disp in GOOD_DISP:
+ DEFAULT_GOOD_DISP.add(disp)
+
+ for disp in SUSP_DISP:
+ DEFAULT_SUSP_DISP.add(disp)
+
+ for disp in BAD_DISP:
+ DEFAULT_BAD_DISP.add(disp)
+
+
+def get_dbot_score(disposition):
+ if disposition in DEFAULT_BAD_DISP:
+ return 3
+ if disposition in DEFAULT_SUSP_DISP:
+ return 2
+ if disposition in DEFAULT_GOOD_DISP:
+ return 1
+ return 0
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ query = {
+ 'apiKey': API_KEY,
+ 'urlInfo': {'url': 'https://www.google.com'}
+ }
+ res = http_request('POST', BASE_URL, data=json.dumps(query))
+ if res and 'message' not in res:
+ return 'ok'
+
+ return res['message'] # the message field contains the error message
+
+
+def submit_to_checkphish(url):
+ """ Submit a URL for analysis in CheckPhish
+
+ Args:
+ url(str): URL to be sent to CheckPhish for analysis
+
+ Returns:
+ (str). jobID retrieved from CheckPhish for the URL
+
+ """
+ if 'http' not in url:
+ url = 'http://' + url
+
+ if re.match(urlRegex, url):
+ query = {
+ 'apiKey': API_KEY,
+ 'urlInfo': {'url': url},
+ 'scanType': 'full'
+ }
+ res = http_request('POST', BASE_URL, data=json.dumps(query))
+
+ return res['jobID']
+
+ else:
+ return_error(url + ' is not a valid url')
+
+
+def is_job_ready_checkphish(jobID):
+ query = {
+ 'apiKey': API_KEY,
+ 'jobID': jobID
+ }
+ res = http_request('POST', BASE_URL + STATUS_SUFFIX, data=json.dumps(query))
+
+ if res and res['status'] == DONE_STATUS:
+ return True
+
+ return False
+
+
+def get_result_checkphish(jobID):
+ query = {
+ 'apiKey': API_KEY,
+ 'jobID': jobID
+ }
+ res = http_request('POST', BASE_URL + STATUS_SUFFIX, data=json.dumps(query))
+
+ if res and 'errorMessage' not in res:
+ result = {
+ 'url': res['url'],
+ 'jobID': jobID,
+ 'status': res['status'],
+ 'disposition': res['disposition'],
+ 'brand': res['brand']
+ }
+
+ url_dict = {
+ 'Data': result['url']
+ }
+
+ if result['disposition'] != CLEAN_STATUS:
+ url_dict['Malicious'] = {
+ 'Vendor': 'CheckPhish',
+ 'Description': 'Targets ' + result['brand']
+ }
+
+ dbot_score = {
+ 'Type': 'url',
+ 'Vendor': 'CheckPhish',
+ 'Indicator': result['url'],
+ 'Score': get_dbot_score(result['disposition'])
+ }
+
+ context = {
+ 'CheckPhish.' + outputPaths['url']: result,
+ outputPaths['url']: url_dict,
+ 'DBotScore': dbot_score
+ }
+
+ human_readable = tableToMarkdown('CheckPhish reputation for ' + result['url'],
+ result,
+ ['url', 'disposition', 'brand', 'status', 'jobID'])
+
+ return_outputs(human_readable, context, res)
+
+ else:
+ return_error('Error getting job status')
+
+
+def checkphish_check_urls():
+ urls = argToList(demisto.args().get('url'))
+ job_ids = []
+
+ for url in urls:
+ submit = submit_to_checkphish(url)
+ if submit:
+ job_ids.append(submit)
+
+ while len(job_ids):
+ for job_id in job_ids[:]:
+ if is_job_ready_checkphish(job_id):
+ get_result_checkphish(job_id)
+ job_ids.remove(job_id)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('Command being called is %s' % (demisto.command()))
+handle_proxy()
+unite_dispositions()
+
+try:
+ if demisto.command() == 'test-module':
+ demisto.results(test_module())
+
+ elif demisto.command() == 'CheckPhish-check-urls':
+ checkphish_check_urls()
+
+# Log exceptions
+except Exception as e:
+ LOG(str(e))
+ LOG.print_log()
+ raise
diff --git a/Integrations/CheckPhish/CheckPhish.yml b/Integrations/CheckPhish/CheckPhish.yml
new file mode 100644
index 000000000000..021b88d510f5
--- /dev/null
+++ b/Integrations/CheckPhish/CheckPhish.yml
@@ -0,0 +1,127 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: CheckPhish
+ version: -1
+configuration:
+- defaultvalue: https://developers.redmarlin.ai/api/neo/scan
+ display: CheckPhish API URL
+ name: url
+ required: false
+ type: 0
+- display: API Token
+ name: token
+ required: true
+ type: 4
+- display: 'Good Dispositions (CheckPhish labels for non-phishing URLs. Default is
+ "clean")'
+ name: good_disp
+ options:
+ - adult
+ - cryptojacking
+ - drug_spam
+ - gambling
+ - hacked_website
+ - likely_phish
+ - phish
+ - scam
+ - streaming
+ - suspicious
+ required: false
+ type: 16
+- display: 'Suspicious dispositions (CheckPhish labels for suspicious phishing URLs).
+ Default is "drug_spam", "gambling", "hacked_website", "streaming", "suspicious"'
+ name: susp_disp
+ options:
+ - adult
+ - cryptojacking
+ - drug_spam
+ - gambling
+ - hacked_website
+ - likely_phish
+ - phish
+ - scam
+ - streaming
+ - suspicious
+ required: false
+ type: 16
+- display: 'Bad dispositions (CheckPhish labels for phishing URLs). Defaults are
+ "cryptojacking", "phish", "likely_phish", "scam".'
+ name: bad_disp
+ options:
+ - adult
+ - cryptojacking
+ - drug_spam
+ - gambling
+ - hacked_website
+ - likely_phish
+ - phish
+ - scam
+ - streaming
+ - suspicious
+ required: false
+ type: 16
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Check any URL to detect supsicious behavior.
+display: CheckPhish
+name: CheckPhish
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: A CSV list of URLs to check.
+ isArray: true
+ name: url
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks URLs against the CheckPhish database and returns the results.
+ execution: false
+ name: CheckPhish-check-urls
+ outputs:
+ - contextPath: CheckPhish.URL.url
+ description: URL that was submitted.
+ type: String
+ - contextPath: CheckPhish.URL.status
+ description: CheckPhish job status of the URL.
+ type: String
+ - contextPath: CheckPhish.URL.jobID
+ description: CheckPhish jobID that was assigned to the URL when it was submitted.
+ type: String
+ - contextPath: CheckPhish.URL.disposition
+ description: The CheckPhish category (disposition) of the URL.
+ type: String
+ - contextPath: CheckPhish.URL.brand
+ description: The brand (attack target) countered by the URL.
+ type: String
+ - contextPath: DBotScore.Score
+ description: DBot score.
+ type: Number
+ - contextPath: DBotScore.Type
+ description: Indicator type that was tested.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor that provided the DBot score.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: Indicator that CheckPhish tested.
+ type: String
+ - contextPath: URL.Data
+ description: URL that was submitted.
+ type: String
+ - contextPath: URL.Malicious.Vendor
+ description: CheckPhish.
+ type: String
+ - contextPath: URL.Malicious.Description
+ description: The brand (attack target) countered by the URL.
+ type: String
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
diff --git a/Integrations/CheckPhish/CheckPhish_description.md b/Integrations/CheckPhish/CheckPhish_description.md
new file mode 100644
index 000000000000..90ab59712dbe
--- /dev/null
+++ b/Integrations/CheckPhish/CheckPhish_description.md
@@ -0,0 +1,24 @@
+## Supported Dispositions
+CheckPhish classifies URLs by the following dispositions (categories).
+- Zero-day phishing
+- Tech support scams
+- Gift card scams
+- Survey scams
+- Adult websites
+- Drug pharmacy (Drug Spam) websites
+- Illegal/rogue streaming sites
+- Gambling websites
+- Hacked Websites
+- Cryptojacking/cryptomining
+
+## Sending URLs to Check
+There is no limit to the number of URLs you can send in each call. The limit is determined by your API privileges. using commas between the URLs,
+for Example: www.demisto.com,www.google.com,www.youtube.com
+
+## Modify Severity Levels
+You can modify the severity levels of any disposition received from CheckPhish. We recommend the following default parameters:\
+- Good = clean\
+- Suspicious = drug_spam, gambling, hacked_website, streaming, suspicious\
+- Bad = cryptojacking, phish, likely_phish, scam\
+
+**Note**: The worst category in which a label is included will be the effective one.
diff --git a/Integrations/CheckPhish/CheckPhish_image.png b/Integrations/CheckPhish/CheckPhish_image.png
new file mode 100644
index 000000000000..56e1e126a079
Binary files /dev/null and b/Integrations/CheckPhish/CheckPhish_image.png differ
diff --git a/Integrations/Cherwell/CHANGELOG.md b/Integrations/Cherwell/CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/Integrations/Cherwell/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/Integrations/Cherwell/Cherwell.py b/Integrations/Cherwell/Cherwell.py
new file mode 100644
index 000000000000..fc078f69b896
--- /dev/null
+++ b/Integrations/Cherwell/Cherwell.py
@@ -0,0 +1,1022 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import json
+import requests
+import traceback
+from datetime import datetime, timedelta
+import os
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+PARAMS = demisto.params()
+FETCHES_INCIDENTS = PARAMS.get('isFetch')
+FETCH_TIME = PARAMS.get('fetch_time')
+FETCH_ATTACHMENTS = PARAMS.get('fetch_attachments')
+OBJECTS_TO_FETCH = PARAMS.get('objects_to_fetch').split(',')
+MAX_RESULT = PARAMS.get('max_results')
+USERNAME = PARAMS.get('credentials').get('identifier')
+PASSWORD = PARAMS.get('credentials').get('password')
+# Remove trailing slash to prevent wrong URL path to service
+SERVER = PARAMS['url'][:-1] if (PARAMS['url'] and PARAMS['url'].endswith('/')) else PARAMS['url']
+SECURED = not PARAMS.get('insecure')
+CLIENT_ID = PARAMS.get('client_id')
+QUERY_STRING = PARAMS.get('query_string')
+DATE_FORMAT = '%m/%d/%Y %I:%M:%S %p'
+# Service base URL
+BASE_URL = SERVER + '/CherwellAPI/'
+
+HTTP_CODES = {
+ 'unauthorized': 401,
+ 'internal_server_error': 500,
+ 'success': 200
+}
+
+HEADERS = {
+ 'Content-Type': "application/json",
+ 'Accept': "application/json"
+}
+
+QUERY_OPERATORS = ['eq', 'gt', 'lt', 'contains', 'startwith']
+
+#######################################################################################################################
+
+
+''' HELPER FUNCTIONS '''
+
+
+def parse_response(response, error_operation, file_content=False, is_fetch=False):
+ try:
+ response.raise_for_status()
+ if not response.content:
+ return
+ if file_content:
+ return response.content
+ else:
+ return response.json()
+ except requests.exceptions.HTTPError:
+ try:
+ res_json = response.json()
+ err_msg = res_json.get('errorMessage') or res_json.get('error_description') or res_json.get('Message')
+ except Exception:
+ err_msg = response.content.decode('utf-8')
+ raise_or_return_error(error_operation + ": " + str(err_msg), is_fetch)
+ except Exception as error:
+ raise_or_return_error(f'Could not parse response {error}', is_fetch)
+
+
+def cherwell_dict_parser(key, value, item_list):
+ new_dict = {}
+ for item in item_list:
+ field_key = item.get(key)
+ new_dict[field_key] = item.get(value)
+
+ return new_dict
+
+
+def parse_fields_from_business_object(field_list):
+ new_business_obj = cherwell_dict_parser('name', 'value', field_list)
+
+ return new_business_obj
+
+
+def parse_fields_from_business_object_list(response):
+ object_list = []
+ if not response.get('businessObjects'):
+ return []
+ for business_obj in response.get('businessObjects'):
+ new_business_obj = parse_fields_from_business_object(business_obj.get('fields'))
+ new_business_obj['BusinessObjectId'] = business_obj.get('busObId')
+ new_business_obj['PublicId'] = business_obj.get('busObPublicId')
+ new_business_obj['RecordId'] = business_obj.get('busObRecId')
+ object_list.append(new_business_obj)
+
+ return object_list
+
+
+def build_fields_for_business_object(data_dict, ids_dict):
+ fields = []
+ for key, value in data_dict.items():
+ new_field = {
+ "dirty": "true",
+ "fieldId": ids_dict.get(key),
+ "name": key,
+ "value": value
+ }
+ fields.append(new_field)
+ return fields
+
+
+def http_request(method, url, payload, token=None, custom_headers=None, is_fetch=False):
+ headers = build_headers(token, custom_headers)
+ try:
+ response = requests.request(method, url, data=payload, headers=headers, verify=SECURED)
+ except requests.exceptions.ConnectionError as e:
+ err_message = f'Error connecting to server. Check your URL/Proxy/Certificate settings: {e}'
+ raise_or_return_error(err_message, is_fetch)
+ return response
+
+
+def request_new_access_token(using_refresh):
+ url = BASE_URL + "token"
+ refresh_token = demisto.getIntegrationContext().get('refresh_token')
+
+ if using_refresh:
+ payload = f'client_id={CLIENT_ID}&grant_type=refresh_token&refresh_token={refresh_token}'
+ else:
+ payload = f'client_id={CLIENT_ID}&grant_type=password&username={USERNAME}&password={PASSWORD}'
+
+ headers = {
+ 'Accept': "application/json",
+ 'Content-Type': "application/x-www-form-urlencoded",
+ }
+
+ response = http_request('POST', url, payload, custom_headers=headers)
+ return response
+
+
+def get_new_access_token(is_fetch=False):
+ response = request_new_access_token(True)
+ if not response.status_code == HTTP_CODES['success']:
+ response = request_new_access_token(False)
+ res_json = parse_response(response,
+ "Could not get token. Check your credentials (user/password/client id) and try again",
+ is_fetch=is_fetch)
+ token_expiration_time = int(date_to_timestamp(res_json.get('.expires'), '%a, %d %b %Y %H:%M:%S GMT'))
+ demisto.setIntegrationContext({
+ 'refresh_token': res_json.get('refresh_token'),
+ 'token_expiration_time': token_expiration_time,
+ 'access_token': res_json.get('access_token')
+ })
+ return res_json.get('access_token')
+
+
+def get_access_token(new_token, is_fetch=False):
+ integration_context = demisto.getIntegrationContext()
+ token_expiration_time = integration_context.get('token_expiration_time')
+ current_time = date_to_timestamp(datetime.utcnow())
+ if new_token or not token_expiration_time or token_expiration_time < current_time:
+ token = get_new_access_token(is_fetch=is_fetch)
+ return token
+ else:
+ return integration_context.get('access_token')
+
+
+def build_headers(token, headers=None):
+ headers = headers if headers else HEADERS
+ headers['Authorization'] = f'Bearer {token}'
+ return headers
+
+
+def make_request(method, url, payload=None, headers=None, is_fetch=False):
+ token = get_access_token(False, is_fetch=is_fetch)
+ response = http_request(method, url, payload, token, custom_headers=headers, is_fetch=is_fetch)
+ if response.status_code == HTTP_CODES['unauthorized']:
+ token = get_access_token(True, is_fetch=is_fetch)
+ response = http_request(method, url, payload, token, custom_headers=headers, is_fetch=is_fetch)
+ return response
+
+
+def get_business_object_summary_by_name(name, is_fetch):
+ url = BASE_URL + f'api/V1/getbusinessobjectsummary/busobname/{name}'
+ response = make_request('GET', url, is_fetch=is_fetch)
+ return parse_response(response, "Could not get business object summary", is_fetch=is_fetch)
+
+
+def resolve_business_object_id_by_name(name, is_fetch=False):
+ res = get_business_object_summary_by_name(name, is_fetch)
+ if not res:
+ err_message = f'Could not retrieve "{name}" business object id. Make sure "{name}" is a valid business object.'
+ raise_or_return_error(err_message, is_fetch)
+ return res[0].get('busObId')
+
+
+def save_business_object(payload):
+ url = BASE_URL + "api/V1/savebusinessobject"
+ response = make_request("POST", url, json.dumps(payload))
+ return parse_response(response, "Could not save business object")
+
+
+def get_business_object_record(business_object_id, object_id, id_type):
+ id_type_str = 'publicid' if id_type == 'public_id' else 'busobrecid'
+ url = BASE_URL + f'api/V1/getbusinessobject/busobid/{business_object_id}/{id_type_str}/{object_id}'
+ response = make_request("GET", url)
+ return parse_response(response, "Could not get business objects")
+
+
+def delete_business_object_record(business_object_id, object_id, id_type):
+ id_type_str = 'publicid' if id_type == 'public_id' else 'busobrecid'
+ url = BASE_URL + f'api/V1/deletebusinessobject/busobid/{business_object_id}/{id_type_str}/{object_id}'
+ response = make_request("DELETE", url)
+ return parse_response(response, "Could not delete business object")
+
+
+def get_search_results(payload, is_fetch=False):
+ url = BASE_URL + "api/V1/getsearchresults"
+ response = make_request("POST", url, json.dumps(payload))
+ return parse_response(response, "Could not search for business objects", is_fetch=is_fetch)
+
+
+def get_business_object_template(business_object_id, include_all=True, field_names=None, fields_ids=None,
+ is_fetch=False):
+ url = BASE_URL + "api/V1/getbusinessobjecttemplate"
+ payload = {
+ "busObId": business_object_id,
+ "includeAll": include_all
+ }
+
+ if field_names:
+ payload['fieldNames'] = field_names
+ if fields_ids:
+ payload['fieldIds'] = fields_ids
+ response = make_request("POST", url, json.dumps(payload), is_fetch=is_fetch)
+ return parse_response(response, "Could not get business object template", is_fetch=is_fetch)
+
+
+def build_business_object_json(simple_json, business_object_id, object_id=None, id_type=None):
+ business_object_ids_dict = get_key_value_dict_from_template('name', 'fieldId', business_object_id)
+ fields_for_business_object = build_fields_for_business_object(simple_json, business_object_ids_dict)
+ business_object_json = {
+ 'busObId': business_object_id,
+ "fields": fields_for_business_object
+ }
+ if object_id:
+ id_key = 'busObPublicId' if id_type == 'public_id' else 'busObRecId'
+ business_object_json[id_key] = object_id
+ return business_object_json
+
+
+def create_business_object(name, data_json):
+ business_object_id = resolve_business_object_id_by_name(name)
+ business_object_json = build_business_object_json(data_json, business_object_id)
+ return save_business_object(business_object_json)
+
+
+def update_business_object(name, data_json, object_id, id_type):
+ business_object_id = resolve_business_object_id_by_name(name)
+ business_object_json = build_business_object_json(data_json, business_object_id, object_id, id_type)
+ return save_business_object(business_object_json)
+
+
+def get_business_object(name, object_id, id_type):
+ business_object_id = resolve_business_object_id_by_name(name)
+ results = get_business_object_record(business_object_id, object_id, id_type)
+ parsed_business_object = parse_fields_from_business_object(results.get('fields'))
+ parsed_business_object['PublicId'] = results.get('busObPublicId')
+ parsed_business_object['RecordId'] = results.get('busObRecId')
+ return parsed_business_object, results
+
+
+def delete_business_object(name, object_id, id_type):
+ business_object_id = resolve_business_object_id_by_name(name)
+ return delete_business_object_record(business_object_id, object_id, id_type)
+
+
+def download_attachment_from_business_object(attachment, is_fetch):
+ attachment_id = attachment.get('attachmentId')
+ business_object_id = attachment.get('busObId')
+ business_record_id = attachment.get('busObRecId')
+ url = BASE_URL + f'api/V1/getbusinessobjectattachment' \
+ f'/attachmentid/{attachment_id}/busobid/{business_object_id}/busobrecid/{business_record_id}'
+ response = make_request('GET', url, is_fetch=is_fetch)
+ return parse_response(response, f'Unable to get content of attachment {attachment_id}', file_content=True,
+ is_fetch=is_fetch)
+
+
+def get_attachments_content(attachments_to_download, is_fetch):
+ attachments = []
+ for attachment in attachments_to_download:
+ new_attachment = {
+ 'FileName': attachment.get('displayText'),
+ 'CreatedAt': attachment.get('created'),
+ 'Content': download_attachment_from_business_object(attachment, is_fetch=is_fetch)
+ }
+ attachments.append(new_attachment)
+ return attachments
+
+
+def get_attachments_details(id_type, object_id, object_type_name, object_type_id, type, attachment_type,
+ is_fetch=False):
+ id_type_str = 'publicid' if id_type == 'public_id' else 'busobrecid'
+ business_object_type_str = 'busobid' if object_type_id else 'busobname'
+ object_type = object_type_id if object_type_id else object_type_name
+ url = BASE_URL + f'api/V1/getbusinessobjectattachments/' \
+ f'{business_object_type_str}/{object_type}/' \
+ f'{id_type_str}/{object_id}' \
+ f'/type/{type}' \
+ f'/attachmenttype/{attachment_type}'
+ response = make_request('GET', url, is_fetch=is_fetch)
+ return parse_response(response, f'Unable to get attachments for {object_type} {object_id}', is_fetch=is_fetch)
+
+
+def download_attachments(id_type, object_id, business_object_type_name=None, business_object_type_id=None,
+ is_fetch=False):
+ type = 'File'
+ attachment_type = 'Imported'
+ result = get_attachments_details(id_type, object_id, business_object_type_name, business_object_type_id, type,
+ attachment_type, is_fetch=is_fetch)
+ attachments_to_download = result.get('attachments')
+ if not attachments_to_download:
+ return
+ return get_attachments_content(attachments_to_download, is_fetch=is_fetch)
+
+
+def get_attachments_info(id_type, object_id, attachment_type, business_object_type_name=None,
+ business_object_type_id=None):
+ type = 'File'
+ attachment_type = attachment_type
+ result = get_attachments_details(id_type, object_id, business_object_type_name, business_object_type_id, type,
+ attachment_type)
+ attachments = result.get('attachments')
+ attachments_info = [{
+ 'AttachmentFiledId': attachment.get('attachmentFileId'),
+ 'FileName': attachment.get('displayText'),
+ 'AttachmentId': attachment.get('attachmentId'),
+ 'BusinessObjectType': business_object_type_name,
+ f'BusinessObject{string_to_context_key(id_type)}': object_id
+
+ } for attachment in attachments]
+ return attachments_info, result
+
+
+def attachment_results(attachments):
+ for attachment in attachments:
+ attachment_content = attachment.get('Content')
+ attachment_name = attachment.get('FileName')
+ demisto.results(fileResult(attachment_name, attachment_content))
+ return
+
+
+def run_query_on_business_objects(bus_id, filter_query, max_results, is_fetch):
+ payload = {
+ 'busObId': bus_id,
+ 'includeAllFields': True,
+ 'filters': filter_query
+ }
+ if max_results:
+ payload['pageSize'] = max_results
+ return get_search_results(payload, is_fetch=is_fetch)
+
+
+def get_key_value_dict_from_template(key, val, business_object_id, is_fetch=False):
+ template_dict = get_business_object_template(business_object_id, is_fetch=is_fetch)
+ return cherwell_dict_parser(key, val, template_dict.get('fields'))
+
+
+def get_all_incidents(objects_names, last_created_time, max_results, query_string, real_fetch):
+ all_incidents: list = []
+ for business_object_name in objects_names:
+ business_object_id = resolve_business_object_id_by_name(business_object_name, is_fetch=real_fetch)
+ query_list = [['CreatedDateTime', 'gt', last_created_time]]
+ if query_string:
+ additional_query_list = validate_query_for_fetch_incidents(objects_names, query_string, real_fetch)
+ query_list += additional_query_list
+ incidents, _ = query_business_object(query_list, business_object_id, max_results, is_fetch=real_fetch)
+ all_incidents += incidents
+ sorted_incidents = sorted(all_incidents, key=lambda incident: incident.get('CreatedDateTime'))
+ return sorted_incidents[:max_results]
+
+
+def object_to_incident(obj):
+ attachments_list = []
+ attachments = obj.get('Attachments')
+ if attachments:
+ obj.pop('Attachments')
+ for attachment in attachments:
+ file_name = attachment.get('FileName')
+ attachment_file = fileResult(file_name, attachment.get('Content'))
+ attachments_list.append({
+ 'path': attachment_file.get('FileID'),
+ 'name': file_name
+ })
+ item = {
+ 'name': f'Record ID:{obj.get("RecID")}',
+ 'attachment': attachments_list,
+ 'rawJSON': json.dumps(obj)
+ }
+
+ return createContext(item, removeNull=True)
+
+
+def save_incidents(objects_to_save):
+ final_incidents = []
+ for obj in objects_to_save:
+ final_incidents.append(object_to_incident(obj))
+ demisto.incidents(final_incidents)
+ return
+
+
+def fetch_incidents_attachments(incidents, is_fetch):
+ for incident in incidents:
+ rec_id = incident.get('RecID')
+ business_object_id = incident.get('BusinessObjectId')
+ incident['Attachments'] = []
+ attachments = download_attachments('record_id', rec_id, business_object_type_id=business_object_id,
+ is_fetch=is_fetch)
+ if attachments:
+ for attachment in attachments:
+ new_attachment_obj = {
+ 'Content': attachment.get('Content'),
+ 'FileName': attachment.get('FileName')
+ }
+ incident['Attachments'].append(new_attachment_obj)
+ return incidents
+
+
+def validate_params_for_fetch(max_result, objects_to_fetch, real_fetch):
+ # Check that max result is positive integer
+ try:
+ max_result = int(max_result)
+ if max_result < 0:
+ raise ValueError
+ except ValueError:
+ max_result_err_message = 'Max results to fetch must be a number grater than 0'
+ raise_or_return_error(max_result_err_message, real_fetch)
+ # Make sure that there are objects to fetch
+ if len(objects_to_fetch) == 0:
+ objects_to_fetch_err_message = 'No objects to fetch were given'
+ raise_or_return_error(objects_to_fetch_err_message, real_fetch)
+ return
+
+
+def fetch_incidents(objects_names, fetch_time, max_results, query_string, fetch_attachments, real_fetch=False):
+ validate_params_for_fetch(max_results, objects_names, real_fetch)
+ max_results = int(max_results)
+ last_run = demisto.getLastRun()
+ last_objects_fetched = last_run.get('objects_names_to_fetch')
+ if 'last_created_time' in last_run and last_objects_fetched == objects_names:
+ last_created_time = last_run.get('last_created_time')
+ else:
+ try:
+ last_created_time, _ = parse_date_range(fetch_time, date_format=DATE_FORMAT, to_timestamp=False)
+ except ValueError:
+ error_message = f'First fetch time stamp should be of the form: , e.g., 12 hours, ' \
+ f'7 days. Received: "{fetch_time}"'
+ raise_or_return_error(error_message, real_fetch)
+ incidents = get_all_incidents(objects_names, last_created_time, max_results, query_string, real_fetch)
+ if fetch_attachments:
+ incidents = fetch_incidents_attachments(incidents, real_fetch)
+ if real_fetch:
+ save_incidents(incidents)
+ return incidents
+
+
+def upload_business_object_attachment(file_name, file_size, file_content, object_type_name, id_type, object_id, ):
+ id_type_str = 'publicid' if id_type == 'public_id' else 'busobrecid'
+ url = BASE_URL + f'/api/V1/uploadbusinessobjectattachment/' \
+ f'filename/{file_name}/busobname/{object_type_name}/{id_type_str}/{object_id}/offset/0/totalsize/{file_size}'
+ payload = file_content
+ headers = HEADERS
+ headers['Content-Type'] = "application/octet-stream"
+ response = make_request('POST', url, payload, headers)
+ return parse_response(response, f'Could not upload attachment {file_name}')
+
+
+def upload_attachment(id_type, object_id, type_name, file_entry_id):
+ file_data = demisto.getFilePath(file_entry_id)
+ file_path = file_data.get('path')
+ file_name = file_data.get('name')
+ try:
+ file_size = os.path.getsize(file_path)
+ with open(file_path, 'rb') as f:
+ file_content = f.read()
+ attachment_id = upload_business_object_attachment(file_name, file_size, file_content, type_name, id_type,
+ object_id)
+ return attachment_id
+ except Exception as err:
+ return_error(f'unable to open file: {err}')
+
+
+def remove_attachment(id_type, object_id, type_name, attachment_id):
+ id_type_str = 'publicid' if id_type == 'public_id' else 'busobrecid'
+ url = BASE_URL + f'/api/V1/removebusinessobjectattachment/' \
+ f'attachmentid/{attachment_id}/busobname/{type_name}/{id_type_str}/{object_id}'
+ response = make_request('DELETE', url)
+ parse_response(response, f'Could not remove attachment {attachment_id} from {type_name} {object_id}')
+ return
+
+
+def link_related_business_objects(action, parent_business_object_id, parent_business_object_record_id, relationship_id,
+ business_object_id, business_object_record_id):
+ url_action_str = 'linkrelatedbusinessobject' if action == 'link' else 'unlinkrelatedbusinessobject'
+ url = BASE_URL + f"api/V1/{url_action_str}/parentbusobid/{parent_business_object_id}" \
+ f"/parentbusobrecid/{parent_business_object_record_id}" \
+ f"/relationshipid/{relationship_id}" \
+ f"/busobid/{business_object_id}" \
+ f"/busobrecid/{business_object_record_id}"
+ http_method = 'GET' if action == 'link' else 'DELETE'
+ response = make_request(http_method, url)
+ parse_response(response, "Could not link business objects")
+ return
+
+
+def business_objects_relation_action(action, parent_type_name, parent_record_id, child_type_name, child_record_id,
+ relationship_id):
+ parent_business_object_id = resolve_business_object_id_by_name(parent_type_name)
+ child_business_object_id = resolve_business_object_id_by_name(child_type_name)
+ link_related_business_objects(action, parent_business_object_id, parent_record_id, relationship_id,
+ child_business_object_id, child_record_id)
+ return
+
+
+def validate_query_list(query_list, is_fetch):
+ for index, query in enumerate(query_list):
+ if not len(query) == 3:
+ length_err_message = f'Cannot parse query, should be of the form: `[["FieldName","Operator","Value"],' \
+ f'["FieldName","Operator","Value"],...]`. Filter in index {index} is malformed: {query}'
+ raise_or_return_error(length_err_message, is_fetch)
+ if query[1] not in QUERY_OPERATORS:
+ operator_err_message = f'Operator should be one of the following: {", ".join(QUERY_OPERATORS)}. Filter in' \
+ f' index {index}, was: {query[1]}'
+ raise_or_return_error(operator_err_message, is_fetch)
+ return
+
+
+def validate_query_for_fetch_incidents(objects_names, query_string, real_fetch):
+ if not objects_names:
+ no_objects_err_message = f'No business object name was given. \n In order to run advanced query, ' \
+ f'fill the integration parameter-`Objects to fetch` with exactly one business object name.'
+ raise_or_return_error(no_objects_err_message, real_fetch)
+ if len(objects_names) > 1:
+ multiple_objects_error_message = f'Advanced query operation is supported for a single business object. ' \
+ f'{len(objects_names)} objects were given: {",".join(objects_names)}'
+ raise_or_return_error(multiple_objects_error_message, real_fetch)
+ return parse_string_query_to_list(query_string, real_fetch)
+
+
+def build_query_dict(query, filed_ids_dict, is_fetch):
+ field_name = query[0]
+ operator = query[1]
+ value = query[2]
+ field_id = filed_ids_dict.get(field_name)
+ if not field_id:
+ err_message = f'Field name: {field_name} does not exit in the given business objects'
+ raise_or_return_error(err_message, is_fetch)
+ return {
+ 'fieldId': filed_ids_dict.get(field_name),
+ 'operator': operator,
+ 'value': value
+ }
+
+
+def build_query_dict_list(query_list, filed_ids_dict, is_fetch):
+ query_dict_list = []
+ for query in query_list:
+ query_dict = build_query_dict(query, filed_ids_dict, is_fetch)
+ query_dict_list.append(query_dict)
+ return query_dict_list
+
+
+def query_business_object(query_list, business_object_id, max_results, is_fetch=False):
+ filed_ids_dict = get_key_value_dict_from_template('name', 'fieldId', business_object_id, is_fetch=is_fetch)
+ filters = build_query_dict_list(query_list, filed_ids_dict, is_fetch=is_fetch)
+ query_result = run_query_on_business_objects(business_object_id, filters, max_results, is_fetch=is_fetch)
+ business_objects = parse_fields_from_business_object_list(query_result)
+ return business_objects, query_result
+
+
+def parse_string_query_to_list(query_string, is_fetch=False):
+ try:
+ query_list = json.loads(query_string)
+ except (ValueError, TypeError):
+ err_message = f'Cannot parse query, should be of the form: `[["FieldName","Operator","Value"],' \
+ f'["FieldName","Operator","Value"]]`.'
+ raise_or_return_error(err_message, is_fetch)
+ validate_query_list(query_list, is_fetch)
+ return query_list
+
+
+def query_business_object_string(business_object_name, query_string, max_results):
+ if max_results:
+ try:
+ int(max_results)
+ except ValueError:
+ return return_error(f'`max_results` argument received is not a number')
+ business_object_id = resolve_business_object_id_by_name(business_object_name)
+ query_filters_list = parse_string_query_to_list(query_string)
+ return query_business_object(query_filters_list, business_object_id, max_results)
+
+
+def get_field_info(type, field_property):
+ business_object_id = resolve_business_object_id_by_name(type)
+ template = get_business_object_template(business_object_id)
+ business_object_fields = template.get('fields')
+ field_to_return = None
+ for field in business_object_fields:
+ if field.get('displayName') == field_property or \
+ field.get('fieldId') == field_property or \
+ field.get('name') == field_property:
+ field_to_return = field
+ if field_to_return:
+ field_to_return = {
+ 'DisplayName': field_to_return.get('displayName'),
+ 'Name': field_to_return.get('name'),
+ 'FieldId': field_to_return.get('fieldId')
+ }
+ else:
+ return_error(f'Field with the value {field_property} was not found')
+ return field_to_return
+
+
+def cherwell_run_saved_search(association_id, scope, scope_owner, search_name):
+ search_payload = {
+ "Association": association_id,
+ "scope": scope,
+ "scopeOwner": scope_owner,
+ "searchName": search_name,
+ "includeAllFields": True,
+ }
+
+ results = get_search_results(search_payload)
+ return parse_fields_from_business_object_list(results)
+
+
+def cherwell_get_business_object_id(business_object_name):
+ business_object_id = resolve_business_object_id_by_name(business_object_name)
+ business_object_info = {
+ 'BusinessObjectId': business_object_id,
+ 'BusinessObjectName': business_object_name
+ }
+ return business_object_info
+
+
+def raise_or_return_error(msg, raise_flag):
+ """
+ This function handles errors occurred in functions that are within the fetch incidents flow.
+ If the error occurred as part of a fetch-incidents flow then an exception will be thrown otherwise a regular error
+ entry will be returned.
+ This is needed when running fetch-incidents process since regular error entries are not handled correctly by the
+ server
+ :param msg: error msg to raise/return
+ :param raise_flag: if true should raise, otherwise throw
+ """
+ if raise_flag:
+ raise Exception(msg)
+ else:
+ return_error(msg)
+
+
+########################################################################################################################
+'''
+Commands
+'''
+
+
+def test_command():
+ if FETCHES_INCIDENTS:
+ fetch_incidents(OBJECTS_TO_FETCH, FETCH_TIME, MAX_RESULT, QUERY_STRING, FETCH_ATTACHMENTS)
+ else:
+ get_access_token(True)
+ return
+
+
+def create_business_object_command():
+ args = demisto.args()
+ type_name = args.get('type')
+ data_json = json.loads(args.get('json'))
+ result = create_business_object(type_name, data_json)
+ ids = {
+ 'PublicId': result.get('busObPublicId'),
+ 'RecordId': result.get('busObRecId')
+ }
+ md = tableToMarkdown(f'New {type_name.capitalize()} was created', ids, headerTransform=pascalToSpace)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'HumanReadable': md,
+ 'EntryContext': {
+ 'Cherwell.BusinessObjects(val.RecordId == obj.RecordId)': ids
+ }
+ })
+
+
+def update_business_object_command():
+ args = demisto.args()
+ type_name = args.get('type')
+ data_json = json.loads(args.get('json'))
+ object_id = args.get('id_value')
+ id_type = args.get('id_type')
+ result = update_business_object(type_name, data_json, object_id, id_type)
+ ids = {
+ 'PublicId': result.get('busObPublicId'),
+ 'RecordId': result.get('busObRecId')
+ }
+ md = tableToMarkdown(f'{type_name.capitalize()} {object_id} was updated', ids, headerTransform=pascalToSpace)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'HumanReadable': md,
+ 'EntryContext': {
+ 'Cherwell.BusinessObjects(val.RecordId == obj.RecordId)': ids
+ }
+ })
+
+
+def get_business_object_command():
+ args = demisto.args()
+ type_name = args.get('type')
+ id_type = args.get('id_type')
+ object_id = args.get('id_value')
+ business_object, results = get_business_object(type_name, object_id, id_type)
+ md = tableToMarkdown(f'{type_name.capitalize()}: {object_id}', business_object,
+ headerTransform=pascalToSpace)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': results,
+ 'HumanReadable': md,
+ 'EntryContext': {
+ 'Cherwell.BusinessObjects(val.RecordId == obj.RecordId)': createContext(business_object)
+ }
+ })
+
+
+def delete_business_object_command():
+ args = demisto.args()
+ type_name = args.get('type')
+ id_type = args.get('id_type')
+ object_id = args.get('id_value')
+ results = delete_business_object(type_name, object_id, id_type)
+ md = f'### Record {object_id} of type {type_name} was deleted.'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': results,
+ 'HumanReadable': md
+ })
+
+
+def fetch_incidents_command():
+ objects_names_to_fetch = OBJECTS_TO_FETCH
+ fetch_attachments = FETCH_ATTACHMENTS
+ max_result = MAX_RESULT
+ fetch_time = FETCH_TIME
+ query_string = QUERY_STRING
+ incidents = fetch_incidents(objects_names_to_fetch, fetch_time, max_result, query_string, fetch_attachments,
+ real_fetch=True)
+ if incidents:
+ last_incident_created_time = incidents[-1].get('CreatedDateTime')
+ next_created_time_to_fetch = \
+ (datetime.strptime(last_incident_created_time, DATE_FORMAT) + timedelta(seconds=1)).strftime(DATE_FORMAT)
+ demisto.setLastRun({
+ 'last_created_time': next_created_time_to_fetch,
+ 'objects_names_to_fetch': objects_names_to_fetch
+ })
+ return
+
+
+def download_attachments_command():
+ args = demisto.args()
+ id_type = args.get('id_type')
+ object_id = args.get('id_value')
+ type_name = args.get('type')
+ attachments = download_attachments(id_type, object_id, business_object_type_name=type_name)
+ if not attachments:
+ return_error(f'No attachments were found for {type_name}:{object_id}')
+ attachment_results(attachments)
+ return
+
+
+def upload_attachment_command():
+ args = demisto.args()
+ id_type = args.get('id_type')
+ object_id = args.get('id_value')
+ type_name = args.get('type')
+ file_entry_id = args.get('file_entry_id')
+ attachment_id = upload_attachment(id_type, object_id, type_name, file_entry_id)
+ entry_context = {
+ 'AttachmentFileId': attachment_id,
+ 'BusinessObjectType': type_name,
+ string_to_context_key(id_type): object_id
+ }
+ md = f'### Attachment: {attachment_id}, was successfully attached to {type_name} {object_id}'
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': {'attachment_id': attachment_id},
+ 'EntryContext': {'Cherwell.UploadedAttachments(val.AttachmentId == obj.AttachmentId)': entry_context},
+ 'HumanReadable': md,
+ })
+
+
+def remove_attachment_command():
+ args = demisto.args()
+ id_type = args.get('id_type')
+ object_id = args.get('id_value')
+ type_name = args.get('type')
+ attachment_id = args.get('attachment_id')
+ remove_attachment(id_type, object_id, type_name, attachment_id)
+ md = f'### Attachment: {attachment_id}, was successfully removed from {type_name} {object_id}'
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': md,
+ 'HumanReadable': md,
+ })
+
+
+def get_attachments_info_command():
+ args = demisto.args()
+ id_type = args.get('id_type')
+ object_id = args.get('id_value')
+ type_name = args.get('type')
+ attachment_type = args.get('attachment_type')
+ attachments_info, raw_result = get_attachments_info(id_type, object_id, attachment_type,
+ business_object_type_name=type_name)
+ md = tableToMarkdown(f'{type_name.capitalize()} {object_id} attachments:', attachments_info,
+ headerTransform=pascalToSpace) if attachments_info \
+ else f'### {type_name.capitalize()} {object_id} has no attachments'
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': raw_result,
+ 'HumanReadable': md
+ }
+ if attachments_info:
+ entry['EntryContext'] = {
+ f'Cherwell.AttachmentsInfo': attachments_info}
+ demisto.results(entry)
+
+
+def link_business_objects_command():
+ args = demisto.args()
+ parent_type = args.get('parent_type')
+ parent_record_id = args.get('parent_record_id')
+ child_type = args.get('child_type')
+ child_record_id = args.get('child_record_id')
+ relationship_id = args.get('relationship_id')
+ business_objects_relation_action('link', parent_type, parent_record_id, child_type, child_record_id,
+ relationship_id)
+ message = \
+ f'{parent_type.capitalize()} {parent_record_id} and {child_type.capitalize()} {child_record_id} were linked'
+ md = f'### {message}'
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': message,
+ 'HumanReadable': md,
+ })
+
+
+def unlink_business_objects_command():
+ args = demisto.args()
+ parent_type = args.get('parent_type')
+ parent_record_id = args.get('parent_record_id')
+ child_type = args.get('child_type')
+ child_record_id = args.get('child_record_id')
+ relationship_id = args.get('relationship_id')
+ business_objects_relation_action('unlink', parent_type, parent_record_id, child_type, child_record_id,
+ relationship_id)
+ message = \
+ f'{parent_type.capitalize()} {parent_record_id} and {child_type.capitalize()} {child_record_id} were unlinked'
+ md = f'### {message}'
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': message,
+ 'HumanReadable': md,
+ })
+
+
+def query_business_object_command():
+ args = demisto.args()
+ type_name = args.get('type')
+ query_string = args.get('query')
+ max_results = args.get('max_results')
+ results, raw_response = query_business_object_string(type_name, query_string, max_results)
+ md = tableToMarkdown('Query Results', results, headerTransform=pascalToSpace)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': raw_response,
+ 'EntryContext': {'Cherwell.QueryResults': results},
+ 'HumanReadable': md,
+ })
+
+
+def get_field_info_command():
+ args = demisto.args()
+ type_name = args.get('type')
+ field_property = args.get('field_property')
+ results = get_field_info(type_name, field_property)
+ md = tableToMarkdown('Field info:', results, headerTransform=pascalToSpace)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': results,
+ 'EntryContext': {'Cherwell.FieldInfo(val.FieldId == obj.FieldId)': results},
+ 'HumanReadable': md
+ })
+
+
+def cherwell_run_saved_search_command():
+ args = demisto.args()
+ association_id = args.get('association_id')
+ scope = args.get('scope')
+ scope_owner = args.get('scope_owner')
+ search_name = args.get('search_name')
+ results = cherwell_run_saved_search(association_id, scope, scope_owner, search_name)
+ md = tableToMarkdown(f'{search_name} results:', results, headerTransform=pascalToSpace)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': results,
+ 'EntryContext': {'Cherwell.SearchOperation(val.RecordId == obj.RecordId)': results},
+ 'HumanReadable': md
+ })
+
+
+def cherwell_get_business_object_id_command():
+ args = demisto.args()
+ business_object_name = args.get('business_object_name')
+ result = cherwell_get_business_object_id(business_object_name)
+ md = tableToMarkdown(f'Business Object Info:', result, headerTransform=pascalToSpace)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': result,
+ 'EntryContext': {'Cherwell.BusinessObjectInfo(val.BusinessObjectId == obj.BusinessObjectId)': result},
+ 'HumanReadable': md
+ })
+
+
+#######################################################################################################################
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('Command being called is %s' % (demisto.command()))
+
+try:
+ handle_proxy()
+ if demisto.command() == 'test-module':
+ test_command()
+ demisto.results('ok')
+
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents_command()
+
+ elif demisto.command() == 'cherwell-create-business-object':
+ create_business_object_command()
+
+ elif demisto.command() == 'cherwell-update-business-object':
+ update_business_object_command()
+
+ elif demisto.command() == 'cherwell-get-business-object':
+ get_business_object_command()
+
+ elif demisto.command() == 'cherwell-delete-business-object':
+ delete_business_object_command()
+
+ elif demisto.command() == 'cherwell-download-attachments':
+ download_attachments_command()
+
+ elif demisto.command() == 'cherwell-get-attachments-info':
+ get_attachments_info_command()
+
+ elif demisto.command() == 'cherwell-upload-attachment':
+ upload_attachment_command()
+
+ elif demisto.command() == 'cherwell-remove-attachment':
+ remove_attachment_command()
+
+ elif demisto.command() == 'cherwell-link-business-objects':
+ link_business_objects_command()
+
+ elif demisto.command() == 'cherwell-unlink-business-objects':
+ unlink_business_objects_command()
+
+ elif demisto.command() == 'cherwell-query-business-object':
+ query_business_object_command()
+
+ elif demisto.command() == 'cherwell-get-field-info':
+ get_field_info_command()
+
+ elif demisto.command() == 'cherwell-run-saved-search':
+ cherwell_run_saved_search_command()
+
+ elif demisto.command() == 'cherwell-get-business-object-id':
+ cherwell_get_business_object_id_command()
+
+
+# Log exceptions
+except Exception as e:
+ if demisto.command() == 'fetch-incidents':
+ raise Exception(e)
+ message = f'Unexpected error: {e}, traceback: {traceback.format_exc()}'
+ LOG(message)
+ LOG(str(e))
+ LOG.print_log()
+ return_error(message)
diff --git a/Integrations/Cherwell/Cherwell.yml b/Integrations/Cherwell/Cherwell.yml
new file mode 100644
index 000000000000..f4ed7dff367f
--- /dev/null
+++ b/Integrations/Cherwell/Cherwell.yml
@@ -0,0 +1,539 @@
+category: Case Management
+commonfields:
+ id: Cherwell
+ version: -1
+configuration:
+- display: 'URL (example: https://my.domain.com)'
+ name: url
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: true
+ type: 9
+- display: Client id
+ name: client_id
+ required: true
+ type: 0
+- defaultvalue: 'true'
+ display: Trust any certificate (not secure)
+ name: insecure
+ defaultvalue: ""
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ required: false
+ type: 8
+- defaultvalue: 3 days
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days)
+ name: fetch_time
+ required: false
+ type: 0
+- defaultvalue: incident
+ display: 'CSV list of objects to fetch. The default is incident, for example: incident,problem,service)'
+ name: objects_to_fetch
+ required: false
+ type: 0
+- defaultvalue: '30'
+ display: Max results to fetch (defualt is 30)
+ name: max_results
+ required: false
+ type: 0
+- display: Advanced Query to fetch (see integration detailed instructions)
+ name: query_string
+ required: false
+ type: 0
+- display: Fetch attachments (include attachements in fetch process)
+ name: fetch_attachments
+ required: false
+ type: 8
+- defaultvalue: ""
+ display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- defaultvalue: ""
+ display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+description: Cloud-based IT service management solution
+display: Cherwell
+name: Cherwell
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: ' Business object type, for example: "Incident". '
+ isArray: false
+ name: type
+ required: true
+ secret: false
+ - default: false
+ description: |-
+ Data JSON containing the relevant fields and their values, for example:
+ {"title": "some value"}).
+ isArray: false
+ name: json
+ required: true
+ secret: false
+ deprecated: false
+ description: Creates a business object.
+ execution: false
+ name: cherwell-create-business-object
+ outputs:
+ - contextPath: Cherwell.BusinessObjects.RecordId
+ description: 'Business object record ID. '
+ type: String
+ - contextPath: Cherwell.BusinessObjects.PublicId
+ description: 'Business object public ID. '
+ type: String
+ - arguments:
+ - default: false
+ description: ' Business object type, for example: "Incident". '
+ isArray: false
+ name: type
+ required: true
+ secret: false
+ - default: false
+ description: Data JSON containing the relevant fields and their values.
+ isArray: false
+ name: json
+ required: true
+ secret: false
+ - default: false
+ description: Public ID or record ID.
+ isArray: false
+ name: id_value
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Type of ID.
+ isArray: false
+ name: id_type
+ predefined:
+ - public_id
+ - record_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Update a business object with the specified fields.
+ execution: false
+ name: cherwell-update-business-object
+ outputs:
+ - contextPath: Cherwell.BusinessObjects.RecordId
+ description: 'Business object record ID. '
+ type: String
+ - contextPath: Cherwell.BusinessObjects.PublicId
+ description: 'Business object public ID. '
+ type: Unknown
+ - arguments:
+ - default: false
+ description: ' Business object type, for example: "Incident". '
+ isArray: false
+ name: type
+ required: true
+ secret: false
+ - default: false
+ description: Public ID or record ID.
+ isArray: false
+ name: id_value
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Type of ID.
+ isArray: false
+ name: id_type
+ predefined:
+ - public_id
+ - record_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes a given business object.
+ execution: false
+ name: cherwell-delete-business-object
+ - arguments:
+ - default: false
+ description: ' Business object type, for example: "Incident". '
+ isArray: false
+ name: type
+ required: true
+ secret: false
+ - default: false
+ description: Public ID or record ID.
+ isArray: false
+ name: id_value
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Type of ID.
+ isArray: false
+ name: id_type
+ predefined:
+ - record_id
+ - public_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets a business object by an ID.
+ execution: false
+ name: cherwell-get-business-object
+ - arguments:
+ - default: false
+ description: ' Business object type, for example: "Incident". '
+ isArray: false
+ name: type
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Type of ID.
+ isArray: false
+ name: id_type
+ predefined:
+ - public_id
+ - record_id
+ required: true
+ secret: false
+ - default: false
+ description: Public ID or record ID.
+ isArray: false
+ name: id_value
+ required: true
+ secret: false
+ deprecated: false
+ description: Downloads imported attachements from a specified business object.
+ execution: false
+ name: cherwell-download-attachments
+ outputs:
+ - contextPath: File
+ description: File result entries.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: ' Business object type, for example: "Incident". '
+ isArray: false
+ name: type
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Type of ID.
+ isArray: false
+ name: id_type
+ predefined:
+ - record_id
+ - public_id
+ required: true
+ secret: false
+ - default: false
+ description: Public ID or record ID.
+ isArray: false
+ name: id_value
+ required: true
+ secret: false
+ - default: false
+ description: File entry ID.
+ isArray: false
+ name: file_entry_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Uploads an attachment to a specified business object.
+ execution: false
+ name: cherwell-upload-attachment
+ outputs:
+ - contextPath: Cherwell.UploadedAttachments.AttachmentFileId
+ description: AttachmentFileId to use to get information about the attachment.
+ attachment
+ type: String
+ - contextPath: Cherwell.UploadedAttachments.BusinessObjectType
+ description: ' Business object type, for example: "Incident". '
+ type: String
+ - contextPath: Cherwell.UploadedAttachments.PublicId
+ description: 'Public ID. '
+ type: String
+ - contextPath: Cherwell.UploadedAttachments.RecordId
+ description: Record ID.
+ type: String
+ - arguments:
+ - default: false
+ description: Parent business object type name.
+ isArray: false
+ name: parent_type
+ required: true
+ secret: false
+ - default: false
+ description: Parent business object record ID.
+ isArray: false
+ name: parent_record_id
+ required: true
+ secret: false
+ - default: false
+ description: Child business object type name.
+ isArray: false
+ name: child_type
+ required: true
+ secret: false
+ - default: false
+ description: Child business object record ID.
+ isArray: false
+ name: child_record_id
+ required: true
+ secret: false
+ - default: false
+ description: Relationship ID.
+ isArray: false
+ name: relationship_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Links business objects that are related.
+ execution: false
+ name: cherwell-link-business-objects
+ - arguments:
+ - default: false
+ description: Parent business object type name.
+ isArray: false
+ name: parent_type
+ required: true
+ secret: false
+ - default: false
+ description: Parent business object record ID.
+ isArray: false
+ name: parent_record_id
+ required: true
+ secret: false
+ - default: false
+ description: Child business object type name.
+ isArray: false
+ name: child_type
+ required: true
+ secret: false
+ - default: false
+ description: Child business object record ID.
+ isArray: false
+ name: child_record_id
+ required: true
+ secret: false
+ - default: false
+ description: Relationship ID.
+ isArray: false
+ name: relationship_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Unlinks business objects that are linked and related.
+ execution: false
+ name: cherwell-unlink-business-objects
+ - arguments:
+ - default: false
+ description: ' Business object type, for example: "Incident". '
+ isArray: false
+ name: type
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Type of ID.
+ isArray: false
+ name: id_type
+ predefined:
+ - record_id
+ - public_id
+ required: true
+ secret: false
+ - default: false
+ description: Public ID or record ID.
+ isArray: false
+ name: id_value
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Type of attachment.
+ isArray: false
+ name: attachment_type
+ predefined:
+ - linked
+ - imported
+ - url
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets information for business object attachments.
+ execution: false
+ name: cherwell-get-attachments-info
+ outputs:
+ - contextPath: Cherwell.AttachmentsInfo.AttachmentFiledId
+ description: Attachment field ID.
+ type: String
+ - contextPath: Cherwell.AttachmentsInfo.FileName
+ description: File name.
+ type: String
+ - contextPath: Cherwell.AttachmentsInfo.AttachmentId
+ description: Attachment ID.
+ type: String
+ - contextPath: Cherwell.AttachmentsInfo.BusinessObjectType
+ description: ' Business object type, for example: "Incident". '
+ type: String
+ - contextPath: Cherwell.AttachmentsInfo.BusinessObjectPublicId
+ description: Business object public ID.
+ type: String
+ - contextPath: Cherwell.AttachmentsInfo.BusinessObjectRecordId
+ description: Business object record ID.
+ type: String
+ - arguments:
+ - default: false
+ description: ' Business object type, for example: "Incident". '
+ isArray: false
+ name: type
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Type of ID.
+ isArray: false
+ name: id_type
+ predefined:
+ - record_id
+ - public_id
+ required: true
+ secret: false
+ - default: false
+ description: Public ID or record ID.
+ isArray: false
+ name: id_value
+ required: true
+ secret: false
+ - default: false
+ description: Attachment ID to reomve.
+ isArray: false
+ name: attachment_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Remove the attachment from the specified business object.
+ execution: false
+ name: cherwell-remove-attachment
+ - arguments:
+ - default: false
+ description: ' Business object type, for example: "Incident". '
+ isArray: false
+ name: type
+ required: true
+ secret: false
+ - default: false
+ description: "The query to run. A CSV list of filters such that each filter\
+ \ is of the form: [\"field_name\",\"operator\",\"value\"] and operator is\
+ \ one of: 'eq'=equal, 'gt'=grater-than, 'lt'=less-than, 'contains', 'startwith'.\
+ \ Special characters should be escaped.\nExample: `[[\"CreatedDateTime\":\"\
+ gt\":\"4/10/2019 3:10:12 PM\"][\"Priority\",\"eq\",\"1\"]]`. \nNOTE: If multiple\
+ \ filters are received for the same field name, an 'OR' operation between\
+ \ the filters will be performed, if the field names are different an 'AND'\
+ \ operation will be performed."
+ isArray: false
+ name: query
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '30'
+ description: Maximum number of results to pull.
+ isArray: false
+ name: max_results
+ required: false
+ secret: false
+ deprecated: false
+ description: Runs advanced queries to search in a specified business object.
+ execution: false
+ name: cherwell-query-business-object
+ - arguments:
+ - default: false
+ description: ' Business object type, for example: "Incident". '
+ isArray: false
+ name: type
+ required: true
+ secret: false
+ - default: false
+ description: Field property to search by (Name, DIsplay Name or Field id)
+ isArray: false
+ name: field_property
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets information for a field, by one of its properties (Name, Display
+ Name, or id).
+ execution: false
+ name: cherwell-get-field-info
+ outputs:
+ - contextPath: Cherwell.FieldInfo.DisplayName
+ description: Field display name (as it displays in the Cherwell UI).
+ type: String
+ - contextPath: Cherwell.FieldInfo.FieldId
+ description: Field ID.
+ type: String
+ - contextPath: Cherwell.FieldInfo.Name
+ description: The name to use when working with business object commands.
+ type: String
+ - arguments:
+ - default: false
+ description: Business object association ID for the saved search.
+ isArray: false
+ name: association_id
+ required: true
+ secret: false
+ - default: false
+ description: Scope name or ID for the saved search
+ isArray: false
+ name: scope
+ required: true
+ secret: false
+ - default: false
+ description: Scope owner ID for the saved search. Use "(None)" when no scope
+ owner exists.
+ isArray: false
+ name: scope_owner
+ required: true
+ secret: false
+ - default: false
+ description: Name of the saved search.
+ isArray: false
+ name: search_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the results of a saved search.
+ execution: false
+ name: cherwell-run-saved-search
+ - arguments:
+ - default: false
+ description: Business object name.
+ isArray: false
+ name: business_object_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Get a general business object id by name
+ execution: false
+ name: cherwell-get-business-object-id
+ outputs:
+ - contextPath: Cherwell.BusinessObjectInfo.BusinessObjectId
+ description: Business object ID.
+ type: String
+ - contextPath: Cherwell.BusinessObjectInfo.BusinessObjectName
+ description: Business object name.
+ type: String
+ dockerimage: demisto/python3:3.7.2.214
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- Cherwell - test
diff --git a/Integrations/Cherwell/Cherwell_description.md b/Integrations/Cherwell/Cherwell_description.md
new file mode 100644
index 000000000000..d049d4d12911
--- /dev/null
+++ b/Integrations/Cherwell/Cherwell_description.md
@@ -0,0 +1,13 @@
+Integration with Cherwell Service Management. You can create, read update, and delete business objects, together with
+attachments and relations operations.
+
+In order to create, query, get, update, delete, and link business objects, we recommend duplicating Cherwell example
+scripts and edit them using the instructions within each script.
+
+To use an advanced query when fetching incidents, add your query in the advanced query parameter.
+The query should be a CSV list of filters, such that each filter is of the form:
+`["FieldName","Operator","Value"]` and operator is one of: 'eq'=equal, 'gt'=grater-than, 'lt'=less-than, 'contains',
+'startwith'. nSpecial characters should be escaped. Example:
+`[["CreatedDateTime","gt","4/10/2019 3:10:12 PM"],["Priority","eq","1"]]`
+NOTE: If received multiple filters for the same field name, an 'OR' operation between the filters will be performed,
+if the field names are different an 'AND' operation will be performed.
\ No newline at end of file
diff --git a/Integrations/Cherwell/Cherwell_image.png b/Integrations/Cherwell/Cherwell_image.png
new file mode 100644
index 000000000000..0c3ef09a9672
Binary files /dev/null and b/Integrations/Cherwell/Cherwell_image.png differ
diff --git a/Integrations/Cisco-umbrella/CHANGELOG.md b/Integrations/Cisco-umbrella/CHANGELOG.md
new file mode 100644
index 000000000000..3c236b4fddba
--- /dev/null
+++ b/Integrations/Cisco-umbrella/CHANGELOG.md
@@ -0,0 +1,7 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added several context outputs to the following commands to support Demisto v5.0.
+ - ***domain***
+ - ***umbrella-get-whois-for-domain***
\ No newline at end of file
diff --git a/Integrations/Cisco-umbrella/Cisco-umbrella-desc.md b/Integrations/Cisco-umbrella/Cisco-umbrella-desc.md
new file mode 100644
index 000000000000..5325de1038ff
--- /dev/null
+++ b/Integrations/Cisco-umbrella/Cisco-umbrella-desc.md
@@ -0,0 +1,2 @@
+Cisco Investigate is part of the Cisco Umbrella package. When you log into the CIsco Umbrella portal you will need to
+obtain the API Token for the Cisco Investigate Feature.
\ No newline at end of file
diff --git a/Integrations/Cisco-umbrella/Cisco-umbrella.png b/Integrations/Cisco-umbrella/Cisco-umbrella.png
new file mode 100644
index 000000000000..94689dd002af
Binary files /dev/null and b/Integrations/Cisco-umbrella/Cisco-umbrella.png differ
diff --git a/Integrations/Cisco-umbrella/Cisco-umbrella.py b/Integrations/Cisco-umbrella/Cisco-umbrella.py
new file mode 100644
index 000000000000..f31953f0cb70
--- /dev/null
+++ b/Integrations/Cisco-umbrella/Cisco-umbrella.py
@@ -0,0 +1,1855 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import sys
+import requests
+import json
+import time
+import re
+import urllib
+from urlparse import urlparse
+from distutils.util import strtobool
+from datetime import datetime, timedelta
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+API_TOKEN = demisto.params()['APIToken']
+BASE_URL = demisto.params()['baseURL']
+USE_SSL = not demisto.params().get('insecure', False)
+DEFAULT_HEADERS = {
+ 'Authorization': 'Bearer {}'.format(API_TOKEN),
+ 'Accept': 'application/json'
+}
+MALICIOUS_THRESHOLD = int(demisto.params().get('dboscore_threshold', -100))
+
+''' MAPS '''
+
+# This object describe the result of the http request of getDomainSecurity function
+# each field has Name,Info & ContextKey - thats just looks scarry
+SECURITY_RESULT_INFO = {
+ 'dga_score': {'Name': 'DGA', 'ContextKey': 'DGA',
+ 'Info': 'Domain Generation Algorithm. This score is generated based on the likeliness of the domain '
+ 'name being generated by an algorithm rather than a human. This algorithm is designed to '
+ 'identify domains which have been created using an automated randomization strategy, '
+ 'which is a common evasion technique in malware kits or botnets. This score ranges from '
+ '-100 (suspicious) to 0 (benign)'},
+ 'perplexity': {'Name': 'Perplexity',
+ 'Info': 'A second score on the likeliness of the name to be algorithmically generated, on a scale '
+ 'from 0 to 1. This score is to be used in conjunction with DGA'},
+ 'entropy': {'Name': 'Entropy',
+ 'Info': 'The number of bits required to encode the domain name, as a score. This score is to be used '
+ 'in conjunction with DGA and Perplexity'},
+ 'securerank2': {'Name': 'SecureRank',
+ 'Info': 'Suspicious rank for a domain that reviews based on the lookup behavior of client IP for '
+ 'the domain. Securerank is designed to identify hostnames requested by known infected '
+ 'clients but never requested by clean clients, assuming these domains are more likely to '
+ 'be bad. Scores returned range from -100 (suspicious) to 100 (benign)'},
+ 'pagerank': {'Name': 'PageRank', 'Info': 'Popularity according to Google\'s pagerank algorithm'},
+ 'asn_score': {'Name': 'ASN Score', 'ContextKey': 'ASNScore',
+ 'Info': 'ASN reputation score, ranges from -100 to 0 with -100 being very suspicious'},
+ 'prefix_score': {'Name': 'Prefix Score', 'ContextKey': 'PrefixScore',
+ 'Info': 'Prefix ranks domains given their IP prefixes (an IP prefix is the first three octets in '
+ 'an IP address) and the reputation score of these prefixes. Ranges from -100 to 0, '
+ '-100 being very suspicious'},
+ 'rip_score': {'Name': 'RIP Score', 'ContextKey': 'RIPScore',
+ 'Info': 'RIP ranks domains given their IP addresses and the reputation score of these IP addresses. '
+ 'Ranges from -100 to 0, -100 being very suspicious'},
+ 'popularity': {'Name': 'Popularity',
+ 'Info': 'The number of unique client IPs visiting this site, relative to the all requests to all '
+ 'sites. A score of how many different client/unique IPs go to this domain compared to '
+ 'others'},
+ 'geoscore': {'Name': 'GeoScore',
+ 'Info': 'A score that represents how far the different physical locations serving this name are from '
+ 'each other'},
+ 'ks_test': {'Name': 'Kolmogorov-Smirnov Test', 'ContextKey': 'KolmogorovSmirnovTest',
+ 'Info': "Kolmogorov-Smirnov test on geodiversity. 0 means that the client traffic matches what is "
+ "expected for this TLD"},
+ 'attack': {'Name': 'Attack Name', 'ContextKey': 'AttackName',
+ 'Info': 'The name of any known attacks associated with this domain. Returns blank if no known threat '
+ 'associated with domain'},
+ 'threat_type': {'Name': 'Threat Type', 'ContextKey': 'ThreatType',
+ 'Info': 'The type of the known attack, such as botnet or APT. Returns blank if no known threat '
+ 'associated with domain'}
+}
+
+# used to describe result on getDomainDNSHistory function
+IP_DNS_FEATURE_INFO = {
+ 'rr_count': 'Number of records of that type mapping to the given IP',
+ 'ld2_count': 'Number of 2-level names mapping to the given IP',
+ 'ld3_count': 'Number of 3-level names mapping to the given IP',
+ 'ld2_1_count': 'Number of 2-level names, without the TLD, mapping to the given IP',
+ 'ld2_2_count': 'Number of 3-level names, without the TLD, mapping to a given IP',
+ 'div_ld2': 'ld2_count divided by the number of records',
+ 'div_ld3': 'ld3_count divided by the number of records',
+ 'div_ld2_1': 'ld2_1_count divided by the number of records',
+ 'div_ld2_2': 'ld2_2_count divided by the number of record'
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def extract_domain_name(url):
+ return url.split("//")[-1].split("/")[0]
+
+
+def http_request(api_endpoint, params_dict=None, method='GET', data_list=None):
+ req_params = {} # type: dict
+ # request does not accept lists, only string/dict
+ if data_list and isinstance(data_list, list):
+ data_list = json.dumps(data_list)
+ if params_dict:
+ req_params.update(params_dict)
+ url = BASE_URL + api_endpoint
+ LOG('running %s request with url=%s\tparams=%s\tdata=%s' % (method, url, json.dumps(req_params), data_list))
+ try:
+ res = requests.request(
+ method,
+ url,
+ verify=USE_SSL,
+ params=req_params,
+ headers=DEFAULT_HEADERS,
+ data=data_list
+ )
+
+ res.raise_for_status()
+ return res.json()
+
+ except Exception, e:
+ LOG(e)
+ raise
+
+
+def format_string_to_table_header_format(string):
+ # example: "one_two" to "One Two"
+ if type(string) in STRING_TYPES:
+ return " ".join(word.capitalize() for word in string.replace("_", " ").split())
+ else:
+ return_error('The key is not a string: {}'.format(string))
+
+
+def format_string_to_context_key_format(string):
+ # example: "one_two" to "OneTwo"
+ if type(string) in STRING_TYPES:
+ return "".join(word.capitalize() for word in string.split('_'))
+ else:
+ return_error('The key is not a string: {}'.format(string))
+
+
+def date_to_timestamp_func(date):
+ # this helper function tries to parse a date time string according to a specific format
+ # if it fails, it will just output the original value
+ try:
+ ts = datetime.strptime(date[0:16], '%Y-%m-%dT%H:%M')
+ except ValueError:
+ pass
+ else:
+ if date[19] == '+':
+ ts += timedelta(hours=int(date[20:22]), minutes=int(date[22:24]))
+ elif date[19] == '-':
+ ts -= timedelta(hours=int(date[20:22]), minutes=int(date[22:24]))
+ ts = time.mktime(ts.timetuple()) # type: ignore
+ return str(int(ts) * 1000) # type: ignore
+ return date
+
+
+def timestamp_to_date(ts):
+ if ts:
+ # Gets a timestamp (either str or int, either in seconds or milliseconds) and converts it to a date.
+ ts = str(ts)
+ if len(ts) > 10:
+ ts = ts[:10]
+ ts = int(ts)
+ return datetime.utcfromtimestamp(ts).strftime('%Y-%m-%dT%H:%M:%S')
+ return ts
+
+
+def securerank_to_dbotscore(sr):
+ # converts cisco umbrella score to dbotscore
+ DBotScore = 0
+ if sr > 0 and sr <= 100:
+ DBotScore = 1
+ elif sr < 0 and sr > MALICIOUS_THRESHOLD:
+ DBotScore = 2
+ elif sr <= MALICIOUS_THRESHOLD:
+ DBotScore = 3
+ return DBotScore
+
+
+''' INTERNAL FUNCTIONS '''
+
+
+def get_co_occurences(domain):
+ # Build & Send request
+ endpoint_url = '/recommendations/name/' + domain + '.json'
+ res_co_occurences = http_request(endpoint_url)
+
+ # Assign and validate response
+ co_occurences = res_co_occurences.get('pfs2', [])
+ if not res_co_occurences['found'] or not co_occurences:
+ return False
+ table_co_occurences = []
+
+ for co_occurence in co_occurences:
+ table_co_occurences.append({
+ 'Name': co_occurence[0],
+ 'Score': co_occurence[1]
+ })
+
+ return table_co_occurences
+
+
+def get_domains_categorization(domains):
+ # Build & Send request
+ endpoint_url = '/domains/categorization?showLabels'
+ res = http_request(endpoint_url, None, 'POST', domains)
+ # Validate response
+ if not res:
+ return False
+ return res
+
+
+''' BUSINESS LOGIC / COMMANDS '''
+
+
+def get_domain_categorization_command():
+ # Initialize
+ contents = [] # type: ignore
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ # Get vars
+ domain = extract_domain_name(demisto.args()['domain'])
+ # Fetch data
+ categorization = get_domain_categorization(domain)
+ if categorization:
+ # Process response - build context and markdown table
+ domain_context = {
+ 'Name': domain
+ }
+
+ contents = { # type: ignore
+ # Will be override in case result contains any
+ 'Content Categories': 'No Content Categories Were Found',
+ 'Malware Categories': 'No Security Categories Were Found'
+ }
+
+ if categorization:
+ if categorization.get('status'):
+ contents['Status'] = categorization['status'] # type: ignore
+ if categorization.get('content_categories'):
+ content_categories = ",".join(categorization['content_categories'])
+ contents['Content Categories'] = content_categories # type: ignore
+ domain_context['ContentCategories'] = content_categories
+ if categorization.get('security_categories'):
+ security_categories = ",".join(categorization['security_categories'])
+ contents['Malware Categories'] = security_categories # type: ignore
+ domain_context['SecurityCategories'] = security_categories
+ if categorization['status'] == -1:
+ domain_context['Malicious'] = {
+ 'Vendor': 'Cisco Umbrella Investigate',
+ 'Description': security_categories
+ }
+
+ context[outputPaths['domain']] = domain_context
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Categorization:', contents, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_domain_categorization(domain):
+ # Build & Send request
+ endpoint_url = '/domains/categorization/' + domain + '?showLabels'
+ res = http_request(endpoint_url)
+
+ # Validate and assign response
+ categorization = res.get(domain, [])
+ if not categorization:
+ return False
+ return categorization
+
+
+def get_domain_search_command():
+ # Initialize
+ contents = [] # type: ignore
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ # Get vars
+ regex = demisto.args()['regex']
+ start = demisto.args().get('start', '')
+ limit = int(demisto.args().get('limit'))
+ # Fetch data
+ matches = get_domain_search(regex, start)
+ if matches:
+ # Process response - build context and markdown table
+ if limit:
+ matches = matches[:limit]
+ contents = matches[:]
+ for index, row in enumerate(contents):
+ contents[index] = {
+ 'Name': row['name'],
+ 'First Seen': row['firstSeenISO'],
+ 'Security Categories': ",".join(row['securityCategories'])
+ }
+
+ domain_context = []
+ for match in matches:
+ security_categories_str = ",".join(match['securityCategories'])
+ domain = {
+ 'Name': match['name'],
+ 'SecurityCategories': security_categories_str if security_categories_str else None,
+ 'FirstSeen': match['firstSeen'],
+ 'FirstSeenISO': match['firstSeenISO'],
+ 'FirstSeean': match['firstSeen'],
+ 'FirstSeeanISO': match['firstSeenISO']
+ }
+ if 'Malware' in security_categories_str:
+ domain['Malicious'] = {
+ 'Vendor': 'Cisco Umbrella Investigate',
+ 'Description': 'Tagged as malware'
+ }
+ domain_context.append(domain)
+
+ context[outputPaths['domain']] = domain_context
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Search Results:', contents, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_domain_search(regex, start):
+ # Build & Send request
+ matches = {} # type: ignore
+ start = "".join(start.split()) if start else '-31days'
+ endpoint_url = '/search/' + regex
+ params = {
+ 'start': start,
+ 'includecategory': 'true'
+ }
+ res = http_request(endpoint_url, params)
+
+ # Validate and assign response
+ matches = res.get('matches')
+ if not matches or not isinstance(matches, list):
+ return False
+ return matches
+
+
+def get_domain_co_occurrences_command():
+ # Initialize
+ contents = []
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ # Get vars
+ domain = extract_domain_name(demisto.args()['domain'])
+ # Fetch data
+ occurrences = get_domain_co_occurrences(domain)
+ if occurrences:
+ # Process response - build context and markdown table
+ for occurrence in occurrences:
+ contents.append({
+ 'Name': occurrence[0],
+ 'Score': occurrence[1]
+ })
+
+ if contents:
+ context[outputPaths['domain']] = {
+ 'Name': domain,
+ 'CoOccurrences': contents
+ }
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Co-occurrences:', contents, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_domain_co_occurrences(domain):
+ # Build & Send request
+ endpoint_url = '/recommendations/name/' + domain + '.json'
+ res = http_request(endpoint_url)
+
+ # Validate and assign response
+ occurrences = res.get('pfs2')
+ if not isinstance(occurrences, list) or not occurrences:
+ return False
+ return occurrences
+
+
+def get_domain_related_command():
+ # Initialize
+ contents = []
+ context = {} # type: ignore
+ headers = [] # type: ignore
+ results = []
+ # Get vars
+ domain = extract_domain_name(demisto.args()['domain'])
+ # Fetch data
+ related_list = get_domain_related(domain)
+ if related_list:
+ # Process response - build context and markdown table
+ for related in related_list:
+ contents.append({
+ 'Name': related[0],
+ 'Score': related[1]
+ })
+
+ context = {}
+ if contents:
+ context[outputPaths['domain']] = {
+ 'Name': domain,
+ 'Related': contents
+ }
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Related Domains:', contents, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_domain_related(domain):
+ # Build & Send request
+ endpoint_url = '/links/name/' + domain + '.json'
+ res = http_request(endpoint_url)
+
+ # Validate and assign response
+ related_list = res.get('tb1', [])
+ if not isinstance(related_list, list) or not related_list:
+ return False
+ return related_list
+
+
+def get_domain_security_command():
+ # Initialize
+ contents = []
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ # Get vars
+ domain = extract_domain_name(demisto.args()['domain'])
+ threshold = int(demisto.args().get('threshold', MALICIOUS_THRESHOLD))
+ # Fetch data
+ res = get_domain_security(domain)
+ if res:
+ # Process response - build context and markdown table
+ # each key in SECURITY_RESULT_INFO corrispond to a key in 'res'
+ # we get the score from 'res' & add Name & Info from SECURITY_RESULT_INFO
+ for key in SECURITY_RESULT_INFO:
+ info = SECURITY_RESULT_INFO[key]
+ contents.append({
+ 'Name': info['Name'],
+ 'Score': res[key],
+ 'Info': info['Info']
+ })
+
+ domain_security_context = {}
+
+ for key in SECURITY_RESULT_INFO:
+ context_key = SECURITY_RESULT_INFO[key].get('ContextKey', format_string_to_context_key_format(
+ SECURITY_RESULT_INFO[key]['Name']))
+ domain_security_context[context_key] = res[key]
+
+ if domain_security_context:
+ secure_rank = res.get('securerank2', False)
+ DBotScore = 0
+ if secure_rank:
+ if secure_rank < threshold:
+ DBotScore = 3
+ else:
+ DBotScore = securerank_to_dbotscore(secure_rank)
+ context[outputPaths['dbotscore']] = {
+ 'Indicator': domain,
+ 'Type': 'domain',
+ 'Vendor': 'Cisco Umbrella Investigate',
+ 'Score': DBotScore
+ }
+
+ context[outputPaths['domain']] = {
+ 'Name': domain,
+ 'Security': domain_security_context
+ }
+
+ if DBotScore == 3:
+ context[outputPaths['domain']]['Malicious'] = {
+ 'Vendor': 'Cisco Umbrella Investigate',
+ 'Description': 'Malicious domain found via umbrella-domain-security'
+ }
+ else:
+ context[outputPaths['dbotscore']] = {
+ 'Indicator': domain,
+ 'Type': 'domain',
+ 'Vendor': 'Cisco Umbrella Investigate',
+ 'Score': 0
+ }
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Domain Security Info:', contents, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_domain_security(domain):
+ # Build & Send request
+ endpoint_url = '/security/name/' + domain + '.json'
+ res = http_request(endpoint_url)
+
+ # Validate and assign response
+ if not res or res.get('errorMessage'):
+ return False
+ return res
+
+
+def get_domain_dns_history_command():
+ # Initialize
+ contents = {} # type: ignore
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ # Get vars
+ domain = extract_domain_name(demisto.args()['domain'])
+ # Fetch data
+ features = get_domain_dns_history(domain)
+ if features:
+ # Process response - build context and markdown table
+ dns_history_context = {}
+
+ for feature in features:
+ table_key = format_string_to_table_header_format(feature)
+ context_key = format_string_to_context_key_format(feature)
+ value = features.get(feature, '')
+ dns_history_context[context_key] = value
+ if feature in ('locations'):
+ contents[table_key] = []
+ for location in features[feature]:
+ contents[table_key].append("[ " + str(location['lat']) + ", " + str(location['lon']) + " ]")
+ contents[table_key] = ','.join(contents[table_key])
+ else:
+ if isinstance(value, list):
+ contents[table_key] = ','.join(str(item) for item in value)
+ elif value:
+ contents[table_key] = value
+
+ context[outputPaths['domain']] = {
+ 'Name': domain,
+ 'DNSHistory': dns_history_context
+ }
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('DNS History:', contents, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_domain_dns_history(domain):
+ # this command return 2 entries - but the context update is done with the 2nd entry
+ # Build & Send request
+ endpoint_url = '/dnsdb/name/a/' + domain + '.json'
+ res = http_request(endpoint_url)
+
+ # Validate and assign response
+ features = res.get('features', {})
+ if not features or not features.viewkeys() > {"base_domain", "is_subdomain"}:
+ return False
+ # this is the actual path for ip address
+ address = res.get('rrs_tf')[0].get('rrs')[0].get('rr')
+ features['ip'] = address
+ return features
+
+
+def get_ip_dns_history_command():
+ # Initialize
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ # Get vars
+ ip = demisto.args()['ip']
+ limit = int(demisto.args().get('limit'))
+ # Fetch data
+ response_object = get_ip_dns_history(ip)
+ if response_object:
+ response = response_object['response']
+ features = response_object['features']
+ # Process response - build context and markdown table
+ response_contents = []
+ if limit:
+ response = response[:limit]
+ for item in response:
+ response_contents.append({
+ 'RR': item['rr'],
+ 'TTL': item['ttl'],
+ 'Class': item['class'],
+ 'Type': item['type'],
+ 'Name': item['name']
+ })
+
+ features_contents = {}
+ features_context = {}
+ for key in IP_DNS_FEATURE_INFO:
+ # table_key = format_string_to_table_header_format(key)
+ features_contents[IP_DNS_FEATURE_INFO[key]] = features[key]
+ context_key = format_string_to_context_key_format(key)
+ features_context[context_key] = features[key]
+
+ context[outputPaths['ip']] = {
+ 'Address': ip,
+ 'DNSHistory': {
+ 'RRS': response_contents,
+ 'Features': features_context
+ }
+ }
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': [response_contents, features_contents],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('RRS:', response_contents, headers) + tableToMarkdown('Features:',
+ features_contents,
+ headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_ip_dns_history(ip):
+ # Build & Send request
+ endpoint_url = '/dnsdb/ip/a/' + ip + '.json'
+ res = http_request(endpoint_url)
+
+ # Validate and assign response
+ features = res.get('features', [])
+ response = res.get('rrs', [])
+ if not features or not response:
+ return False
+ return {'features': features, 'response': response}
+
+
+def get_ip_malicious_domains_command():
+ # Initialize
+ contents = []
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ context_dbotscore = []
+ # Get vars
+ ip = demisto.args()['ip']
+ # Fetch data
+ res = get_ip_malicious_domains(ip)
+ if res:
+ # Process response - build context and markdown table
+ for domain in res:
+ contents.append({
+ 'Name': domain['name'],
+ 'Malicious': {
+ 'Vendor': 'Cisco Umbrella Investigate',
+ 'Description': 'For IP ' + ip
+ }
+ })
+ context_dbotscore.append({
+ 'Indicator': domain['name'],
+ 'Type': 'domain',
+ 'Vendor': 'Cisco Umbrella Investigate',
+ 'Score': 3
+ })
+
+ if contents:
+ context[outputPaths['domain']] = contents
+ context[outputPaths['dbotscore']] = context_dbotscore
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Malicious Domains:', contents, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_ip_malicious_domains(ip):
+ # Build & Send request
+ endpoint_url = '/ips/' + ip + '/latest_domains'
+ res = http_request(endpoint_url)
+
+ # Validate and assign response
+ if not res:
+ return False
+ return res
+
+
+def get_domain_command():
+ # Initialize
+ contents = []
+ context = {}
+ headers = [] # type: ignore
+ results = []
+
+ domain = extract_domain_name(demisto.args()['domain'])
+
+ whois = get_whois_for_domain(domain)
+ admin = {
+ 'Country': whois.get('administrativeContactCountry'),
+ 'Email': whois.get('administrativeContactEmail'),
+ 'Name': whois.get('administrativeContactName'),
+ 'Phone': whois.get('administrativeContactTelephone')
+ }
+ registrant = {
+ 'Country': whois.get('registrantCountry'),
+ 'Email': whois.get('registrantEmail'),
+ 'Name': whois.get('registrantName'),
+ 'Phone': whois.get('registrantTelephone')
+ }
+ first_queried = whois.get('created')
+ name_servers = whois.get('nameServers')
+ emails = whois.get('emails')
+ registrar = {'Name': whois.get('registrarName')}
+ creation_date = first_queried
+ domain_status = whois.get('status')
+ updated_date = whois.get('updated')
+ expiration_date = whois.get('expires')
+
+ whois = {
+ 'Name': whois.get('domainName'),
+ 'Registrar Name': whois.get('registrarName'),
+ 'Last Retrieved': timestamp_to_date(whois.get('timeOfLatestRealtimeCheck')),
+ 'Created': whois.get('created'),
+ 'Updated': whois.get('updated'),
+ 'Expires': whois.get('expires'),
+ 'IANAID': whois.get('registrarIANAID'),
+ 'Last Observed': whois.get('auditUpdatedDate')
+ }
+
+ domain_categorization = [] # type: ignore
+ domain_categorization = get_domain_categorization(domain)
+ content_categories = domain_categorization.get('content_categories') # type: ignore
+ malware_categories = domain_categorization.get('security_categories') # type: ignore
+ risk_score = domain_categorization.get('status') # type: ignore
+ domain_categorization_table = {
+ 'Content Categories': content_categories,
+ 'Malware Categories': malware_categories
+ }
+
+ domain_details = [] # type: ignore
+ domain_details = get_domain_details(domain)
+ popularity = domain_details.get('popularity') # type: ignore
+ secure_rank = domain_details.get('securerank2') # type: ignore
+ dbotscore = securerank_to_dbotscore(secure_rank)
+
+ context[outputPaths['domain']] = {
+ 'Name': domain,
+ 'Admin': admin,
+ 'Registrant': registrant,
+ 'Registrar': registrar,
+ 'CreationDate': creation_date,
+ 'DomainStatus': domain_status,
+ 'UpdatedDate': updated_date,
+ 'ExpirationDate': expiration_date,
+ 'Umbrella': {
+ 'RiskScore': risk_score,
+ 'SecureRank': secure_rank,
+ 'FirstQueriedTime': first_queried,
+ 'ContentCategories': content_categories,
+ 'MalwareCategories': malware_categories
+ }
+ }
+
+ # Add malicious if needed
+ if risk_score == -1 or secure_rank < MALICIOUS_THRESHOLD:
+ context[outputPaths['domain']]['Malicious'] = {
+ 'Vendor': 'Cisco Umbrella Investigate',
+ 'Description': 'Malicious domain found with risk score -1'
+ }
+ dbotscore = 3
+
+ context[outputPaths['dbotscore']] = {
+ 'Indicator': domain,
+ 'Type': 'domain',
+ 'Vendor': 'Cisco Umbrella Investigate',
+ 'Score': dbotscore
+ }
+
+ contents.append({
+ 'Risk Score': risk_score,
+ 'Secure Rank': secure_rank,
+ 'Populairty': popularity,
+ 'Demisto Reputation': scoreToReputation(dbotscore),
+ 'First Queried time': first_queried,
+ })
+
+ # Domain reputation + [whois -> whois nameservers -> whois emails] + domain categorization
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': [contents, whois, name_servers, emails, domain_categorization_table],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('"Umbrella Investigate" Domain Reputation for: ' + domain, contents,
+ headers) + tableToMarkdown(
+ '"Umbrella Investigate" WHOIS Record Data for: ' + domain, whois, headers) + tableToMarkdown(
+ 'Name Servers:', {'Name Servers': name_servers}, headers) + tableToMarkdown('Emails:', {'Emails': emails},
+ headers) + tableToMarkdown(
+ 'Domain Categorization:', domain_categorization_table, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_related_domains_command():
+ # Initialize
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ # Get vars
+ is_co_occurences = bool(strtobool(demisto.args().get('coOccurences', False)))
+ domain = extract_domain_name(demisto.args()['domain'])
+ # Fetch data
+ related_domains = get_related_domains(domain, is_co_occurences)
+ contents_related_domains = []
+ contents_co_occurences = {} # type: ignore
+ co_occurences_md = ''
+ if related_domains:
+ # Process response - build context and markdown table
+ for related_domain in related_domains:
+ contents_related_domains.append({
+ 'Name': related_domain[0],
+ 'Score': related_domain[1]
+ })
+
+ if related_domains:
+ context['Umbrella.RelatedDomains(val.Domain && val.Domain == obj.Domain)'] = {
+ 'Data': contents_related_domains,
+ 'Domain': domain
+ }
+
+ # Create another request in case co_occurences flag is raised, add the results with the main request
+ if is_co_occurences:
+ contents_co_occurences = get_co_occurences(domain)
+ if contents_co_occurences:
+ co_occurences_md = tableToMarkdown('"Umbrella Investigate" Domain Co-occurences for: ' + domain,
+ contents_co_occurences, headers)
+ if related_domains:
+ context['Umbrella.CoOccurences(val.Domain && val.Domain == obj.Domain)'] = {
+ 'Domain': domain,
+ 'Data': contents_co_occurences
+ }
+ else:
+ context['Umbrella.CoOccurences(val.Domain && val.Domain == obj.Domain)'] = {
+ 'Data': contents_co_occurences,
+ 'Domain': domain
+ }
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': [contents_related_domains, contents_co_occurences],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('"Umbrella Investigate" Related Domains for a Domain: ',
+ contents_related_domains, headers) + co_occurences_md,
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_related_domains(domain, is_co_occurences):
+ # Main Request
+ # Build & Send request
+ endpoint_url = '/links/name/' + domain + '.json'
+ res_related_domains = http_request(endpoint_url)
+
+ # Assign and validate response
+ related_domains = res_related_domains.get('tb1', [])
+ if not related_domains:
+ return False
+ return related_domains
+
+
+def get_domain_classifiers_command():
+ # Initialize
+ contents = {}
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ # Get vars
+ domain = extract_domain_name(demisto.args()['domain'])
+ # Fetch data
+ res = get_domain_classifiers(domain)
+ if res:
+ # Process response - build context and markdown table
+ security_categories = res.get('securityCategories', [])
+ attacks = res.get('attacks', [])
+ threat_types = res.get('threatTypes', [])
+ contents['Security Categories'] = security_categories
+ contents['Attacks'] = attacks
+ contents['Threat Types'] = threat_types
+
+ if contents:
+ context['Umbrella.DomainClassifiers(val.Domain && val.Domain == obj.Domain)'] = {
+ 'Data': {
+ 'MalwareCategories': security_categories,
+ 'Attacks': attacks,
+ 'ThreatTypes': threat_types
+ },
+ 'Domain': domain
+ }
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('"Umbrella Investigate" Domain Classifiers: ' + domain, contents, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_domain_classifiers(domain):
+ # Build & Send request
+ endpoint_url = '/url/' + domain + '/classifiers'
+ res = http_request(endpoint_url)
+
+ # Assign and validate response
+ if not res['securityCategories'] and not res['attacks'] and not res['threatTypes']:
+ return False
+ return res
+
+
+def get_domain_query_volume_command():
+ # Initialize
+ contents = []
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ queries_context = []
+ # Get vars
+ domain = extract_domain_name(demisto.args()['domain'])
+ start_date_string = demisto.args()['start']
+ stop_date_string = demisto.args()['stop']
+ match = demisto.args()['match']
+ limit = int(demisto.args().get('limit'))
+
+ # validation and user input conversion
+ if match != 'all' and match != 'exact' and match != 'component':
+ return_error('Not a valid type. Valid options are all, exact, or component.')
+
+ # Fetch data
+ response_object = get_domain_query_volume(domain, start_date_string, stop_date_string, match)
+ if response_object:
+ dates = response_object.get('dates')
+ queries = response_object.get('queries')
+
+ # Process response - build context and markdown table
+ start_date = dates[0]
+ stop_date = dates[1]
+ # Query timestamp/hour needs to be calculated manually, every entry represents 1 hour (3600 secods). ts is in
+ # milliseconds
+ query_ts = start_date / 1000
+ if limit:
+ queries = queries[:limit]
+ for query in queries:
+ contents.append({
+ 'Queries': query,
+ 'Query Hour': timestamp_to_date(query_ts)
+ })
+ queries_context.append({
+ 'Queries': query,
+ 'QueryHour': timestamp_to_date(query_ts)
+ })
+ query_ts = query_ts + 3600
+
+ context['Umbrella.QueryVolume(val.Domain && val.Domain == obj.Domain)'] = {
+ 'Domain': domain,
+ 'Data': {
+ 'StartDate': timestamp_to_date(start_date),
+ 'StopDate': timestamp_to_date(stop_date),
+ 'QueriesInfo': queries_context
+ }
+ }
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(
+ '"Umbrella Investigate" Domain Volume: ' + domain + '\nStart Date ' + timestamp_to_date(
+ start_date) + ' - Stop Date ' + timestamp_to_date(stop_date), contents, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_domain_query_volume(domain, start_date_string, stop_date_string, match):
+ # user input conversion
+ start_ts = date_to_timestamp_func(start_date_string)
+ stop_ts = date_to_timestamp_func(stop_date_string)
+
+ # Build & Send request
+ params = {
+ 'start': start_ts,
+ 'stop': stop_ts,
+ 'match': match
+ }
+ endpoint_url = '/domains/volume/' + domain
+ res = http_request(endpoint_url, params)
+
+ # Assign and validate response
+ dates = res.get('dates', [])
+ queries = res.get('queries', [])
+ if not dates or not queries:
+ return False
+ return {'dates': dates, 'queries': queries}
+
+
+def get_domain_details_command():
+ # Initialize
+ contents = []
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ # Get vars
+ domain = extract_domain_name(demisto.args()['domain'])
+ threshold = int(demisto.args().get('threshold', MALICIOUS_THRESHOLD))
+ # Fetch data
+ res = get_domain_details(domain)
+ if res:
+ # Process response - build context and markdown table
+ # each key in SECURITY_RESULT_INFO corrispond to a key in 'res'
+ # we get the score from 'res' & add Name & Info from SECURITY_RESULT_INFO
+ for key in SECURITY_RESULT_INFO:
+ info = SECURITY_RESULT_INFO[key]
+ contents.append({
+ 'Score': res[key],
+ 'Name': info['Name'],
+ 'Info': info['Info']
+ })
+
+ domain_security_context = {}
+
+ for key in SECURITY_RESULT_INFO:
+ context_key = SECURITY_RESULT_INFO[key].get('ContextKey', format_string_to_context_key_format(
+ SECURITY_RESULT_INFO[key]['Name']))
+ domain_security_context[context_key] = res[key]
+
+ if domain_security_context:
+ context['Umbrella.DomainDetails(val.Domain && val.Domain == obj.Domain)'] = {
+ 'Domain': domain,
+ 'Data': domain_security_context
+ }
+ secure_rank = res.get('securerank2', False)
+ if secure_rank:
+ if secure_rank < threshold:
+ dbotscore = 3
+ else:
+ dbotscore = securerank_to_dbotscore(secure_rank)
+ context[outputPaths['dbotscore']] = {
+ 'Indicator': domain,
+ 'Type': 'domain',
+ 'Vendor': 'Cisco Umbrella Investigate',
+ 'Score': dbotscore
+ }
+ if dbotscore == 3:
+ context[outputPaths['domain']] = {}
+ context[outputPaths['domain']]['Malicious'] = {
+ 'Vendor': 'Cisco Umbrella Investigate',
+ 'Description': 'Malicious domain found via get-domain-details'
+ }
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('"Umbrella Investigate" Domain Reputation: ' + domain, contents, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_domain_details(domain):
+ # Build & Send request
+ endpoint_url = '/security/name/' + domain
+ res = http_request(endpoint_url)
+
+ # Assign and validate response
+ if not res:
+ return False
+ return res
+
+
+def get_domains_for_email_registrar_command():
+ # Initialize
+ contents = [] # type: ignore
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ markdown = ''
+ # Get vars
+ emails = argToList(demisto.args()['emails'])
+ offset = demisto.args().get('offset', '')
+ sort = demisto.args().get('sort', '')
+ limit = demisto.args().get('limit', '')
+ # user input validation
+ if not isinstance(emails, list):
+ return_error('Emails list is not formatted correctly, please try again.')
+ if sort:
+ if sort != 'created' and sort != 'updated':
+ return_error('The parameter sort accept only these values: created/updated/expired.')
+ for email in emails:
+ if re.match('^[_a-z0-9-]+(\.[_a-z0-9-]+)*@[a-z0-9-]+(\.[a-z0-9-]+)*(\.[a-z]{2,4})$', email) is None:
+ return_error('The provided email is not valid: ' + email)
+ # Fetch data
+ res = get_domains_for_email_registrar(emails, offset, sort, limit)
+ if res:
+ # Process response - build context and markdown table
+ domains = [] # type: ignore
+ for email in emails:
+ domains_contents = []
+ emails_contents = [] # type: ignore
+ domains_list = []
+ emails_context = []
+ # get the entry that matches the provided emails each time
+ email_res = res[email]
+ domains = email_res.get('domains', [])
+ if not email_res or not domains:
+ continue
+
+ # going over all the domains associated with this email, making POST request to get each categorization
+ for domain in domains:
+ domains_list.append(domain['domain'])
+ domains_info = get_domains_categorization(domains_list)
+ if domains_info:
+ for domain in domains:
+ domains_contents.append({
+ 'Name': domain['domain'],
+ 'Security Categories': domains_info[domain['domain']]['security_categories'],
+ 'Content Categories': domains_info[domain['domain']]['content_categories'],
+ 'Is Current': domain['current']
+ })
+
+ # each email has its own data + associated domains attached
+ emails_context.append({
+ 'TotalResults': email_res['totalResults'],
+ 'MoreDataAvailable': email_res['moreDataAvailable'],
+ 'ResultLimit': email_res['limit'],
+ 'Domains': domains_contents
+ })
+ # each email represented by 2 tables
+ # Build Output
+ markdown = markdown + tableToMarkdown('Domains Associated with: ' + email, domains_contents, headers)
+ contents.extend((emails_contents, domains_contents))
+
+ context['Umbrella.AssociatedDomains(val.Email && val.Email == obj.Email)'] = {
+ 'Email': email,
+ 'Data': emails_context
+ }
+
+ if not markdown:
+ markdown = tableToMarkdown('Domains Associated with: ' + email, domains_contents, headers)
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': markdown,
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_domains_for_email_registrar(emails, offset, sort, limit):
+ # Build & Send request
+ params = {} # type: ignore
+ # single email has different api call over multiple emails
+ if len(emails) == 1:
+ endpoint_url = '/whois/emails/' + emails[0]
+ if sort or limit or offset:
+ params = {
+ 'sortField': sort,
+ 'limit': limit,
+ 'offset': offset
+ }
+
+ elif len(emails) > 1:
+ emails_string = ','.join(emails)
+ endpoint_url = '/whois/emails'
+ if sort or limit or offset:
+ params = {
+ 'emailList': emails_string,
+ 'sortField': sort,
+ 'limit': limit,
+ 'offset': offset
+ }
+ else:
+ params = {
+ 'emailList': emails_string,
+ }
+
+ res = http_request(endpoint_url, params)
+ if not res:
+ return False
+ return res
+
+
+def get_domains_for_nameserver_command():
+ # Initialize
+ contents = [] # type: ignore
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ markdown = ''
+ # Get vars
+ nameservers = argToList(demisto.args()['nameservers'])
+ offset = demisto.args().get('offset', '')
+ sort = demisto.args().get('sort', '')
+ limit = demisto.args().get('limit', '')
+ # user input validation
+ if not isinstance(nameservers, list):
+ return_error('Name Servers list is not formatted correctly, please try again.')
+ if sort:
+ if sort != 'created' and sort != 'updated':
+ return_error('The parameter sort accept only these values: created/updated')
+ for nameserver in nameservers:
+ if re.match('^(([a-zA-Z]|[a-zA-Z][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z]|[A-Za-z][A-Za-z0-9\-]*[A-Za-z0-9])$',
+ nameserver) is None:
+ return_error('The provided name server is not valid: ' + nameserver)
+ # Fetch data
+ res = get_domains_for_nameserver(nameservers, offset, sort, limit)
+ if res:
+ # Process response - build context and markdown table
+ domains = [] # type: ignore
+ for nameserver in nameservers:
+ domains_contents = []
+ nameservers_contents = [] # type: ignore
+ domains_list = []
+ nameservers_context = []
+ # get the entry that matches the provided nameservers each time
+ nameserver_res = res[nameserver]
+ domains = nameserver_res.get('domains', [])
+ if not nameserver_res or not domains:
+ continue
+
+ # going over the domains associated with this nameserver, making POST request to get each categorization
+ for domain in domains:
+ domains_list.append(domain['domain'])
+ domains_info = get_domains_categorization(domains_list)
+ if domains_info:
+ for domain in domains:
+ domains_contents.append({
+ 'Name': domain['domain'],
+ 'Security Categories': domains_info[domain['domain']]['security_categories'],
+ 'Content Categories': domains_info[domain['domain']]['content_categories'],
+ 'Is Current': domain['current']
+ })
+
+ # each nameserver has its own data + associated domains attached
+ nameservers_context.append({
+ 'TotalResults': nameserver_res['totalResults'],
+ 'MoreDataAvailable': nameserver_res['moreDataAvailable'],
+ 'ResultLimit': nameserver_res['limit'],
+ 'Domains': domains_contents
+ })
+ # each nameserver represented by 2 tables
+ # Build Output
+ markdown = markdown + tableToMarkdown('Domains Associated with: ' + nameserver, domains_contents, headers)
+ contents.extend((nameservers_contents, domains_contents))
+
+ context['Umbrella.AssociatedDomains(val.Nameserver && val.Nameserver == obj.Nameserver)'] = {
+ 'Nameserver': nameserver,
+ 'Data': nameservers_context
+ }
+
+ if not markdown:
+ markdown = tableToMarkdown('Domains Associated with: ' + nameserver, domains_contents, headers)
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': markdown,
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_domains_for_nameserver(nameservers, offset, sort, limit):
+ # Build & Send request
+ params = {} # type: ignore
+ # single name server has different api call over multiple name servers
+ if len(nameservers) == 1:
+ endpoint_url = '/whois/nameservers/' + nameservers[0]
+ if sort or limit or offset:
+ params = {
+ 'sortField': sort,
+ 'limit': limit,
+ 'offset': offset
+ }
+ elif len(nameservers) > 1:
+ nameservers_string = ','.join(nameservers)
+ endpoint_url = '/whois/nameservers'
+ if sort or limit or offset:
+ params = {
+ 'nameServerList': nameservers_string,
+ 'sortField': sort,
+ 'limit': limit,
+ 'offset': offset
+ }
+ else:
+ params = {
+ 'nameServerList': nameservers_string,
+ }
+
+ res = http_request(endpoint_url, params)
+ if not res:
+ return False
+ return res
+
+
+def get_whois_for_domain_command():
+ # Initialize
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ contents_nameserver = {} # type: ignore
+ contents_email = {} # type: ignore
+
+ original_domain = demisto.args()['domain']
+ domain = extract_domain_name(original_domain)
+
+ res = get_whois_for_domain(domain)
+ if res:
+ # Process response - build context and markdown table
+ nameservers = res.get('nameServers')
+ emails = res.get('emails')
+ whois = {
+ 'Name': res.get('domainName'),
+ 'RegistrarName': res.get('registrarName'),
+ 'LastRetrieved': res.get('timeOfLatestRealtimeCheck'),
+ 'Created': res.get('created'),
+ 'Updated': res.get('updated'),
+ 'Expires': res.get('expires'),
+ 'IANAID': res.get('registrarIANAID'),
+ 'LastObserved': res.get('auditUpdatedDate')
+ }
+
+ table_whois = {
+ 'Name': whois.get('Name'),
+ 'Registrar Name': whois.get('RegistrarName'),
+ 'Last Retrieved': timestamp_to_date(whois.get('LastRetrieved')),
+ 'Created': whois.get('Created'),
+ 'Updated': whois.get('Updated'),
+ 'Expires': whois.get('Expires'),
+ 'IANAID': whois.get('IANAID'),
+ 'Last Observed': whois.get('LastObserved')
+ }
+
+ admin = {
+ 'Country': res.get('administrativeContactCountry', ),
+ 'Email': res.get('administrativeContactEmail', ),
+ 'Name': res.get('administrativeContactName'),
+ 'Phone': res.get('administrativeContactTelephone')
+ }
+ registrant = {
+ 'Country': res.get('registrantCountry'),
+ 'Email': res.get('registrantEmail'),
+ 'Name': res.get('registrantName'),
+ 'Phone': res.get('registrantTelephone'),
+ }
+ creation_date = res.get('created')
+ registrar = {'Name': res.get('registrarName')}
+ domain_status = res.get('status')
+ updated_date = res.get('updated')
+ expiration_date = res.get('expires')
+
+ context[outputPaths['domain']] = {
+ 'Name': domain,
+ 'Admin': admin,
+ 'Registrant': registrant,
+ 'Registrar': registrar,
+ 'CreationDate': creation_date,
+ 'DomainStatus': domain_status,
+ 'UpdatedDate': updated_date,
+ 'ExpirationDate': expiration_date,
+ }
+
+ contents_nameserver = {'Nameservers': nameservers}
+ contents_email = {'Emails': emails}
+
+ whois.update({
+ 'Nameservers': nameservers,
+ 'Emails': emails
+ })
+ context['Domain.Umbrella.Whois(val.Name && val.Name == obj.Name)'] = whois
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': [table_whois, contents_nameserver, contents_email],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('"Umbrella Investigate" WHOIS Record Data for: ' + whois['Name'], table_whois,
+ headers) + tableToMarkdown('Nameservers: ', contents_nameserver,
+ headers) + tableToMarkdown('Email Addresses: ',
+ contents_email, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_whois_for_domain(domain):
+ # Build & Send request
+ endpoint_url = '/whois/' + domain
+ res = http_request(endpoint_url)
+
+ # Assign and validate response
+ if not res or res.get('errorMessage'):
+ return False
+ return res
+
+
+def get_malicious_domains_for_ip_command():
+ # Initialize
+ contents = []
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ context_dbotscore = []
+ context_malicious = []
+ # Get vars
+ ip = demisto.args()['ip']
+ # Fetch data
+ res = get_malicious_domains_for_ip(ip)
+ if res:
+ # Process response - build context and markdown table
+ domains = []
+ for item in res:
+ domains.append(item['name'])
+ domains = get_domains_categorization(domains)
+ domains_context = []
+ if domains:
+ for domain in domains:
+ domains_context.append({
+ 'Name': domain,
+ 'MalwareCategories': domains[domain]['security_categories'],
+ 'ContentCategories': domains[domain]['content_categories']
+ })
+ contents.append({
+ 'Name': domain,
+ 'Malware Categories': domains[domain]['security_categories'],
+ 'Content Categories': domains[domain]['content_categories']
+ })
+ context_dbotscore.append({
+ 'Indicator': domain,
+ 'Type': 'domain',
+ 'Vendor': 'Cisco Umbrella Investigate',
+ 'Score': 3
+ })
+ context_malicious.append({
+ 'Name': domain,
+ 'Malicious': {
+ 'Vendor': 'Cisco Umbrella Investigate',
+ 'Description': 'For IP ' + ip
+ }
+ })
+
+ context['Umbrella.MaliciousDomains(val.IP && val.IP == obj.IP)'] = {
+ 'IP': ip,
+ 'Data': domains_context
+ }
+ context[outputPaths['domain']] = context_malicious # type: ignore
+ context[outputPaths['dbotscore']] = context_dbotscore # type: ignore
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('"Umbrella Investigate" Malicious Domains for an IP: ' + ip, contents,
+ headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_malicious_domains_for_ip(ip):
+ # Build & Send request
+ endpoint_url = '/ips/' + ip + '/latest_domains'
+ res = http_request(endpoint_url)
+ # Assign and validate response
+ if not res:
+ return False
+ return res
+
+
+def get_domain_using_regex_command():
+ # Initialize
+ contents = []
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ title_contents = [] # type: ignore
+ # Get vars
+ regex = demisto.args()['expression']
+ start = date_to_timestamp_func(demisto.args()['start'])
+ stop = date_to_timestamp_func(demisto.args().get('stop'))
+ is_include_category = bool(strtobool(demisto.args().get('includeCategory')))
+ limit = demisto.args().get('limit')
+ node_type = demisto.args().get('type')
+ # Fetch data
+ res = get_domain_using_regex(regex, start, is_include_category, stop, limit, node_type)
+ if res:
+ matches = res.get('matches', [])
+ # Process response - build context and markdown table
+ domain_context = []
+ for match in matches:
+ contents.append({
+ 'Name': match['name'],
+ 'First Seen': match['firstSeenISO'],
+ 'Security Categories': match['securityCategories']
+ })
+ domain_context.append({
+ 'Name': match['name'],
+ 'FirstSeen': match['firstSeen'],
+ 'SecurityCategories': match['securityCategories']
+ })
+
+ title_contents = [{
+ 'Total Results': res['totalResults'],
+ 'More Data Available': res['moreDataAvailable'],
+ 'Limit': res['limit']
+ }]
+
+ context['Umbrella.DomainSearch(val.Expression && val.Expression == obj.Expression)'] = {
+ 'Expression': res.get('expression', regex),
+ 'TotalResults': res.get('totalResults', None),
+ 'Data': domain_context
+ }
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': [title_contents, contents],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('"Umbrella Investigate" Domain Pattern Search for: ' + regex, title_contents,
+ headers) + tableToMarkdown('Matches: ', contents, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_domain_using_regex(regex, start, is_include_category, stop, limit, node_type):
+ # Build params dict
+ params = {
+ 'start': start,
+ 'includecategory': is_include_category,
+ 'stop': stop,
+ 'limit': limit,
+ 'type': node_type
+ }
+
+ # Build & Send request
+ endpoint_url = '/search/' + regex
+ res = http_request(endpoint_url, params)
+ # Assign and validate response
+ results = res.get('totalResults', 0)
+ if not results:
+ return False
+ return res
+
+
+def get_domain_timeline_command():
+ # Initialize
+ contents = []
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ # Get vars
+ domain = extract_domain_name(demisto.args()['domain'])
+ if re.match('[a-zA-Z\d-]{,63}(\.[a-zA-Z\d-]{,63})*', domain) is None:
+ return_error('Domain is not valid')
+ # Fetch data
+ timeline = get_domain_timeline(domain)
+ if timeline:
+ # Process response - build context and markdown table
+ timeline_context = []
+ for item in timeline:
+ contents.append({
+ 'Malware Categories': item['categories'],
+ 'Attacks': item['attacks'],
+ 'Threat Types': item['threatTypes'],
+ 'Timestamp': timestamp_to_date(item['timestamp']),
+ })
+ timeline_context.append({
+ 'MalwareCategories': item['categories'],
+ 'Attacks': item['attacks'],
+ 'ThreatTypes': item['threatTypes'],
+ 'Timestamp': timestamp_to_date(item['timestamp']),
+ })
+
+ context['Umbrella.Timeline(val.Domain && val.Domain == obj.Domain)'] = {
+ 'Domain': domain,
+ 'Data': timeline_context
+ }
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('"Umbrella Investigate" Domain Timeline: ' + domain, contents, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_domain_timeline(domain):
+ # Build & Send request
+ endpoint_url = '/timeline/' + domain
+ timeline = http_request(endpoint_url)
+
+ # Assign and validate response
+ if not timeline:
+ return False
+ return timeline
+
+
+def get_ip_timeline_command():
+ # Initialize
+ contents = []
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ # Get vars
+ ip = demisto.args()['ip']
+ is_valid = is_ip_valid(ip)
+ if not is_valid:
+ return_error('IP is not valid')
+ # Fetch data
+ timeline = get_ip_timeline(ip)
+ if timeline:
+ # Process response - build context and markdown table
+ timeline_context = []
+ for item in timeline:
+ contents.append({
+ 'Malware Categories': item['categories'],
+ 'Attacks': item['attacks'],
+ 'Threat Types': item['threatTypes'],
+ 'Timestamp': timestamp_to_date(item['timestamp']),
+ })
+ timeline_context.append({
+ 'MalwareCategories': item['categories'],
+ 'Attacks': item['attacks'],
+ 'ThreatTypes': item['threatTypes'],
+ 'Timestamp': timestamp_to_date(item['timestamp']),
+ })
+
+ context['Umbrella.Timeline(val.IP && val.IP == obj.IP)'] = {
+ 'IP': ip,
+ 'Data': timeline_context
+ }
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('"Umbrella Investigate" IP Timeline: ' + ip, contents, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_ip_timeline(ip):
+ # Build & Send request
+ endpoint_url = '/timeline/' + ip
+ timeline = http_request(endpoint_url)
+
+ # Assign and validate response
+ if not timeline:
+ return False
+ return timeline
+
+
+def get_url_timeline_command():
+ # Initialize
+ contents = []
+ context = {}
+ headers = [] # type: ignore
+ results = []
+ # Get vars
+ url = demisto.args()['url']
+ parsed_url = urlparse(url)
+ if not bool(parsed_url.scheme) and not bool(parsed_url.netloc) and not bool(parsed_url.path):
+ return_error('URL is not valid')
+ # Fetch data
+ timeline = get_url_timeline(url)
+ if timeline:
+ # Process response - build context and markdown table
+ timeline_context = []
+ for item in timeline:
+ contents.append({
+ 'Malware Categories': item['categories'],
+ 'Attacks': item['attacks'],
+ 'Threat Types': item['threatTypes'],
+ 'Timestamp': timestamp_to_date(item['timestamp']),
+ })
+ timeline_context.append({
+ 'MalwareCategories': item['categories'],
+ 'Attacks': item['attacks'],
+ 'ThreatTypes': item['threatTypes'],
+ 'Timestamp': timestamp_to_date(item['timestamp']),
+ })
+
+ context['Umbrella.Timeline(val.URL && val.URL == obj.URL)'] = {
+ 'URL': url,
+ 'Data': timeline_context
+ }
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('"Umbrella Investigate" URL Timeline: ' + url, contents, headers),
+ 'EntryContext': context
+ })
+
+ return results
+
+
+def get_url_timeline(url):
+ # percent encoding the url or else the API does not give response 200
+ encoded_url = urllib.quote_plus(url.encode('utf-8'))
+
+ # Build & Send request
+ endpoint_url = '/timeline/' + encoded_url
+ timeline = http_request(endpoint_url)
+
+ # Assign and validate response
+ if not timeline:
+ return False
+ return timeline
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('command is %s' % (demisto.command(),))
+try:
+ handle_proxy()
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ http_request('/domains/categorization/google.com?showLabels')
+ demisto.results('ok')
+ sys.exit(0)
+ elif demisto.command() == 'investigate-umbrella-domain-categorization' or demisto.command() == \
+ 'umbrella-domain-categorization':
+ demisto.results(get_domain_categorization_command())
+ elif demisto.command() == 'investigate-umbrella-domain-search' or demisto.command() == 'umbrella-domain-search':
+ demisto.results(get_domain_search_command())
+ elif demisto.command() == 'investigate-umbrella-domain-co-occurrences' or demisto.command() == \
+ 'umbrella-domain-co-occurrences':
+ demisto.results(get_domain_co_occurrences_command())
+ elif demisto.command() == 'investigate-umbrella-domain-related' or demisto.command() == 'umbrella-domain-related':
+ demisto.results(get_domain_related_command())
+ elif demisto.command() == 'investigate-umbrella-domain-security' or demisto.command() == 'umbrella-domain-security':
+ demisto.results(get_domain_security_command())
+ elif demisto.command() == 'investigate-umbrella-domain-dns-history' or demisto.command() == \
+ 'umbrella-domain-dns-history':
+ demisto.results(get_domain_dns_history_command())
+ elif demisto.command() == 'investigate-umbrella-ip-dns-history' or demisto.command() == 'umbrella-ip-dns-history':
+ demisto.results(get_ip_dns_history_command())
+ elif demisto.command() == 'investigate-umbrella-ip-malicious-domains' or demisto.command() == \
+ 'umbrella-ip-malicious-domains':
+ demisto.results(get_ip_malicious_domains_command())
+ # new-commands:
+ elif demisto.command() == 'domain':
+ demisto.results(get_domain_command())
+ elif demisto.command() == 'umbrella-get-related-domains':
+ demisto.results(get_related_domains_command())
+ elif demisto.command() == 'umbrella-get-domain-classifiers':
+ demisto.results(get_domain_classifiers_command())
+ elif demisto.command() == 'umbrella-get-domain-queryvolume':
+ demisto.results(get_domain_query_volume_command())
+ elif demisto.command() == 'umbrella-get-domain-details':
+ demisto.results(get_domain_details_command())
+ elif demisto.command() == 'umbrella-get-domains-for-email-registrar':
+ demisto.results(get_domains_for_email_registrar_command())
+ elif demisto.command() == 'umbrella-get-domains-for-nameserver':
+ demisto.results(get_domains_for_nameserver_command())
+ elif demisto.command() == 'umbrella-get-whois-for-domain':
+ demisto.results(get_whois_for_domain_command())
+ elif demisto.command() == 'umbrella-get-malicious-domains-for-ip':
+ demisto.results(get_malicious_domains_for_ip_command())
+ elif demisto.command() == 'umbrella-get-domains-using-regex':
+ demisto.results(get_domain_using_regex_command())
+ elif demisto.command() == 'umbrella-get-domain-timeline':
+ demisto.results(get_domain_timeline_command())
+ elif demisto.command() == 'umbrella-get-ip-timeline':
+ demisto.results(get_ip_timeline_command())
+ elif demisto.command() == 'umbrella-get-url-timeline':
+ demisto.results(get_url_timeline_command())
+
+except Exception, e:
+ LOG(e.message)
+ LOG.print_log()
+ return_error(e.message)
diff --git a/Integrations/Cisco-umbrella/Cisco-umbrella.yml b/Integrations/Cisco-umbrella/Cisco-umbrella.yml
new file mode 100644
index 000000000000..2b58064b037f
--- /dev/null
+++ b/Integrations/Cisco-umbrella/Cisco-umbrella.yml
@@ -0,0 +1,1557 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: Cisco Umbrella Investigate
+ version: -1
+configuration:
+- display: Cisco Umbrella API token
+ name: APIToken
+ required: true
+ type: 4
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: https://investigate.api.umbrella.com
+ display: Base URL
+ name: baseURL
+ required: true
+ type: 0
+- defaultvalue: '-100'
+ display: DBot Score Malicious Threshold (-100 to 100)
+ name: dboscore_threshold
+ required: false
+ type: 0
+description: Cisco Umbrella Investigate
+display: Cisco Umbrella Investigate
+name: Cisco Umbrella Investigate
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: Enter the domain you would like to categorize (e.g. amazon.com)
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the category of a domain. e.g. domain=amazon.com returns
+ Ecommerce/Shopping
+ execution: false
+ name: umbrella-domain-categorization
+ outputs:
+ - contextPath: Domain.Name
+ description: Domain name
+ type: string
+ - contextPath: Domain.SecurityCategories
+ description: The Umbrella security category, or categories, that match this
+ domain
+ type: string
+ - contextPath: Domain.ContentCategories
+ description: The Umbrella content category or categories that match this domain
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the reason for the vendor to make the decision
+ type: string
+ - arguments:
+ - default: true
+ description: Enter the domain you would like to categorize (e.g. amazon.com)
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: true
+ description: Returns the category of a domain. e.g. domain=amazon.com returns
+ Ecommerce/Shopping
+ execution: false
+ name: investigate-umbrella-domain-categorization
+ outputs:
+ - contextPath: Domain.Name
+ description: Domain name
+ type: Unknown
+ - contextPath: Domain.SecurityCategories
+ description: The Umbrella security category, or categories, that match this
+ domain
+ type: Unknown
+ - contextPath: Domain.ContentCategories
+ description: The Umbrella content category or categories that match this domain
+ type: Unknown
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision
+ type: Unknown
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the reason for the vendor to make the decision
+ type: Unknown
+ - arguments:
+ - default: true
+ description: Enter a domain (e.g. www.cnn.com)
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: Get a list of related domains back and returns a list of co-occurences
+ for the specified domain. A co-occurrence is when two or more domains are being
+ accessed by the same users within a small window of time. Being a co-occurrence
+ isn't necessarily a bad thing, legitimate sites co-occur with each other as
+ a part of normal web activity. However, unusual or suspicious co-occurence can
+ provide additional information regarding attacks
+ execution: false
+ name: umbrella-domain-co-occurrences
+ outputs:
+ - contextPath: Domain.Name
+ description: Domain name
+ type: string
+ - contextPath: Domain.CoOccurrences.Score
+ description: Domain score - value range between 0 and 1
+ type: number
+ - contextPath: Domain.CoOccurrences.Name
+ description: Domain name
+ type: string
+ - arguments:
+ - default: true
+ description: Enter a domain (e.g. www.cnn.com)
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: true
+ description: Get a list of related domains back and returns a list of co-occurences
+ for the specified domain. A co-occurrence is when two or more domains are being
+ accessed by the same users within a small window of time. Being a co-occurrence
+ isn't necessarily a bad thing, legitimate sites co-occur with each other as
+ a part of normal web activity. However, unusual or suspicious co-occurence can
+ provide additional information regarding attacks
+ execution: false
+ name: investigate-umbrella-domain-co-occurrences
+ outputs:
+ - contextPath: Domain.Name
+ description: Domain name
+ type: Unknown
+ - contextPath: Domain.Score
+ description: Domain score - value range between 0 and 1
+ type: Unknown
+ - arguments:
+ - default: true
+ description: Enter a domain (e.g. www.cnn.com)
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: This will return a list of domain names that have been frequently
+ seen requested b around the same time (up to 60 seconds before or after) as
+ the given domain name, but that are not frequently associated with other domain
+ names.
+ execution: false
+ name: umbrella-domain-related
+ outputs:
+ - contextPath: Domain.Name
+ description: Domain name
+ type: string
+ - contextPath: Domain.Related.Score
+ description: This is a score reflecting the number of client IPs looking up
+ related sites within 60 seconds of the original request
+ type: number
+ - contextPath: Domain.Related.Name
+ description: Related domain name
+ type: string
+ - arguments:
+ - default: true
+ description: Enter a domain (e.g. www.cnn.com)
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: true
+ description: This will return a list of domain names that have been frequently
+ seen requested b around the same time (up to 60 seconds before or after) as
+ the given domain name, but that are not frequently associated with other domain
+ names.
+ execution: false
+ name: investigate-umbrella-domain-related
+ outputs:
+ - contextPath: Domain.Name
+ description: Domain name
+ type: Unknown
+ - contextPath: Domain.Score
+ description: This is a score reflecting the number of client IPs looking up
+ related sites within 60 seconds of the original request
+ type: Unknown
+ - arguments:
+ - default: true
+ description: Enter a domain like (www.cnn.com)
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '-100'
+ description: Manually set a threshold instead of secure rank in order to determine
+ if domain is malicious. from -100(malicious) to 100(good)
+ isArray: false
+ name: threshold
+ required: false
+ secret: false
+ deprecated: false
+ description: This contains multiple scores or security features, each of which
+ can be used to determine relevant datapoints to build insight on the reputation
+ or security risk posed by the site. See security information about this specific
+ domain at https://investigate-api.readme.io/docs/security-information-for-a-domain-1
+ execution: false
+ name: umbrella-domain-security
+ outputs:
+ - contextPath: Domain.Name
+ description: Domain name
+ type: string
+ - contextPath: Domain.Security.DGA
+ description: Domain Generation Algorithm. This score is generated based on the
+ likeliness of the domain name being generated by an algorithm rather than
+ a human
+ type: number
+ - contextPath: Domain.Security.Perplexity
+ description: A second score on the likeliness of the name to be algorithmically
+ generated, on a scale from 0 to 1
+ type: number
+ - contextPath: Domain.Security.Entropy
+ description: The number of bits required to encode the domain name, as a score
+ type: number
+ - contextPath: Domain.Security.SecureRank
+ description: Suspicious rank for a domain that reviews based on the lookup behavior
+ of client IP for the domain
+ type: number
+ - contextPath: Domain.Security.PageRank
+ description: Popularity according to Google's pagerank algorithm
+ type: number
+ - contextPath: Domain.Security.ASNScore
+ description: ASN reputation score, ranges from -100 to 0 with -100 being very
+ suspicious
+ type: Unknown
+ - contextPath: Domain.Security.PrefixScore
+ description: Prefix ranks domains given their IP prefixes (an IP prefix is the
+ first three octets in an IP address) and the reputation score of these prefixes.
+ Ranges from -100 to 0, -100 being very suspicious
+ type: number
+ - contextPath: Domain.Security.RipScore
+ description: RIP ranks domains given their IP addresses and the reputation score
+ of these IP addresses. Ranges from -100 to 0, -100 being very suspicious
+ type: number
+ - contextPath: Domain.Security.Popularity
+ description: The number of unique client IPs visiting this site, relative to
+ the all requests to all sites
+ type: number
+ - contextPath: Domain.Security.GeoScore
+ description: A score that represents how far the different physical locations
+ serving this name are from each other
+ type: number
+ - contextPath: Domain.Security.KolmoorovSmirnov
+ description: olmogorov–Smirnov test on geodiversity. 0 means that the client
+ traffic matches what is expected for this TLD
+ type: number
+ - contextPath: Domain.Security.AttackName
+ description: The name of any known attacks associated with this domain, or blank
+ if no known threat
+ type: string
+ - contextPath: Domain.Security.ThreatType
+ description: The type of the known attack, such as botnet or APT, or blank if
+ no known threat
+ type: string
+ - arguments:
+ - default: true
+ description: Enter a domain like (www.cnn.com)
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: true
+ description: This contains multiple scores or security features, each of which
+ can be used to determine relevant datapoints to build insight on the reputation
+ or security risk posed by the site. See security information about this specific
+ domain at https://investigate-api.readme.io/docs/security-information-for-a-domain-1
+ execution: false
+ name: investigate-umbrella-domain-security
+ outputs:
+ - contextPath: Domain.Name
+ description: Domain name
+ type: Unknown
+ - contextPath: Domain.Security.DGA
+ description: Domain Generation Algorithm. This score is generated based on the
+ likeliness of the domain name being generated by an algorithm rather than
+ a human
+ type: Unknown
+ - contextPath: Domain.Security.Perplexity
+ description: A second score on the likeliness of the name to be algorithmically
+ generated, on a scale from 0 to 1
+ type: Unknown
+ - contextPath: Domain.Security.Entropy
+ description: The number of bits required to encode the domain name, as a score
+ type: Unknown
+ - contextPath: Domain.Security.SecureRank
+ description: Suspicious rank for a domain that reviews based on the lookup behavior
+ of client IP for the domain
+ type: Unknown
+ - contextPath: Domain.Security.PageRank
+ description: Popularity according to Google's pagerank algorithm
+ type: Unknown
+ - contextPath: Domain.Security.ASNScore
+ description: ASN reputation score, ranges from -100 to 0 with -100 being very
+ suspicious
+ type: Unknown
+ - contextPath: Domain.Security.PrefixScore
+ description: Prefix ranks domains given their IP prefixes (an IP prefix is the
+ first three octets in an IP address) and the reputation score of these prefixes.
+ Ranges from -100 to 0, -100 being very suspicious
+ type: Unknown
+ - contextPath: Domain.Security.RipScore
+ description: RIP ranks domains given their IP addresses and the reputation score
+ of these IP addresses. Ranges from -100 to 0, -100 being very suspicious
+ type: Unknown
+ - contextPath: Domain.Security.Popularity
+ description: The number of unique client IPs visiting this site, relative to
+ the all requests to all sites
+ type: Unknown
+ - contextPath: Domain.Security.GeoScore
+ description: A score that represents how far the different physical locations
+ serving this name are from each other
+ type: Unknown
+ - contextPath: Domain.Security.KolmoorovSmirnov
+ description: olmogorov–Smirnov test on geodiversity. 0 means that the client
+ traffic matches what is expected for this TLD
+ type: Unknown
+ - contextPath: Domain.Security.AttackName
+ description: The name of any known attacks associated with this domain, or blank
+ if no known threat
+ type: Unknown
+ - contextPath: Domain.Security.ThreatType
+ description: The type of the known attack, such as botnet or APT, or blank if
+ no known threat
+ type: Unknown
+ - arguments:
+ - default: true
+ description: Enter a domain like (www.cnn.com)
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: The DNS database can be used to query the history that Umbrella has
+ seen for a given domain. The most common use case is to obtain the RRs (Resource
+ Record) history for a given domain, passing in the record query type as a parameter,
+ to help build intelligence around an domain.
+ execution: false
+ name: umbrella-domain-dns-history
+ outputs:
+ - contextPath: Domain.Address
+ description: IP address
+ type: Unknown
+ - contextPath: Domain.DNSHistory.Age
+ description: The day in days between now and the last request for this domain.
+ This value is only useful if present
+ type: Unknown
+ - contextPath: Domain.DNSHistory.TtlsMin
+ description: Minimum amount of time set that DNS records should be cached
+ type: Unknown
+ - contextPath: Domain.DNSHistory.TtlsMax
+ description: Maximum amount of time set that DNS records should be cached
+ type: Unknown
+ - contextPath: Domain.DNSHistory.TtlsMean
+ description: Average amount of time set that DNS records should be cached
+ type: Unknown
+ - contextPath: Domain.DNSHistory.TtlsMedian
+ description: Median amount of time set that DNS records should be cached
+ type: Unknown
+ - contextPath: Domain.DNSHistory.TtlsStddev
+ description: Standard deviation of the amount of time set that DNS records should
+ be cached
+ type: Unknown
+ - contextPath: Domain.DNSHistory.CountryCodes
+ description: 'List of country codes (ex: US, FR, TW) for the IPs the name maps
+ to'
+ type: Unknown
+ - contextPath: Domain.DNSHistory.CountryCount
+ description: Number of countries the IPs are hosted in
+ type: Unknown
+ - contextPath: Domain.DNSHistory.Asns
+ description: List of ASN numbers the IPs are in
+ type: Unknown
+ - contextPath: Domain.DNSHistory.AsnsCount
+ description: Number of ASNs the IPs map to
+ type: Unknown
+ - contextPath: Domain.DNSHistory.Prefixes
+ description: List of network prefixes the IPs map to
+ type: Unknown
+ - contextPath: Domain.DNSHistory.PrefixesCount
+ description: Number of network prefixes the IPs map to
+ type: Unknown
+ - contextPath: Domain.DNSHistory.Rips
+ description: Number of IPs seen for the domain name
+ type: Unknown
+ - contextPath: Domain.DNSHistory.DivRips
+ description: The number of prefixes over the number of IPs
+ type: Unknown
+ - contextPath: Domain.DNSHistory.Locations
+ description: List of geo coordinates (WGS84 datum, decimal format) the IPs are
+ mapping to
+ type: Unknown
+ - contextPath: Domain.DNSHistory.LocationsCount
+ description: Number of distinct geo coordinates the IPs are mapping to
+ type: Unknown
+ - contextPath: Domain.DNSHistory.GeoDistanceSum
+ description: Minimum sum of distance between locations, in kilometers
+ type: Unknown
+ - contextPath: Domain.DNSHistory.GeoDistancMean
+ description: Mean distance between the geo median and each location, in kilometers
+ type: Unknown
+ - contextPath: Domain.DNSHistory.MailExchanger
+ description: Boolean, If an MX query for this domain name has been seen
+ type: Unknown
+ - contextPath: Domain.DNSHistory.NonRoutable
+ description: Boolean. If one of the IPs is in a reserved, non-routable IP range
+ type: Unknown
+ - contextPath: Domain.DNSHistory.FfCandidate
+ description: Boolean. If the domain name looks like a candidate for fast flux.
+ This does not necessarily mean the domain is in fast flux, but rather that
+ the IP address the domain resolves to changes rapidly
+ type: Unknown
+ - contextPath: Domain.DNSHistory.RipsStability
+ description: 1.0 divided by the number of times the set of IP addresses changed
+ type: Unknown
+ - contextPath: Domain.DNSHistory.BaseDomain
+ description: The base domain of the requested domain
+ type: Unknown
+ - contextPath: Domain.DNSHistory.IsSubdomain
+ description: Boolean. True if the requested domain is a subdomain of another
+ type: Unknown
+ - arguments:
+ - default: true
+ description: Enter a domain like (www.cnn.com)
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: true
+ description: The DNS database can be used to query the history that Umbrella has
+ seen for a given domain. The most common use case is to obtain the RRs (Resource
+ Record) history for a given domain, passing in the record query type as a parameter,
+ to help build intelligence around an domain.
+ execution: false
+ name: investigate-umbrella-domain-dns-history
+ outputs:
+ - contextPath: IP.Address
+ description: IP address
+ type: Unknown
+ - contextPath: IP.DNSHistory.Age
+ description: The day in days between now and the last request for this domain.
+ This value is only useful if present
+ type: Unknown
+ - contextPath: IP.DNSHistory.TtlsMin
+ description: Minimum amount of time set that DNS records should be cached
+ type: Unknown
+ - contextPath: IP.DNSHistory.TtlsMax
+ description: Maximum amount of time set that DNS records should be cached
+ type: Unknown
+ - contextPath: IP.DNSHistory.TtlsMean
+ description: Average amount of time set that DNS records should be cached
+ type: Unknown
+ - contextPath: IP.DNSHistory.TtlsMedian
+ description: Median amount of time set that DNS records should be cached
+ type: Unknown
+ - contextPath: IP.DNSHistory.TtlsStddev
+ description: Standard deviation of the amount of time set that DNS records should
+ be cached
+ type: Unknown
+ - contextPath: IP.DNSHistory.CountryCodes
+ description: 'List of country codes (ex: US, FR, TW) for the IPs the name maps
+ to'
+ type: Unknown
+ - contextPath: IP.DNSHistory.CountryCount
+ description: Number of countries the IPs are hosted in
+ type: Unknown
+ - contextPath: IP.DNSHistory.Asns
+ description: List of ASN numbers the IPs are in
+ type: Unknown
+ - contextPath: IP.DNSHistory.AsnsCount
+ description: Number of ASNs the IPs map to
+ type: Unknown
+ - contextPath: IP.DNSHistory.Prefixes
+ description: List of network prefixes the IPs map to
+ type: Unknown
+ - contextPath: IP.DNSHistory.PrefixesCount
+ description: Number of network prefixes the IPs map to
+ type: Unknown
+ - contextPath: IP.DNSHistory.Rips
+ description: Number of IPs seen for the domain name
+ type: Unknown
+ - contextPath: IP.DNSHistory.DivRips
+ description: The number of prefixes over the number of IPs
+ type: Unknown
+ - contextPath: IP.DNSHistory.Locations
+ description: List of geo coordinates (WGS84 datum, decimal format) the IPs are
+ mapping to
+ type: Unknown
+ - contextPath: IP.DNSHistory.LocationsCount
+ description: Number of distinct geo coordinates the IPs are mapping to
+ type: Unknown
+ - contextPath: IP.DNSHistory.GeoDistanceSum
+ description: Minimum sum of distance between locations, in kilometers
+ type: Unknown
+ - contextPath: IP.DNSHistory.GeoDistancMean
+ description: Mean distance between the geo median and each location, in kilometers
+ type: Unknown
+ - contextPath: IP.DNSHistory.MailExchanger
+ description: Boolean, If an MX query for this domain name has been seen
+ type: Unknown
+ - contextPath: IP.DNSHistory.NonRoutable
+ description: Boolean. If one of the IPs is in a reserved, non-routable IP range
+ type: Unknown
+ - contextPath: IP.DNSHistory.FfCandidate
+ description: Boolean. If the domain name looks like a candidate for fast flux.
+ This does not necessarily mean the domain is in fast flux, but rather that
+ the IP address the domain resolves to changes rapidly
+ type: Unknown
+ - contextPath: IP.DNSHistory.RipsStability
+ description: 1.0 divided by the number of times the set of IP addresses changed
+ type: Unknown
+ - contextPath: IP.DNSHistory.BaseDomain
+ description: The base domain of the requested domain
+ type: Unknown
+ - contextPath: IP.DNSHistory.IsSubdomain
+ description: Boolean. True if the requested domain is a subdomain of another
+ type: Unknown
+ - arguments:
+ - default: true
+ description: 'Enter an IP Address:'
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: Limit fetched results, removing limit can fetch big amount of results
+ into context.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: The DNS database can be used to query the history that Umbrella has
+ seen for a given IP address. The most common use case is to obtain the DNS Resource
+ Record (RR) history for a given IP, passing in the record query type as a parameter,
+ to help build intelligence around an IP or a range of IPs. The information provided
+ is from within the last 90 days.
+ execution: false
+ name: umbrella-ip-dns-history
+ outputs:
+ - contextPath: IP.Address
+ description: IP address
+ type: Unknown
+ - contextPath: IP.DNSHistory.RRS.Name
+ description: The looked up IP addres
+ type: Unknown
+ - contextPath: IP.DNSHistory.RRS.Class
+ description: DNS class type
+ type: Unknown
+ - contextPath: IP.DNSHistory.RRS.Type
+ description: Query type
+ type: Unknown
+ - contextPath: IP.DNSHistory.RRS.RR
+ description: Resource record owner
+ type: Unknown
+ - contextPath: IP.DNSHistory.RRS.TTL
+ description: Time to live for this record
+ type: Unknown
+ - contextPath: IP.DNSHistory.Features.RrCount
+ description: Number of records of that type mapping to the given IP
+ type: Unknown
+ - contextPath: IP.DNSHistory.Features.Ld2Count
+ description: Number of 2-level names mapping to the given IP
+ type: Unknown
+ - contextPath: IP.DNSHistory.Features.Ld3Count
+ description: Number of 3-level names mapping to the given IP
+ type: Unknown
+ - contextPath: IP.DNSHistory.Features.Ld21Count
+ description: Number of 2-level names, without the TLD, mapping to the given
+ IP
+ type: Unknown
+ - contextPath: IP.DNSHistory.Features.Ld22Count
+ description: Number of 3-level names, without the TLD, mapping to the given
+ IP
+ type: Unknown
+ - contextPath: IP.DNSHistory.Features.DivLd2
+ description: ld2_count divided by the number of records
+ type: Unknown
+ - contextPath: IP.DNSHistory.Features.DivLd3
+ description: ld3_count divided by the number of records
+ type: Unknown
+ - contextPath: IP.DNSHistory.Features.DivLd21
+ description: ld2_1_count divided by the number of records
+ type: Unknown
+ - contextPath: IP.DNSHistory.Features.DivLd22
+ description: ld2_2_count divided by the number of records
+ type: Unknown
+ - arguments:
+ - default: true
+ description: 'Enter an IP Address:'
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: true
+ description: The DNS database can be used to query the history that Umbrella has
+ seen for a given IP address. The most common use case is to obtain the DNS Resource
+ Record (RR) history for a given IP, passing in the record query type as a parameter,
+ to help build intelligence around an IP or a range of IPs. The information provided
+ is from within the last 90 days.
+ execution: false
+ name: investigate-umbrella-ip-dns-history
+ outputs:
+ - contextPath: IP.Address
+ description: IP address
+ type: Unknown
+ - contextPath: IP.DNSHistory.RRS.Name
+ description: The looked up IP addres
+ type: Unknown
+ - contextPath: IP.DNSHistory.RRS.Class
+ description: DNS class type
+ type: Unknown
+ - contextPath: IP.DNSHistory.RRS.Type
+ description: Query type
+ type: Unknown
+ - contextPath: IP.DNSHistory.RRS.RR
+ description: Resource record owner
+ type: Unknown
+ - contextPath: IP.DNSHistory.RRS.TTL
+ description: Time to live for this record
+ type: Unknown
+ - contextPath: IP.DNSHistory.Feature.RrCount
+ description: Number of records of that type mapping to the given IP
+ type: Unknown
+ - contextPath: IP.DNSHistory.Feature.Ld2Count
+ description: Number of 2-level names mapping to the given IP
+ type: Unknown
+ - contextPath: IP.DNSHistory.Feature.Ld3Count
+ description: Number of 3-level names mapping to the given IP
+ type: Unknown
+ - contextPath: IP.DNSHistory.Feature.Ld21Count
+ description: Number of 2-level names, without the TLD, mapping to the given
+ IP
+ type: Unknown
+ - contextPath: IP.DNSHistory.Feature.Ld22Count
+ description: Number of 3-level names, without the TLD, mapping to the given
+ IP
+ type: Unknown
+ - contextPath: IP.DNSHistory.Feature.DivLd2
+ description: ld2_count divided by the number of records
+ type: Unknown
+ - contextPath: IP.DNSHistory.Feature.DivLd3
+ description: ld3_count divided by the number of records
+ type: Unknown
+ - contextPath: IP.DNSHistory.Feature.DivLd21
+ description: ld2_1_count divided by the number of records
+ type: Unknown
+ - contextPath: IP.DNSHistory.Feature.DivLd22
+ description: ld2_2_count divided by the number of records
+ type: Unknown
+ - arguments:
+ - default: true
+ description: An IP Address
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: true
+ description: This command shows whether the IP address you’ve entered as input
+ has any known malicious domains associated with it. The domains that appear
+ when using this endpoint are those that currently exist in the Umbrella block
+ list. This endpoint will return an array with a single domain name for each
+ domain associated with the IP, along with an id number that can be ignored.
+ execution: false
+ name: investigate-umbrella-ip-malicious-domains
+ outputs:
+ - contextPath: Domain.Name
+ description: Domain name
+ type: Unknown
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision
+ type: Unknown
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the reason for the vendor to make the decision
+ type: Unknown
+ - arguments:
+ - default: true
+ description: An IP Address
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: false
+ description: This command shows whether the IP address you’ve entered as input
+ has any known malicious domains associated with it. The domains that appear
+ when using this endpoint are those that currently exist in the Umbrella block
+ list. This endpoint will return an array with a single domain name for each
+ domain associated with the IP, along with an id number that can be ignored.
+ execution: false
+ name: umbrella-ip-malicious-domains
+ outputs:
+ - contextPath: Domain.Name
+ description: Domain name
+ type: Unknown
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision
+ type: Unknown
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the reason for the vendor to make the decision
+ type: Unknown
+ - contextPath: DBotScore.Score
+ description: The DBot score
+ type: number
+ - contextPath: DBotScore.Type
+ description: The Indicator type
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: The DBot score vendor
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The Indicator
+ type: string
+ - arguments:
+ - default: true
+ description: Enter a domain regular expression (e.g. "cn.*\\\\.com"). Note to
+ use double backslash ("\\\\")
+ isArray: false
+ name: regex
+ required: true
+ secret: false
+ - default: false
+ description: 'Example: -2weeks, -1 day, -1000minutes, EPOCH unix time, MAX:
+ -31days'
+ isArray: false
+ name: start
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: Limit fetched results, removing limit can fetch big amount of results
+ into context.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: This produces a list of matching domains based on a regular expression.
+ You could use this for domain squatting. The pattern search functionality in
+ Investigate uses regular expressions (RegEx) to search against the Investigate
+ database. There are several excellent tools online such as http://regexr.com
+ to help if you’re not familiar with building RegEx.
+ execution: false
+ name: umbrella-domain-search
+ outputs:
+ - contextPath: Domain.Name
+ description: Domain name
+ type: string
+ - contextPath: Domain.FirstSeen
+ description: First seen time in Epoch format
+ type: string
+ - contextPath: Domain.FirstSeenISO
+ description: First seen time in ISO format
+ type: Unknown
+ - contextPath: Domain. SecurityCategories
+ description: Matching Umbrella Security Categories
+ type: string
+ - arguments:
+ - default: true
+ description: Enter a domain regular expression (e.g. "cn.*\\\\.com"). Note to
+ use double backslash ("\\\\")
+ isArray: false
+ name: regex
+ required: true
+ secret: false
+ - default: false
+ description: 'Example: -2weeks, -1 day, -1000minutes, EPOCH unix time'
+ isArray: false
+ name: start
+ required: false
+ secret: false
+ deprecated: true
+ description: This produces a list of matching domains based on a regular expression.
+ You could use this for domain squatting. The pattern search functionality in
+ Investigate uses regular expressions (RegEx) to search against the Investigate
+ database. There are several excellent tools online such as http://regexr.com
+ to help if you’re not familiar with building RegEx.
+ execution: false
+ name: investigate-umbrella-domain-search
+ outputs:
+ - contextPath: Domain.Name
+ description: Domain name
+ type: Unknown
+ - contextPath: Domain.FirstSeean
+ description: First seen time in Epoch format
+ type: Unknown
+ - contextPath: Domain.FirstSeeanISO
+ description: First seen time in ISO format
+ type: Unknown
+ - contextPath: Domain. SecurityCategories
+ description: Matching Umbrella Security Categories
+ type: Unknown
+ - arguments:
+ - default: true
+ description: 'The domain name you would like to categorize. (e.g. : www.amazon.com)
+ Comma separated list allowed. (e.g. : www.amazon.com,www.facebook.com,www.yahoo.com)'
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: Get Domain Reputation info using Cisco Umbrella Investigate. Domain
+ reputation score is either true or false from the service and cannot be modified
+ using a threshold.
+ execution: false
+ name: domain
+ outputs:
+ - contextPath: Domain.Name
+ description: The domain's name.
+ type: string
+ - contextPath: Domain.Umbrella.RiskScore
+ description: The status will be "-1" if the domain is believed to be malicious,
+ "1" if the domain is believed to be benign, "0" if it hasn't been classified
+ yet.
+ type: number
+ - contextPath: Domain.Umbrella.SecureRankÂ
+ description: Suspicious rank for a domain that reviews based on the lookup behavior
+ of client IP for the domain. Securerank is designed to identify hostnames
+ requested by known infected clients but never requested by clean clients,
+ assuming these domains are more likely to be bad. Scores returned range from
+ -100 (suspicious) to 100 (benign).
+ type: number
+ - contextPath: Domain.Umbrella.FirstQueriedTime
+ description: The time when the attribution for this Domain was made.
+ type: number
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: String
+ - contextPath: Domain.Umbrella.ContentCategories
+ description: The Umbrella content category or categories that match this domain.
+ If none of them match, the return will be blank.
+ type: string
+ - contextPath: Domain.Umbrella.MalwareCategories
+ description: The Umbrella security category, or categories, that match this
+ domain or that this domain is associated with. If none match, the return will
+ be blank.
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the reason for the vendor to make the decision
+ type: string
+ - contextPath: Domain.Admin.Country
+ description: The country of the domain administrator.
+ type: String
+ - contextPath: Domain.Admin.Email
+ description: The email address of the domain administrator.
+ type: String
+ - contextPath: Domain.Admin.Name
+ description: The name of the domain administrator.
+ type: String
+ - contextPath: Domain.Admin.Phone
+ description: The phone number of the domain administrator.
+ type: String
+ - contextPath: Domain.Registrant.Country
+ description: The country of the registrant.
+ type: String
+ - contextPath: Domain.Registrant.Email
+ description: The email address of the registrant.
+ type: String
+ - contextPath: Domain.Registrant.Name
+ description: The name of the registrant.
+ type: String
+ - contextPath: Domain.Registrant.Phone
+ description: The phone number of the registrant.
+ type: String
+ - contextPath: Domain.CreationDate
+ description: The date on which the domain was created.
+ type: Date
+ - contextPath: Domain.DomainStatus
+ description: The status of the domain.
+ type: String
+ - contextPath: Domain.UpdatedDate
+ description: The date on which the domain was last updated.
+ type: Date
+ - contextPath: Domain.ExpirationDate
+ description: The expiration date of the domain.
+ type: Date
+ - contextPath: Domain.Registrar.Name
+ description: The name of the registrar, such as "GoDaddy".
+ type: String
+ - arguments:
+ - default: false
+ description: 'The domain name you would like see related domains for. (e.g.
+ : www.cnn.com)'
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Set to true to get a list of co-occurences. (A co-occurrence is
+ when two or more domains are being accessed by the same users within a small
+ window of time) By default, this value will be false.
+ isArray: false
+ name: coOccurences
+ predefined:
+ - 'false'
+ - 'true'
+ required: false
+ secret: false
+ deprecated: false
+ description: Get a list of domain names that have been frequently seen requested
+ around the same time (up to 60 seconds before or after) as the given domain
+ name. And also a list of co-occurences.
+ execution: false
+ name: umbrella-get-related-domains
+ outputs:
+ - contextPath: Umbrella.RelatedDomains.Domain
+ description: The domain's name.
+ type: string
+ - contextPath: Umbrella.RelatedDomains.Data.Name
+ description: Domain names that have been frequently seen requested around the
+ same time (up to 60 seconds before or after) as the given domain name.
+ type: string
+ - contextPath: Umbrella.CoOccurences.Data.Name
+ description: All co-occurences of requests from client IPs are returned for
+ the previous seven days whether the co-occurence is suspicious or not.
+ type: string
+ - contextPath: Umbrella.CoOccurences.Data.Score
+ description: The values range between 0 and 1 and should not exceed 1.
+ type: number
+ - contextPath: Umbrella.RelatedDomains.Data.Score
+ description: The score here is the number of client IP requests to the site
+ around the same time as the site being looked up. This is a score reflecting
+ the number of client IPs looking up related sites within 60 seconds of the
+ original request
+ type: number
+ - contextPath: Umbrella.CoOccurences.Domain
+ description: The domain's name.
+ type: string
+ - arguments:
+ - default: false
+ description: 'The domain name you would like see classifiers for. (e.g. : www.cnn.com)'
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: List all the classifiers used for a particular domain to assign a
+ particular security categorization or threat type (indicators of compromise).
+ execution: false
+ name: umbrella-get-domain-classifiers
+ outputs:
+ - contextPath: Umbrella.DomainClassifiers.Domain
+ description: The domain's name.
+ type: string
+ - contextPath: Umbrella.DomainClassifiers.Data.MalwareCategories
+ description: Which Umbrella security category, if any, matched the input
+ type: string
+ - contextPath: Umbrella.DomainClassifiers.Data.AttackNames
+ description: Which named attacks, if any, matched the input
+ type: string
+ - contextPath: Umbrella.DomainClassifiers.Data.ThreatTypes
+ description: Which threat type, if any, matched in the input.
+ type: string
+ - arguments:
+ - default: false
+ description: 'The domain name you would like see volume for. (e.g. : www.cnn.com)'
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ - default: false
+ defaultValue: -7days
+ description: 'Point in time in the past, expressed as a timestamp in the following
+ format or relative time. Valid formats: start=-2days start=-2hours start=1997-07-16T19:20:30+01:00
+ i.e YYYY-MM-DDThh:mm:ssTZD Note the negative sign. The max is 30 days.'
+ isArray: false
+ name: start
+ required: false
+ secret: false
+ - default: false
+ defaultValue: now
+ description: 'Point in time in the past expressed as a timestamp in milliseconds
+ or relative time. Also valid is ''now''. Valid formats: stop=-1days stop=now
+ start=1997-07-16T19:20:30+01:00 i.e YYYY-MM-DDThh:mm:ssTZD Note the negative
+ sign. The max is 30 days.'
+ isArray: false
+ name: stop
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: all
+ description: 'Valid options are: exact, component, or all (default). 1.Using
+ "cisco.com" as an example, "exact" only gives results for cisco.com. 2.
+ Component gives results for every component of cisco.com, but not cisco.com.
+ Examples are www.cisco.com, mail.cisco.com, wwwin.cisco.com, something.else.cisco.com. 3.All
+ returns the sum of component and exact, this is the default.'
+ isArray: false
+ name: match
+ predefined:
+ - all
+ - exact
+ - component
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: Limit fetched results, removing limit can fetch big amount of results
+ into context.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: The domain volume command shows the number of DNS queries made per
+ hour to the specified domain by users of Umbrella's recursive DNS servers.
+ execution: false
+ name: umbrella-get-domain-queryvolume
+ outputs:
+ - contextPath: Umbrella.QueryVolume.Domain
+ description: The domain's name.
+ type: string
+ - contextPath: Umbrella.QueryVolume.Data.StartDate
+ description: Start date for which the volume data is returned.
+ type: string
+ - contextPath: Umbrella.QueryVolume.Data.StopDate
+ description: Stop date for which the volume data is returned.
+ type: string
+ - contextPath: Umbrella.QueryVolume.Data.QueriesInfo.QueryHour
+ description: Query hour for which the queries data is returned.
+ type: string
+ - contextPath: Umbrella.QueryVolume.Data.QueriesInfo.Queries
+ description: Number of DNS queries per hour, in ascending order, to the specified
+ domain.
+ type: string
+ - arguments:
+ - default: false
+ description: 'The domain name you would like see the security info for. (e.g.
+ : www.cnn.com)'
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '-100'
+ description: Manually set a threshold instead of secure rank in order to determine
+ if domain is malicious. from -100(malicious) to 100(good)
+ isArray: false
+ name: threshold
+ required: false
+ secret: false
+ deprecated: false
+ description: The security information API method contains multiple scores or security
+ features, which can act as relevant datapoints to build insight on the reputation.
+ execution: false
+ name: umbrella-get-domain-details
+ outputs:
+ - contextPath: Umbrella.DomainDetails.Domain
+ description: The domain's name.
+ type: string
+ - contextPath: Umbrella.DomainDetails.Data.DGA
+ description: Domain Generation Algorithm. This score is generated based on the
+ likeliness of the domain name being generated by an algorithm rather than
+ a human. This score ranges from -100 (suspicious) to 0 (benign).
+ type: string
+ - contextPath: Umbrella.DomainDetails.Data.Entropy
+ description: The number of bits required to encode the domain name, as a score.
+ This score is to be used in conjunction with DGA and Perplexity.
+ type: number
+ - contextPath: Umbrella.DomainDetails.Data.SecureRankÂ
+ description: Suspicious rank for a domain that reviews based on the lookup behavior
+ of client IP for the domain. Securerank is designed to identify hostnames
+ requested by known infected clients but never requested by clean clients,
+ assuming these domains are more likely to be bad. Scores returned range from
+ -100 (suspicious) to 100 (benign).
+ type: number
+ - contextPath: Umbrella.DomainDetails.Data.PrefixScore
+ description: Prefix ranks domains given their IP prefixes (an IP prefix is the
+ first three octets in an IP address) and the reputation score of these prefixes.
+ Ranges from -100 to 0, -100 being very suspicious.
+ type: number
+ - contextPath: Umbrella.DomainDetails.Data.RipScore
+ description: RIP ranks domains given their IP addresses and the reputation score
+ of these IP addresses. Ranges from -100 to 0, -100 being very suspicious.
+ type: number
+ - contextPath: Umbrella.DomainDetails.Data.Popularity
+ description: The number of unique client IPs visiting this site, relative to
+ the all requests to all sites. A score of how many different client/unique
+ IPs go to this domain compared to others.
+ type: number
+ - contextPath: Umbrella.DomainDetails.Data.Geodiversity
+ description: A score representing the number of queries from clients visiting
+ the domain, broken down by country. Score is a non-normalized ratio between
+ 0 and 1.
+ type: number
+ - contextPath: Umbrella.DomainDetails.Data.TldGeodiversity
+ description: A score that represents the TLD country code geodiversity as a
+ percentage of clients visiting the domain. Occurs most often with domains
+ that have a ccTLD. Score is normalized ratio between 0 and 1.
+ type: number
+ - contextPath: Umbrella.DomainDetails.Data.KolmogorovSmirnovTest
+ description: Kolmogorov–Smirnov test on geodiversity. 0 means that the client
+ traffic matches what is expected for this TLD.
+ type: number
+ - contextPath: DBotScore.Indicator
+ description: The Indicator
+ type: string
+ - contextPath: DBotScore.Score
+ description: The DBot score
+ type: number
+ - contextPath: DBotScore.Type
+ description: The Indicator type
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: The DBot score vendor
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the reason for the vendor to make the decision
+ type: string
+ - arguments:
+ - default: false
+ description: 'Email address following rfc5322 conventions. (e.g. : admin@google.com)
+ Comma separated list allowed. (e.g. : admin@google.com, dns-admin@google.com,
+ hostmaster@charter.com)'
+ isArray: true
+ name: emails
+ required: true
+ secret: false
+ - default: false
+ description: For paging with offset for domains with more than 500 results,
+ set the url-param limit. Default value is 10.
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'To sort the list of domains based on timestamp. By default, domains
+ are simply sorted by name in alphabetical order. Possible values are: ""created"",
+ ""updated"", and ""expired"", each of which sorts from the most recent date
+ for the value of the WHOIS entry.'
+ isArray: false
+ name: sort
+ predefined:
+ - created
+ - updated
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: To limit the total number of results (domains).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: The command will return the domains associated with the email address
+ or addresses of the registrar that are looked up.
+ execution: false
+ name: umbrella-get-domains-for-email-registrar
+ outputs:
+ - contextPath: Umbrella.AssociatedDomains.Email
+ description: Email address.
+ type: string
+ - contextPath: Umbrella.AssociatedDomains.Data.TotalResults
+ description: Total number of results for this email.
+ type: number
+ - contextPath: Umbrella.AssociatedDomains.Data.MoreDataAvailable
+ description: Whether or not there are more than 500 results for this email,
+ either yes or no.
+ type: boolean
+ - contextPath: Umbrella.AssociatedDomains.Data.ResultLimit
+ description: Total number of results for this page of results, default 500.
+ type: number
+ - contextPath: Umbrella.AssociatedDomains.Data.Domains.Name
+ description: Domains registered by this email
+ type: string
+ - contextPath: Umbrella.AssociatedDomains.Data.Domains.Name.SecurityCategories
+ description: Security Categories associated with the domain.
+ type: string
+ - contextPath: Umbrella.AssociatedDomains.Data.Domains.Name.ContentCategories
+ description: Content Categories associated with the domain.
+ type: string
+ - contextPath: Umbrella.AssociatedDomains.Data.Domains.LastObserved
+ description: 'Whether the domain is current, meaning currently registered by
+ this email address. Values : Past or Current'
+ type: string
+ - arguments:
+ - default: false
+ description: 'Enter the Nameserver’s domain name. (e.g. : ns2.google.com) Comma
+ separated list allowed. (e.g. : ns2.google.com, ns1.google.com)'
+ isArray: true
+ name: nameservers
+ required: true
+ secret: false
+ - default: false
+ description: For paging with offset for domains with more than 500 results,
+ set the url-param limit. Default value is 10.
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: '"To sort the list of domains based on timestamp. By default, domains
+ are simply sorted by name in alphabetical order. Possible values are: ""created"",
+ ""updated"", and ""expired"", each of which sorts from the most recent date
+ for the value of the WHOIS entry."'
+ isArray: false
+ name: sort
+ predefined:
+ - created
+ - updated
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: To limit the total number of results (domains).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: The Nameserver command allows you to search a nameserver to find
+ all domains registered by that nameserver. You can search against a single nameserver
+ or multiple nameservers in a query.
+ execution: false
+ name: umbrella-get-domains-for-nameserver
+ outputs:
+ - contextPath: Umbrella.AssociatedDomains.Nameserver
+ description: Nameserver's domain name.
+ type: string
+ - contextPath: Umbrella.AssociatedDomains.Data.TotalResults
+ description: Total number of results for this nameserver domain name.
+ type: string
+ - contextPath: Umbrella.AssociatedDomains.Data.MoreDataAvailable
+ description: Whether or not there are more than 500 results for this email,
+ either yes or no.
+ type: boolean
+ - contextPath: Umbrella.AssociatedDomains.Data.ResultLimit
+ description: Total number of results for this page of results, default 500.
+ type: number
+ - contextPath: Umbrella.AssociatedDomains.Data.Domains.Name
+ description: Domains registered by this nameserver.
+ type: string
+ - contextPath: Umbrella.AssociatedDomains.Data.Domains.Name.SecurityCategories
+ description: Security Categories associated with the domain.
+ type: string
+ - contextPath: Umbrella.AssociatedDomains.Data.Domains.Name.ContentCategories
+ description: Content Categories associated with the domain.
+ type: string
+ - contextPath: Umbrella.AssociatedDomains.Data.Domains.LastObserved
+ description: 'Whether the domain is current, meaning currently registered by
+ this email address. Values : Past or Current'
+ type: string
+ - arguments:
+ - default: false
+ description: 'Domain name without wildcards and including TLD. (e.g. : www.cnn.com)'
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: This command will provide a standard WHOIS response record for a
+ single domain with all available WHOIS data returned.
+ execution: false
+ name: umbrella-get-whois-for-domain
+ outputs:
+ - contextPath: Domain.Umbrella.Whois.Name
+ description: Domain's name.
+ type: string
+ - contextPath: Domain.Umbrella.Whois.RegistrarName
+ description: Domain registrar name
+ type: string
+ - contextPath: Domain.Umbrella.Whois.LastRetrieved
+ description: Domain last retrieved date
+ type: string
+ - contextPath: Domain.Umbrella.Whois.Created
+ description: Domain created date
+ type: string
+ - contextPath: Domain.Umbrella.Whois.Updated
+ description: Domain updated date
+ type: string
+ - contextPath: Domain.Umbrella.Whois.Expires
+ description: Domain expiry date
+ type: string
+ - contextPath: Domain.Umbrella.Whois.IANAID
+ description: IANA ID
+ type: string
+ - contextPath: Domain.Umbrella.Whois.LastObserved
+ description: Domain last observed
+ type: string
+ - contextPath: Domain.Umbrella.Whois.Nameservers.Name
+ description: Domain's name servers
+ type: string
+ - contextPath: Domain.Umbrella.Whois.Emails.Name
+ description: Domain's email
+ type: string
+ - contextPath: Domain.Name
+ description: The domain name e.g. google.com
+ type: Unknown
+ - contextPath: Domain.Admin.Country
+ description: The country of the domain administrator.
+ type: String
+ - contextPath: Domain.Admin.Email
+ description: The email address of the domain administrator.
+ type: String
+ - contextPath: Domain.Admin.Name
+ description: The name of the domain administrator.
+ type: String
+ - contextPath: Domain.Admin.Phone
+ description: The phone number of the domain administrator.
+ type: String
+ - contextPath: Domain.Registrant.Country
+ description: The country of the registrant.
+ type: String
+ - contextPath: Domain.Registrant.Email
+ description: The email address of the registrant.
+ type: String
+ - contextPath: Domain.Registrant.Name
+ description: The name of the registrant.
+ type: String
+ - contextPath: Domain.Registrant.Phone
+ description: The phone number of the registrant.
+ type: String
+ - contextPath: Domain.CreationDate
+ description: The date on which the domain was created.
+ type: Date
+ - contextPath: Domain.DomainStatus
+ description: The status of the domain.
+ type: String
+ - contextPath: Domain.UpdatedDate
+ description: The date on which the domain was last updated.
+ type: Date
+ - contextPath: Domain.ExpirationDate
+ description: The expiration date of the domain.
+ type: Date
+ - contextPath: Domain.Registrar.Name
+ description: The name of the registrar, such as "GoDaddy".
+ type: String
+ - arguments:
+ - default: false
+ description: IP Address to check for malicious domains.
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: false
+ description: Test whether the IP address you’ve entered as input has any known
+ malicious domains associated with it.
+ execution: false
+ name: umbrella-get-malicious-domains-for-ip
+ outputs:
+ - contextPath: Umbrella.MaliciousDomains.IP
+ description: IP address.
+ type: string
+ - contextPath: Umbrella.MaliciousDomains.Data.Name
+ description: The block list domain associated with the IP
+ type: string
+ - contextPath: Umbrella.MaliciousDomains.Data.LastObserved
+ description: 'Whether the domain is current, meaning currently registered by
+ this email address. Values : Past or Current'
+ type: string
+ - contextPath: Umbrella.MaliciousDomains.Data.MalwareCategories
+ description: Security Categories associated with the domain.
+ type: string
+ - contextPath: Umbrella.MaliciousDomains.Data.ContentCategories
+ description: Content Categories associated with the domain.
+ type: string
+ - arguments:
+ - default: false
+ description: 'A standard RegEx search pattern, must be encoded in a double quoted
+ bracket. e.g. :'
+ isArray: false
+ name: expression
+ required: true
+ secret: false
+ - default: false
+ defaultValue: -7days
+ description: 'Can either be specified in relative or absolute time. Point in
+ time in the past, expressed as a timestamp in the following format or relative
+ time. Valid formats: start=-2days start=-2hours start=-1000minutes start=-3weeks
+ start=1997-07-16T19:20:30+01:00 i.e YYYY-MM-DDThh:mm:ssTZD Note the negative
+ sign for relative time. Max is -30days.'
+ isArray: false
+ name: start
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: Default is false, if set to true this will include security categories
+ in the results and may slow the return times.
+ isArray: false
+ name: includeCategory
+ predefined:
+ - 'false'
+ - 'true'
+ required: false
+ secret: false
+ - default: false
+ defaultValue: now
+ description: 'The exclusive end time in milliseconds absolute or relative time
+ (eg: ''now'', ''-2days'',''1997-07-16T19:20:30+01:00'') for a query.'
+ isArray: false
+ name: stop
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: The maximum number of items to return - combine with offset for
+ result pagination
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: HOST
+ description: Search database node type (URL, IP, HOST).
+ isArray: false
+ name: type
+ predefined:
+ - URL
+ - IP
+ - HOST
+ required: false
+ secret: false
+ deprecated: false
+ description: Get the list of matching domains (Investigate Database) based on
+ a regular expression.
+ execution: false
+ name: umbrella-get-domains-using-regex
+ outputs:
+ - contextPath: Umbrella.DomainSearch.TotalResults
+ description: Total results from this search string. The default number of results
+ is 100 and can be expanded using the limit parameter.
+ type: number
+ - contextPath: Umbrella.DomainSearch.Data.Name
+ description: Name of the domain found.
+ type: string
+ - contextPath: Umbrella.DomainSearch.Data.FirstSeen
+ description: First Seen of the domain found.
+ type: string
+ - contextPath: Umbrella.DomainSearch.Data.SecurityCategories
+ description: Security Categories associated with the domain.
+ type: string
+ - arguments:
+ - default: false
+ description: 'The domain name you would like see timeline for. (e.g. : www.cnn.com)'
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: The timeline command shows when a domain was given attribution of
+ a particular security categorization or threat type (indicators of compromise).
+ execution: false
+ name: umbrella-get-domain-timeline
+ outputs:
+ - contextPath: Umbrella.Timeline.Domain
+ description: Domain name
+ type: string
+ - contextPath: Umbrella.Timeline.Data.MalwareCategories
+ description: Which Umbrella security category, if any, matched the input
+ type: string
+ - contextPath: Umbrella.Timeline.Data.Attacks
+ description: Which named attacks, if any, matched the input
+ type: string
+ - contextPath: Umbrella.Timeline.Data.ThreatTypes
+ description: Which threat type, if any, matched in the input.
+ type: string
+ - contextPath: Umbrella.Timeline.Data.Timestamp
+ description: The time when the attribution for this Domain changed.
+ type: string
+ - arguments:
+ - default: false
+ description: 'The IP you would like see timeline for. (e.g. : 8.8.8.8)'
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: false
+ description: The timeline command shows when a IP was given attribution of a particular
+ security categorization or threat type (indicators of compromise).
+ execution: false
+ name: umbrella-get-ip-timeline
+ outputs:
+ - contextPath: Umbrella.Timeline.IP
+ description: IP address
+ type: string
+ - contextPath: Umbrella.Timeline.Data.MalwareCategories
+ description: Which Umbrella security category, if any, matched the input
+ type: string
+ - contextPath: Umbrella.Timeline.Data.Attacks
+ description: Which named attacks, if any, matched the inputWhich threat type,
+ if any, matched in the input.
+ type: string
+ - contextPath: Umbrella.Timeline.Data.ThreatTypes
+ description: Which threat type, if any, matched in the input.
+ type: string
+ - contextPath: Umbrella.Timeline.Data.Timestamp
+ description: The time when the attribution for this IP changed.
+ type: string
+ - arguments:
+ - default: false
+ description: 'The URL you would like see timeline for. (e.g. : www.aws.amazon.com)'
+ isArray: false
+ name: url
+ required: true
+ secret: false
+ deprecated: false
+ description: The timeline command shows when a URL was given attribution of a
+ particular security categorization or threat type (indicators of compromise).
+ execution: false
+ name: umbrella-get-url-timeline
+ outputs:
+ - contextPath: Umbrella.Timeline.URL
+ description: URL value
+ type: string
+ - contextPath: Umbrella.Timeline.Data.MalwareCategories
+ description: Which Umbrella security category, if any, matched the input
+ type: string
+ - contextPath: Umbrella.Timeline.Data.Attacks
+ description: Which named attacks, if any, matched the input
+ type: string
+ - contextPath: Umbrella.Timeline.Data.ThreatTypes
+ description: Which threat type, if any, matched in the input.
+ type: string
+ - contextPath: Umbrella.Timeline.Data.Timestamp
+ description: The time when the attribution for this URL changed.
+ type: date
+ isfetch: false
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- Cisco Umbrella Test
diff --git a/Integrations/Cloaken/Cloaken.py b/Integrations/Cloaken/Cloaken.py
new file mode 100644
index 000000000000..a86da6b39858
--- /dev/null
+++ b/Integrations/Cloaken/Cloaken.py
@@ -0,0 +1,72 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+# The command demisto.command() holds the command sent from the user.
+from cloakensdk.client import SyncClient
+from cloakensdk.resources import Url
+
+PROXY = handle_proxy("proxy", False)
+
+
+def get_client():
+ server = demisto.params()["server_url"]
+ verify = not demisto.params().get('insecure', False)
+ password = demisto.params()["credentials"]["password"]
+ username = demisto.params()["credentials"]["identifier"]
+ client = SyncClient(
+ server_url=server,
+ username=username,
+ verify=verify,
+ password=password
+ )
+ return client
+
+
+if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ client = get_client()
+ demisto.results('ok')
+
+if demisto.command() == 'cloaken-unshorten-url':
+ client = get_client()
+ url = demisto.args()['url']
+ resource = Url(client)
+ resource.unshorten(url)
+ response = resource.full_request()
+
+ response_code = response.get('response_code', 'NA')
+ response_status = response.get('status', 'FAILED')
+ if response_status == 'Success':
+ # successfully unshortened the url
+ url_data = response.get('data', {}).get('unshortened_url')
+ cloaken_context = {
+ 'OriginalURL': url,
+ 'UnshortenedURL': url_data,
+ 'Status': response_code
+ }
+ ec = {
+ outputPaths['url']: {
+ 'Data': url_data
+ },
+ 'Cloaken': cloaken_context
+ }
+ return_outputs(
+ tableToMarkdown('Cloakened URL', cloaken_context),
+ ec,
+ cloaken_context
+ )
+ elif response_code == 400:
+ # url was malformed
+ context = {
+ 'original_url': url,
+ 'unshortened_url': '',
+ 'status': response_code
+ }
+ return_outputs(
+ tableToMarkdown("Not able to resolve or malformed URL ", context),
+ {},
+ context
+ )
+ else:
+ # server error or unavailable
+ return_error('Error Cloaken Unshorten: ' + str(response.get('data', 'key missing')))
diff --git a/Integrations/Cloaken/Cloaken.yml b/Integrations/Cloaken/Cloaken.yml
new file mode 100644
index 000000000000..449d156ec89c
--- /dev/null
+++ b/Integrations/Cloaken/Cloaken.yml
@@ -0,0 +1,56 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: Cloaken
+ version: -1
+configuration:
+- display: Server URL (e.g., https://cloaken.cypherint.com)
+ name: server_url
+ defaultvalue: "https://cloaken.cypherint.com"
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: true
+ type: 9
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+description: Unshorten URLs onsite using the power of a Tor proxy server to prevent leaking IP addresses to adversaries.
+display: Cloaken
+name: Cloaken
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: URL to unshorten.
+ isArray: false
+ name: url
+ required: true
+ secret: false
+ deprecated: false
+ description: Unshortens a URL.
+ execution: false
+ name: cloaken-unshorten-url
+ outputs:
+ - contextPath: Cloaken.UnshortenedURL
+ description: The unshortened URL.
+ type: string
+ - contextPath: Cloaken.OriginalURL
+ description: The original URL.
+ type: string
+ - contextPath: URL.Data
+ description: The unshortened URL.
+ type: string
+ - contextPath: Cloaken.Status
+ description: 'Status of the response: BADREQUEST or OK.'
+ type: number
+ dockerimage: demisto/cloaken:1.0.0.346
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
diff --git a/Integrations/Cloaken/Cloaken_description.md b/Integrations/Cloaken/Cloaken_description.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Integrations/Cloaken/Cloaken_image.png b/Integrations/Cloaken/Cloaken_image.png
new file mode 100644
index 000000000000..fd1c11ebafbc
Binary files /dev/null and b/Integrations/Cloaken/Cloaken_image.png differ
diff --git a/Integrations/CofenseTriage/CHANGELOG.md b/Integrations/CofenseTriage/CHANGELOG.md
new file mode 100644
index 000000000000..a84dcf51e484
--- /dev/null
+++ b/Integrations/CofenseTriage/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.8.0] - 2019-08-06
+#### New Integration
+Use the Cofense Intelligence integration to check the reputation of URLs, IP addresses, file hashes, and email addresses.
\ No newline at end of file
diff --git a/Integrations/CofenseTriage/CofenseTriage.py b/Integrations/CofenseTriage/CofenseTriage.py
new file mode 100644
index 000000000000..d9d474eac354
--- /dev/null
+++ b/Integrations/CofenseTriage/CofenseTriage.py
@@ -0,0 +1,380 @@
+from CommonServerPython import *
+
+'''IMPORTS'''
+import requests
+from typing import Any, List, Dict
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS '''
+BASE_URL = demisto.getParam('host').rstrip('/') + '/api/public/v1' # type: str
+TOKEN = demisto.getParam('token') # type: str
+USER = demisto.getParam('user') # type: str
+USE_SSL = not demisto.params().get('insecure', False) # type: bool
+
+HEADERS = {
+ "Authorization": f"Token token={USER}:{TOKEN}",
+ "Accept": "application/json"
+} # type: dict
+DEFAULT_TIME_RANGE = '7 days' # type: str
+TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ' # type: str
+
+CATEGORIES = {
+ 1: 'Non-Malicious',
+ 2: 'Spam',
+ 3: 'Crimeware',
+ 4: 'Advanced Threats',
+ 5: 'Phishing Simulation'
+}
+
+# Severity levels are 4 - Critical, 3 - High, 2 - Medium, 1 - Low, 0 - Unknown
+CATEGORIES_SEVERITY = {
+ 1: 1, # non malicious -> low
+ 2: 0, # spam -> unknown
+ 3: 2, # crimeware -> medium
+ 4: 2, # advanced threats -> medium
+ 5: 1 # phishing simulation -> low
+}
+
+TERSE_FIELDS = [
+ 'id',
+ 'cluster_id',
+ 'reporter_id',
+ 'location',
+ 'created_at',
+ 'reported_at',
+ 'report_subject',
+ 'report_body',
+ 'md5',
+ 'sha256',
+ 'category_id',
+ 'match_priority',
+ 'tags',
+ 'email_attachments'
+]
+
+
+# HELPER FUNCTIONS #
+def snake_to_camel_keys(snake_list: List[Dict]) -> List[Dict]:
+ def snake_to_camel(snake_str) -> str:
+ if snake_str == 'id':
+ return 'ID'
+ components = snake_str.split('_')
+ return ''.join(x.title() for x in components)
+
+ return [{snake_to_camel(k): v for k, v in snake_d.items()} for snake_d in snake_list]
+
+
+def split_snake(string: str) -> str:
+ return string.replace("_", " ").title()
+
+
+# MAIN FUNCTIONS #
+def http_request(url_suffix: str, params=None, body=None, raw_response=False) -> Any:
+ """
+ Generic request to Cofense Triage. Client applications can make 25 requests to Cofense Triage
+ within a five-minute interval using the Cofense Triage API.
+ """
+ response = requests.get(
+ BASE_URL + url_suffix,
+ headers=HEADERS,
+ params=params,
+ data=body,
+ verify=USE_SSL,
+ )
+ try:
+ if not response.ok:
+ return_error(f'Call to Cofense Triage failed [{response.status_code}] - [{response.text}]')
+
+ elif response.status_code == 206: # 206 indicates Partial Content, reason will be in the warning header
+ demisto.debug(str(response.headers))
+
+ if raw_response:
+ return response
+ data = response.json() if response.text and response.text != '[]' else {} # type: Any
+ return data
+
+ except TypeError as ex:
+ demisto.debug(str(ex))
+ return_error(f'Error in API call to Cofense Triage, could not parse result [{response.status_code}]')
+ return {}
+
+
+def test_function() -> None:
+ try:
+ response = requests.get(
+ BASE_URL + '/processed_reports',
+ headers=HEADERS,
+ params="",
+ verify=USE_SSL,
+ )
+ if demisto.getParam("isFetch"):
+ fetch_reports()
+
+ if response.ok:
+ demisto.results('ok')
+
+ else:
+ return_error(
+ f'API call to Cofense Triage failed. Please check Server URL, or authentication related parameters.'
+ f' [{response.status_code}] - {response.reason}')
+
+ except Exception as ex:
+ demisto.debug(str(ex))
+ return_error(f'API call to Cofense Triage failed, please check URL, or authentication related parameters.')
+
+
+def fetch_reports() -> None:
+ # parameters importing
+ start_date, _ = parse_date_range(demisto.getParam('date_range'), date_format=TIME_FORMAT)
+ max_fetch = int(demisto.getParam('max_fetch')) # type: int
+ params = {
+ 'category_id': demisto.getParam('category_id'),
+ 'match_priority': demisto.getParam('match_priority'),
+ 'tags': demisto.getParam('tags'),
+ 'start_date': start_date,
+ }
+
+ # running the API command
+ reports = http_request(url_suffix='/processed_reports', params=params)
+
+ # loading last_run
+ last_run = json.loads(demisto.getLastRun().get('value', '{}'))
+ already_fetched = last_run.get('already_fetched', [])
+
+ # parsing outputs
+ incidents = []
+ for report in reports:
+ if report.get('id') not in already_fetched:
+ category_id, report_id = report.get('category_id'), report['id']
+ report_body = report.pop('report_body')
+ incident = {
+ 'name': f"cofense triage report {report_id}: {CATEGORIES.get(category_id, 'Unknown')}",
+ 'occurred': report.get('created_at'),
+ 'rawJSON': json.dumps(report),
+ 'severity': CATEGORIES_SEVERITY.get(category_id, 0)
+ }
+
+ # load HTML attachment into the incident
+ attachment = load_attachment(report_body, report_id)
+ if attachment:
+ incident['attachment'] = attachment
+ else:
+ # attachment is not HTML file, keep it as plain text
+ report['report_body'] = report_body
+ incident['rawJSON'] = json.dumps(report)
+
+ incidents.append(incident)
+ already_fetched.append(report_id)
+ if len(incidents) >= max_fetch:
+ break
+
+ demisto.incidents(incidents)
+ last_run = {'already_fetched': already_fetched}
+ demisto.setLastRun({'value': json.dumps(last_run)})
+
+
+def load_attachment(report_body: Any, report_id: int) -> list:
+ if report_body and 'HTML' in report_body:
+ html_attachment = fileResult(filename=f'{report_id}-report.html', data=report_body.encode())
+ attachment = {
+ 'path': html_attachment.get('FileID'),
+ 'name': html_attachment.get('FileName')
+ }
+ return [attachment]
+ return []
+
+
+def search_reports_command() -> None:
+ # arguments importing
+ subject = demisto.getArg('subject') # type: str
+ url = demisto.getArg('url') # type: str
+ file_hash = demisto.getArg('file_hash') # type: str
+ reported_at, _ = parse_date_range(demisto.args().get('reported_at', DEFAULT_TIME_RANGE))
+ created_at, _ = parse_date_range(demisto.args().get('created_at', DEFAULT_TIME_RANGE))
+ reporter = demisto.getArg('reporter') # type: str
+ max_matches = int(demisto.getArg('max_matches')) # type: int
+ verbose = demisto.getArg('verbose') == "true"
+
+ # running the API command
+ results = search_reports(subject, url, file_hash, reported_at, created_at, reporter, verbose, max_matches)
+
+ # parsing outputs
+ if results:
+ ec = {'Cofense.Report(val.ID && val.ID == obj.ID)': snake_to_camel_keys(results)}
+ hr = tableToMarkdown("Reports:", results, headerTransform=split_snake, removeNull=True)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': results if results else "no results were found",
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+ else:
+ return_outputs("no results were found.", {})
+
+
+def search_reports(subject=None, url=None, file_hash=None, reported_at=None, created_at=None, reporter=None,
+ verbose=False, max_matches=30) -> list:
+ params = {'start_date': datetime.strftime(reported_at, TIME_FORMAT)}
+ reports = http_request(url_suffix='/processed_reports', params=params)
+
+ if not isinstance(reports, list):
+ reports = [reports]
+
+ reporters = [] # type: list
+ if reporter:
+ reporters = get_all_reporters(time_frame=min(reported_at, created_at))
+
+ matches = []
+
+ for report in reports:
+ if subject and subject != report.get('subject'):
+ continue
+ if url and url != report.get('url'):
+ continue
+ if created_at and 'created_at' in report and created_at >= datetime.strptime(report['created_at'], TIME_FORMAT):
+ continue
+ if file_hash and file_hash != report.get('md5') and file_hash != report.get('sha256'):
+ continue
+ if reporter and int(reporter) != report.get('reporter_id') and reporter not in reporters:
+ continue
+
+ if not verbose:
+ # extract only relevant fields
+ report = {key: report[key] for key in report.keys() & TERSE_FIELDS}
+
+ matches.append(report)
+ if len(matches) >= max_matches:
+ break
+
+ return matches
+
+
+def get_all_reporters(time_frame) -> list:
+ res = http_request(f'/reporters', params={'start_date': time_frame})
+ if not isinstance(res, list):
+ res = [res]
+ reporters = [reporter.get('email') for reporter in res]
+
+ return reporters
+
+
+def get_reporter_command() -> None:
+ # arguments importing
+ reporter_id = demisto.getArg('reporter_id') # type: str
+
+ # running the API command
+ res = get_reporter(reporter_id)
+
+ # parsing outputs
+ ec = {'Cofense.Reporter(val.ID && val.ID == obj.ID)': {'ID': reporter_id, 'Email': res}}
+ hr = f'Reporter: {res}' if res else 'Could not find reporter with matching ID'
+ return_outputs(readable_output=hr, outputs=ec)
+
+
+def get_reporter(reporter_id) -> str:
+ res = http_request(url_suffix=f'/reporters/{reporter_id}')
+ if not isinstance(res, list):
+ res = [res]
+ reporter = res[0].get('email')
+
+ return reporter
+
+
+def get_attachment_command() -> None:
+ # arguments importing
+ attachment_id = demisto.getArg('attachment_id') # type: str
+ file_name = demisto.getArg('file_name') or attachment_id # type: str
+
+ # running the command
+ res = get_attachment(attachment_id)
+
+ # parsing outputs
+ context_data = {'ID': attachment_id}
+ demisto.results(fileResult(file_name, res.content))
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': '',
+ 'HumanReadable': '',
+ 'EntryContext': {'Cofense.Attachment(val.ID == obj.ID)': context_data}
+ })
+
+
+def get_attachment(attachment_id):
+ response = http_request(f'/attachment/{attachment_id}', params={'attachment_id': attachment_id}, raw_response=True)
+ if not response.ok:
+ return_error(f'Call to Cofense Triage failed [{response.status_code}]')
+ else:
+ return response
+
+
+def get_report_by_id_command() -> None:
+ # arguments importing
+ report_id = int(demisto.getArg('report_id')) # type: int
+ verbose = demisto.getArg('verbose') == "true"
+
+ # running the command
+ res = get_report_by_id(report_id)[0]
+
+ # parsing outputs
+ if not verbose:
+ # extract only relevant fields
+ res = {k: res[k] for k in res.keys() & TERSE_FIELDS}
+
+ # get the report body, and create html file if necessary
+ if res:
+ parse_report_body(res)
+ res['reporter'] = get_reporter(res.get('reporter_id')) # enrich: id -> email
+ hr = tableToMarkdown("Report Summary:", res, headerTransform=split_snake, removeNull=True)
+ ec = {'Cofense.Report(val.ID && val.ID == obj.ID)': snake_to_camel_keys([res])}
+ return_outputs(readable_output=hr, outputs=ec)
+
+ else:
+ return_error('Could not find report with matching ID')
+
+
+def parse_report_body(report) -> None:
+ if 'report_body' in report and 'HTML' in report['report_body']:
+ attachment = fileResult(
+ filename=f'{report.get("id")}-report.html',
+ data=report.get('report_body').encode(),
+ )
+ attachment['HumanReadable'] = '### Cofense HTML Report:\nHTML report download request has been completed'
+ demisto.results(attachment)
+ del report['report_body']
+
+
+def get_report_by_id(report_id):
+ response = http_request(url_suffix=f'/reports/{report_id}', params={'report_id': report_id})
+ return response
+
+
+try:
+ handle_proxy()
+
+ # COMMANDS
+ if demisto.command() == 'test-module':
+ test_function()
+
+ if demisto.command() == 'fetch-incidents':
+ fetch_reports()
+
+ elif demisto.command() == 'cofense-search-reports':
+ search_reports_command()
+
+ elif demisto.command() == 'cofense-get-attachment':
+ get_attachment_command()
+
+ elif demisto.command() == 'cofense-get-reporter':
+ get_reporter_command()
+
+ elif demisto.command() == 'cofense-get-report-by-id':
+ get_report_by_id_command()
+
+except Exception as e:
+ return_error(str(e))
+ raise
diff --git a/Integrations/CofenseTriage/CofenseTriage.yml b/Integrations/CofenseTriage/CofenseTriage.yml
new file mode 100644
index 000000000000..ca13c0e70f3a
--- /dev/null
+++ b/Integrations/CofenseTriage/CofenseTriage.yml
@@ -0,0 +1,250 @@
+commonfields:
+ id: Cofense Triage
+ version: -1
+name: Cofense Triage
+description: Use the Cofense Triage integration to manage reports and attachments.
+display: Cofense Triage
+category: Data Enrichment & Threat Intelligence
+configuration:
+ - display: Server URL (e.g., https://192.168.0.1)
+ name: host
+ defaultvalue: ""
+ type: 0
+ required: true
+ - display: User
+ name: user
+ defaultvalue: ""
+ type: 0
+ required: true
+ - display: API Token
+ name: token
+ defaultvalue: ""
+ type: 4
+ required: true
+ - display: Fetch incidents
+ name: isFetch
+ defaultvalue: ""
+ type: 8
+ required: false
+ - display: Incident type
+ name: incidentType
+ defaultvalue: ""
+ type: 13
+ required: false
+ - display: First fetch time ( , e.g., 12 hours, 7 days, 3 months,
+ 1 year)
+ name: date_range
+ defaultvalue: 3 days
+ type: 0
+ required: false
+ - display: Category ID to fetch - corresponds to the ranking that determines the Cofense
+ Triage prioritization (1-5)
+ name: category_id
+ defaultvalue: ""
+ type: 0
+ required: false
+ - display: Match Priority - the highest match priority based on rule hits for the
+ report
+ name: match_priority
+ defaultvalue: ""
+ type: 0
+ required: false
+ - display: 'Tags - CSV list of tags of processed reports by which to filter '
+ name: tags
+ defaultvalue: ""
+ type: 0
+ required: false
+ - display: Maximum number of incidents to fetch each time
+ name: max_fetch
+ defaultvalue: "30"
+ type: 0
+ required: false
+ - display: Trust any certificate (unsecure)
+ name: insecure
+ defaultvalue: "false"
+ type: 8
+ required: false
+ - display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+script:
+ script: '-'
+ type: python
+ subtype: python3
+ commands:
+ - name: cofense-search-reports
+ arguments:
+ - name: file_hash
+ description: File hash, MD5 or SHA256.
+ - name: url
+ description: The reported URLs.
+ - name: subject
+ description: Report's subject
+ - name: reported_at
+ description: 'Retrieve reports that were reported after this time, for example: "2 hours,
+ 4 minutes, 6 month, 1 day".'
+ defaultValue: 60 days
+ - name: created_at
+ description: 'Retrieve reports that were created after this time, for example: "2 hours,
+ 4 minutes, 6 month, 1 day".'
+ defaultValue: 60 days
+ - name: reporter
+ description: Name or ID of the reporter.
+ - name: max_matches
+ default: true
+ description: Maximum number of matches to fetch. Default is 30.
+ defaultValue: "30"
+ - name: verbose
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Returns all fields of a report.
+ outputs:
+ - contextPath: Report.ID
+ description: ID number of the report.
+ - contextPath: Report.EmailAttachments
+ description: Email attachments.
+ - contextPath: Report.EmailAttachments.id
+ description: Email attachment ID.
+ - contextPath: Report.Tags
+ description: Report tags.
+ type: string
+ - contextPath: Report.ClusterId
+ description: Cluster ID number.
+ type: number
+ - contextPath: Report.CategoryId
+ description: Report category.
+ type: number
+ - contextPath: Report.CreatedAt
+ description: Report creation date.
+ type: date
+ - contextPath: Report.ReportedAt
+ description: Reporting time.
+ type: string
+ - contextPath: Report.MatchPriority
+ description: The highest match priority based on rule hits for the report.
+ type: number
+ - contextPath: Report.ReporterId
+ description: Reporter ID.
+ type: number
+ - contextPath: Report.Location
+ description: Location of the report.
+ type: string
+ - contextPath: Report.Reporter
+ description: Reporter email address.
+ type: string
+ - contextPath: Report.SuspectFromAddress
+ description: Suspect from address.
+ type: string
+ - contextPath: Report.ReportSubject
+ description: Report subject.
+ type: string
+ - contextPath: Report.ReportBody
+ description: Report body.
+ type: string
+ - contextPath: Report.Md5
+ description: MD5 hash of the file.
+ type: number
+ - contextPath: Report.Sha256
+ description: SHA256 hash of the file.
+ description: 'Runs a query for reports.'
+ - name: cofense-get-attachment
+ arguments:
+ - name: attachment_id
+ required: true
+ description: ID of the attachment.
+ outputs:
+ - contextPath: Attachment.ID
+ description: The ID number of the report that contains the attachment.
+ type: string
+ - contextPath: File.Size
+ description: The date and time (in UTC) when the threat was found on the device.
+ type: string
+ - contextPath: File.EntryID
+ description: The file path where the threat was found on the device.
+ type: string
+ - contextPath: File.Name
+ description: The name of the threat.
+ type: string
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the threat.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the threat.
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 hash of the threat.
+ type: string
+ description: 'Retrieves an attachment by the attachment ID number. '
+ - name: cofense-get-reporter
+ arguments:
+ - name: reporter_id
+ required: true
+ description: ID of the reporter.
+ outputs:
+ - contextPath: Report.ID
+ description: ID of the reporter.
+ - contextPath: Report.Email
+ description: Reporter email address.
+ description: Retrieves Email address of the reporter by ID
+ - name: cofense-get-report-by-id
+ arguments:
+ - name: report_id
+ required: true
+ description: ID of the report
+ outputs:
+ - contextPath: Report.ID
+ description: ID number of the report
+ - contextPath: Report.EmailAttachments
+ description: Email attachments.
+ - contextPath: Report.EmailAttachments.id
+ description: Email attachment ID.
+ - contextPath: Report.Tags
+ description: Report tags.
+ type: string
+ - contextPath: Report.ClusterId
+ description: Cluster ID number.
+ type: number
+ - contextPath: Report.CategoryId
+ description: Report category.
+ type: number
+ - contextPath: Report.CreatedAt
+ description: Report creation date.
+ type: date
+ - contextPath: Report.ReportedAt
+ description: Reporting time.
+ type: string
+ - contextPath: Report.MatchPriority
+ description: The highest match priority based on rule hits for the report.
+ type: number
+ - contextPath: Report.ReporterId
+ description: Reporter ID.
+ type: number
+ - contextPath: Report.Location
+ description: Location of the report.
+ type: string
+ - contextPath: Report.Reporter
+ description: Reporter email address.
+ type: string
+ - contextPath: Report.SuspectFromAddress
+ description: Suspect from address.
+ type: string
+ - contextPath: Report.ReportSubject
+ description: Report subject.
+ type: string
+ - contextPath: Report.ReportBody
+ description: Report body.
+ type: string
+ - contextPath: Report.Md5
+ description: MD5 hash of the file.
+ type: number
+ - contextPath: Report.Sha256
+ description: SHA256 hash of the file.
+ description: 'Retrieves a report by the report ID number. '
+ dockerimage: demisto/python3:3.7.2.200
+ isfetch: true
+ runonce: false
+tests:
+ - Cofense Triage Test
diff --git a/Integrations/CofenseTriage/CofenseTriage_description.md b/Integrations/CofenseTriage/CofenseTriage_description.md
new file mode 100644
index 000000000000..a12ac240864f
--- /dev/null
+++ b/Integrations/CofenseTriage/CofenseTriage_description.md
@@ -0,0 +1,4 @@
+Cofense Triage provides an API that superusers can use to
+programmatically extract data from Cofense Triage in JSON format.
+
+This integration was tested with Cofense Triage version 1.14.0.
diff --git a/Integrations/CofenseTriage/CofenseTriage_image.png b/Integrations/CofenseTriage/CofenseTriage_image.png
new file mode 100644
index 000000000000..7f4cfdda2d61
Binary files /dev/null and b/Integrations/CofenseTriage/CofenseTriage_image.png differ
diff --git a/Integrations/Confluence/Confluence.py b/Integrations/Confluence/Confluence.py
new file mode 100644
index 000000000000..3a774fee8b08
--- /dev/null
+++ b/Integrations/Confluence/Confluence.py
@@ -0,0 +1,490 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+'''IMPORTS'''
+import requests
+import json
+
+requests.packages.urllib3.disable_warnings()
+
+"""
+GLOBAL VARIABLES
+"""
+
+SERVER = demisto.params()['url'][:-1] if demisto.params()['url'].endswith('/') else demisto.params()['url']
+BASE_URL = SERVER + '/rest/api'
+VERIFY_CERTIFICATE = not demisto.params().get('unsecure', False)
+
+# Support Credentials
+USERNAME = demisto.params()['credentials']['identifier']
+PASSWORD = demisto.params()['credentials']['password']
+HEADERS = {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+}
+
+"""
+Helper Functions
+"""
+
+
+def http_request(method, full_url, data=None, params=None, is_test=False):
+ try:
+ res = requests.request(
+ method,
+ full_url,
+ verify=VERIFY_CERTIFICATE,
+ auth=(USERNAME, PASSWORD),
+ data=data,
+ headers=HEADERS,
+ params=params
+ )
+ except requests.exceptions.RequestException: # This is the correct syntax
+ return_error('Failed to connect to - {} - Please check the URL'.format(full_url))
+
+ # Handle error responses gracefully
+ if res.status_code < 200 or res.status_code >= 400:
+ if is_test:
+ return res
+
+ return_error('Failed to execute command.\nURL: {}, Status Code: {}\nResponse: {}'.format(full_url,
+ res.status_code,
+ res.text))
+
+ if is_test:
+ return res
+ try:
+ return res.json()
+
+ except ValueError as err:
+ return_error('Failed to parse response from service, received the following error:\n{}'.format(str(err)))
+
+
+"""
+Confluence Commands
+"""
+
+
+def update_content(page_id, content_title, space_key, content_body, content_type, content_version):
+ content_data = {}
+ # Populate the content_data dictionary
+ content_data['type'] = content_type
+ if space_key is not None:
+ content_data['space'] = {"key": space_key}
+ if content_title is not None:
+ content_data['title'] = content_title
+
+ content_data['body'] = {
+ "storage": {
+ "value": content_body,
+ "representation": "storage"
+ }
+ }
+ content_data['version'] = {
+ "number": content_version
+ }
+
+ full_url = BASE_URL + '/content/' + page_id
+
+ res = http_request('PUT', full_url, json.dumps(content_data))
+
+ return res
+
+
+def update_content_command():
+ """
+ Confluence Update Content method
+ """
+
+ page_id = demisto.args().get('pageid')
+ content_title = demisto.args().get('title')
+ space_key = demisto.args().get('space')
+ content_body = demisto.args().get('body')
+ content_type = demisto.args().get('type')
+ content_version = int(demisto.args().get('currentversion')) + 1
+
+ raw_content = update_content(page_id, content_title, space_key, content_body, content_type, content_version)
+ content = {
+ "ID": page_id,
+ "Title": content_title,
+ "Type": content_type,
+ "Body": content_body
+ }
+
+ # create markdown table string from context
+ # the outputs must be array in order the tableToMarkdown to work
+ # headers must be array of strings (which column should appear in the table)
+ md = tableToMarkdown('Updated Content', content, ['ID', 'Title', 'Type', 'Body'])
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_content,
+ 'HumanReadable': md,
+ 'EntryContext': {
+ 'Confluence.Content(val.ID == obj.ID)': content
+ }
+ })
+
+
+def create_content(content_type, content_title, space_key, content_body):
+ content_data = {
+ "type": content_type,
+ "space": {
+ "key": space_key
+ },
+ "title": content_title,
+ "body": {
+ "storage": {
+ "value": content_body,
+ "representation": "storage"
+ }
+ }
+ }
+
+ full_url = BASE_URL + '/content'
+
+ res = http_request('POST', full_url, json.dumps(content_data))
+
+ return res
+
+
+def create_content_command():
+ """
+ Confluence Create Content method
+ """
+ content_type = demisto.args().get('type')
+ content_title = demisto.args().get('title')
+ space_key = demisto.args().get('space')
+ content_body = demisto.args().get('body')
+
+ raw_content = create_content(content_type, content_title, space_key, content_body)
+
+ content = {
+ "ID": raw_content['id'],
+ "Title": content_title,
+ "Type": content_type,
+ "Body": content_body
+ }
+
+ # create markdown table string from context
+ # the outputs must be array in order the tableToMarkdown to work
+ # headers must be array of strings (which column should appear in the table)
+ md = tableToMarkdown('New Content', content, ['ID', 'Title', 'Type', 'Body'])
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_content,
+ 'HumanReadable': md,
+ 'EntryContext': {
+ 'Confluence.Content(val.ID == obj.ID)': content
+ }
+ })
+
+
+def create_space(space_description, space_key, space_name):
+ space_data = {
+ "type": "global",
+ "description": {
+ "plain": {
+ "value": space_description,
+ "representation": "plain"
+ }
+ },
+ "name": space_name,
+ "key": space_key
+ }
+
+ full_url = BASE_URL + '/space'
+
+ res = http_request('POST', full_url, json.dumps(space_data))
+
+ return res
+
+
+def create_space_command():
+ """
+ Confluence Create Space method
+ """
+ space_description = demisto.args().get('description')
+ space_key = demisto.args().get('key')
+ space_name = demisto.args().get('name')
+
+ raw_space = create_space(space_description, space_key, space_name)
+
+ space = {
+ "ID": raw_space['id'],
+ "Key": raw_space['key'],
+ "Name": raw_space['name']
+ }
+
+ # create markdown table string from context
+ # the outputs must be array in order the tableToMarkdown to work
+ # headers must be array of strings (which column should appear in the table)
+ md = tableToMarkdown('Space created successfully', space, ['ID', 'Key', 'Name'])
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_space,
+ 'HumanReadable': md,
+ 'EntryContext': {
+ 'Confluence.Space(val.ID == obj.ID)': space
+ }
+ })
+
+
+def get_content(key, title):
+ params = {
+ "title": title,
+ "spaceKey": key,
+ "expand": "body.view,version"
+ }
+
+ full_url = BASE_URL + '/content'
+
+ res = http_request('GET', full_url, None, params)
+
+ return res
+
+
+def get_content_command():
+ """
+ Confluence Get Content method
+ """
+ space_key = demisto.args().get('key')
+ content_title = demisto.args().get('title')
+ raw_content = get_content(space_key, content_title)
+
+ content_list = []
+ for obj in raw_content['results']:
+ content = {
+ "ID": obj['id'],
+ "Title": obj['title'],
+ "Type": obj['type']
+ }
+ if obj.get('version') is not None:
+ content["Version"] = obj['version']['number']
+ if obj.get('body') is not None:
+ content["Body"] = obj['body']['view']['value']
+
+ content_list.append(content)
+
+ # create markdown table string from context
+ # the outputs must be array in order the tableToMarkdown to work
+ # headers must be array of strings (which column should appear in the table)
+ md = tableToMarkdown('Content', content_list, ['ID', 'Title', 'Type', 'Version', 'Body'])
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_content,
+ 'HumanReadable': md,
+ 'EntryContext': {
+ 'Confluence.Content(val.ID == obj.ID)': content_list
+ }
+ })
+
+
+def search_content(cql, cql_context, expand, start, limit):
+ params = {
+ 'limit': limit,
+ 'cql': cql
+ }
+ if cql_context is not None:
+ params['cqlcontext'] = cql_context
+
+ if expand is not None:
+ params['expand'] = expand
+
+ if start is not None:
+ params['start'] = start
+
+ full_url = BASE_URL + '/content/search'
+
+ res = http_request('GET', full_url, None, params)
+
+ return res
+
+
+def search_content_command():
+ """
+ Confluence Search Content method
+ Reference: https://developer.atlassian.com/server/confluence/advanced-searching-using-cql/
+ """
+
+ cql = demisto.args().get('cql')
+ cql_context = demisto.args().get('cqlcontext')
+ expand = demisto.args().get('expand')
+ start = demisto.args().get('start')
+ limit = demisto.args().get('limit')
+
+ raw_search = search_content(cql, cql_context, expand, start, limit)
+
+ searches = []
+ for result in raw_search['results']:
+ search = {}
+
+ search['ID'] = result['id']
+ search['Title'] = result['title']
+ search['Type'] = result['type']
+ if result.get('version') is not None:
+ search['Version'] = result['version']['number']
+
+ searches.append(search)
+
+ # create markdown table string from context
+ # the outputs must be array in order the tableToMarkdown to work
+ # headers must be array of strings (which column should appear in the table)
+ md = tableToMarkdown('Content Search', searches, ['ID', 'Title', 'Type', 'Version'])
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_search,
+ 'HumanReadable': md,
+ 'EntryContext': {
+ 'Confluence.Content(val.ID == obj.ID)': searches
+ }
+ })
+
+
+def list_spaces(limit, status, space_type):
+ full_url = BASE_URL + '/space'
+
+ params = {
+ 'limit': limit
+ }
+
+ if status:
+ params['status'] = status
+
+ if space_type:
+ params['type'] = space_type
+
+ res = http_request('GET', full_url, params=params)
+
+ return res
+
+
+def list_spaces_command():
+ """
+ Confluence list Spaces method
+ """
+ limit = demisto.args().get('limit', 25)
+ status = demisto.args().get('status')
+ space_type = demisto.args().get('type')
+ space_list = list_spaces(limit, status, space_type)
+
+ spaces = []
+ for raw_space in space_list['results']:
+ space = {}
+
+ space['ID'] = raw_space['id']
+ space['Key'] = raw_space['key']
+ space['Name'] = raw_space['name']
+
+ spaces.append(space)
+
+ # create markdown table string from context
+ # the outputs must be array in order the tableToMarkdown to work
+ # headers must be array of strings (which column should appear in the table)
+ md = tableToMarkdown('Spaces', spaces, ['ID', 'Key', 'Name'])
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': space_list,
+ 'HumanReadable': md,
+ 'EntryContext': {
+ 'Confluence.Space(val.ID == obj.ID)': spaces
+ }
+ })
+
+
+def delete_content(content_id):
+
+ full_url = BASE_URL + '/content/' + content_id
+ http_request('DELETE', full_url, is_test=True)
+ result = {
+ "Results": "Successfully Deleted Content ID " + content_id,
+ "ID": content_id
+ }
+ return result
+
+
+def delete_content_command():
+ """
+ Confluence Delete Content Spaces method
+ """
+
+ content_id = demisto.args().get('id')
+
+ deleted_content = delete_content(content_id)
+
+ # create markdown table string from context
+ # the outputs must be array in order the tableToMarkdown to work
+ # headers must be array of strings (which column should appear in the table)
+ md = tableToMarkdown('Content', deleted_content, ['ID', 'Results'])
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': deleted_content,
+ 'HumanReadable': md,
+ 'EntryContext': {
+ 'Confluence.Content(val.ID == obj.ID)': deleted_content
+ }
+ })
+
+
+def test():
+ full_url = BASE_URL + '/user/current'
+ res = http_request('GET', full_url, is_test=True)
+
+ if not res:
+ return_error('Test failed. \nCheck URL and Username/Password.\nURL: {}, Status Code: {}, Response: {}'.format(
+ full_url, res.status_code, res.text))
+
+ demisto.results('ok')
+
+
+"""
+CODE EXECUTION STARTS HERE
+
+demisto.command() returns the name of the command which executed now
+"""
+LOG('Confluence integration is executing the command %s' % (demisto.command(), ))
+try:
+ handle_proxy()
+ if demisto.command() == 'test-module':
+ """
+ demisto.command() will return 'test-module' when the Test button in integration page clicked
+ """
+ test()
+
+ elif demisto.command() == 'confluence-create-space':
+ create_space_command()
+
+ elif demisto.command() == 'confluence-create-content':
+ create_content_command()
+
+ elif demisto.command() == 'confluence-get-content':
+ get_content_command()
+
+ elif demisto.command() == 'confluence-list-spaces':
+ list_spaces_command()
+
+ elif demisto.command() == 'confluence-delete-content':
+ delete_content_command()
+
+ elif demisto.command() == 'confluence-update-content':
+ update_content_command()
+
+ elif demisto.command() == 'confluence-search-content':
+ search_content_command()
+
+except Exception as e:
+ return_error(str(e))
diff --git a/Integrations/Confluence/Confluence.yml b/Integrations/Confluence/Confluence.yml
new file mode 100644
index 000000000000..4b62031fcb8a
--- /dev/null
+++ b/Integrations/Confluence/Confluence.yml
@@ -0,0 +1,229 @@
+commonfields:
+ id: Atlassian Confluence Server
+ version: -1
+name: Atlassian Confluence Server
+display: Atlassian Confluence Server
+category: Utilities
+description: Atlassian Confluence Server API
+configuration:
+- display: Server URL (e.g. http://1.2.3.4:8090)
+ name: url
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Username
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: true
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: "false"
+ type: 8
+ required: false
+- display: Trust any certificate (not secure)
+ name: unsecure
+ defaultvalue: "false"
+ type: 8
+ required: false
+script:
+ script: ''
+ type: python
+ commands:
+ - name: confluence-create-space
+ arguments:
+ - name: name
+ required: true
+ description: 'Space name, for example: "Test Space".'
+ - name: description
+ required: true
+ description: A description for the space.
+ - name: key
+ required: true
+ description: Space key, which will be used as input when creating or updating
+ child components from a space.
+ outputs:
+ - contextPath: Confluence.Space.ID
+ description: Space ID.
+ type: String
+ - contextPath: Confluence.Space.Key
+ description: Space key.
+ type: String
+ - contextPath: Confluence.Space.Name
+ description: Space name.
+ type: String
+ description: Creates a new Confluence space.
+ - name: confluence-create-content
+ arguments:
+ - name: title
+ required: true
+ description: Confluence page title.
+ - name: type
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - page
+ - blogpost
+ description: Confluence content type. Can be "page" or "blogpost".
+ defaultValue: page
+ - name: space
+ required: true
+ description: Space key to add content to a specific space.
+ - name: body
+ description: Confluence page body to add.
+ outputs:
+ - contextPath: Confluence.Content.ID
+ description: Page content ID.
+ type: String
+ - contextPath: Confluence.Content.Title
+ description: Content title.
+ type: String
+ - contextPath: Confluence.Content.Type
+ description: Content type.
+ type: String
+ - contextPath: Confluence.Content.Body
+ description: Content body.
+ type: String
+ description: Creates Confluence content for a given space.
+ - name: confluence-list-spaces
+ arguments:
+ - name: limit
+ defaultValue: 25
+ description: Maximum number of spaces to return.
+ - name: type
+ auto: PREDEFINED
+ predefined:
+ - global
+ - personal
+ description: Filter the returned list of spaces by type. Can be "global" or "personal".
+ - name: status
+ auto: PREDEFINED
+ predefined:
+ - current
+ - archived
+ description: Filter the returned list of spaces by status. Can be "current" or "archived".
+ outputs:
+ - contextPath: Confluence.Space.ID
+ description: Space ID.
+ type: String
+ - contextPath: Confluence.Space.Key
+ description: Space key.
+ type: String
+ - contextPath: Confluence.Space.Name
+ description: Space name.
+ type: String
+ description: Returns a list of all Confluence spaces.
+ - name: confluence-get-content
+ arguments:
+ - name: key
+ required: true
+ description: Space key.
+ - name: title
+ required: true
+ description: Content title.
+ outputs:
+ - contextPath: Confluence.Content.ID
+ description: Content ID.
+ type: String
+ - contextPath: Confluence.Content.Title
+ description: Content title.
+ type: String
+ - contextPath: Confluence.Content.Type
+ description: Content type.
+ type: String
+ - contextPath: Confluence.Content.Version
+ description: Content version.
+ type: String
+ - contextPath: Confluence.Content.Body
+ description: Content body.
+ type: String
+ description: Returns Confluence content by space key and title.
+ - name: confluence-delete-content
+ arguments:
+ - name: id
+ required: true
+ description: Content ID
+ outputs:
+ - contextPath: Confluence.Content.Result
+ description: Content delete result.
+ type: String
+ - contextPath: Confluence.Content.ID
+ description: Content ID deleted.
+ type: String
+ description: Deletes Confluence content.
+ execution: true
+ - name: confluence-update-content
+ arguments:
+ - name: pageid
+ required: true
+ description: Page ID used to find and update the page.
+ - name: currentversion
+ required: true
+ description: The version number, extracted from a content search. The integration will increment by 1.
+ - name: title
+ required: true
+ description: Title of the page to update.
+ - name: type
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - page
+ - blogpost
+ description: Content type. Can be "page" or "blogpost".
+ defaultValue: page
+ - name: space
+ required: true
+ description: Space key to update.
+ - name: body
+ description: Content body to replace (overwrite) existing content of a Confluence page.
+ outputs:
+ - contextPath: Confluence.Content.ID
+ description: Content ID.
+ type: String
+ - contextPath: Confluence.Content.Title
+ description: Content title.
+ type: String
+ - contextPath: Confluence.Content.Type
+ description: Content type.
+ type: String
+ - contextPath: Confluence.Content.Body
+ description: Content body.
+ type: String
+ description: Update (overwrite) the existing content of a Confluence page with new content.
+ execution: true
+ - name: confluence-search-content
+ arguments:
+ - name: cql
+ required: true
+ description: 'A CQL query string to use to locate content, for example: "space = DEV
+ order by created".'
+ - name: cqlcontext
+ description: The context in which to execute a CQL search. The context is the JSON serialized
+ form of SearchContext.
+ - name: expand
+ description: A CSV list of properties to expand on the content.
+ defaultValue: version
+ - name: start
+ description: The start point of the collection to return.
+ - name: limit
+ description: Maximum number of items to return. This can be restricted
+ by fixed system limits. Default is 25.
+ defaultValue: "25"
+ outputs:
+ - contextPath: Confluence.Content.ID
+ description: Content ID.
+ type: String
+ - contextPath: Confluence.Content.Title
+ description: Content title.
+ type: String
+ - contextPath: Confluence.Content.Type
+ description: Content type.
+ type: String
+ - contextPath: Confluence.Content.Version
+ description: Content version.
+ type: String
+ description: 'Fetches a list of content using the Confluence Query Language (CQL).
+ For more information about CQL syntax, see https://developer.atlassian.com/server/confluence/advanced-searching-using-cql/'
+ runonce: false
+tests:
+ - No test - the test requires personal credentials to the confluence account
diff --git a/Integrations/Confluence/Confluence_description.md b/Integrations/Confluence/Confluence_description.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Integrations/Confluence/Confluence_image.png b/Integrations/Confluence/Confluence_image.png
new file mode 100644
index 000000000000..6229476ac67a
Binary files /dev/null and b/Integrations/Confluence/Confluence_image.png differ
diff --git a/Integrations/CounterTack/CounterTack.py b/Integrations/CounterTack/CounterTack.py
new file mode 100644
index 000000000000..1734d420c88d
--- /dev/null
+++ b/Integrations/CounterTack/CounterTack.py
@@ -0,0 +1,1432 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+
+import json
+import requests
+import os
+import os.path
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+# remove proxy if not set to true in params
+if not demisto.params().get('proxy'):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+''' GLOBALS/PARAMS '''
+
+USERNAME = demisto.params().get('credentials').get('identifier')
+PASSWORD = demisto.params().get('credentials').get('password')
+SERVER_URL = demisto.params().get('server')[:-1] if demisto.params().get('server').endswith('/') else \
+ demisto.params().get('server')
+FETCH_TIME = demisto.params().get('fetch_time', '3 days').strip()
+FETCH_NOTIFICATIONS = demisto.params().get('fetch_notifications')
+FETCH_BEHAVIORS = demisto.params().get('fetch_behviors')
+
+# Should we use SSL
+USE_SSL = not demisto.params().get('unsecure', False)
+# Service base URL
+BASE_PATH = '{}/api/v2/'.format(SERVER_URL)
+# Headers to be sent in requests
+DEFAULT_HEADERS = {
+ 'Content-Type': 'application/json'
+}
+
+
+def http_request(method, suffix_url, headers=DEFAULT_HEADERS, body=None):
+ """
+ returns the http request
+
+ """
+ url = BASE_PATH + suffix_url
+
+ response = requests.request(
+ method,
+ url,
+ auth=(USERNAME, PASSWORD),
+ headers=headers,
+ verify=USE_SSL,
+ data=body
+ )
+ # handle request failure
+ if response.status_code not in {200}:
+ message = parse_error_response(response)
+ return_error('Error in API call to CounterTack with status code {}\n{}'.format(response.status_code, message))
+
+ try:
+ response = response.json()
+ except Exception:
+ return_error(response.content)
+
+ return response
+
+
+def parse_error_response(response):
+ try:
+ res = response.json()
+ msg = res.get('message')
+ if res.get('details') is not None and res.get('details')[0].get('message') is not None:
+ msg = msg + "\n" + json.dumps(res.get('details')[0])
+ except Exception:
+ return response.text
+ return msg
+
+
+"""
+
+ENDPOINTS
+
+"""
+
+
+def get_endpoints_request():
+ """
+ This request returns a collection of endpoints.
+ """
+ suffix_url = 'endpoints'
+ response = http_request('GET', suffix_url)
+ return response
+
+
+def get_endpoints():
+
+ """
+ Returns the information on existing endpoints
+ """
+ data = []
+ endpoint_standards = []
+ endpoints = get_endpoints_request()
+ for endpoint in endpoints:
+ data.append({
+ 'Id': endpoint.get('id'),
+ 'Name': endpoint.get('name'),
+ 'OS': endpoint.get('product_name'),
+ 'IP': endpoint.get('ips'),
+ 'Status': endpoint.get('status'),
+ 'Threat': endpoint.get('threat')
+ })
+ endpoint_standards.append({
+ 'Id': endpoint.get('id'),
+ 'IPAddress': endpoint.get('ips'),
+ 'Domain': endpoint.get('domain'),
+ 'MACAddress': endpoint.get('mac'),
+ 'OS': endpoint.get('product_name'),
+ 'OSVersion': endpoint.get('driver_version'),
+ 'Model': endpoint.get('current_profile'),
+ 'Memory': endpoint.get('memory'),
+ 'Processors': endpoint.get('num_cpus')
+ })
+
+ context = {
+ 'CounterTack.Endpoint(val.Id && val.Id === obj.Id)': createContext(endpoints,
+ keyTransform=underscoreToCamelCase),
+ 'Endpoint': endpoint_standards
+ }
+
+ headers = ['OS', 'Name', 'Threat', 'Status', 'Id', 'IP']
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': endpoints,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(
+ 'CounterTack Endpoints', data, headers, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+def get_endpoint_request(endpoint_id):
+
+ """
+ Request for a specific endpoint
+ """
+ suffix_url = 'endpoints/' + endpoint_id
+
+ response = http_request('GET', suffix_url)
+
+ return response
+
+
+def get_endpoint():
+ """
+ Get the information for the requested endpoint
+
+ demisto parameter: (string) endpoint_id
+ The unique ID of the endpoint
+
+ returns:
+ The information about the specified endpoint
+ """
+ endpoint_id = demisto.args().get('endpoint_id')
+
+ response = get_endpoint_request(endpoint_id)
+
+ content = {
+ 'OS': response.get('product_name'),
+ 'Domain': response.get('domain'),
+ 'IP': response.get('ip'),
+ 'Threat': response.get('threat'),
+ 'MaxImpact': response.get('max_impact'),
+ 'TenantID': response.get('tenant'),
+ 'IsQuarantined': response.get('is_quarantined'),
+ 'Profile': response.get('current_profile'),
+ 'Cluster_hosts': response.get('cluster_hosts'),
+ 'Status': response.get('status'),
+ 'Tags': response.get('tags')
+ }
+
+ endpoint_standards = {
+ 'Id': response.get('id'),
+ 'IPAddress': response.get('ips'),
+ 'Domain': response.get('domain'),
+ 'MACAddress': response.get('mac'),
+ 'OS': response.get('product_name'),
+ 'OSVersion': response.get('driver_version'),
+ 'Model': response.get('current_profile'),
+ 'Memory': response.get('memory'),
+ 'Processors': response.get('num_cpus')
+ }
+
+ context = {
+ 'CounterTack.Endpoint(val.Id && val.Id === obj.Id)': createContext(response,
+ keyTransform=underscoreToCamelCase),
+ 'Endpoint': endpoint_standards
+ }
+
+ headers = ['OS', 'Domain', 'IP', 'Threat', 'MaxImpact', 'TenantID', 'IsQuarantined',
+ 'Profile', 'Tags', 'Cluster_Hosts', 'Status']
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(
+ 'CounterTack Endpoint information:', content, headers, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+"""
+ENDPOINTS TAGS
+"""
+
+
+def endpoint_tags_request(endpoint_id):
+ """
+ This request retrieves tags from specified endpoint
+ """
+
+ suffix_url = 'endpoints/' + endpoint_id + '/tags'
+
+ response = http_request('GET', suffix_url)
+ return response
+
+
+def get_endpoint_tags():
+
+ """
+ Get the tags for the specified endpoint
+
+ demisto parameter: (string) endpoint_id
+ The unique ID of the endpoint
+ """
+ endpoint_id = demisto.args().get('endpoint_id')
+ response = endpoint_tags_request(endpoint_id)
+
+ response = {
+ 'tags': response
+ }
+
+ tags_context = {
+ 'Id': endpoint_id,
+ 'tags': response
+ }
+
+ context = {
+ 'CounterTack.Endpoint(val.Id && val.Id === obj.Id)': createContext(tags_context,
+ keyTransform=underscoreToCamelCase)
+ }
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('CounterTack tags for the specified endpoint:', response, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+def add_tags_request(endpoint_id, body):
+ """
+ The request adds tags to specified endpoint
+
+ The request gets the endpoint ID and the tags the user wants to add.
+ """
+ suffix_url = 'endpoints/' + endpoint_id + '/tags'
+
+ response = http_request('POST', suffix_url, body=json.dumps(body))
+ return response
+
+
+def add_tags():
+
+ """
+ The command add tags for the specified endpoint.
+
+ demisto parameter: (string) endpoint_id
+ The unique ID of the endpoint
+ demisto parameter: (array) body
+ The tags to add to the endpoint
+ """
+
+ endpoint_id = demisto.args().get('endpoint_id')
+ body = argToList(demisto.args().get('tags'))
+
+ response = add_tags_request(endpoint_id, body)
+ response = endpoint_tags_request(endpoint_id)
+
+ response = {
+ 'tags': response,
+ 'Id': endpoint_id
+ }
+
+ context = {
+ 'CounterTack.Endpoint(val.Id && val.Id === obj.Id)': createContext(response, keyTransform=underscoreToCamelCase)
+ }
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown("Endpoint tags were added successfully", response),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+def delete_tags_request(endpoint_id, body):
+
+ """
+ This request deletes specific tags from specified endpoint.
+
+ demisto parameter: (string) endpoint_id
+ The unique ID of the endpoint
+ demisto parameter: (array) body
+ The tags to delete from the endpoint
+ """
+
+ suffix_url = 'endpoints/' + endpoint_id + '/tags'
+
+ response = http_request('DELETE', suffix_url, body=json.dumps(body))
+ return response
+
+
+def delete_tags():
+ """
+ The command deletes tags for the specified endpoint.
+
+ demisto parameter: (string) endpoint_id
+ The unique ID of the endpoint
+ demisto parameter: (array) body
+ The tags to delete from the endpoint
+ """
+
+ endpoint_id = demisto.args().get('endpoint_id')
+ body = argToList(demisto.args().get('tags'))
+
+ response = delete_tags_request(endpoint_id, body)
+ response = endpoint_tags_request(endpoint_id)
+
+ response = {
+ 'tags': response,
+ 'Id': endpoint_id
+ }
+
+ context = {
+ 'CounterTack.Endpoint(val.Id && val.Id === obj.Id)': createContext(response, keyTransform=underscoreToCamelCase)
+ }
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(
+ 'Endpoint tags were deleted successfully', response),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+"""
+ENDPOINTS COMMANDS
+"""
+
+
+def endpoint_quarantine_request(endpoint_id, body):
+
+ """
+ Request to quarantine a specified endpoint
+
+ demisto parameter: (string) endpoint_id
+ The unique ID of the endpoint
+ demisto parameter: (string) type
+ The type of the command: quarantine
+ """
+
+ suffix_url = 'endpoints/' + endpoint_id + '/commands'
+ response = http_request('POST', suffix_url, body=json.dumps(body))
+
+ return response
+
+
+def endpoint_quarantine():
+
+ """
+ Prevents an endpoint(s) from any network communication, but maintains a connection to the Sentinel Cluster
+ and addresses defined in the Global Whitelist.
+
+ demisto parameter: (string) endpoint_id
+ The unique ID of the endpoint
+ demisto parameter: (string) type
+ The type of the command: quarantine
+ """
+
+ endpoint_id = demisto.args().get('endpoint_id')
+ body = {
+ 'type': 'quarantine'
+ }
+ response = endpoint_quarantine_request(endpoint_id, body)
+ quarantine_response = get_endpoint_request(endpoint_id)
+ quarantine_context = {
+ 'Id': endpoint_id,
+ 'is_quarantine': quarantine_response.get('is_quarantined')
+ }
+
+ context = {
+ 'CounterTack.Endpoint(val.Id && val.Id === obj.Id)': createContext(quarantine_context,
+ keyTransform=underscoreToCamelCase)
+ }
+
+ data = {
+ 'Id': response.get('id'),
+ 'user name': response.get('username'),
+ 'request time': response.get('request_time'),
+ 'endpoint ID': response.get('endpoint_ids'),
+ 'command name': response.get('command_name'),
+ 'status': response.get('status'),
+ }
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': quarantine_context,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('The command has been applied successfully:', data, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+def disable_quarantine():
+
+ """
+ Allows a previously quarantined endpoint to communicate with the network.
+
+ demisto parameter: (string) endpoint_id
+ The unique ID of the endpoint
+ demisto parameter: (string) type
+ The type of the command: lift_quarantine
+
+ """
+ endpoint_id = demisto.args().get('endpoint_id')
+ body = {
+ 'type': 'lift_quarantine'
+ }
+ response = endpoint_quarantine_request(endpoint_id, body)
+ quarantine_response = get_endpoint_request(endpoint_id)
+
+ quarantine_context = {
+ 'Id': endpoint_id,
+ 'is_quarantine': quarantine_response.get('is_quarantined')
+ }
+
+ data = {
+ 'Id': response.get('id'),
+ 'user name': response.get('username'),
+ 'request time': response.get('request_time'),
+ 'endpoint ID': response.get('endpoint_ids'),
+ 'command name': response.get('command_name'),
+ 'status': response.get('status'),
+ }
+
+ context = {
+ 'CounterTack.Endpoint(val.Id && val.Id === obj.Id)': createContext(quarantine_context,
+ keyTransform=underscoreToCamelCase)
+ }
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': quarantine_context,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('The command has been applied successfully:', data, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+def file_extract_request(endpoint_id, body):
+
+ """
+ Request for extracting file from specified endpoint
+ """
+
+ suffix_url = 'endpoints/' + endpoint_id + '/commands'
+
+ response = http_request('POST', suffix_url, body=json.dumps(body))
+ return response
+
+
+def extract_file():
+
+ """
+ Enables an API consumer to extract the file in addition to some file metadata.
+
+ demisto parameter: (string) endpoint_id
+ The unique ID of the endpoint
+ demisto parameter: (string) body
+ The type of the command: extract file and the file path
+ """
+ endpoint_id = demisto.args().get('endpoint_id')
+ paths = argToList(demisto.args().get('file_path'))
+ body = {
+ 'type': 'extract_files',
+ 'paths': paths
+ }
+
+ response = file_extract_request(endpoint_id, body)
+ data = {
+ 'Id': response.get('id'),
+ 'User Name': response.get('username'),
+ 'Request Time': response.get('request_time'),
+ 'Endpoint ID': response.get('endpoint_ids'),
+ 'Command Name': response.get('command_name'),
+ 'Command Arguments': response.get('command_arg'),
+ 'Status': response.get('status'),
+ }
+
+ context = {
+ 'CounterTack.File(val.Id && val.Id === obj.Id)': createContext(response, keyTransform=underscoreToCamelCase)
+ }
+ headers = ['Id', 'User Name', 'Request Time', 'Endpoint ID', 'Command Name', 'Command Arguments', 'Status']
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(
+ 'The file has been extracted successfully:', data, headers, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+def delete_file_request(endpoint_id, body):
+
+ """
+ Deletes a file from the specified endpoint
+ """
+
+ suffix_url = 'endpoints/' + endpoint_id + '/commands'
+
+ response = http_request('POST', suffix_url, body=json.dumps(body))
+ return response
+
+
+def delete_file():
+
+ """
+ Deletes a file from the specified endpoint
+
+ demisto parameter: (string) endpoint_id
+ The unique ID of the endpoint
+ demisto parameter: (string) body
+ The type of the command: delete_file and the file path
+ """
+ endpoint_id = demisto.args().get('endpoint_id')
+ path = demisto.args().get('file_path')
+ body = {
+ 'type': 'delete_file',
+ 'path': path
+ }
+
+ delete_file_request(endpoint_id, body)
+
+ demisto.results('The file has been deleted successfully')
+
+
+def kill_process_request(endpoint_id, body):
+ """
+ Reqquest to terminates all instances of the process identified in the command.
+
+ """
+ suffix_url = 'endpoints/' + endpoint_id + '/commands'
+
+ response = http_request('POST', suffix_url, body=json.dumps(body))
+ return response
+
+
+def kill_process():
+ """
+ Terminates all instances of the process identified in the command.
+ Processes can be identified by the PID or process name.
+
+ demisto parameter: (string) endpoint_id
+ The unique ID of the endpoint
+ demisto parameter: (string) process_id
+ The ID of the process to terminate
+ demisto parameter: (string) process_name
+ The name of the process to terminate
+
+ """
+
+ endpoint_id = demisto.args().get('endpoint_id')
+ pid = demisto.args().get('process_id')
+ name = demisto.args().get('process_name')
+ if not pid and not name:
+ return_error('Please provide either process_id or process_name')
+ body = {
+ 'type': 'kill_process',
+ 'pid': pid,
+ 'name': name
+ }
+
+ response = kill_process_request(endpoint_id, body)
+
+ data = {
+ 'Id': response.get('id'),
+ 'User Name': response.get('username'),
+ 'Request Time': response.get('request_time'),
+ 'Endpoint ID': response.get('endpoint_ids'),
+ 'Command Name': response.get('command_name'),
+ 'Status': response.get('status'),
+ }
+
+ context = {
+ 'CounterTack.Endpoint(val.Id && val.Id === obj.Id)': createContext(response,
+ keyTransform=underscoreToCamelCase,
+ removeNull=True)
+ }
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(
+ 'The process has been terminated', data, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+"""
+ENDPOINT FILES
+
+"""
+
+
+def file_request():
+ """
+ This request retrieves all extracted files for all endpoints on the cluster
+ """
+
+ suffix_url = 'endpoints/files'
+
+ response = http_request('GET', suffix_url)
+ return response
+
+
+def get_all_files():
+
+ data = []
+ files_standards = []
+
+ files = file_request()
+ for file in files:
+ data.append({
+ 'Id': file.get('id'),
+ 'user': file.get('user'),
+ 'endpoint_id': file.get('endpoint_id'),
+ 'path': file.get('path'),
+ 'extraction_time': file.get('extraction_time'),
+ 'Status': file.get('status')
+ })
+
+ files_standards.append({
+ 'Size': file.get('size'),
+ 'MD5': file.get('md5'),
+ 'SHA256': file.get('sha256'),
+ 'SSDeep': file.get('ssdeep'),
+ 'Path': file.get('path')
+ })
+
+ context = {
+ 'CounterTack.File(val.Id && val.Id === obj.Id)': createContext(files, keyTransform=underscoreToCamelCase),
+ outputPaths['file']: files_standards
+ }
+
+ headers = ['Status', 'Id', 'path', 'endpoint_id', 'extraction_time', 'user']
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': files,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(
+ 'CounterTack Endpoints Files', data, headers, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+def endpoint_files_request(endpoint_id):
+
+ """
+ This request returns all extracted files from specified endpoint
+ """
+
+ suffix_url = 'endpoints/' + endpoint_id + '/files'
+
+ response = http_request('GET', suffix_url)
+ return response
+
+
+def get_endpoint_files():
+
+ """
+ Returns extracted files from specific endpoint
+
+ demisto parameter: (string) endpoint_id
+ The unique ID of the endpoint
+ """
+
+ endpoint_id = demisto.args().get('endpoint_id')
+ data = []
+ files_standards = []
+
+ files = endpoint_files_request(endpoint_id)
+ for file in files:
+ data.append({
+ 'Id': file.get('id'),
+ 'User': file.get('user'),
+ 'EndpointId': file.get('endpoint_id'),
+ 'Path': file.get('path'),
+ 'ExtractionTime': file.get('extraction_time'),
+ 'Status': file.get('status')
+ })
+ files_standards.append({
+ 'Size': file.get('size'),
+ 'MD5': file.get('md5'),
+ 'SHA256': file.get('sha256'),
+ 'SSDeep': file.get('ssdeep'),
+ 'Path': file.get('path')
+ })
+ context = {
+ 'CounterTack.File(val.Id && val.Id === obj.Id)': createContext(files, keyTransform=underscoreToCamelCase),
+ outputPaths['file']: files_standards
+ }
+
+ headers = ['Status', 'Id', 'path', 'endpoint_id', 'extraction_time', 'user']
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': data,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(
+ 'The extracted files from the endpoint:', data, headers, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+def file_information_request(file_id):
+ """
+ request specific file information
+ """
+ suffix_url = 'endpoints/files/' + file_id
+ response = http_request('GET', suffix_url)
+
+ return response
+
+
+def get_file_information():
+
+ """
+ Get the information of a specific file
+
+ demisto parameter: (string) file_id
+ The unique ID of the extracted file
+ """
+ context = {}
+ files_standards = []
+ file_id = demisto.args().get('file_id')
+ response = file_information_request(file_id)
+
+ data = {
+ 'endpoint_name': response.get('endpoint_name'),
+ 'path': response.get('path'),
+ 'size': response.get('size'),
+ 'extraction_time': response.get('extraction_time'),
+ 'status': response.get('status')
+ }
+
+ files_standards.append({
+ 'Size': response.get('size'),
+ 'MD5': response.get('md5'),
+ 'SHA256': response.get('sha256'),
+ 'SSDeep': response.get('ssdeep'),
+ 'Path': response.get('path')
+ })
+
+ context['CounterTack.File(val.Id && val.Id === obj.Id)'] = createContext(response,
+ keyTransform=underscoreToCamelCase)
+ context[outputPaths['file']] = files_standards
+ headers = ['endpoint_name', 'path', 'size', 'status', 'extraction_time']
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('CounterTack File Information:', data, headers, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+def download_file_request(file_id):
+
+ # This request downloads an extracted file.
+
+ suffix_url = 'downloads/extractedfiles/' + file_id
+ response = http_request('GET', suffix_url)
+ return response
+
+
+def download_file():
+ """
+ Download an extracted file in a ZIP format.
+
+ demisto parameter: (string) file_id
+ The unique ID of the extracted file
+ """
+
+ file_id = demisto.args().get('file_id')
+ response = download_file_request(file_id)
+
+ demisto.results(fileResult(file_id + '.zip', response.content))
+
+
+"""
+
+BEHAVIORS
+
+"""
+
+
+def get_behaviors_request():
+ """
+ This request retrieves information on a collection of behaviors.
+ """
+ suffix_url = 'behaviors'
+
+ response = http_request('GET', suffix_url)
+ return response
+
+
+def get_behaviors():
+ """
+ retrieve information on a collection of behaviors.
+ """
+ data = []
+ behaviors = get_behaviors_request()
+ for behavior in behaviors:
+ data.append({
+ 'Id': behavior.get('id'),
+ 'Name': behavior.get('name'),
+ 'Type': behavior.get('type'),
+ 'ImpactLevel': behavior.get('impact_level'),
+ 'lastReported': behavior.get('last_reported'),
+ 'EndpointId': behavior.get('endpoint_id')
+ })
+
+ context = {
+ 'CounterTack.Behavior(val.Id && val.Id === obj.Id)': createContext(behaviors,
+ keyTransform=underscoreToCamelCase)
+ }
+ headers = ['Name', 'Id', 'Type', 'ImpactLevel', 'EndpointId', 'lastReported']
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': behaviors,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('CounterTack Endpoints Behaviors', data, headers, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+def get_behavior_request(behavior_id):
+ """
+ Request for getting specified behvior
+ """
+ suffix_url = 'behaviors/' + behavior_id
+
+ response = http_request('GET', suffix_url)
+ return response
+
+
+def get_behavior():
+ """
+ Get behavior information
+
+ demisto parameter: behavior_id(string)
+ The unique ID of the behvior
+
+ """
+
+ behavior_id = demisto.args().get('behavior_id')
+ response = get_behavior_request(behavior_id)
+
+ data = {
+ 'Id': response.get('id'),
+ 'Name': response.get('name'),
+ 'ImpactLevel': response.get('impact_level'),
+ 'LastActive': response.get('last_active'),
+ 'EventCount': response.get('event_count'),
+ 'MaxImpact': response.get('max_impact'),
+ 'EndpointId': response.get('endpoint_id'),
+ 'Type': response.get('type'),
+ }
+
+ context = {
+ 'CounterTack.Behavior(val.Id && val.Id === obj.Id)': createContext(response, keyTransform=underscoreToCamelCase)
+ }
+ headers = ['Name', 'Id', 'ImpactLevel', 'MaxImpact', 'EventCount', 'Type', 'EndpointId', 'LastActive']
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('CounterTack Behavior information', data, headers, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+"""
+BEHAVIORS TAGS
+"""
+
+
+def behaviour_add_tags_request(behaviour_id, body):
+ """
+ The request adds tags to specified behaviour
+ """
+ suffix_url = 'behaviors/' + behaviour_id + '/tags'
+ response = http_request('POST', suffix_url, body=json.dumps(body))
+ return response
+
+
+def add_behavior_tags():
+
+ """
+ Add specific tags to specified behavior
+
+ demisto parameter: (string) behavior_id
+ The unique ID of the behavior
+
+ demisto parameter: (Array) Body.
+ The tags to add to the behavior. seperate the tags with comma
+ """
+ behaviour_id = demisto.args().get('behaviour_id')
+ body = argToList(demisto.args().get('tags'))
+
+ response = behaviour_add_tags_request(behaviour_id, body)
+ behavior_tags = get_behavior_request(behaviour_id)
+
+ response = {
+ 'tags': behavior_tags.get('tags'),
+ 'Id': behaviour_id
+ }
+
+ context = {
+ 'CounterTack.Behavior(val.Id && val.Id === obj.Id)': createContext(response, keyTransform=underscoreToCamelCase)
+ }
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Behavior tags were added successfully', response),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+def delete_tags_behavior_request(behaviour_id, body):
+
+ suffix_url = 'behaviors/' + behaviour_id + '/tags'
+ response = http_request('DELETE', suffix_url, body=json.dumps(body))
+ return response
+
+
+def delete_behavior_tags():
+
+ """
+ Delete specific tags from behavior
+
+ demisto parameter: (string) behavior_id
+ The unique ID of the behavior
+
+ demisto parameter: (Array) Body.
+ The tags to delete from the behavior. seperate the tags with comma
+
+ """
+ behaviour_id = demisto.args().get('behaviour_id')
+ body = argToList(demisto.args().get('tags'))
+
+ response = delete_tags_behavior_request(behaviour_id, body)
+ response = get_behavior_request(behaviour_id)
+
+ response = {
+ 'tags': response.get('tags'),
+ 'Id': behaviour_id
+ }
+
+ context = {
+ 'CounterTack.Behavior(val.Id && val.Id === obj.Id)': createContext(response, keyTransform=underscoreToCamelCase)
+ }
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Endpoint tags were deleted successfully', response, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+
+
+"""
+SEARCH
+"""
+
+
+def search_endpoints_request(exp):
+ """
+ Request for endpoints search using CQL expression
+
+ """
+ suffix_url = 'search/endpoints' + exp
+ response = http_request('GET', suffix_url)
+
+ return response
+
+
+def search_behaviors_request(exp):
+ """
+ Request for endpoints search using CQL expression
+
+ """
+ suffix_url = 'search/behaviors' + exp
+ response = http_request('GET', suffix_url)
+
+ return response
+
+
+def search_events_request(exp):
+ """
+ Request for events search using CQL expression
+
+ """
+ suffix_url = 'search/events' + exp
+ response = http_request('GET', suffix_url)
+
+ return response
+
+
+def search_events():
+ """
+ Request for events search using CQL expression
+ demisto parameter: (dict) expression
+ The CQL expression to be used for the search
+ """
+
+ data = []
+ expression = demisto.args().get('expression')
+ exp = '?expression=' + expression
+ events = search_events_request(exp)
+ if events.get('results'):
+ results = events.get('results')
+ results_lst = list()
+ for i in range(len(results)):
+ results_lst.append({k.replace('events.', ''): v for k, v in results[i].items()})
+ events['results'] = results_lst
+ for event in events.get('results'):
+ data.append({
+ 'Id': event.get('id'),
+ 'Events Action': event.get('action'),
+ 'Events Impact': event.get('impact'),
+ 'Events EndpointID': event.get('endpoint_id'),
+ 'Event Type': event.get('event_type'),
+ 'Collected time': event.get('time_stamp'),
+ 'Source process PID': event.get('source_process_pid'),
+ 'Source process name': event.get('source_process_name')
+ })
+
+ context = {
+ 'CounterTack.Event(val.Id && val.Id === obj.Id)': createContext(results_lst,
+ keyTransform=underscoreToCamelCase,
+ removeNull=True)
+ }
+ headers = ['ID', 'Event Type', 'Events Action', 'Events EndpointID', 'Events Impact',
+ 'Collected time', 'Source process PID', 'Source process name']
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': results_lst,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Results of the events search', data, headers, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+ else:
+ demisto.results('No results found')
+
+
+def search_endpoints():
+ """
+ Request for endpoints search using CQL expression
+ demisto parameter: (dict) expression
+ The CQL expression to be used for the search
+ """
+
+ data = []
+ endpoint_standards = []
+ expression = demisto.args().get('expression')
+ exp = '?expression=' + expression
+ endpoints = search_endpoints_request(exp)
+ if endpoints.get('results'):
+ results = endpoints.get('results')
+ results_lst = list()
+ for i in range(len(results)):
+ results_lst.append({k.replace('endpoints.', ''): v for k, v in results[i].items()})
+ endpoints['results'] = results_lst
+ for endpoint in endpoints.get('results'):
+ data.append({
+ 'Id': endpoint.get('id'),
+ 'Name': endpoint.get('name'),
+ 'OS': endpoint.get('product_name'),
+ 'IP': endpoint.get('ips'),
+ 'Status': endpoint.get('status'),
+ 'Threat': endpoint.get('threat')
+ })
+ endpoint_standards.append({
+ 'Id': endpoint.get('id'),
+ 'IPAddress': endpoint.get('ips'),
+ 'Domain': endpoint.get('domain'),
+ 'MACAddress': endpoint.get('mac'),
+ 'OS': endpoint.get('product_name'),
+ 'OSVersion': endpoint.get('driver_version'),
+ 'Model': endpoint.get('current_profile'),
+ 'Memory': endpoint.get('memory'),
+ 'Processors': endpoint.get('num_cpus')
+ })
+ context = {
+ 'CounterTack.Endpoint(val.Id && val.Id === obj.Id)': createContext(results_lst,
+ keyTransform=underscoreToCamelCase,
+ removeNull=True),
+ 'Endpoint': endpoint_standards
+ }
+
+ headers = ['Status', 'Name', 'Id', 'OS', 'Events Impact', 'Threat', 'IP']
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': results_lst,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Results of the endpoints search', data, headers, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+ else:
+ demisto.results('No results found')
+
+
+def search_behaviors():
+ """
+ Request for behaviors search using CQL expression
+ demisto parameter: (dict) expression
+ The CQL expression to be used for the search
+ """
+
+ data = []
+ expression = demisto.args().get('expression')
+ exp = '?expression=' + expression
+ behaviors = search_behaviors_request(exp)
+ if behaviors.get('results'):
+ results = behaviors.get('results')
+ results_lst = list()
+ for i in range(len(results)):
+ results_lst.append({k.replace('behaviors.', ''): v for k, v in results[i].items()})
+ behaviors['results'] = results_lst
+ for behavior in behaviors.get('results'):
+ data.append({
+ 'Id': behavior.get('id'),
+ 'Name': behavior.get('name'),
+ 'Type': behavior.get('type'),
+ 'Impact_Level': behavior.get('impact_level'),
+ 'lastReported': behavior.get('last_reported'),
+ 'EndpointID': behavior.get('endpoint_id')
+ })
+
+ context = {
+ 'CounterTack.Behavior(val.Id && val.Id === obj.Id)': createContext(results_lst,
+ keyTransform=underscoreToCamelCase,
+ removeNull=True)
+ }
+ headers = ['Name', 'Type', 'Impact_Level', 'Id', 'EndpointID', 'lastReported']
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': results_lst,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Results of the behaviors search', data, headers, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+ else:
+ demisto.results('No results found')
+
+
+def hashes_search_request(exp):
+ """
+ Request for Hashed search using CQL expression
+
+ """
+ suffix_url = 'search/hashes' + exp
+ response = http_request('GET', suffix_url)
+
+ return response
+
+
+def search_hashes():
+ """
+ Request for hashes search using CQL expression
+ demisto parameter: (dict) expression
+ The CQL expression to be used for the search
+ """
+ data = []
+ file_standards = []
+ expression = demisto.args().get('expression')
+ exp = '?expression=' + expression
+ hashes = hashes_search_request(exp)
+ if hashes.get('results'):
+ results = hashes.get('results')
+ results_lst = list()
+ for i in range(len(results)):
+ results_lst.append({k.replace('hashes.', ''): v for k, v in results[i].items()})
+ hashes['results'] = results_lst
+ for hash_type in hashes.get('results'):
+ file_hash_type = hash_type.get('type', '').upper()
+ if file_hash_type == 'SSDEEP':
+ file_hash_type = 'SSDeep'
+ hash_id = hash_type.get('id')
+ data.append({
+ file_hash_type: hash_id,
+ 'Type': file_hash_type,
+ 'Impact': hash_type.get('impact'),
+ 'VT report location': hash_type.get('vt_report_location'),
+ 'AV Coverage': hash_type.get('av_coverage')
+ })
+
+ if file_hash_type:
+ file_standards.append({
+ file_hash_type: hash_id
+ })
+
+ context = {
+ 'CounterTack.Hash(val.hash_id && val.hash_id === obj.hash_id)': createContext(data),
+ outputPaths['file']: file_standards
+ }
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': results_lst,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Results of the hashes search:', data, removeNull=True),
+ 'EntryContext': context
+ }
+ demisto.results(entry)
+ else:
+ demisto.results('No results found')
+
+
+"""
+
+FETCH INCIDENTS
+
+"""
+
+
+def search_notifications_request(params=''):
+ """
+ Request for notifications search using CQL expression
+
+ """
+ suffix_url = 'search/notifications?expression=' + params
+ response = http_request('GET', suffix_url)
+
+ return response
+
+
+def fetch_behaviors_request(params=''):
+ """
+ Request for behaviors search using CQL expression
+
+ """
+ suffix_url = 'search/behaviors?expression=' + params
+ response = http_request('GET', suffix_url)
+
+ return response
+
+
+def fetch_incidents():
+ incidents = []
+ last_run = demisto.getLastRun()
+
+ if last_run and last_run['time_stamp']:
+ last_update_time = last_run['time_stamp']
+ else:
+ # In first run
+ last_update_time, _ = parse_date_range(FETCH_TIME, date_format='%Y-%m-%dT%H:%M:%S.%f'[:-3])
+
+ max_timestamp = last_update_time
+ if FETCH_BEHAVIORS:
+ params = 'behaviors.time_stamp>' + last_update_time
+ behaviors = fetch_behaviors_request(params)
+
+ for behavior in behaviors.get('results'):
+ incident = behavior_to_incident(behavior)
+ # 0 corresponds to never triggered
+ time_stamp = behavior.get('behaviors.time_stamp')[:-5] # comapre time_stamp
+ if time_stamp > max_timestamp:
+ max_timestamp = time_stamp
+ incidents.append(incident)
+
+ if FETCH_NOTIFICATIONS:
+ params = 'notifications.time_stamp>' + last_update_time
+ notifications = search_notifications_request(params)
+
+ for notification in notifications.get('results'):
+ incident = notifications_to_incidents(notification)
+ time_stamp = notification.get('notifications.time_stamp')[:-5]
+
+ if time_stamp > max_timestamp:
+ max_timestamp = time_stamp
+ incidents.append(incident)
+
+ demisto.setLastRun({
+ 'time_stamp': max_timestamp
+ })
+
+ demisto.incidents(incidents)
+
+
+def behavior_to_incident(behavior):
+ incident = {}
+ incident['name'] = 'CounterTack Behavior - ' + behavior.get('behaviors.name')
+ incident['rawJSON'] = json.dumps(behavior)
+ return incident
+
+
+def notifications_to_incidents(notification):
+ incident = {}
+ incident['name'] = 'CounterTack Notification - ' + notification.get('notifications.message')
+ incident['rawJSON'] = json.dumps(notification)
+ return incident
+
+
+"""
+
+EXECUTION
+
+"""
+
+command = demisto.command()
+LOG('Running command "{}"'.format(command))
+try:
+ if command == 'test-module':
+ get_endpoints_request()
+ demisto.results('ok')
+ elif command == 'fetch-incidents':
+ fetch_incidents()
+ elif command == 'countertack-get-endpoints':
+ get_endpoints()
+ elif command == 'countertack-get-endpoint':
+ get_endpoint()
+ elif command == 'countertack-get-endpoint-tags':
+ get_endpoint_tags()
+ elif command == 'countertack-add-tags':
+ add_tags()
+ elif command == 'countertack-delete-tags':
+ delete_tags()
+ elif command == 'countertack-endpoint-quarantine':
+ endpoint_quarantine()
+ elif command == 'countertack-disable-quarantine':
+ disable_quarantine()
+ elif command == 'countertack-extract-file':
+ extract_file()
+ elif command == 'countertack-delete-file':
+ delete_file()
+ elif command == 'countertack-get-all-files':
+ get_all_files()
+ elif command == 'countertack-get-endpoint-files':
+ get_endpoint_files()
+ elif command == 'countertack-get-file-information':
+ get_file_information()
+ elif command == 'countertack-download-file':
+ download_file()
+ elif command == 'countertack-get-behaviors':
+ get_behaviors()
+ elif command == 'countertack-get-behavior':
+ get_behavior()
+ elif command == 'countertack-add-behavior-tags':
+ add_behavior_tags()
+ elif command == 'countertack-delete-behavior-tags':
+ delete_behavior_tags()
+ elif command == 'countertack-search-events':
+ search_events()
+ elif command == 'countertack-search-hashes':
+ search_hashes()
+ elif command == 'countertack-search-endpoints':
+ search_endpoints()
+ elif command == 'countertack-search-behaviors':
+ search_behaviors()
+ elif command == 'countertack-kill-process':
+ kill_process()
+except Exception as e:
+ return_error(e.message)
+ LOG(e)
diff --git a/Integrations/CounterTack/CounterTack.yml b/Integrations/CounterTack/CounterTack.yml
new file mode 100644
index 000000000000..3d4feb5cb744
--- /dev/null
+++ b/Integrations/CounterTack/CounterTack.yml
@@ -0,0 +1,1303 @@
+category: Endpoint
+commonfields:
+ id: CounterTack
+ version: -1
+configuration:
+- defaultvalue: https://democloud.countertack.com
+ display: Server URL (e.g. https://democloud.countertack.com)
+ name: server
+ required: true
+ type: 0
+- display: User Name
+ name: credentials
+ required: true
+ type: 9
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Trust any certificate (unsecure)
+ name: unsecure
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- defaultvalue: 3 days
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days, 3
+ months, 1 year)
+ name: fetch_time
+ required: false
+ type: 0
+- display: Fetch notifications incidents
+ name: fetch_notifications
+ required: false
+ type: 8
+- display: Fetch behviors incidents
+ name: fetch_behviors
+ required: false
+ type: 8
+description: CounterTack empowers endpoint security teams to assure endpoint protection
+ for Identifying Cyber Threats. Integrating a predictive endpoint protection platform
+display: CounterTack
+name: CounterTack
+script:
+ commands:
+ - deprecated: false
+ description: Returns information for endpoints.
+ execution: false
+ name: countertack-get-endpoints
+ outputs:
+ - contextPath: CounterTack.Endpoint.IsQuarantined
+ description: Is the endpoint currently quarantined
+ type: boolean
+ - contextPath: CounterTack.Endpoint.MaxImpact
+ description: Impact of the highest scoring behavior
+ type: number
+ - contextPath: CounterTack.Endpoint.Memory
+ description: The RAM of the endpoint (in megabytes).
+ type: number
+ - contextPath: CounterTack.Endpoint.DriverVersion
+ description: Endpoint sensor version
+ type: string
+ - contextPath: CounterTack.Endpoint.ProfileVersion
+ description: Version of the current profile used for collection
+ type: string
+ - contextPath: CounterTack.Endpoint.BehaviorCount
+ description: Number of behaviors detected
+ type: number
+ - contextPath: CounterTack.Endpoint.CurrentProfile
+ description: Currently active analysis profile
+ type: string
+ - contextPath: CounterTack.Endpoint.Domain
+ description: DNS suffix for the endpoint
+ type: string
+ - contextPath: CounterTack.Endpoint.NumCpus
+ description: Number of CPUs
+ type: number
+ - contextPath: CounterTack.Endpoint.Macs
+ description: MAC addresses associated with the endpoint
+ type: string
+ - contextPath: CounterTack.Endpoint.WinRdpPort
+ description: RDP port used by the endpoint
+ type: number
+ - contextPath: CounterTack.Endpoint.Ip
+ description: IP address used to connect to the analysis cluster
+ type: string
+ - contextPath: CounterTack.Endpoint.ClusterHosts
+ description: The list of hosts that the endpoint tries to connect through (in
+ order).
+ type: string
+ - contextPath: CounterTack.Endpoint.Vendor
+ description: OS vendor
+ type: string
+ - contextPath: CounterTack.Endpoint.SensorMode
+ description: Specifies the sensor mode of the driver
+ type: string
+ - contextPath: CounterTack.Endpoint.Identifier
+ description: OS identifier
+ type: string
+ - contextPath: CounterTack.Endpoint.CurrentResponsePolicy
+ description: Currently active response policy
+ type: string
+ - contextPath: CounterTack.Endpoint.Tenant
+ description: Tenant ID set at the time of KM installation
+ type: string
+ - contextPath: CounterTack.Endpoint.Name
+ description: Product name of the endpoint OS
+ type: string
+ - contextPath: CounterTack.Endpoint.ImpactLevel
+ description: Threat level of the endpoint.(LOW, MEDIUM, HIGH, CRITICAL)
+ type: string
+ - contextPath: CounterTack.Endpoint.Ips
+ description: IP addresses associated with the endpoint
+ type: string
+ - contextPath: CounterTack.Endpoint.ClusterConnectionRoute
+ description: List of hosts the endpoint is currently connected through
+ type: string
+ - contextPath: CounterTack.Endpoint.LastActive
+ description: Time of last event captured on the endpoint
+ type: date
+ - contextPath: CounterTack.Endpoint.TimeStarted
+ description: Time kernel module collection last engaged
+ type: date
+ - contextPath: CounterTack.Endpoint.Mac
+ description: The endpoint MAC address
+ type: string
+ - contextPath: CounterTack.Endpoint.EventStartTime
+ description: The time that the event was captured
+ type: date
+ - contextPath: CounterTack.Endpoint.CpuType
+ description: Bit length of the CPU architecture.
+ type: string
+ - contextPath: CounterTack.Endpoint.Status
+ description: Collection status of the endpoint (ON, PAUSE, OFF, INIT)
+ type: string
+ - contextPath: CounterTack.Endpoint.OsType
+ description: The OS type.
+ type: number
+ - contextPath: CounterTack.Endpoint.Version
+ description: OS version
+ type: string
+ - contextPath: CounterTack.Endpoint.Tags
+ description: List of user assigned tags
+ type: string
+ - contextPath: CounterTack.Endpoint.Threat
+ description: Threat level associated with the endpoint
+ type: string
+ - contextPath: CounterTack.Endpoint.Id
+ description: Endpoints ID
+ type: string
+ - contextPath: CounterTack.Endpoint.ProductName
+ description: Product name of the endpoint OS
+ type: string
+ - contextPath: Endpoint.Memory
+ description: Endpoint RAM (megabytes)
+ type: number
+ - contextPath: Endpoint.Processors
+ description: Number of CPUs
+ type: number
+ - contextPath: Endpoint.Domain
+ description: DNS suffix for the endpoint
+ type: string
+ - contextPath: Endpoint.OS
+ description: Product name of the endpoint OS
+ type: string
+ - contextPath: Endpoint.MACAddress
+ description: The MAC address of the endpoint.
+ type: string
+ - contextPath: Endpoint.Model
+ description: The analysis profile that is currently active.
+ type: string
+ - contextPath: Endpoint.IPAddress
+ description: The IP addresses that are associated with the endpoint.
+ type: string
+ - contextPath: Endpoint.OSVersion
+ description: The endpoint sensor version.
+ type: string
+ - contextPath: Endpoint.ID
+ description: The ID of the Endpoints.
+ type: string
+ - deprecated: false
+ description: Returns information for all behaviors.
+ execution: false
+ name: countertack-get-behaviors
+ outputs:
+ - contextPath: CounterTack.Behavior.MaxImpact
+ description: The impact of the highest scoring event (0-100)
+ type: number
+ - contextPath: CounterTack.Behavior.EndpointId
+ description: The ID of the endpoint, based on the UUID of the last installed
+ endpoint sensor
+ type: string
+ - contextPath: CounterTack.Behavior.Tenant
+ description: The tenant of the behavior.
+ type: string
+ - contextPath: CounterTack.Behavior.EventCount
+ description: The number of events detected.
+ type: number
+ - contextPath: CounterTack.Behavior.Name
+ description: The name of the condition that triggered the behavior.
+ type: string
+ - contextPath: CounterTack.Behavior.ImpactLevel
+ description: The threat level of the behavior (LOW, MEDIUM, HIGH, CRITICAL).
+ type: string
+ - contextPath: CounterTack.Behavior.LastActive
+ description: The time that the behavior was last active.
+ type: date
+ - contextPath: CounterTack.Behavior.FirstEventId
+ description: The ID of the first event.
+ type: date
+ - contextPath: CounterTack.Behavior.TimeStamp
+ description: The start time for the behavior.
+ type: date
+ - contextPath: CounterTack.Behavior.Type
+ description: The type of behavior (CLASSIFICATION, TRACE)
+ type: string
+ - contextPath: CounterTack.Behavior.Id
+ description: The ID of the behaviors.
+ type: string
+ - contextPath: CounterTack.Behavior.LastReported
+ description: The time that the behavior was last seen.
+ type: date
+ - arguments:
+ - default: false
+ description: The ID of the endpoint. To get the "endpoint_id", run the `get-endpoints`
+ command.
+ isArray: false
+ name: endpoint_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Get information on specific endpoint
+ execution: false
+ name: countertack-get-endpoint
+ outputs:
+ - contextPath: CounterTack.Endpoint.MaxImpact
+ description: The impact of the highest scoring behavior.
+ type: number
+ - contextPath: CounterTack.Endpoint.Memory
+ description: The RAM of the endpoint (in megabytes)
+ type: number
+ - contextPath: CounterTack.Endpoint.DriverVersion
+ description: The sensor version of the endpoint.
+ type: string
+ - contextPath: CounterTack.Endpoint.ProfileVersion
+ description: The version of the current profile used for collection.
+ type: string
+ - contextPath: CounterTack.Endpoint.BehaviorCount
+ description: The number of behaviors that were detected.
+ type: number
+ - contextPath: CounterTack.Endpoint.CurrentProfile
+ description: The analysis profile that is currently active.
+ type: string
+ - contextPath: CounterTack.Endpoint.Domain
+ description: DNS suffix for the endpoint.
+ type: string
+ - contextPath: CounterTack.Endpoint.NumCpus
+ description: The number of CPUs for the endpoint.
+ type: number
+ - contextPath: CounterTack.Endpoint.WinRdpPort
+ description: The RDP port used by the endpoint.
+ type: number
+ - contextPath: CounterTack.Endpoint.Macs
+ description: The MAC addresses associated with the endpoint.
+ type: string
+ - contextPath: CounterTack.Endpoint.Ip
+ description: The IP address used to connect to the analysis cluster.
+ type: string
+ - contextPath: CounterTack.Endpoint.ClusterHosts
+ description: The list of hosts that the endpoint tries to connect through (in
+ order).
+ type: string
+ - contextPath: CounterTack.Endpoint.Vendor
+ description: The OS vendor.
+ type: string
+ - contextPath: CounterTack.Endpoint.SensorMode
+ description: The sensor mode of the driver.
+ type: string
+ - contextPath: CounterTack.Endpoint.Identifier
+ description: The identifier of the OS.
+ type: string
+ - contextPath: CounterTack.Endpoint.Tenant
+ description: The tenant ID that was set at the time of KM installation.
+ type: string
+ - contextPath: CounterTack.Endpoint.Name
+ description: The machine name of the endpoint.
+ type: string
+ - contextPath: CounterTack.Endpoint.ImpactLevel
+ description: The threat level of the endpoint.
+ type: string
+ - contextPath: CounterTack.Endpoint.Ips
+ description: The IP addresses associated with the endpoint.
+ type: string
+ - contextPath: CounterTack.Endpoint.ClusterConnectionRoute
+ description: The list of hosts that the endpoint is currently connected through.
+ type: string
+ - contextPath: CounterTack.Endpoint.LastActive
+ description: The time of the last event that was captured on the endpoint.
+ type: date
+ - contextPath: CounterTack.Endpoint.TimeStarted
+ description: The first time that the endpoint started to work.
+ type: date
+ - contextPath: CounterTack.Endpoint.Mac
+ description: The MAC address of the endpoint.
+ type: string
+ - contextPath: CounterTack.Endpoint.EventStartTime
+ description: The time that the event was captured.
+ type: date
+ - contextPath: CounterTack.Endpoint.CpuType
+ description: The bit length of the CPU architecture.
+ type: number
+ - contextPath: CounterTack.Endpoint.Status
+ description: The collection status of the endpoint (ON, PAUSE, OFF, INIT).
+ type: string
+ - contextPath: CounterTack.Endpoint.OsType
+ description: The OS type.
+ type: number
+ - contextPath: CounterTack.Endpoint.Version
+ description: The version of the endpoint.
+ type: string
+ - contextPath: CounterTack.Endpoint.Threat
+ description: The threat level associated with the endpoint.
+ type: string
+ - contextPath: CounterTack.Endpoint.Id
+ description: The ID of the endpoint.
+ type: string
+ - contextPath: CounterTack.Endpoint.ProductName
+ description: The product name of the endpoint OS.
+ type: string
+ - contextPath: CounterTack.Endpoint.Tags
+ description: The list of user assigned tags.
+ type: string
+ - contextPath: CounterTack.Endpoint.IsQuarantined
+ description: Whether the endpoint is currently quarantined.
+ type: boolean
+ - contextPath: Endpoint.Memory
+ description: The RAM of the endpoint (in megabytes).
+ type: number
+ - contextPath: Endpoint.Processors
+ description: The number of CPUs.
+ type: number
+ - contextPath: Endpoint.Domain
+ description: The DNS suffix for the endpoint.
+ type: string
+ - contextPath: Endpoint.OS
+ description: The product name of the endpoint OS.
+ type: string
+ - contextPath: Endpoint.MACAddress
+ description: The MAC address of the endpoint.
+ type: string
+ - contextPath: Endpoint.Model
+ description: The analysis profile that is currently active.
+ type: string
+ - contextPath: Endpoint.IPAddress
+ description: The IP addresses associated with the endpoint.
+ type: string
+ - contextPath: Endpoint.OSVersion
+ description: The version of the endpoint sensor.
+ type: string
+ - arguments:
+ - default: false
+ description: The ID of the behavior.
+ isArray: false
+ name: behavior_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets information of a given behavior.
+ execution: false
+ name: countertack-get-behavior
+ outputs:
+ - contextPath: CounterTack.Behavior.MaxImpact
+ description: The maximum impact of the behavior.
+ type: number
+ - contextPath: CounterTack.Behavior.EndpointId
+ description: The ID of the endpoint.
+ type: string
+ - contextPath: CounterTack.Behavior.Tenant
+ description: The tenant of the behavior.
+ type: string
+ - contextPath: CounterTack.Behavior.EventCount
+ description: The event count of the behavior.
+ type: number
+ - contextPath: CounterTack.Behavior.ReportedOn
+ description: The time that the behavior was first seen.
+ type: date
+ - contextPath: CounterTack.Behavior.Name
+ description: The name of the behavior.
+ type: string
+ - contextPath: CounterTack.Behavior.ImpactLevel
+ description: The impact level of the behavior.
+ type: string
+ - contextPath: CounterTack.Behavior.LastActive
+ description: The last time that the behavior was active.
+ type: date
+ - contextPath: CounterTack.Behavior.TimeStamp
+ description: The time stamp of the behavior.
+ type: date
+ - contextPath: CounterTack.Behavior.FirstEventId
+ description: The ID of the first event.
+ type: string
+ - contextPath: CounterTack.Behavior.Type
+ description: The type of behavior.
+ type: string
+ - contextPath: CounterTack.Behavior.Id
+ description: The ID of the behavior.
+ type: string
+ - contextPath: CounterTack.Behavior.LastReported
+ description: The time that the behavior was last seen.
+ type: date
+ - arguments:
+ - default: false
+ description: The ID of the endpoint to get tags for.
+ isArray: false
+ name: endpoint_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets the tags of a given endpoint.
+ execution: false
+ name: countertack-get-endpoint-tags
+ outputs:
+ - contextPath: CounterTack.Endpoint.Tags
+ description: The list of user assigned tags.
+ type: string
+ - contextPath: CounterTack.Endpoint.EndpointId
+ description: The ID of the endpoints.
+ type: string
+ - arguments:
+ - default: false
+ description: The ID of the endpoint. To get the "*endpoint_id*", run the `get-endpoints`
+ command.
+ isArray: false
+ name: endpoint_id
+ required: true
+ secret: false
+ - default: false
+ description: A CSV list of tags you want to add to the endpoint, for example,
+ "test1,test2".
+ isArray: true
+ name: tags
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds tags to a given endpoint.
+ execution: false
+ name: countertack-add-tags
+ outputs:
+ - contextPath: CounterTack.Endpoint.EndpointId
+ description: The ID of the endpoint.
+ type: string
+ - contextPath: CounterTack.Endpoint.Tags
+ description: The tags that were added to the endpoint.
+ type: string
+ - arguments:
+ - default: false
+ description: The tags to be deleted from specified endpoint. To delete more
+ then one, separate the tags with a comma. (e.g test1,test2)
+ isArray: true
+ name: tags
+ required: true
+ secret: false
+ - default: false
+ description: The endpoint ID. Get the ID from the "get-endpoints" command
+ isArray: false
+ name: endpoint_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the supplied tags from a given endpoint.
+ execution: false
+ name: countertack-delete-tags
+ outputs:
+ - contextPath: CounterTack.Endpoint.Id
+ description: The ID of the endpoint
+ type: string
+ - contextPath: CounterTack.Endpoint.Tags
+ description: The tags of the specified endpoint
+ type: string
+ - arguments:
+ - default: false
+ description: The ID of the behavior.
+ isArray: false
+ name: behaviour_id
+ required: true
+ secret: false
+ - default: false
+ description: A CSV list of tags to add to the behavior, for example, "test1,test2".
+ isArray: true
+ name: tags
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds tags to a given behavior.
+ execution: false
+ name: countertack-add-behavior-tags
+ outputs:
+ - contextPath: CounterTack.Behavior.Id
+ description: The ID of the behavior.
+ type: string
+ - contextPath: CounterTack.Behavior.Tags
+ description: The tags of the behavior.
+ type: string
+ - arguments:
+ - default: false
+ description: The behavior ID
+ isArray: false
+ name: behaviour_id
+ required: true
+ secret: false
+ - default: false
+ description: Tags to delete from a behavior. To delete more then one, separate
+ the tags with a comma. (e.g test1,test2)
+ isArray: true
+ name: tags
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the supplied tags from a given behavior.
+ execution: false
+ name: countertack-delete-behavior-tags
+ outputs:
+ - contextPath: CounterTack.Behavior.Id
+ description: The ID of the behavior.
+ type: string
+ - contextPath: CounterTack.Behavior.Tags
+ description: The tags of the behavior.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The ID of the endpoint to quarantine.
+ isArray: false
+ name: endpoint_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Quarantines a given endpoint.
+ execution: false
+ name: countertack-endpoint-quarantine
+ outputs:
+ - contextPath: CounterTack.Endpoint.Id
+ description: The ID of the endpoint.
+ type: string
+ - contextPath: CounterTack.Endpoint.IsQuarantine
+ description: Is the endpoint currently quarantined.
+ type: boolean
+ - arguments:
+ - default: false
+ description: The ID of the endpoint to remove from quarantine.
+ isArray: false
+ name: endpoint_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes a given endpoint from quarantine.
+ execution: false
+ name: countertack-disable-quarantine
+ outputs:
+ - contextPath: CounterTack.Endpoint.Id
+ description: The ID of the endpoint that was removed from quarantine.
+ type: string
+ - contextPath: CounterTack.Endpoint.IsQuarantine
+ description: Is the endpoint is currently quarantined.
+ type: string
+ - arguments:
+ - default: false
+ description: The ID of the endpoint to extract a file from.
+ isArray: false
+ name: endpoint_id
+ required: true
+ secret: false
+ - default: false
+ description: The path of the file to extract, for example, "C:\\test1.txt".
+ isArray: true
+ name: file_path
+ required: true
+ secret: false
+ deprecated: false
+ description: Extracts a file from given endpoint.
+ execution: false
+ name: countertack-extract-file
+ outputs:
+ - contextPath: CounterTack.File.CommandArg.contents
+ description: The contents of the extracted file.
+ type: boolean
+ - contextPath: CounterTack.File.CommandArg.extracted_ids
+ description: The IDs of the extracted file.
+ type: string
+ - contextPath: CounterTack.File.CommandArg.md5
+ description: The MD5 hash of the extracted file.
+ type: boolean
+ - contextPath: CounterTack.File.CommandArg.paths
+ description: The path of the extracted file.
+ type: string
+ - contextPath: CounterTack.File.CommandArg.sha256
+ description: The SHA-256 has of teh extracted file.
+ type: boolean
+ - contextPath: CounterTack.File.CommandArg.ssdeep
+ description: The ssdeep hash of the extracted file.
+ type: boolean
+ - contextPath: CounterTack.File.CommandArg
+ description: The command arguments.
+ type: Unknown
+ - contextPath: CounterTack.File.CommandName
+ description: The name of the command that is sent.
+ type: string
+ - contextPath: CounterTack.File.Username
+ description: The username of the user that requested the command.
+ type: string
+ - contextPath: CounterTack.File.TargetType
+ description: The type of resource or collection this command is being sent to.
+ type: string
+ - contextPath: CounterTack.File.Status
+ description: The status of the command (initial, pending, complete, error).
+ type: string
+ - contextPath: CounterTack.File.RequestTime
+ description: The time at which the client requested the command.
+ type: date
+ - contextPath: CounterTack.File.Id
+ description: The ID of the commands.
+ type: string
+ - contextPath: CounterTack.File.EndpointIds
+ description: The ID of the source this command is being sent to.
+ type: string
+ - arguments:
+ - default: false
+ description: The ID of the endpoint to delete a file from.
+ isArray: false
+ name: endpoint_id
+ required: true
+ secret: false
+ - default: false
+ description: The path of the file to delete.
+ isArray: false
+ name: file_path
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes a file from the given endpoint.
+ execution: false
+ name: countertack-delete-file
+ - deprecated: false
+ description: Gets all extracted files for all endpoints.
+ execution: false
+ name: countertack-get-all-files
+ outputs:
+ - contextPath: CounterTack.File.Size
+ description: The size of the extracted file (in bytes).
+ type: number
+ - contextPath: CounterTack.File.EndpointId
+ description: The ID of the endpoint that contains the extracted file.
+ type: string
+ - contextPath: CounterTack.File.ExtractionTime
+ description: The time that the file was extracted.
+ type: date
+ - contextPath: CounterTack.File.Path
+ description: The full file system path of the extracted file, including the
+ filename, as seen on the endpoint.
+ type: string
+ - contextPath: CounterTack.File.Sha256
+ description: The SHA-256 digest of the file contents.
+ type: string
+ - contextPath: CounterTack.File.Tenant
+ description: The tenant ID for the endpoint.
+ type: string
+ - contextPath: CounterTack.File.User
+ description: The name of the user requesting the file.
+ type: string
+ - contextPath: CounterTack.File.Ssdeep
+ description: The ssdeep digest of the file contents.
+ type: string
+ - contextPath: CounterTack.File.EndpointIp
+ description: The IP address of the endpoint with the extracted file.
+ type: string
+ - contextPath: CounterTack.File.AvCoverage
+ description: The percentage of AV engines that determined that the hash is malicious.
+ type: number
+ - contextPath: CounterTack.File.Status
+ description: The status of the contents.
+ type: string
+ - contextPath: CounterTack.File.VtStatus
+ description: The Virus Total report status.
+ type: string
+ - contextPath: CounterTack.File.EndpointName
+ description: The name of the endpoint with the extracted file.
+ type: string
+ - contextPath: CounterTack.File.Id
+ description: The file ID of the extracted file.
+ type: string
+ - contextPath: CounterTack.File.Md5
+ description: The MD5 digest of the file contents.
+ type: string
+ - contextPath: CounterTack.File.VtReportLocation
+ description: The VirusTotal report location path.
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 digest of the file contents.
+ type: string
+ - contextPath: File.Path
+ description: The full file system path of the extracted file, including the
+ filename, as seen on the endpoint.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA-256 digest of the file contents.
+ type: string
+ - contextPath: File.SSDeep
+ description: The ssdeep digest of the file contents.
+ type: string
+ - contextPath: File.Size
+ description: The size of the extracted file (in bytes).
+ type: number
+ - arguments:
+ - default: false
+ description: The ID of the endpoint. To get the endpoint_id, run the `get-endpoints`
+ command.
+ isArray: false
+ name: endpoint_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns all extracted files from a given endpoint.
+ execution: false
+ name: countertack-get-endpoint-files
+ outputs:
+ - contextPath: CounterTack.File.Id
+ description: The file ID of the extracted file.
+ type: string
+ - contextPath: CounterTack.File.Status
+ description: The status of the contents.
+ type: string
+ - contextPath: CounterTack.File.EndpointId
+ description: The ID of the endpoint with the extracted file.
+ type: string
+ - contextPath: CounterTack.File.ExtractionTime
+ description: The time that the file was extracted.
+ type: date
+ - contextPath: CounterTack.File.Tenant
+ description: The tenant ID for the endpoint.
+ type: string
+ - contextPath: CounterTack.File.User
+ description: The name of the user requesting the file.
+ type: string
+ - contextPath: CounterTack.File.Path
+ description: The full file system path of the extracted file, including the
+ filename, as seen on the endpoint.
+ type: string
+ - contextPath: CounterTack.File.Sha256
+ description: The SHA-256 digest of the file contents.
+ type: string
+ - contextPath: CounterTack.File.Ssdeep
+ description: The ssdeep digest of the file contents.
+ type: string
+ - contextPath: CounterTack.File.EndpointIp
+ description: The IP address of the endpoint with the extracted file.
+ type: string
+ - contextPath: CounterTack.File.VtStatus
+ description: The VirusTotal report status.
+ type: string
+ - contextPath: CounterTack.File.VtReportLocation
+ description: The location path of the VirusTotal report.
+ type: string
+ - contextPath: CounterTack.File.Size
+ description: The size of the extracted file (in bytes).
+ type: number
+ - contextPath: CounterTack.File.EndpointName
+ description: The name of the endpoint with the extracted file.
+ type: string
+ - contextPath: CounterTack.File.Md5
+ description: The MD5 digest of the file contents.
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 digest of the file contents.
+ type: string
+ - contextPath: File.Path
+ description: The full file system path of the extracted file, including the
+ filename, as seen on the endpoint.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA-256 digest of the file contents.
+ type: string
+ - contextPath: File.SSDeep
+ description: The ssdeep digest of the file contents.
+ type: string
+ - contextPath: File.Size
+ description: The size of the extracted file (bytes).
+ type: number
+ - arguments:
+ - default: false
+ description: The ID of the requested file. To get the "file_id"m run the `get-all-files`
+ command.
+ isArray: false
+ name: file_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets the information of a given file.
+ execution: false
+ name: countertack-get-file-information
+ outputs:
+ - contextPath: CounterTack.File.Size
+ description: The size of the extracted file (in bytes).
+ type: number
+ - contextPath: CounterTack.File.EndpointId
+ description: The ID of the endpoint with the extracted file.
+ type: string
+ - contextPath: CounterTack.File.ExtractionTime
+ description: The time that the file was extracted.
+ type: date
+ - contextPath: CounterTack.File.Path
+ description: Full file system path of the extracted file, including the filename,
+ as seen on the endpoint.
+ type: string
+ - contextPath: CounterTack.File.Sha256
+ description: The SHA-256 digest of the file contents.
+ type: string
+ - contextPath: CounterTack.File.Tenant
+ description: The tenant ID for the endpoint.
+ type: string
+ - contextPath: CounterTack.File.User
+ description: The name of the user requesting the file.
+ type: string
+ - contextPath: CounterTack.File.Ssdeep
+ description: The ssdeep digest of the file contents.
+ type: string
+ - contextPath: CounterTack.File.EndpointIp
+ description: The IP address of the endpoint with the extracted file.
+ type: string
+ - contextPath: CounterTack.File.AvCoverage
+ description: The percentage of AV engines that determined that the hash is malicious.
+ type: number
+ - contextPath: CounterTack.File.Status
+ description: The status of the contents.
+ type: string
+ - contextPath: CounterTack.File.VtStatus
+ description: The status of the VirusTotal report.
+ type: string
+ - contextPath: CounterTack.File.EndpointName
+ description: The name of the endpoint with the extracted file.
+ type: string
+ - contextPath: CounterTack.File.Id
+ description: The ID of the extracted file.
+ type: string
+ - contextPath: CounterTack.File.Md5
+ description: The MD5 digest of the file contents.
+ type: string
+ - contextPath: CounterTack.File.VtReportLocation
+ description: The location path of the VirusTotal report.
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 digest of the file contents.
+ type: string
+ - contextPath: File.Path
+ description: The full file system path of the extracted file, including the
+ filename, as seen on the endpoint.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA-256 digest of the file contents.
+ type: string
+ - contextPath: File.SSDeep
+ description: The ssdeep digest of the file contents.
+ type: string
+ - contextPath: File.Size
+ description: The size of the extracted file (in bytes).
+ type: number
+ - arguments:
+ - default: false
+ description: The ID of the extracted file. To get the "file_id", run the `get-all-files`
+ command.
+ isArray: false
+ name: file_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Downloads an extracted file in ZIP format. The password to unlock
+ the ZIP file is `sentinel`.
+ execution: false
+ name: countertack-download-file
+ outputs:
+ - contextPath: File.Size
+ description: The size of the extracted file (in bytes).
+ type: number
+ - contextPath: File.SHA1
+ description: The SHA-1 digest of the file contents.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA-256 digest of the file contents.
+ type: string
+ - contextPath: File.Name
+ description: The name of the file.
+ type: string
+ - contextPath: File.SSDeep
+ description: The ssdeep digest of the file contents.
+ type: string
+ - contextPath: File.EntryID
+ description: The EntryID of the file.
+ type: string
+ - contextPath: File.Info
+ description: The file information.
+ type: string
+ - contextPath: File.Type
+ description: The file type.
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 digest of the file contents.
+ type: string
+ - contextPath: File.Extension
+ description: The extension of the file (.zip).
+ type: string
+ - arguments:
+ - default: false
+ description: The CQL expression to be used for the search, for example, "events.event_type=basic".
+ isArray: false
+ name: expression
+ required: true
+ secret: false
+ deprecated: false
+ description: Searches for events, using CQL expression.
+ execution: false
+ name: countertack-search-events
+ outputs:
+ - contextPath: CounterTack.Event.SourceProcessTimeStarted
+ description: The start time for the source process.
+ type: date
+ - contextPath: CounterTack.Event.SourceThreadProcessPid
+ description: The process PID of the source thread.
+ type: number
+ - contextPath: CounterTack.Event.IsTaintTransfer
+ description: Is the event a malignant transfer.
+ type: boolean
+ - contextPath: CounterTack.Event.IsBasic
+ description: Is the event a basic event.
+ type: boolean
+ - contextPath: CounterTack.Event.SourceThreadTimeFinished
+ description: The exit time of the source thread.
+ type: date
+ - contextPath: CounterTack.Event.SourceThreadTid
+ description: The TID of the source thread.
+ type: number
+ - contextPath: CounterTack.Event.Tenant
+ description: The tenant of the event.
+ type: string
+ - contextPath: CounterTack.Event.SourceThreadProcessTimeStarted
+ description: The start time of the parent process for the source thread.
+ type: date
+ - contextPath: CounterTack.Event.TargetType
+ description: The system object type that was target of the event (PROCESS, THREAD,
+ REGISTRY, DRIVER, TCPIP,FILE, MUTEX, MEMORY_REGION).
+ type: string
+ - contextPath: CounterTack.Event.ConditionNames
+ description: The names of the condition triggered by the event.
+ type: Unknown
+ - contextPath: CounterTack.Event.IsOrigin
+ description: Is the event an origin for a trace.
+ type: boolean
+ - contextPath: CounterTack.Event.endpoint_id
+ description: The endpoint ID, based on the UUID of the last installed endpoint
+ sensor.
+ type: string
+ - contextPath: CounterTack.Event.TargetFilePath
+ description: The path of the target file.
+ type: string
+ - contextPath: CounterTack.Events.SourceThreadProcessBackingFilePath
+ description: The backing file of the source thread.
+ type: string
+ - contextPath: CounterTack.Event.EventType
+ description: The type of event.
+ type: string
+ - contextPath: CounterTack.Event.IsKey
+ description: Is the event a key event in a trace.
+ type: boolean
+ - contextPath: CounterTack.Event.SourceType
+ description: The system object that was the source of the event.
+ type: string
+ - contextPath: CounterTack.Event.SourceThreadProcessName
+ description: The name of the parent process for the source thread.
+ type: string
+ - contextPath: CounterTack.Event.SourceThreadProcessUser
+ description: The user associated with the process of the thread.
+ type: string
+ - contextPath: CounterTack.Event.TimeStamp
+ description: The time that the event was collected.
+ type: date
+ - contextPath: CounterTack.Event.Action
+ description: The system interaction that characterizes the event.
+ type: string
+ - contextPath: CounterTack.Event.IsTainted
+ description: Are the objects in the event tainted.
+ type: boolean
+ - contextPath: CounterTack.Event.SourceThreadProcessParentPid
+ description: The parent PID of the source thread process.
+ type: number
+ - contextPath: CounterTack.Event.SourceProcessPid
+ description: The PID of the source process.
+ type: number
+ - contextPath: CounterTack.Event.SourceThreadStartAddress
+ description: The start address of the thread.
+ type: number
+ - contextPath: CounterTack.Event.SourceProcessSid
+ description: The user SIDs associated with the process.
+ type: number
+ - contextPath: CounterTack.Event.Id
+ description: The ID of the event.
+ type: string
+ - contextPath: CounterTack.Event.ConditionIds
+ description: The IDs of the condition triggered by the event.
+ type: Unknown
+ - contextPath: CounterTack.Event.SourceProcessName
+ description: The name of the process that was the source of the event.
+ type: string
+ - contextPath: CounterTack.Event.SourceProcessUser
+ description: The user associated with the process
+ type: string
+ - arguments:
+ - default: false
+ description: The ID of the endpoint. To get the "endpoint_id", run the `get-endpoints`
+ command.
+ isArray: false
+ name: endpoint_id
+ required: true
+ secret: false
+ - default: false
+ description: The process PID. To get the "process_id", run the `search-events`
+ command.
+ isArray: false
+ name: process_id
+ required: false
+ secret: false
+ - default: false
+ description: The name of the process. To get the "process_name", run the `search-events`
+ command.
+ isArray: false
+ name: process_name
+ required: false
+ secret: false
+ deprecated: false
+ description: Terminates all instances of the process identified in the command.
+ Processes can be identified by the PID or process name.
+ execution: false
+ name: countertack-kill-process
+ outputs:
+ - contextPath: CounterTack.Endpoint.EndpointIds
+ description: The ID of the source this command is being sent to.
+ type: string
+ - contextPath: CounterTack.Endpoint.TargetType
+ description: The type of resource or collection this command is being sent to.
+ type: string
+ - contextPath: CounterTack.Endpoint.CommandArg.name
+ description: The name of the process that was terminated.
+ type: string
+ - contextPath: CounterTack.Endpoint.CommandArg.pid
+ description: The PID of the process that was terminated.
+ type: number
+ - contextPath: CounterTack.Endpoint.CommandArg
+ description: The command arguments.
+ type: string
+ - contextPath: CounterTack.Endpoint.Status
+ description: The status of the command (initial, pending, complete, error).
+ type: string
+ - contextPath: CounterTack.Endpoint.CommandName
+ description: The name of the command that is sent.
+ type: string
+ - contextPath: CounterTack.Endpoint.Username
+ description: The username of the user that requested the command.
+ type: string
+ - contextPath: CounterTack.Endpoint.Id
+ description: The ID of the commands.
+ type: string
+ - contextPath: CounterTack.Endpoint.RequestTime
+ description: The time at which the client requested the command.
+ type: date
+ - arguments:
+ - default: false
+ description: The CQL expression to be used for the search (e.g hashes.type =
+ md5)
+ isArray: false
+ name: expression
+ required: true
+ secret: false
+ deprecated: false
+ description: Searches for hashes using CQL expressions (Contextual Query Language)
+ to represent queries.
+ execution: false
+ name: countertack-search-hashes
+ outputs:
+ - contextPath: CounterTack.Hash.AvCoverage
+ description: The percentage of AV engines that determined that the hash is malicious.
+ type: number
+ - contextPath: CounterTack.Hash.Id
+ description: The ID of the hashes.
+ type: string
+ - contextPath: CounterTack.Hash.Impact
+ description: The impact score for the event in the hash (1-100).
+ type: number
+ - contextPath: CounterTack.Hash.Type
+ description: The type of hash (sha256, md5, or ssdeep).
+ type: string
+ - contextPath: CounterTack.Hash.VtReportLocation
+ description: The report location for VirusTotal report.
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 of the file
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA-256 of the file.
+ type: string
+ - contextPath: File.SSDeep
+ description: The ssdeep of the file.
+ type: string
+ - arguments:
+ - default: false
+ description: The CQL expression to be used for the search. (e.g endpoints.status=on)
+ isArray: false
+ name: expression
+ required: true
+ secret: false
+ deprecated: false
+ description: Request for endpoints search using CQL expression (Contextual Query
+ Language) to represent queries.
+ execution: false
+ name: countertack-search-endpoints
+ outputs:
+ - contextPath: CounterTack.Endpoint.Memory
+ description: The RAM of the endpoint (in megabytes).
+ type: Number
+ - contextPath: CounterTack.Endpoint.CpuType
+ description: Bit length of the CPU architecture.
+ type: Number
+ - contextPath: CounterTack.Endpoint.WinRdpPort
+ description: RDP port used by the endpoint
+ type: Number
+ - contextPath: CounterTack.Endpoint.Macs
+ description: MAC addresses associated with the endpoint
+ type: String
+ - contextPath: CounterTack.Endpoint.Ip
+ description: IP address used to connect to the analysis cluster
+ type: String
+ - contextPath: CounterTack.Endpoint.Vendor
+ description: OS vendor
+ type: String
+ - contextPath: CounterTack.Endpoint.Identifier
+ description: OS identifier
+ type: String
+ - contextPath: CounterTack.Endpoint.Tenant
+ description: Tenant ID set at the time of KM installation
+ type: String
+ - contextPath: CounterTack.Endpoint.MaxImpact
+ description: Impact of the highest scoring behavior
+ type: Number
+ - contextPath: CounterTack.Endpoint.Name
+ description: Product name of the endpoint OS
+ type: String
+ - contextPath: CounterTack.Endpoint.Ips
+ description: IP addresses associated with the endpoint
+ type: String
+ - contextPath: CounterTack.Endpoint.CurrentResponsePolicy
+ description: Currently active response policy
+ type: String
+ - contextPath: CounterTack.Endpoint.ProfileVersion
+ description: Version of the current profile used for collection
+ type: String
+ - contextPath: CounterTack.Endpoint.CurrentProfile
+ description: Currently active analysis profile
+ type: String
+ - contextPath: CounterTack.Endpoint.DriverVersion
+ description: Endpoint sensor version
+ type: String
+ - contextPath: CounterTack.Endpoint.NumCpus
+ description: Number of CPUs
+ type: Number
+ - contextPath: CounterTack.Endpoint.ClusterConnectionRoute
+ description: List of hosts the endpoint is currently connected through
+ type: String
+ - contextPath: CounterTack.Endpoint.ClusterHosts
+ description: The list of hosts that the endpoint tries to connect through (in
+ order).
+ type: String
+ - contextPath: CounterTack.Endpoint.Status
+ description: Collection status of the endpoint (ON, PAUSE, OFF, INIT)
+ type: String
+ - contextPath: CounterTack.Endpoint.TimeStarted
+ description: Time kernel module collection last engaged
+ type: Date
+ - contextPath: CounterTack.Endpoint.EventStartTime
+ description: The time that the event was captured
+ type: Date
+ - contextPath: CounterTack.Endpoint.Version
+ description: OS version
+ type: String
+ - contextPath: CounterTack.Endpoint.Threat
+ description: Threat level associated with the endpoint
+ type: String
+ - contextPath: CounterTack.Endpoint.ProductName
+ description: Product name of the endpoint OS
+ type: String
+ - contextPath: CounterTack.Endpoint.Id
+ description: Endpoints ID
+ type: String
+ - contextPath: CounterTack.Endpoint.LastActive
+ description: Time of last event captured on the endpoint
+ type: Date
+ - contextPath: CounterTack.Endpoint.SensorMode
+ description: Specifies the sensor mode of the driver
+ type: String
+ - contextPath: CounterTack.Endpoint.BehaviorCount
+ description: Number of behaviors detected
+ type: Number
+ - contextPath: CounterTack.Endpoint.ImpactLevel
+ description: Threat level of the endpoint.(LOW, MEDIUM, HIGH, CRITICAL)
+ type: String
+ - contextPath: CounterTack.Endpoint.OsType
+ description: The OS type.
+ type: Number
+ - contextPath: Endpoint.Memory
+ description: Endpoint RAM (megabytes)
+ type: Number
+ - contextPath: Endpoint.Processors
+ description: Number of CPUs
+ type: Number
+ - contextPath: Endpoint.Domain
+ description: DNS suffix for the endpoint
+ type: String
+ - contextPath: Endpoint.OS
+ description: Product name of the endpoint OS
+ type: String
+ - contextPath: Endpoint.MACAddress
+ description: The MAC address of the endpoint.
+ type: String
+ - contextPath: Endpoint.Model
+ description: The analysis profile that is currently active.
+ type: String
+ - contextPath: Endpoint.IPAddress
+ description: The IP addresses that are associated with the endpoint.
+ type: String
+ - contextPath: Endpoint.OSVersion
+ description: The endpoint sensor version.
+ type: String
+ - contextPath: Endpoint.Id
+ description: The ID of the Endpoints.
+ type: String
+ - arguments:
+ - default: false
+ description: The CQL expression to be used for the search(e.g behaviors.event_count<60)
+ isArray: false
+ name: expression
+ required: true
+ secret: false
+ deprecated: false
+ description: Request for behaviors search using CQL expression (Contextual Query
+ Language) to represent queries.
+ execution: false
+ name: countertack-search-behaviors
+ outputs:
+ - contextPath: CounterTack.Behavior.FirstEventId
+ description: The ID of the first event.
+ type: String
+ - contextPath: CounterTack.Behavior.LastReported
+ description: The time that the behavior was last seen.
+ type: Date
+ - contextPath: CounterTack.Behavior.Tenant
+ description: The tenant of the behavior.
+ type: String
+ - contextPath: CounterTack.Behavior.MaxImpact
+ description: The impact of the highest scoring event (0-100)
+ type: Number
+ - contextPath: CounterTack.Behavior.Name
+ description: The name of the condition that triggered the behavior.
+ type: String
+ - contextPath: CounterTack.Behavior.EndpointId
+ description: The ID of the endpoint, based on the UUID of the last installed
+ endpoint sensor
+ type: String
+ - contextPath: CounterTack.Behavior.ReportedOn
+ description: The time that the behavior was first seen.
+ type: Date
+ - contextPath: CounterTack.Behavior.EventCount
+ description: The number of events detected.
+ type: Number
+ - contextPath: CounterTack.Behavior.TimeStamp
+ description: The start time for the behavior.
+ type: Date
+ - contextPath: CounterTack.Behavior.Type
+ description: The type of behavior (CLASSIFICATION, TRACE)
+ type: String
+ - contextPath: CounterTack.Behavior.Id
+ description: The ID of the behaviors.
+ type: String
+ - contextPath: CounterTack.Behavior.LastActive
+ description: The time that the behavior was last active.
+ type: Date
+ - contextPath: CounterTack.Behavior.ImpactLevel
+ description: The threat level of the behavior (LOW, MEDIUM, HIGH, CRITICAL).
+ type: String
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- no tests
diff --git a/Integrations/CounterTack/CounterTack_description.md b/Integrations/CounterTack/CounterTack_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/CounterTack/CounterTack_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/CounterTack/CounterTack_image.png b/Integrations/CounterTack/CounterTack_image.png
new file mode 100644
index 000000000000..a6a55cb8d812
Binary files /dev/null and b/Integrations/CounterTack/CounterTack_image.png differ
diff --git a/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py b/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py
new file mode 100644
index 000000000000..a765a241c486
--- /dev/null
+++ b/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py
@@ -0,0 +1,805 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+
+import json
+import requests
+import base64
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+CLIENT_ID = demisto.params().get('client_id')
+SECRET = demisto.params().get('secret')
+# Remove trailing slash to prevent wrong URL path to service
+SERVER = demisto.params()['url'][:-1] if (demisto.params()['url'] and demisto.params()['url'].endswith('/')) else \
+ demisto.params()['url']
+# Should we use SSL
+USE_SSL = not demisto.params().get('insecure', False)
+# How many time before the first fetch to retrieve incidents
+FETCH_TIME = demisto.params().get('fetch_time', '3 days')
+BYTE_CREDS = '{name}:{password}'.format(name=CLIENT_ID, password=SECRET).encode('utf-8')
+# Headers to be sent in requests
+HEADERS = {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json',
+ 'Authorization': 'Basic {}'.format(base64.b64encode(BYTE_CREDS).decode())
+}
+# Note: True life time is actually 30 mins
+TOKEN_LIFE_TIME = 28
+INCIDENTS_PER_FETCH = int(demisto.params().get('incidents_per_fetch', 15))
+# Remove proxy if not set to true in params
+if not demisto.params().get('proxy'):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+''' KEY DICTIONARY '''
+
+DETECTIONS_BASE_KEY_MAP = {
+ 'device.hostname': 'System',
+ 'device.cid': 'CustomerID',
+ 'hostinfo.domain': 'MachineDomain',
+ 'detection_id': 'ID',
+ 'created_timestamp': 'ProcessStartTime',
+ 'max_severity': 'MaxSeverity',
+ 'show_in_ui': 'ShowInUi',
+ 'status': 'Status'
+}
+
+DETECTIONS_BEHAVIORS_KEY_MAP = {
+ 'filename': 'FileName',
+ 'scenario': 'Scenario',
+ 'md5': 'MD5',
+ 'sha256': 'SHA256',
+ 'ioc_type': 'IOCType',
+ 'ioc_value': 'IOCValue',
+ 'cmdline': 'CommandLine',
+ 'user_name': 'UserName',
+ 'behavior_id': 'ID',
+}
+
+SEARCH_IOC_KEY_MAP = {
+ 'type': 'Type',
+ 'value': 'Value',
+ 'policy': 'Policy',
+ 'source': 'Source',
+ 'share_level': 'ShareLevel',
+ 'expiration_timestamp': 'Expiration',
+ 'description': 'Description',
+ 'created_timestamp': 'CreatedTime',
+ 'created_by': 'CreatedBy',
+ 'modified_timestamp': 'ModifiedTime',
+ 'modified_by': 'ModifiedBy'
+}
+
+SEARCH_DEVICE_KEY_MAP = {
+ 'device_id': 'ID',
+ 'external_ip': 'ExternalIP',
+ 'local_ip': 'LocalIP',
+ 'hostname': 'Hostname',
+ 'os_version': 'OS',
+ 'mac_address': 'MacAddress',
+ 'first_seen': 'FirstSeen',
+ 'last_seen': 'LastSeen'
+}
+
+''' SPLIT KEY DICTIONARY '''
+
+"""
+ Pattern:
+ {
+ 'Path': 'Path to item',
+ 'NewKey': 'Value of output key',
+ 'Delim': 'Delimiter char',
+ 'Index': Split Array Index
+ }
+"""
+DETECTIONS_BEHAVIORS_SPLIT_KEY_MAP = [
+ {
+ 'Path': 'parent_details.parent_process_graph_id',
+ 'NewKey': 'SensorID',
+ 'Delim': ':',
+ 'Index': 1
+ },
+ {
+ 'Path': 'parent_details.parent_process_graph_id',
+ 'NewKey': 'ParentProcessID',
+ 'Delim': ':',
+ 'Index': 2
+ },
+ {
+ 'Path': 'triggering_process_graph_id',
+ 'NewKey': 'ProcessID',
+ 'Delim': ':',
+ 'Index': 2
+ },
+]
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url_suffix, params=None, data=None, headers=HEADERS, safe=False, get_token_flag=True):
+ """
+ A wrapper for requests lib to send our requests and handle requests and responses better.
+
+ :type method: ``str``
+ :param method: HTTP method for the request.
+
+ :type url_suffix: ``str``
+ :param url_suffix: The suffix of the URL (endpoint)
+
+ :type params: ``dict``
+ :param params: The URL params to be passed.
+
+ :type data: ``str``
+ :param data: The body data of the request.
+
+ :type headers: ``dict``
+ :param headers: Request headers
+
+ :type safe: ``bool``
+ :param safe: If set to true will return None in case of http error
+
+ :type get_token_flag: ``bool``
+ :param get_token_flag: If set to True will call get_token()
+
+ :return: Returns the http request response json
+ :rtype: ``dict``
+ """
+ if get_token_flag:
+ token = get_token()
+ headers['Authorization'] = 'Bearer {}'.format(token)
+ url = SERVER + url_suffix
+ try:
+ res = requests.request(
+ method,
+ url,
+ verify=USE_SSL,
+ params=params,
+ data=data,
+ headers=headers,
+ )
+ except requests.exceptions.RequestException:
+ return_error('Error in connection to the server. Please make sure you entered the URL correctly.')
+ # Handle error responses gracefully
+ if res.status_code not in {200, 201, 202}:
+ err_msg = 'Error in API call. code:{code}; reason: {reason}'.format(code=res.status_code, reason=res.reason)
+ # try to create a new token
+ if res.status_code == 403 and get_token_flag:
+ LOG(err_msg)
+ token = get_token(new_token=True)
+ headers['Authorization'] = 'Bearer {}'.format(token)
+ return http_request(method, url_suffix, params, data, headers, safe, get_token_flag=False)
+ elif safe:
+ return None
+ return_error(err_msg)
+ return res.json()
+
+
+def create_entry_object(contents='', ec=None, hr=''):
+ """
+ Creates an entry object
+
+ :type contents: ``dict``
+ :param contents: Raw response to output
+
+ :type ec: ``dict``
+ :param ec: Entry context of the entry object
+
+ :type hr: ``str``
+ :param hr: Human readable
+
+ :return: Entry object
+ :rtype: ``dict``
+ """
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': contents,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ }
+
+
+def detection_to_incident(detection):
+ """
+ Creates an incident of a detection.
+
+ :type detection: ``dict``
+ :param detection: Single detection object
+
+ :return: Incident representation of a detection
+ :rtype ``dict``
+ """
+ incident = {
+ 'name': 'Detection ID: ' + str(detection.get('detection_id')),
+ 'occurred': str(detection.get('first_behavior')),
+ 'rawJSON': json.dumps(detection),
+ 'severity': severity_string_to_int(detection.get('max_severity_displayname'))
+ }
+ return incident
+
+
+def severity_string_to_int(severity):
+ """
+ Converts a severity string to DBot score representation
+
+ :type severity: ``str``
+ :param severity: String representation of a severity
+
+ :return: DBot score representation of the severity
+ :rtype ``int``
+ """
+ if severity in ('Critical', 'High'):
+ return 3
+ elif severity in ('Medium', 'Low'):
+ return 2
+ return 0
+
+
+def get_trasnformed_dict(old_dict, transformation_dict):
+ """
+ Returns a dictionary with the same values as old_dict, with the correlating key:value in transformation_dict
+
+ :type old_dict: ``dict``
+ :param old_dict: Old dictionary to pull values from
+
+ :type transformation_dict: ``dict``
+ :param transformation_dict: Transformation dictionary that contains oldkeys:newkeys
+
+ :return Transformed dictionart (according to transformation_dict values)
+ :rtype ``dict``
+ """
+ new_dict = {}
+ for k in list(old_dict.keys()):
+ if k in transformation_dict:
+ new_dict[transformation_dict[k]] = old_dict[k]
+ return new_dict
+
+
+def extract_transformed_dict_with_split(old_dict, transformation_dict_arr):
+ """
+ Extracts new values out of old_dict using a json structure of:
+ {'Path': 'Path to item', 'NewKey': 'Value of output key', 'Delim': 'Delimiter char', 'Index': Split Array Index}
+ """
+ new_dict = {}
+ for trans_dict in transformation_dict_arr:
+ try:
+ val = demisto.get(old_dict, trans_dict['Path'])
+ if 'split' in dir(val):
+ i = trans_dict['Index']
+ new_dict[trans_dict['NewKey']] = val.split(trans_dict['Delim'])[i]
+ except Exception as ex:
+ LOG('Error {exception} with: {tdict}'.format(exception=ex, tdict=trans_dict))
+ return new_dict
+
+
+def get_passed_mins(start_time, end_time_str):
+ """
+ Returns the time passed in mins
+ :param start_time: Start time in datetime
+ :param end_time_str: End time in str
+ :return: The passed mins in int
+ """
+ time_delta = start_time - datetime.fromtimestamp(end_time_str)
+ return time_delta.seconds / 60
+
+
+''' COMMAND SPECIFIC FUNCTIONS '''
+
+
+def get_token(new_token=False):
+ """
+ Retrieves the token from the server if it's expired and updates the global HEADERS to include it
+
+ :param new_token: If set to True will generate a new token regardless of time passed
+
+ :rtype: ``str``
+ :return: Token
+ """
+ now = datetime.now()
+ ctx = demisto.getIntegrationContext()
+ if ctx and not new_token:
+ passed_mins = get_passed_mins(now, ctx.get('time'))
+ if passed_mins >= TOKEN_LIFE_TIME:
+ # token expired
+ auth_token = get_token_request()
+ demisto.setIntegrationContext({'auth_token': auth_token, 'time': date_to_timestamp(now) / 1000})
+ else:
+ # token hasn't expired
+ auth_token = ctx.get('auth_token')
+ else:
+ # there is no token
+ auth_token = get_token_request()
+ demisto.setIntegrationContext({'auth_token': auth_token, 'time': date_to_timestamp(now) / 1000})
+ return auth_token
+
+
+def get_token_request():
+ """
+ Sends token request
+
+ :rtype ``str``
+ :return: Access token
+ """
+ body = {
+ 'client_id': CLIENT_ID,
+ 'client_secret': SECRET
+ }
+ headers = {
+ 'Authorization': HEADERS['Authorization']
+ }
+ token_res = http_request('POST', '/oauth2/token', data=body, headers=headers, safe=True,
+ get_token_flag=False)
+ if not token_res:
+ err_msg = 'Authorization Error: User has no authorization to create a token. Please make sure you entered the' \
+ ' credentials correctly.'
+ raise Exception(err_msg)
+ return token_res.get('access_token')
+
+
+def get_detections(last_behavior_time=None, behavior_id=None, filter_arg=None):
+ """
+ Sends detections request. The function will ignore the arguments passed according to priority:
+ filter_arg > behavior_id > last_behavior_time
+
+ :param last_behavior_time: 3rd priority. The last behavior time of results will be greater than this value
+ :param behavior_id: 2nd priority. The result will only contain the detections with matching behavior id
+ :param filter_arg: 1st priority. The result will be filtered using this argument.
+ :return: Response json of the get detection endpoint (IDs of the detections)
+ """
+ endpoint_url = '/detects/queries/detects/v1'
+ params = {
+ 'sort': 'first_behavior.asc'
+ }
+ if filter_arg:
+ params['filter'] = filter_arg
+ elif behavior_id:
+ params['filter'] = "behaviors.behavior_id:'{0}'".format(behavior_id)
+ elif last_behavior_time:
+ params['filter'] = "first_behavior:>'{0}'".format(last_behavior_time)
+
+ response = http_request('GET', endpoint_url, params)
+ return response
+
+
+def get_detections_entities(detections_ids):
+ """
+ Sends detection entities request
+ :param detections_ids: IDs of the requested detections.
+ :return: Response json of the get detection entities endpoint (detection objects)
+ """
+ ids_json = {'ids': detections_ids}
+ if detections_ids:
+ response = http_request(
+ 'POST',
+ '/detects/entities/summaries/GET/v1',
+ data=json.dumps(ids_json)
+ )
+ return response
+ return detections_ids
+
+
+def create_ioc():
+ """
+ UNTESTED - Creates an IoC
+ :return: Response json of create IoC request
+ """
+ args = demisto.args()
+ input_args = {}
+ # req args:
+ input_args['type'] = args['ioc_type']
+ input_args['value'] = args['ioc_value']
+ input_args['policy'] = args['policy']
+ # opt args:
+ input_args['expiration_days'] = args.get('expiration_days')
+ input_args['source'] = args.get('source')
+ input_args['description'] = args.get('description')
+
+ payload = {k: str(v) for k, v in input_args.items() if v}
+ headers = {'Authorization': HEADERS['Authorization']}
+ return http_request('POST', '/indicators/entities/iocs/v1', params=payload, headers=headers)
+
+
+def search_iocs():
+ """
+ UNTESTED IN OAUTH 2- Searches an IoC
+ :return: IoCs that were found in the search
+ """
+ args = demisto.args()
+ ids = args.get('ids')
+ if not ids:
+ search_args = {
+ 'types': str(args.get('ioc_types', '')).split(','),
+ 'values': str(args.get('ioc_values', '')).split(','),
+ 'policies': str(args.get('policy', '')),
+ 'sources': str(args.get('sources', '')).split(','),
+ 'from.expiration_timestamp': str(args.get('expiration_from', '')),
+ 'to.expiration_timestamp': str(args.get('expiration_to', '')),
+ 'limit': str(args.get('limit', 50))
+ }
+ payload = {}
+ for k, arg in search_args.items():
+ if type(arg) is list:
+ if arg[0]:
+ payload[k] = arg
+ elif arg:
+ payload[k] = arg
+ ids = http_request('GET', '/indicators/queries/iocs/v1', payload).get('resources')
+ if not ids:
+ return None
+ else:
+ ids = str(ids)
+ payload = {
+ 'ids': ids
+ }
+ return http_request('GET', '/indicators/entities/iocs/v1', params=payload)
+
+
+def enrich_ioc_dict_with_ids(ioc_dict):
+ """
+ Enriches the provided ioc_dict with IoC ID
+ :param ioc_dict: IoC dict transformed using the SEARCH_IOC_KEY_MAP
+ :return: ioc_dict with its ID key:value updated
+ """
+ for ioc in ioc_dict:
+ ioc['ID'] = '{type}:{val}'.format(type=ioc.get('Type'), val=ioc.get('Value'))
+ return ioc_dict
+
+
+def delete_ioc():
+ """
+ UNTESTED - Sends a delete IoC request
+ :return: Response json of delete IoC
+ """
+ ids = str(demisto.args().get('ids'))
+ payload = {
+ 'ids': ids
+ }
+ return http_request('DELETE', '/indicators/entities/iocs/v1', payload)
+
+
+def update_iocs():
+ """
+ UNTESTED - Updates the values one or more IoC
+ :return: Response json of update IoC request
+ """
+ args = demisto.args()
+ input_args = {
+ 'ids': args.get('ids'),
+ 'policy': args.get('policy', ''),
+ 'expiration_days': args.get('expiration_days', ''),
+ 'source': args.get('source'),
+ 'description': args.get('description')
+ }
+ payload = {k: str(v) for k, v in input_args.items() if v}
+ headers = {'Authorization': HEADERS['Authorization']}
+ return http_request('PATCH', '/indicators/entities/iocs/v1', params=payload, headers=headers)
+
+
+def search_device():
+ """
+ Searches for devices using the argument provided by the command execution. Returns empty
+ result of no device was found
+ :return: Search device response json
+ """
+ args = demisto.args()
+ input_arg_dict = {
+ 'device_id': str(args.get('ids', '')).split(','),
+ 'status': str(args.get('status', '')).split(','),
+ 'hostname': str(args.get('hostname', '')).split(','),
+ 'platform_name': str(args.get('platform_name', '')).split(','),
+ 'site_name': str(args.get('site_name', '')).split(',')
+ }
+ url_filter = '{}'.format(str(args.get('filter', '')))
+ for k, arg in input_arg_dict.items():
+ if arg:
+ if type(arg) is list:
+ arg_filter = ''
+ for arg_elem in arg:
+ if arg_elem:
+ first_arg = '{filter},{inp_arg}'.format(filter=arg_filter, inp_arg=k) if arg_filter else k
+ arg_filter = "{first}:'{second}'".format(first=first_arg, second=arg_elem)
+ if arg_filter:
+ url_filter = "{url_filter}{arg_filter}".format(url_filter=url_filter + '+' if url_filter else '',
+ arg_filter=arg_filter)
+ else:
+ # All args should be a list. this is a fallback
+ url_filter = "{url_filter}+{inp_arg}:'{arg_val}'".format(url_filter=url_filter, inp_arg=k, arg_val=arg)
+ raw_res = http_request('GET', '/devices/queries/devices/v1', params={'filter': url_filter})
+ device_ids = raw_res.get('resources')
+ if not device_ids:
+ return None
+ return http_request('GET', '/devices/entities/devices/v1', params={'ids': device_ids})
+
+
+def behavior_to_entry_context(behavior):
+ """
+ Transforms a behavior to entry context representation
+ :param behavior: Behavior dict in the format of crowdstrike's API response
+ :return: Behavior in entry context representation
+ """
+ raw_entry = get_trasnformed_dict(behavior, DETECTIONS_BEHAVIORS_KEY_MAP)
+ raw_entry.update(extract_transformed_dict_with_split(behavior, DETECTIONS_BEHAVIORS_SPLIT_KEY_MAP))
+ return raw_entry
+
+
+def resolve_detection(ids, status, assigned_to_uuid, show_in_ui):
+ """
+ Sends a resolve detection request
+ :param ids: Single or multiple ids in an array string format
+ :param status: New status of the detection
+ :param assigned_to_uuid: uuid to assign the detection to
+ :param show_in_ui: Boolean flag in string format (true/false)
+ :return: Resolve detection response json
+ """
+ payload = {
+ 'ids': ids
+ }
+ if status:
+ payload['status'] = status
+ if assigned_to_uuid:
+ payload['assigned_to_uuid'] = assigned_to_uuid
+ if show_in_ui:
+ payload['show_in_ui'] = show_in_ui
+ # We do this so show_in_ui value won't contain ""
+ data = json.dumps(payload).replace('"show_in_ui": "false"', '"show_in_ui": false').replace('"show_in_ui": "true"',
+ '"show_in_ui": true')
+ return http_request('PATCH', '/detects/entities/detects/v2', data=data)
+
+
+def contain_host(ids):
+ """
+ Contains host(s) with matching ids
+ :param ids: IDs of host to contain
+ :return: Contain host response json
+ """
+ payload = {
+ 'ids': ids
+ }
+ data = json.dumps(payload)
+ params = {
+ 'action_name': 'contain'
+ }
+ return http_request('POST', '/devices/entities/devices-actions/v2', data=data, params=params)
+
+
+def lift_host_containment(ids):
+ """
+ Lifts off containment from host(s) with matchind ids
+ :param ids: IDs of host to lift off containment from
+ :return: Lift off containment response json
+ """
+ payload = {
+ 'ids': ids
+ }
+ data = json.dumps(payload)
+ params = {
+ 'action_name': 'lift_containment'
+ }
+ return http_request('POST', '/devices/entities/devices-actions/v2', data=data, params=params)
+
+
+''' COMMANDS FUNCTIONS '''
+
+
+def fetch_incidents():
+ """
+ Fetches incident using the detections API
+ :return: Fetched detections in incident format
+ """
+ last_run = demisto.getLastRun()
+ # Get the last fetch time, if exists
+ last_fetch = last_run.get('first_behavior_time')
+
+ # Handle first time fetch, fetch incidents retroactively
+ if last_fetch is None:
+ last_fetch, _ = parse_date_range(FETCH_TIME, date_format='%Y-%m-%dT%H:%M:%SZ')
+ last_fetch_timestamp = date_to_timestamp(last_fetch, date_format='%Y-%m-%dT%H:%M:%SZ')
+ fetch_query = demisto.params().get('fetch_query')
+ if fetch_query:
+ fetch_query = "first_behavior:>'{time}'+{query}".format(time=last_fetch, query=fetch_query)
+ detections_ids = demisto.get(get_detections(filter_arg=fetch_query), 'resources')
+ else:
+ detections_ids = demisto.get(get_detections(last_behavior_time=last_fetch), 'resources')
+ incidents = []
+ if detections_ids:
+ # Limit the results to INCIDENTS_PER_FETCH`z
+ detections_ids = detections_ids[0:INCIDENTS_PER_FETCH]
+ raw_res = get_detections_entities(detections_ids)
+ if "resources" in raw_res:
+ for detection in demisto.get(raw_res, "resources"):
+ incident = detection_to_incident(detection)
+ incident_date = incident['occurred']
+ incident_date_timestamp = date_to_timestamp(incident_date, date_format='%Y-%m-%dT%H:%M:%SZ')
+ # Update last run and add incident if the incident is newer than last fetch
+ if incident_date_timestamp > last_fetch_timestamp:
+ last_fetch = incident_date
+ incidents.append(incident)
+ demisto.setLastRun({'first_behavior_time': last_fetch})
+ return incidents
+
+
+def create_ioc_command():
+ """
+ UNTESTED - Creates an IoC
+ :return: EntryObject of create IoC command
+ """
+ raw_res = create_ioc()
+ return create_entry_object(contents=raw_res, hr="Custom IoC was created successfully.")
+
+
+def search_iocs_command():
+ """
+ UNTESTED IN OAUTH 2 - Searches for an ioc
+ :return: EntryObject of search IoC command
+ """
+ raw_res = search_iocs()
+ if not raw_res:
+ return create_entry_object(hr='Could not find any Indicators of Compromise.')
+ iocs = raw_res.get('resources')
+ ec = [get_trasnformed_dict(ioc, SEARCH_IOC_KEY_MAP) for ioc in iocs]
+ enrich_ioc_dict_with_ids(ec)
+ return create_entry_object(contents=raw_res, ec={'CrowdStrike.IoC(val.ID === obj.ID)': ec},
+ hr=tableToMarkdown('Indicators of Compromise', ec))
+
+
+def delete_iocs_command():
+ """
+ UNTESTED - Deletes an IoC
+ :return: EntryObject of delete IoC command
+ """
+ raw_res = delete_ioc()
+ ids = demisto.args().get('ids')
+ return create_entry_object(contents=raw_res, hr="Custom IoC {0} successfully deleted.".format(ids))
+
+
+def update_iocs_command():
+ """
+ UNTESTED - Updates an IoC
+ :return: EntryObject of update IoC command
+ """
+ raw_res = update_iocs()
+ ids = demisto.args().get('ids')
+ return create_entry_object(contents=raw_res, hr="Custom IoC {0} successfully updated.".format(ids))
+
+
+def search_device_command():
+ """
+ Searches for a device
+ :return: EntryObject of search device command
+ """
+ raw_res = search_device()
+ if not raw_res:
+ return create_entry_object(hr='Could not find any devices.')
+ devices = raw_res.get('resources')
+ entries = [get_trasnformed_dict(device, SEARCH_DEVICE_KEY_MAP) for device in devices]
+ headers = ['ID', 'Hostname', 'OS', 'MacAddress', 'LocalIP', 'ExternalIP', 'FirstSeen', 'LastSeen']
+ hr = tableToMarkdown('Devices', entries, headers=headers, headerTransform=pascalToSpace)
+ ec = {'CrowdStrike.Device(val.ID === obj.ID)': entries}
+ return create_entry_object(contents=raw_res, ec=ec, hr=hr)
+
+
+def get_behavior_command():
+ """
+ Gets a behavior by ID
+ :return: EntryObject of get behavior command
+ """
+ behavior_id = demisto.args().get('behavior_id')
+ detections_ids = demisto.get(get_detections(behavior_id=behavior_id), 'resources')
+ raw_res = get_detections_entities(detections_ids)
+ entries = []
+ if "resources" in raw_res:
+ for resource in demisto.get(raw_res, "resources"):
+ for behavior in demisto.get(resource, 'behaviors'):
+ entries.append(behavior_to_entry_context(behavior))
+ hr = tableToMarkdown('Behavior ID: {}'.format(behavior_id), entries, headerTransform=pascalToSpace)
+ # no dt since behavior vary by more than their ID
+ ec = {'CrowdStrike.Behavior': entries}
+ return create_entry_object(contents=raw_res, ec=ec, hr=hr)
+
+
+def search_detections_command():
+ """
+ Searches for a detection
+ :return: EntryObject of search detections command
+ """
+ d_args = demisto.args()
+ detections_ids = argToList(d_args.get('ids'))
+ if not detections_ids:
+ filter_arg = d_args.get('filter')
+ if not filter_arg:
+ return_error('Command Error: Please provide at least one argument.')
+ detections_ids = get_detections(filter_arg=filter_arg).get('resources')
+ raw_res = get_detections_entities(detections_ids)
+ entries = []
+ headers = ['ID', 'Status', 'System', 'ProcessStartTime', 'CustomerID', 'MaxSeverity']
+ if "resources" in raw_res:
+ for detection in demisto.get(raw_res, "resources"):
+ detection_entry = {}
+ for path, new_key in DETECTIONS_BASE_KEY_MAP.items():
+ detection_entry[new_key] = demisto.get(detection, path)
+ behaviors = []
+ for behavior in demisto.get(detection, 'behaviors'):
+ behaviors.append(behavior_to_entry_context(behavior))
+ detection_entry['Behavior'] = behaviors
+ entries.append(detection_entry)
+ hr = tableToMarkdown('Detections Found:', entries, headers=headers, removeNull=True, headerTransform=pascalToSpace)
+ ec = {'CrowdStrike.Detection(val.ID === obj.ID)': entries}
+ return create_entry_object(contents=raw_res, ec=ec, hr=hr)
+
+
+def resolve_detection_command():
+ """
+ Resolves single or multiple detections
+ :return: EntryObject of resolve detection command
+ """
+ args = demisto.args()
+ ids = argToList(args.get('ids'))
+ status = args.get('status')
+ assigned_to_uuid = args.get('assigned_to_uuid')
+ show_in_ui = args.get('show_in_ui')
+ raw_res = resolve_detection(ids, status, assigned_to_uuid, show_in_ui)
+ args.pop('ids')
+ hr = "Detection {0} updated\n".format(str(ids)[1:-1])
+ hr += 'With the following values:\n'
+ for k, arg in args.items():
+ hr += '\t{name}:{val}\n'.format(name=k, val=arg)
+ return create_entry_object(contents=raw_res, hr=hr)
+
+
+def contain_host_command():
+ """
+ Contains hosts with user arg ids
+ :return: EntryObject of contain host command
+ """
+ ids = argToList(demisto.args().get('ids'))
+ raw_res = contain_host(ids)
+ hr = "Host {} contained".format(str(ids)[1:-1])
+ return create_entry_object(contents=raw_res, hr=hr)
+
+
+def lift_host_containment_command():
+ """
+ Lifts containment off a host
+ :return: EntryObject of lift host containment
+ """
+ ids = argToList(demisto.args().get('ids'))
+ raw_res = lift_host_containment(ids)
+ hr = "Containment has been lift off host {}".format(str(ids)[1:-1])
+ return create_entry_object(contents=raw_res, hr=hr)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('Command being called is {}'.format(demisto.command()))
+
+# should raise error in case of issue
+if demisto.command() == 'fetch-incidents':
+ demisto.incidents(fetch_incidents())
+
+try:
+ if demisto.command() == 'test-module':
+ get_token(new_token=True)
+ demisto.results('ok')
+ elif demisto.command() == 'cs-falcon-search-device':
+ demisto.results(search_device_command())
+ elif demisto.command() == 'cs-falcon-get-behavior':
+ demisto.results(get_behavior_command())
+ elif demisto.command() == 'cs-falcon-search-detection':
+ demisto.results(search_detections_command())
+ elif demisto.command() == 'cs-falcon-resolve-detection':
+ demisto.results(resolve_detection_command())
+ elif demisto.command() == 'cs-falcon-contain-host':
+ demisto.results(contain_host_command())
+ elif demisto.command() == 'cs-falcon-lift-host-containment':
+ demisto.results(lift_host_containment_command())
+ # Log exceptions
+except Exception as e:
+ return_error(str(e))
diff --git a/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml b/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml
new file mode 100644
index 000000000000..e0bd2bb0aef4
--- /dev/null
+++ b/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml
@@ -0,0 +1,332 @@
+category: Endpoint
+commonfields:
+ id: CrowdstrikeFalcon
+ version: -1
+configuration:
+- defaultvalue: https://api.crowdstrike.com
+ display: Server URL (e.g., https://api.crowdstrike.com)
+ name: url
+ required: true
+ type: 0
+- display: Client ID
+ name: client_id
+ required: true
+ type: 0
+- display: Secret
+ name: secret
+ required: true
+ type: 4
+- defaultvalue: 3 days
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days)
+ name: fetch_time
+ required: false
+ type: 0
+- defaultvalue: '15'
+ display: Max incidents per fetch
+ name: incidents_per_fetch
+ required: false
+ type: 0
+- display: Fetch query
+ name: fetch_query
+ required: false
+ type: 0
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+description: The CrowdStrike Falcon OAuth 2 API (formerly the Falcon Firehose
+ API), enables fetching and resolving detections, searching devices, getting behaviors
+ by ID, containing hosts, and lifting host containment.
+display: CrowdStrike Falcon
+name: CrowdstrikeFalcon
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Returns devices that match the query.
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ - default: false
+ description: A CSV list of device IDs to limit by which to limit the results.
+ isArray: false
+ name: ids
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Returns devices that match the specified status.
+ isArray: false
+ name: status
+ predefined:
+ - Normal
+ - containment_pending
+ - contained
+ - lift_containment_pending
+ required: false
+ secret: false
+ - default: false
+ description: Returns devices that match the specified hostname.
+ isArray: false
+ name: hostname
+ predefined:
+ - ''
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Returns devices that match the specified platform name.
+ isArray: false
+ name: platform_name
+ predefined:
+ - Windows
+ - Mac
+ - Linux
+ required: false
+ secret: false
+ - default: false
+ description: Returns devices that match the specified site name.
+ isArray: false
+ name: site_name
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches for a device that matches the query.
+ execution: false
+ name: cs-falcon-search-device
+ outputs:
+ - contextPath: CrowdStrike.Device.ID
+ description: The ID of the device.
+ type: String
+ - contextPath: CrowdStrike.Device.LocalIP
+ description: The local IP address of the device.
+ type: String
+ - contextPath: CrowdStrike.Device.ExternalIP
+ description: The external IP address of the device.
+ type: String
+ - contextPath: CrowdStrike.Device.Hostname
+ description: The hostname of the device.
+ type: String
+ - contextPath: CrowdStrike.Device.OS
+ description: The operating system of the device.
+ type: String
+ - contextPath: CrowdStrike.Device.MacAddress
+ description: The Mac address of the device.
+ type: String
+ - contextPath: CrowdStrike.Device.FirstSeen
+ description: The first seen time of the device.
+ type: String
+ - contextPath: CrowdStrike.Device.LastSeen
+ description: The last seen time of the device.
+ type: String
+ - contextPath: CrowdStrike.Device.PolicyType
+ description: The policy types of the device.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The ID of the the behavior.
+ isArray: false
+ name: behavior_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Searches for and fetches the behavior that matches the query.
+ execution: false
+ name: cs-falcon-get-behavior
+ outputs:
+ - contextPath: CrowdStrike.Behavior.FileName
+ description: The file name in the behavior.
+ type: String
+ - contextPath: CrowdStrike.Behavior.Scenario
+ description: The scenario name in the behavior.
+ type: String
+ - contextPath: CrowdStrike.Behavior.MD5
+ description: The MD5 hash of the IoC in the behavior.
+ type: String
+ - contextPath: CrowdStrike.Behavior.SHA256
+ description: The SHA256 hash of the IoC in the behavior.
+ type: String
+ - contextPath: CrowdStrike.Behavior.IOCType
+ description: Type of the indicator of compromise.
+ type: String
+ - contextPath: CrowdStrike.Behavior.IOCValue
+ description: The value of the IoC.
+ type: String
+ - contextPath: CrowdStrike.Behavior.CommandLine
+ description: The command line executed in the behavior.
+ type: String
+ - contextPath: CrowdStrike.Behavior.UserName
+ description: The user name related to the behavior.
+ type: String
+ - contextPath: CrowdStrike.Behavior.SensorID
+ description: The sensor ID related to the behavior.
+ type: String
+ - contextPath: CrowdStrike.Behavior.ParentProcessID
+ description: The ID of the parent process.
+ type: String
+ - contextPath: CrowdStrike.Behavior.ProcessID
+ description: The process ID of the behavior.
+ type: String
+ - contextPath: CrowdStrike.Behavior.ID
+ description: The ID of the behavior.
+ type: String
+ - arguments:
+ - default: false
+ description: IDs of the detections to search. If provided, will override other arguments.
+ isArray: true
+ name: ids
+ required: false
+ secret: false
+ - default: false
+ description: |-
+ Filter detections using a query in Falcon Query Language (FQL).
+ e.g. filter="device.hostname:'CS-SE-TG-W7-01'"
+
+ The full list of valid filter options is extensive. Review it in the following URL: https://falcon.crowdstrike.com/support/documentation/2/query-api-reference#detectionsearch
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ deprecated: false
+ description: Search for details of specific detections, either using a filter query, or by providing the IDs of the detections.
+ execution: false
+ name: cs-falcon-search-detection
+ outputs:
+ - contextPath: CrowdStrike.Detection.Behavior.FileName
+ description: The file name in the behavior.
+ type: String
+ - contextPath: CrowdStrike.Detection.Behavior.Scenario
+ description: The scenario name in the behavior.
+ type: String
+ - contextPath: CrowdStrike.Detection.Behavior.MD5
+ description: The MD5 hash of the IoC in the behavior.
+ type: String
+ - contextPath: CrowdStrike.Detection.Behavior.SHA256
+ description: The SHA256 hash of the IoC in the behavior.
+ type: String
+ - contextPath: CrowdStrike.Detection.Behavior.IOCType
+ description: The type of the IoC.
+ type: String
+ - contextPath: CrowdStrike.Detection.Behavior.IOCValue
+ description: The value of the IoC.
+ type: String
+ - contextPath: CrowdStrike.Detection.Behavior.CommandLine
+ description: The command line executed in the behavior.
+ type: String
+ - contextPath: CrowdStrike.Detection.Behavior.UserName
+ description: The user name related to the behavior.
+ type: String
+ - contextPath: CrowdStrike.Detection.Behavior.SensorID
+ description: The sensor ID related to the behavior.
+ type: String
+ - contextPath: CrowdStrike.Detection.Behavior.ParentProcessID
+ description: The ID of the parent process.
+ type: String
+ - contextPath: CrowdStrike.Detection.Behavior.ProcessID
+ description: The process ID of the behavior.
+ type: String
+ - contextPath: CrowdStrike.Detection.Behavior.ID
+ description: The ID of the behavior.
+ type: String
+ - contextPath: CrowdStrike.Detection.System
+ description: The system name of the detection.
+ type: String
+ - contextPath: CrowdStrike.Detection.CustomerID
+ description: The ID of the customer (CID).
+ type: String
+ - contextPath: CrowdStrike.Detection.MachineDomain
+ description: The name of the domain of the detection machine.
+ type: String
+ - contextPath: CrowdStrike.Detection.ID
+ description: The detection ID.
+ type: String
+ - contextPath: CrowdStrike.Detection.ProcessStartTime
+ description: The start time of the process that generated the detection.
+ type: Date
+ - arguments:
+ - default: false
+ description: ' A CSV list of one or more IDs to resolve.'
+ isArray: false
+ name: ids
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The status to which you want to transition a detection.
+ isArray: false
+ name: status
+ predefined:
+ - new
+ - in_progress
+ - true_positive
+ - false_positive
+ - ignored
+ required: false
+ secret: false
+ - default: false
+ description: ' A user ID, for example: 1234567891234567891. '
+ isArray: false
+ name: assigned_to_uuid
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If set to true, will display the dectection in the UI.
+ isArray: false
+ name: show_in_ui
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Resolves and updates a detection.
+ execution: false
+ name: cs-falcon-resolve-detection
+ - arguments:
+ - default: false
+ description: The host agent ID (AID) of the host to contain. Get an
+ agent ID from a detection.
+ isArray: true
+ name: ids
+ required: true
+ secret: false
+ deprecated: false
+ description: Contains or lifts containment for a specified host. When contained,
+ a host can only communicate with the CrowdStrike cloud and any IPs specified
+ in your containment policy.
+ execution: false
+ name: cs-falcon-contain-host
+ - arguments:
+ - default: false
+ description: The host agent ID (AID) of the host you want to contain. Get an
+ agent ID from a detection
+ isArray: true
+ name: ids
+ required: true
+ secret: false
+ deprecated: false
+ description: Lifts containment on the host, which returns its network communications to normal.
+ execution: false
+ name: cs-falcon-lift-host-containment
+ dockerimage: demisto/python3:3.7.2.200
+ isfetch: true
+ runonce: false
+ script: ''
+ type: python
+ subtype: python3
+tests:
+ - Test - CrowdStrike Falcon
diff --git a/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_description.md b/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_description.md
new file mode 100644
index 000000000000..671360319b2f
--- /dev/null
+++ b/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_description.md
@@ -0,0 +1 @@
+To get an The API client ID and secret please contact the crowdstrike support: support@crowdstrike.com
\ No newline at end of file
diff --git a/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_image.png b/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_image.png
new file mode 100644
index 000000000000..42dde6b74ce9
Binary files /dev/null and b/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_image.png differ
diff --git a/Integrations/Cybereason/CHANGELOG.md b/Integrations/Cybereason/CHANGELOG.md
new file mode 100644
index 000000000000..4e9ffa1698d6
--- /dev/null
+++ b/Integrations/Cybereason/CHANGELOG.md
@@ -0,0 +1,10 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+- Fixed the *Filters* argument in the ***cybereason-query-malops*** command.
+
+
+
+## [19.9.0] - 2019-09-04
+-
\ No newline at end of file
diff --git a/Integrations/Cybereason/Cybereason.py b/Integrations/Cybereason/Cybereason.py
new file mode 100644
index 000000000000..6be196e0406d
--- /dev/null
+++ b/Integrations/Cybereason/Cybereason.py
@@ -0,0 +1,1476 @@
+import demistomock as demisto
+from CommonServerPython import *
+''' IMPORTS '''
+import requests
+import os
+import json
+from datetime import datetime, timedelta
+import time
+import re
+import sys
+
+# Define utf8 as default encoding
+reload(sys)
+sys.setdefaultencoding('utf8') # pylint: disable=maybe-no-member
+
+if not demisto.params()['proxy']:
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARS '''
+SERVER = demisto.params()['server'][:-1] if demisto.params()['server'].endswith('/') else demisto.params()['server']
+USERNAME = demisto.params().get('credentials').get('identifier')
+PASSWORD = demisto.params().get('credentials').get('password')
+USE_SSL = not demisto.params().get('unsecure', False)
+CERTIFICATE = demisto.params().get('credentials').get('credentials').get('sshkey')
+FETCH_TIME_DEFAULT = '3 days'
+FETCH_TIME = demisto.params().get('fetch_time', FETCH_TIME_DEFAULT)
+FETCH_TIME = FETCH_TIME if FETCH_TIME and FETCH_TIME.strip() else FETCH_TIME_DEFAULT
+FETCH_BY = demisto.params().get('fetch_by', 'MALOP CREATION TIME')
+
+STATUS_MAP = {
+ 'To Review': 'TODO',
+ 'Remediated': 'CLOSED',
+ 'Unread': 'UNREAD',
+ 'Not Relevant': 'FP',
+ 'Open': 'OPEN'
+}
+# Field = the name as received from CR API, Header = The name which will be mapped to Demisto command,
+# Type = Data that is received from CR API
+PROCESS_INFO = [
+ {'field': 'elementDisplayName', 'header': 'Name', 'type': 'filterData'},
+ {'field': 'imageFile.maliciousClassificationType', 'header': 'Malicious', 'type': 'simple'},
+ {'field': 'creationTime', 'header': 'Creation Time', 'type': 'time'},
+ {'field': 'endTime', 'header': 'End Time', 'type': 'time'},
+ {'field': 'commandLine', 'header': 'Command Line', 'type': 'simple'},
+ {'field': 'isImageFileSignedAndVerified', 'header': 'Signed and Verified', 'type': 'simple'},
+ {'field': 'productType', 'header': 'Product Type', 'type': 'simple'},
+ {'field': 'children', 'header': 'Children', 'type': 'simple'},
+ {'field': 'parentProcess', 'header': 'Parent', 'type': 'element'},
+ {'field': 'ownerMachine', 'header': 'Owner Machine', 'type': 'element'},
+ {'field': 'calculatedUser', 'header': 'User', 'type': 'element'},
+ {'field': 'imageFile', 'header': 'Image File', 'type': 'element'},
+ {'field': 'imageFile.sha1String', 'header': 'SHA1', 'type': 'simple'},
+ {'field': 'imageFile.md5String', 'header': 'MD5', 'type': 'simple'},
+ {'field': 'imageFile.companyName', 'header': 'Company Name', 'type': 'simple'},
+ {'field': 'imageFile.productName', 'header': 'Product Name', 'type': 'simple'}
+]
+
+PROCESS_FIELDS = [element['field'] for element in PROCESS_INFO]
+
+PROCESS_HEADERS = [element['header'] for element in PROCESS_INFO]
+
+CONNECTION_INFO = [
+ {'field': 'elementDisplayName', 'header': 'Name', 'type': 'simple'},
+ {'field': 'direction', 'header': 'Direction', 'type': 'simple'},
+ {'field': 'serverAddress', 'header': 'Server Address', 'type': 'simple'},
+ {'field': 'serverPort', 'header': 'Server Port', 'type': 'simple'},
+ {'field': 'portType', 'header': 'Port Type', 'type': 'simple'},
+ {'field': 'aggregatedReceivedBytesCount', 'header': 'Received Bytes', 'type': 'simple'},
+ {'field': 'aggregatedTransmittedBytesCount', 'header': 'Transmitted Bytes', 'type': 'simple'},
+ {'field': 'remoteAddressCountryName', 'header': 'Remote Country', 'type': 'simple'},
+ {'field': 'ownerMachine', 'header': 'Owner Machine', 'type': 'element'},
+ {'field': 'ownerProcess', 'header': 'Owner Process', 'type': 'element'},
+ {'field': 'calculatedCreationTime', 'header': 'Creation Time', 'type': 'time'},
+ {'field': 'endTime', 'header': 'End Time', 'type': 'time'}
+]
+
+CONNECTION_FIELDS = [element['field'] for element in CONNECTION_INFO]
+
+CONNECTION_HEADERS = [element['header'] for element in CONNECTION_INFO]
+
+HEADERS = {
+ 'Content-Type': 'application/json',
+ 'Connection': 'close'
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def build_query(query_fields, path, template_context='SPECIFIC'):
+
+ limit = demisto.args().get('limit')
+ results_limit = int(limit) if limit else 10000
+ group_limit = int(limit) if limit else 100
+
+ query = {
+ 'customFields': query_fields,
+ 'perFeatureLimit': 100,
+ 'perGroupLimit': group_limit,
+ 'queryPath': path,
+ 'queryTimeout': 120000,
+ 'templateContext': template_context,
+ 'totalResultLimit': results_limit
+ }
+
+ return query
+
+
+def http_request(method, url_suffix, data=None, json=None, headers=HEADERS):
+ LOG('running request with url=%s' % (SERVER + url_suffix))
+ try:
+ res = session.request(
+ method,
+ SERVER + url_suffix,
+ headers=headers,
+ data=data,
+ json=json,
+ verify=USE_SSL
+ )
+ if res.status_code not in {200, 204}:
+ raise Exception('Your request failed with the following error: ' + res.content + str(res.status_code))
+ except Exception, e:
+ LOG(e)
+ raise
+ return res
+
+
+def translate_timestamp(timestamp):
+ return datetime.fromtimestamp(int(timestamp) / 1000).isoformat()
+
+
+def update_output(output, simple_values, element_values, info_dict):
+ for i in range(len(info_dict)):
+ info_type = info_dict[i]['type']
+ if info_type == 'simple':
+ field = simple_values.get(info_dict[i]['field'])
+ if field:
+ output[info_dict[i]['header']] = field['values'][0]
+ elif info_type == 'element':
+ field = element_values.get(info_dict[i]['field'])
+ if field:
+ output[info_dict[i]['header']] = field['elementValues'][0]['name']
+ elif info_type == 'time':
+ field = simple_values.get(info_dict[i]['field'])
+ if field:
+ output[info_dict[i]['header']] = translate_timestamp(field['values'][0])
+ return output
+
+
+def get_pylum_id(machine):
+
+ query_fields = ['pylumId']
+ path = [
+ {
+ 'requestedType': 'Machine',
+ 'filters': [
+ {'facetName': 'elementDisplayName', 'values': [machine]}
+ ],
+ 'isResult': True
+ }
+ ]
+ json = build_query(query_fields, path)
+ cmd_url = '/rest/visualsearch/query/simple'
+ response = http_request('POST', cmd_url, json=json).json()
+ data = response['data'].get('resultIdToElementDataMap')
+
+ if not data:
+ return_error('Could not find machine')
+
+ guid = data.keys()[0]
+ simple_values = data[guid]['simpleValues']
+ pylum_id = simple_values['pylumId']['values'][0]
+ return pylum_id
+
+
+def get_machine_guid(machine_name):
+
+ query_fields = ['elementDisplayName']
+ path = [
+ {
+ 'requestedType': 'Machine',
+ 'filters': [
+ {'facetName': 'elementDisplayName', 'values': [machine_name]}
+ ],
+ 'isResult': True
+ }
+ ]
+ json = build_query(query_fields, path)
+ cmd_url = '/rest/visualsearch/query/simple'
+ response = http_request('POST', cmd_url, json=json).json()
+ data = response['data'].get('resultIdToElementDataMap')
+ if not data:
+ return_error('Could not find machine')
+ machine_guid = data.keys()[0]
+ return machine_guid
+
+
+''' FUNCTIONS '''
+
+
+def is_probe_connected_command(is_remediation_commmand=False):
+
+ machine = demisto.args().get('machine')
+ is_connected = False
+
+ response = is_probe_connected(machine)
+
+ elements = response['data']['resultIdToElementDataMap']
+
+ for key, value in elements.iteritems():
+ machine_name = value['simpleValues']['elementDisplayName']['values'][0]
+ if machine_name.upper() == machine.upper():
+ is_connected = True
+ break
+
+ if is_remediation_commmand:
+ return is_connected
+
+ ec = {
+ 'Cybereason.Machine(val.Name && val.Name === obj.Name)': {
+ 'isConnected': is_connected,
+ 'Name': machine
+ }
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': is_connected,
+ 'EntryContext': ec
+ })
+
+
+def is_probe_connected(machine):
+ query_fields = ['elementDisplayName']
+ path = [
+ {
+ 'requestedType': 'Machine',
+ 'filters': [
+ {'facetName': 'elementDisplayName', 'values': [machine]},
+ {'facetName': 'isActiveProbeConnected', 'values': [True]}
+ ],
+ 'isResult': True
+ }
+ ]
+ json = build_query(query_fields, path)
+ cmd_url = '/rest/visualsearch/query/simple'
+ response = http_request('POST', cmd_url, json=json).json()
+ return response
+
+
+def query_processes_command():
+
+ machine = demisto.args().get('machine')
+ process_name = demisto.args().get('processName')
+ only_suspicious = demisto.args().get('onlySuspicious')
+ has_incoming_connection = demisto.args().get('hasIncomingConnection')
+ has_outgoing_connection = demisto.args().get('hasOutgoingConnection')
+ has_external_connection = demisto.args().get('hasExternalConnection')
+ unsigned_unknown_reputation = demisto.args().get('unsignedUnknownReputation')
+ from_temporary_folder = demisto.args().get('fromTemporaryFolder')
+ privileges_escalation = demisto.args().get('privilegesEscalation')
+ maclicious_psexec = demisto.args().get('maliciousPsExec')
+
+ response = query_processes(machine, process_name, only_suspicious, has_incoming_connection, has_outgoing_connection,
+ has_external_connection, unsigned_unknown_reputation, from_temporary_folder,
+ privileges_escalation, maclicious_psexec)
+ elements = response['data']['resultIdToElementDataMap']
+ outputs = []
+ for element in elements:
+
+ simple_values = elements[element]['simpleValues']
+ element_values = elements[element]['elementValues']
+
+ output = {}
+ for i in range(len(PROCESS_INFO)):
+ if PROCESS_INFO[i]['type'] == 'filterData':
+ output[PROCESS_INFO[i]['header']] = elements[element]['filterData']['groupByValue']
+
+ output = update_output(output, simple_values, element_values, PROCESS_INFO)
+ outputs.append(output)
+
+ context = []
+ for output in outputs:
+ # Remove whitespaces from dictionary keys
+ context.append({k.translate(None, ' '): v for k, v in output.iteritems()})
+ ec = {
+ 'Process': context
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Cybereason Processes', outputs, PROCESS_HEADERS),
+ 'EntryContext': ec
+ })
+
+
+def query_processes(machine, process_name, only_suspicious=None, has_incoming_connection=None,
+ has_outgoing_connection=None, has_external_connection=None, unsigned_unknown_reputation=None,
+ from_temporary_folder=None, privileges_escalation=None, maclicious_psexec=None):
+
+ machine_filters = []
+ process_filters = []
+
+ if machine:
+ machine_filters.append({'facetName': 'elementDisplayName', 'values': [machine]})
+
+ if process_name:
+ process_filters.append({'facetName': 'elementDisplayName', 'values': [process_name]})
+
+ if only_suspicious and only_suspicious == 'true':
+ process_filters.append({'facetName': 'hasSuspicions', 'values': [True]})
+
+ if has_incoming_connection == 'true':
+ process_filters.append({'facetName': 'hasIncomingConnection', 'values': [True]})
+
+ if has_outgoing_connection == 'true':
+ process_filters.append({'facetName': 'hasOutgoingConnection', 'values': [True]})
+
+ if has_external_connection == 'true':
+ process_filters.append({'facetName': 'hasExternalConnection', 'values': [True]})
+
+ if unsigned_unknown_reputation == 'true':
+ process_filters.append({'facetName': 'unknownUnsignedEvidence', 'values': [True]})
+
+ if from_temporary_folder == 'true':
+ process_filters.append({'facetName': 'runningFromTempEvidence', 'values': [True]})
+
+ if privileges_escalation == 'true':
+ process_filters.append({'facetName': 'privilegeEscalationSuspicion', 'values': [True]})
+
+ if maclicious_psexec == 'true':
+ process_filters.append({'facetName': 'executedByPsexecSuspicion', 'values': [True]})
+
+ path = [
+ {
+ 'requestedType': 'Machine',
+ 'filters': machine_filters,
+ 'connectionFeature': {'elementInstanceType': 'Machine', 'featureName': 'processes'}
+ },
+ {
+ 'requestedType': 'Process',
+ 'filters': process_filters,
+ 'isResult': True
+ }
+ ]
+
+ json = build_query(PROCESS_FIELDS, path)
+ cmd_url = '/rest/visualsearch/query/simple'
+ response = http_request('POST', cmd_url, json=json).json()
+ return response
+
+
+def query_connections_command():
+
+ machine = demisto.args().get('machine')
+ ip = demisto.args().get('ip')
+ if ip and machine:
+ demisto.results('Too many arguments given.')
+ return
+ elif not ip and not machine:
+ demisto.results('Not enough arguments given.')
+ return
+
+ response = query_connections(machine, ip)
+ elements = response['data']['resultIdToElementDataMap']
+ outputs = []
+
+ for element in elements:
+
+ simple_values = elements[element]['simpleValues']
+ element_values = elements[element]['elementValues']
+
+ output = {} # type: Dict[Any,Any]
+ output = update_output(output, simple_values, element_values, CONNECTION_INFO)
+ outputs.append(output)
+
+ context = []
+ for output in outputs:
+ # Remove whitespaces from dictionary keys
+ context.append({k.translate(None, ' '): v for k, v in output.iteritems()})
+ ec = {
+ 'Connection': context
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Cybereason Connections', outputs, CONNECTION_HEADERS),
+ 'EntryContext': ec
+ })
+
+
+def query_connections(machine, ip):
+
+ if machine:
+ path = [
+ {
+ 'requestedType': 'Connection',
+ 'filters': [],
+ 'connectionFeature':
+ {
+ 'elementInstanceType': 'Connection',
+ 'featureName': 'ownerMachine'
+ },
+ 'isResult': True
+ },
+ {
+ 'requestedType': 'Machine',
+ 'filters':
+ [{
+ 'facetName': 'elementDisplayName',
+ 'values': [machine],
+ 'filterType': 'Equals'
+ }]
+ }
+ ]
+ elif ip:
+ path = [
+ {
+ 'requestedType': 'Connection',
+ 'filters':
+ [{
+ 'facetName': 'elementDisplayName',
+ 'values': [ip]
+ }],
+ 'isResult': True
+ }
+ ]
+
+ json = build_query(CONNECTION_FIELDS, path)
+ cmd_url = '/rest/visualsearch/query/simple'
+ response = http_request('POST', cmd_url, json=json).json()
+ return response
+
+
+def query_malops_command():
+
+ total_result_limit = demisto.args().get('totalResultLimit')
+ per_group_limit = demisto.args().get('perGroupLimit')
+ template_context = demisto.args().get('templateContext')
+ filters = demisto.args().get('filters', [])
+ within_last_days = demisto.args().get('withinLastDays')
+ guid_list = argToList(demisto.args().get('malopGuid'))
+
+ if within_last_days:
+ current_timestamp = time.time()
+ current_datetime = datetime.fromtimestamp(current_timestamp)
+ within_last_days_datetime = current_datetime - timedelta(days=int(within_last_days))
+ within_last_days_timestamp = time.mktime(within_last_days_datetime.timetuple()) + \
+ within_last_days_datetime.microsecond / 1E6 # Converting datetime to time
+ within_last_days_timestamp = within_last_days_timestamp * 1000
+ filters.append({
+ 'facetName': 'malopLastUpdateTime',
+ 'values': [within_last_days_timestamp],
+ 'filterType': 'GreaterThan'
+ })
+
+ response = query_malops(total_result_limit, per_group_limit, template_context, filters, guid_list=guid_list)
+ data = response['data']
+ malops_map = data.get('resultIdToElementDataMap')
+ if not data or not malops_map:
+ demisto.results('No malops found')
+ return
+
+ outputs = []
+ for malop_id in malops_map:
+ malop = malops_map[malop_id]
+ management_status = malop['simpleValues']['managementStatus']['values'][0]
+
+ if management_status and management_status.lower() == 'closed':
+ continue
+
+ creation_time = translate_timestamp(malop['simpleValues']['creationTime']['values'][0])
+ malop_last_update_time = translate_timestamp(malop['simpleValues']['malopLastUpdateTime']['values'][0])
+ decision_failure = malop['simpleValues']['decisionFeature']['values'][0].replace('Process.', '')
+
+ suspects_string = ''
+ raw_suspects = malop['elementValues'].get('suspects')
+ if raw_suspects:
+ suspects = raw_suspects['elementValues'][0]
+ suspects_string = '{}: {}'.format(suspects['elementType'], suspects['name'])
+
+ affected_machines = []
+ for machine in malop['elementValues']['affectedMachines']['elementValues']:
+ machine_name = machine.get('name', '')
+ affected_machines.append(machine_name)
+
+ involved_hashes = [] # type: List[str]
+ if 'rootCauseElementHashes' in malop['simpleValues']:
+ if malop['simpleValues']['rootCauseElementHashes']['totalValues'] != 0:
+ involved_hashes.extend(malop['simpleValues']['rootCauseElementHashes']['values'])
+
+ malop_output = {
+ 'GUID': malop_id,
+ 'Link': SERVER + '/#/malop/' + malop_id,
+ 'CreationTime': creation_time,
+ 'DecisionFailure': re.sub(r'\([^)]*\)', '', decision_failure),
+ 'Suspects': suspects_string,
+ 'LastUpdateTime': malop_last_update_time,
+ 'Status': management_status,
+ 'AffectedMachine': affected_machines,
+ 'InvolvedHash': involved_hashes
+ }
+ outputs.append(malop_output)
+ ec = {
+ 'Cybereason.Malops(val.GUID && val.GUID === obj.GUID)': outputs
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': data,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Cybereason Malops', outputs, ['GUID', 'Link', 'CreationTime', 'Status',
+ 'LastUpdateTime', 'DecisionFailure', 'Suspects',
+ 'AffectedMachine', 'InvolvedHash']),
+ 'EntryContext': ec
+ })
+
+
+def query_malops(total_result_limit=None, per_group_limit=None, template_context=None, filters=None, guid_list=[]):
+
+ body = {
+ 'totalResultLimit': int(total_result_limit) if total_result_limit else 10000,
+ 'perGroupLimit': int(per_group_limit) if per_group_limit else 10000,
+ 'perFeatureLimit': 100,
+ 'templateContext': template_context or 'MALOP',
+ 'queryPath': [
+ {
+ 'requestedType': 'MalopProcess',
+ 'guidList': guid_list,
+ 'result': True,
+ 'filters': filters or None
+ }
+ ]
+ }
+ cmd_url = '/rest/crimes/unified'
+ response = http_request('POST', cmd_url, json=body)
+ try:
+ return response.json()
+ except Exception:
+ raise Exception('Failed to parse query malop response as JSON: {}'.format(response.text))
+
+
+def isolate_machine_command():
+
+ machine = demisto.args().get('machine')
+ response, pylum_id = isolate_machine(machine)
+ result = response.get(pylum_id)
+ if result == 'Succeeded':
+ ec = {
+ 'Cybereason(val.Machine && val.Machine === obj.Machine)': {
+ 'Machine': machine,
+ 'IsIsolated': True
+ },
+ 'Endpoint(val.Hostname && val.Hostname === obj.Hostname)': {
+ 'Hostname': machine
+ }
+ }
+ demisto.results({
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': 'Machine was isolated successfully.',
+ 'EntryContext': ec
+ })
+ else:
+ return_error('Failed to isolate machine.')
+
+
+def isolate_machine(machine):
+
+ pylum_id = get_pylum_id(machine)
+
+ cmd_url = '/rest/monitor/global/commands/isolate'
+ json = {
+ 'pylumIds': [pylum_id]
+
+ }
+ response = http_request('POST', cmd_url, json=json).json()
+ return response, pylum_id
+
+
+def unisolate_machine_command():
+
+ machine = demisto.args().get('machine')
+ response, pylum_id = unisolate_machine(machine)
+ result = response.get(pylum_id)
+ if result == 'Succeeded':
+ ec = {
+ 'Cybereason(val.Machine && val.Machine === obj.Machine)': {
+ 'Machine': machine,
+ 'IsIsolated': False
+ },
+ 'Endpoint(val.Hostname && val.Hostname === obj.Hostname)': {
+ 'Hostname': machine
+ }
+ }
+ demisto.results({
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': 'Machine was un-isolated successfully.',
+ 'EntryContext': ec
+ })
+ else:
+ return_error('Failed to un-isolate machine.')
+
+
+def unisolate_machine(machine):
+
+ pylum_id = get_pylum_id(machine)
+ cmd_url = '/rest/monitor/global/commands/un-isolate'
+ json = {
+ 'pylumIds': [pylum_id]
+
+ }
+ response = http_request('POST', cmd_url, json=json).json()
+ return response, pylum_id
+
+
+def malop_processes_command():
+
+ malop_guids = demisto.args().get('malopGuids')
+ machine_name = demisto.args().get('machineName')
+
+ if isinstance(malop_guids, unicode):
+ malop_guids = malop_guids.split(',')
+ elif not isinstance(malop_guids, list):
+ return_error('malopGuids must be array of strings')
+
+ machine_name_list = [machine.lower() for machine in argToList(machine_name)]
+
+ response = malop_processes(malop_guids)
+ elements = response['data']['resultIdToElementDataMap']
+ outputs = []
+
+ for element in elements:
+
+ simple_values = elements[element]['simpleValues']
+ element_values = elements[element]['elementValues']
+
+ if machine_name_list:
+ owner_machine = element_values.get('ownerMachine', {})
+ machine_list = owner_machine.get('elementValues', [])
+ wanted_machine = False
+ for machine in machine_list:
+ current_machine_name = machine.get('name', '').lower()
+ if current_machine_name in machine_name_list:
+ wanted_machine = True
+ break
+
+ if not wanted_machine:
+ continue
+
+ output = {}
+ for i in range(len(PROCESS_INFO)):
+ if PROCESS_INFO[i]['type'] == 'filterData':
+ output[PROCESS_INFO[i]['header']] = elements[element]['filterData']['groupByValue']
+
+ output = update_output(output, simple_values, element_values, PROCESS_INFO)
+ outputs.append(output)
+
+ context = []
+ for output in outputs:
+ # Remove whitespaces from dictionary keys
+ context.append({k.translate(None, ' '): v for k, v in output.iteritems()})
+ ec = {
+ 'Process': context
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Cybereason Malop Processes', outputs, PROCESS_HEADERS, removeNull=True),
+ 'EntryContext': ec
+ })
+
+
+def malop_processes(malop_guids):
+
+ json = {
+ 'queryPath': [
+ {
+ 'requestedType': 'MalopProcess',
+ 'filters': [],
+ 'guidList': malop_guids,
+ 'connectionFeature': {
+ 'elementInstanceType': 'MalopProcess',
+ 'featureName': 'suspects'
+ }
+ },
+ {
+ 'requestedType': 'Process',
+ 'filters': [],
+ 'isResult': True
+ }
+ ],
+ 'totalResultLimit': 1000,
+ 'perGroupLimit': 1200,
+ 'perFeatureLimit': 1200,
+ 'templateContext': 'MALOP',
+ 'queryTimeout': None
+ }
+ cmd_url = '/rest/visualsearch/query/simple'
+ response = http_request('POST', cmd_url, json=json).json()
+ return response
+
+
+def add_comment_command():
+ comment = demisto.args().get('comment')
+ malop_guid = demisto.args().get('malopGuid')
+ try:
+ add_comment(malop_guid, comment.encode('utf-8'))
+ demisto.results('Comment added successfully')
+ except Exception, e:
+ return_error('Failed to add new comment. Orignal Error: ' + e.message)
+
+
+def add_comment(malop_guid, comment):
+ cmd_url = '/rest/crimes/comment/' + malop_guid
+ http_request('POST', cmd_url, data=comment)
+
+
+def update_malop_status_command():
+
+ status = demisto.args().get('status')
+ malop_guid = demisto.args().get('malopGuid')
+
+ if status not in STATUS_MAP:
+ return_error('Invalid status. Given status must be one of the following: To Review,Unread,Remediated or Not Relevant')
+
+ update_malop_status(malop_guid, status)
+
+ ec = {
+ 'Cybereason.Malops(val.GUID && val.GUID == {})'.format(malop_guid): {
+ 'GUID': malop_guid,
+ 'Status': status
+ }
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': 'Successfully updated malop {0} to status {1}'.format(malop_guid, status),
+ 'ContentsFormat': formats['text'],
+ 'EntryContext': ec
+ })
+
+
+def update_malop_status(malop_guid, status):
+
+ api_status = STATUS_MAP[status]
+
+ json = {}
+ json[malop_guid] = api_status
+
+ cmd_url = '/rest/crimes/status'
+ response = http_request('POST', cmd_url, json=json).json()
+ if response['status'] != 'SUCCESS':
+ return_error('Failed to update malop {0} status to {1}. Message: {2}'.format(malop_guid, status,
+ response['message']))
+
+
+def prevent_file_command():
+
+ file_hash = demisto.args()['md5']
+ response = prevent_file(file_hash)
+ if response['outcome'] == 'success':
+ ec = {
+ 'Process(val.MD5 && val.MD5 === obj.MD5)': {
+ 'MD5': file_hash,
+ 'Prevent': True
+ }
+ }
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': 'File was prevented successfully',
+ 'EntryContext': ec
+ }
+ demisto.results(entry)
+ else:
+ raise Exception('Failed to prevent file')
+
+
+def prevent_file(file_hash):
+
+ json = [{
+ 'keys': [str(file_hash)],
+ 'maliciousType': 'blacklist',
+ 'remove': False,
+ 'prevent': True
+ }]
+ cmd_url = '/rest/classification/update'
+ response = http_request('POST', cmd_url, json=json).json()
+ return response
+
+
+def unprevent_file_command():
+ file_hash = demisto.args()['md5']
+ response = unprevent_file(file_hash)
+ if response['outcome'] == 'success':
+ ec = {
+ 'Process(val.MD5 && val.MD5 === obj.MD5)': {
+ 'MD5': file_hash,
+ 'Prevent': False
+ }
+ }
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': 'File was unprevented successfully',
+ 'EntryContext': ec
+ }
+ demisto.results(entry)
+ else:
+ return_error('Failed to unprevent file')
+
+
+def unprevent_file(file_hash):
+
+ json = [{
+ 'keys': [str(file_hash)],
+ 'remove': True,
+ 'prevent': False
+ }]
+ cmd_url = '/rest/classification/update'
+ response = http_request('POST', cmd_url, json=json).json()
+ return response
+
+
+def kill_process_command():
+
+ machine_name = demisto.args()['machine']
+ file_name = demisto.args()['file']
+ malop_guid = demisto.args()['malop']
+
+ is_machine_conntected = is_probe_connected_command(is_remediation_commmand=True)
+ if is_machine_conntected is False:
+ return_error('Machine must be connected to Cybereason in order to perform this action.')
+
+ machine_guid = get_machine_guid(machine_name)
+ procceses = query_processes(machine_name, file_name)
+ process_data = procceses['data'].get('resultIdToElementDataMap')
+ if not process_data:
+ return_error('Could not find process')
+ processes = process_data.keys()
+ for process_guid in processes:
+ response = kill_process(malop_guid, machine_guid, process_guid)
+ status_log = response['statusLog'][0]
+ status = status_log['status']
+ # response
+ demisto.results('Request to kill process {0} was sent successfully and now in status {1}'.format(process_guid,
+ status))
+
+
+def kill_process(malop_guid, machine_guid, process_guid):
+
+ json = {
+ 'malopId': malop_guid,
+ 'actionsByMachine': {
+ machine_guid: [{
+ 'targetId': process_guid,
+ 'actionType': 'KILL_PROCESS'
+ }]
+ }
+ }
+
+ cmd_url = '/rest/remediate'
+ response = http_request('POST', cmd_url, json=json).json()
+ return response
+
+
+def quarantine_file_command():
+
+ machine_name = demisto.args()['machine']
+ file_name = demisto.args()['file']
+ malop_guid = demisto.args()['malop']
+
+ is_machine_conntected = is_probe_connected_command(is_remediation_commmand=True)
+ if is_machine_conntected is False:
+ return_error('Machine must be connected to Cybereason in order to perform this action.')
+
+ machine_guid = get_machine_guid(machine_name)
+ procceses = query_processes(machine_name, file_name)
+ process_data = procceses['data'].get('resultIdToElementDataMap')
+ if not process_data:
+ return_error('Could not find process')
+ processes = process_data.keys()
+ for process_guid in processes:
+ response = kill_process(malop_guid, machine_guid, process_guid)
+ status_log = response['statusLog'][0]
+ status = status_log['status']
+ demisto.results(status)
+
+
+def quarantine_file(malop_guid, machine_guid, process_guid):
+
+ json = {
+ 'malopId': malop_guid,
+ 'actionsByMachine': {
+ machine_guid: [{
+ 'targetId': process_guid,
+ 'actionType': 'QUARANTINE_FILE'
+ }]
+ }
+ }
+
+ cmd_url = '/rest/remediate'
+ response = http_request('POST', cmd_url, json=json).json()
+ return response
+
+
+def delete_registry_key_command():
+
+ machine_name = demisto.args()['machine']
+ file_name = demisto.args()['file']
+ malop_guid = demisto.args()['malop']
+
+ machine_guid = get_machine_guid(machine_name)
+ procceses = query_processes(machine_name, file_name)
+ process_data = procceses['data'].get('resultIdToElementDataMap')
+ if not process_data:
+ return_error('Could not find process')
+ processes = process_data.keys()
+ for process_guid in processes:
+ response = delete_registry_key(malop_guid, machine_guid, process_guid)
+ status_log = response['statusLog'][0]
+ status = status_log['status']
+ demisto.results(status)
+
+
+def delete_registry_key(malop_guid, machine_guid, process_guid):
+
+ json = {
+ 'malopId': malop_guid,
+ 'actionsByMachine': {
+ machine_guid: [{
+ 'targetId': process_guid,
+ 'actionType': 'DELETE_REGISTRY_KEY'
+ }]
+ }
+ }
+
+ cmd_url = '/rest/remediate'
+ response = http_request('POST', cmd_url, json=json).json()
+ return response
+
+
+def query_file_command():
+
+ file_hash = demisto.args().get('file_hash')
+
+ filters = []
+
+ hash_type = get_hash_type(file_hash)
+ if hash_type == 'sha1':
+ filters.append({
+ 'facetName': 'sha1String',
+ 'values': [file_hash],
+ 'filterType': 'ContainsIgnoreCase'
+ })
+ elif hash_type == 'md5':
+ filters.append({
+ 'facetName': 'md5String',
+ 'values': [file_hash],
+ 'filterType': 'ContainsIgnoreCase'
+ })
+ else:
+ return_error('Hash type is not supported.')
+
+ data = query_file(filters)
+
+ if data:
+ cybereason_outputs = []
+ file_outputs = []
+ endpoint_outputs = []
+ files = data.get('resultIdToElementDataMap')
+ for file in files.keys():
+
+ raw_machine_details = get_file_machine_details(file)['data']['resultIdToElementDataMap']
+ machine_details = raw_machine_details[raw_machine_details.keys()[0]]
+ simple_values = files[file]['simpleValues']
+
+ file_name = simple_values['elementDisplayName']['values'][0]
+ md5 = simple_values['md5String']['values'][0]
+ sha1 = simple_values['sha1String']['values'][0]
+ path = simple_values['correctedPath']['values'][0]
+ machine = files[file].get('elementValues', {}).get('ownerMachine', {}).get('elementValues')[0]['name']
+
+ machine_element_values = machine_details['elementValues']
+ machine_simple_values = machine_details['simpleValues']
+ os_version = machine_simple_values['ownerMachine.osVersionType']['values'][0]
+
+ raw_suspicions = machine_details['suspicions']
+ suspicions = {}
+ if raw_suspicions:
+ for key in raw_suspicions.keys():
+ suspicions[key] = timestamp_to_datestring(raw_suspicions[key])
+
+ evidences = []
+ for key in machine_element_values:
+ if 'evidence' in key.lower():
+ evidences.append(key)
+ for key in machine_simple_values:
+ if 'evidence' in key.lower():
+ evidences.append(key)
+
+ company_name = None
+ if 'companyName' in simple_values:
+ company_name = simple_values['companyName']['values'][0]
+
+ cybereason_outputs.append({
+ 'Name': file_name,
+ 'CreationTime': timestamp_to_datestring(simple_values['createdTime']['values'][0]),
+ 'ModifiedTime': timestamp_to_datestring(simple_values['modifiedTime']['values'][0]),
+ 'Malicious': files[file]['isMalicious'],
+ 'MD5': md5,
+ 'SHA1': sha1,
+ 'Path': path,
+ 'Machine': machine,
+ 'SuspicionsCount': machine_details['suspicionCount'],
+ 'IsConnected': (machine_simple_values['ownerMachine.isActiveProbeConnected']['values'][0] == 'true'),
+ 'OSVersion': os_version,
+ 'Suspicion': suspicions,
+ 'Evidence': evidences,
+ 'Signed': True if simple_values['isSigned']['values'][0] == 'true' else False,
+ 'Company': company_name
+ })
+
+ file_outputs.append({
+ 'Name': file_name,
+ 'MD5': md5,
+ 'SHA1': sha1,
+ 'Path': path,
+ 'Hostname': machine
+ })
+ endpoint_outputs.append({
+ 'Hostname': machine,
+ 'OSVersion': os_version
+ })
+
+ ec = {
+ 'Cybereason.File(val.MD5 && val.MD5===obj.MD5 || val.SHA1 && val.SHA1===obj.SHA1)': cybereason_outputs,
+ 'Endpoint(val.Hostname===obj.Hostname)': endpoint_outputs
+ }
+ ec[outputPaths['file']] = file_outputs
+
+ demisto.results({
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': data,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Cybereason file query results', cybereason_outputs),
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results('No results found.')
+
+
+def query_file(filters):
+
+ query_fields = ['md5String', 'ownerMachine', 'avRemediationStatus', 'isSigned', 'signatureVerified',
+ 'sha1String', 'maliciousClassificationType', 'createdTime', 'modifiedTime', 'size', 'correctedPath',
+ 'productName', 'productVersion', 'companyName', 'internalName', 'elementDisplayName']
+ path = [
+ {
+ 'requestedType': 'File',
+ 'filters': filters,
+ 'isResult': True
+ }
+ ]
+ json = build_query(query_fields, path)
+ cmd_url = '/rest/visualsearch/query/simple'
+ response = http_request('POST', cmd_url, json=json).json()
+ if response.get('status', '') == 'SUCCESS' and 'data' in response:
+ return response['data']
+ else:
+ return_error('Error occurred while trying to query the file.')
+
+
+def get_file_machine_details(file_guid):
+
+ query_fields = ["ownerMachine", "self", "elementDisplayName", "correctedPath", "canonizedPath", "mount",
+ "mountedAs", "createdTime", "modifiedTime", "md5String", "sha1String", "productType", "companyName",
+ "productName", "productVersion", "signerInternalOrExternal", "signedInternalOrExternal",
+ "signatureVerifiedInternalOrExternal", "signedByMicrosoft", "extensionType", "size",
+ "avRemediationStatus", "classificationDetectionName", "avScanTime", "relatedToMalop",
+ "isSuspicious", "maliciousClassificationType", "classificationBlocking", "isDownloadedFromInternet",
+ "downloadedFromDomain", "downloadedFromIpAddress", "downloadedFromUrl", "downloadedFromUrlReferrer",
+ "downloadedFromEmailFrom", "downloadedFromEmailMessageId", "downloadedFromEmailSubject",
+ "ownerMachine.isActiveProbeConnected", "ownerMachine.osVersionType", "quarantineVersion",
+ "originalVersion"]
+
+ path = [
+ {
+ 'requestedType': 'File',
+ 'guidList': [file_guid],
+ 'result': True
+ }
+ ]
+ json = build_query(query_fields, path, template_context='DETAILS')
+
+ cmd_url = '/rest/visualsearch/query/simple'
+ response = http_request('POST', cmd_url, json=json).json()
+ return response
+
+
+def query_domain_command():
+
+ domain_input = demisto.args().get('domain')
+
+ filters = [{
+ 'facetName': 'elementDisplayName',
+ 'values': [domain_input],
+ 'filterType': 'ContainsIgnoreCase'
+ }]
+
+ data = query_domain(filters)
+
+ if data:
+ cybereason_outputs = []
+ domain_outputs = []
+ domains = data.get('resultIdToElementDataMap')
+ for domain in domains.keys():
+
+ simple_values = domains[domain]['simpleValues']
+
+ reputation = simple_values['maliciousClassificationType']['values'][0]
+ is_internal_domain = simple_values['isInternalDomain']['values'][0] == 'true'
+ was_ever_resolved = simple_values['everResolvedDomain']['values'][0] == 'true'
+ was_ever_resolved_as = simple_values['everResolvedSecondLevelDomain']['values'][0] == 'true'
+ malicious = domains[domain].get('isMalicious')
+ suspicions_count = domains[domain].get('suspicionCount')
+
+ cybereason_outputs.append({
+ 'Name': domain_input,
+ 'Reputation': reputation,
+ 'Malicious': malicious,
+ 'SuspicionsCount': suspicions_count,
+ 'IsInternalDomain': is_internal_domain,
+ 'WasEverResolved': was_ever_resolved,
+ 'WasEverResolvedAsASecondLevelDomain': was_ever_resolved_as
+ })
+
+ domain_outputs.append({
+ 'Name': domain_input,
+ })
+
+ ec = {
+ 'Cybereason.Domain(val.Name && val.Name===obj.Name)': cybereason_outputs
+ }
+ ec[outputPaths['domain']] = domain_outputs
+
+ demisto.results({
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': data,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Cybereason domain query results', cybereason_outputs,
+ ['Name', 'Reputation', 'IsInternalDomain', 'WasEverResolved',
+ 'WasEverResolvedAsASecondLevelDomain', 'Malicious',
+ 'SuspicionsCount']),
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results('No results found.')
+
+
+def query_domain(filters):
+
+ query_fields = ['maliciousClassificationType', 'isInternalDomain',
+ 'everResolvedDomain', 'everResolvedSecondLevelDomain', 'elementDisplayName']
+ path = [
+ {
+ 'requestedType': 'DomainName',
+ 'filters': filters,
+ 'isResult': True
+ }
+ ]
+ json = build_query(query_fields, path)
+ cmd_url = '/rest/visualsearch/query/simple'
+ response = http_request('POST', cmd_url, json=json).json()
+ if response.get('status', '') == 'SUCCESS' and 'data' in response:
+ return response['data']
+ else:
+ return_error('Error occurred while trying to query the file.')
+
+
+def query_user_command():
+
+ username = demisto.args().get('username')
+
+ filters = [{
+ 'facetName': 'elementDisplayName',
+ 'values': [username],
+ 'filterType': 'ContainsIgnoreCase'
+ }]
+
+ data = query_user(filters)
+
+ if data:
+ cybereason_outputs = []
+ users = data.get('resultIdToElementDataMap')
+ for user in users.keys():
+
+ simple_values = users[user]['simpleValues']
+ element_values = users[user]['elementValues']
+
+ domain = simple_values['domain']['values'][0]
+ local_system = True if simple_values['isLocalSystem']['values'][0] == 'true' else False
+ machine = element_values['ownerMachine']['elementValues'][0]['name']
+ organization = element_values['ownerOrganization']['elementValues'][0]['name']
+
+ cybereason_outputs.append({
+ 'Username': username,
+ 'Domain': domain,
+ 'LastMachineLoggedInTo': machine,
+ 'Organization': organization,
+ 'LocalSystem': local_system
+ })
+
+ ec = {
+ 'Cybereason.User(val.Username && val.Username===obj.Username)': cybereason_outputs
+ }
+
+ demisto.results({
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': data,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Cybereason user query results', cybereason_outputs,
+ ['Username', 'Domain', 'LastMachineLoggedInTo', 'Organization',
+ 'LocalSystem']),
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results('No results found.')
+
+
+def query_user(filters):
+
+ query_fields = ['domain', 'ownerMachine', 'ownerOrganization', 'isLocalSystem', 'elementDisplayName']
+ path = [
+ {
+ 'requestedType': 'User',
+ 'filters': filters,
+ 'isResult': True
+ }
+ ]
+ json = build_query(query_fields, path)
+ cmd_url = '/rest/visualsearch/query/simple'
+ response = http_request('POST', cmd_url, json=json).json()
+ if response.get('status', '') == 'SUCCESS' and 'data' in response:
+ return response['data']
+ else:
+ return_error('Error occurred while trying to query the file.')
+
+
+def malop_to_incident(malop):
+
+ incident = {} # type: Dict[Any, Any]
+ incident['rawJSON'] = json.dumps(malop)
+ incident['name'] = 'Cybereason Malop ' + malop['guidString']
+ incident['labels'] = [{'type': 'GUID', 'value': malop['guidString']}]
+ return incident
+
+
+def fetch_incidents():
+
+ last_run = demisto.getLastRun()
+
+ if last_run and last_run['creation_time']:
+ last_update_time = int(last_run['creation_time'])
+ else:
+ # In first run
+ last_update_time, _ = parse_date_range(FETCH_TIME, to_timestamp=True)
+
+ max_update_time = last_update_time
+
+ if FETCH_BY == 'MALOP UPDATE TIME':
+ filters = [{
+ 'facetName': 'malopLastUpdateTime',
+ 'values': [last_update_time],
+ 'filterType': 'GreaterThan'
+ }]
+ elif FETCH_BY == 'MALOP CREATION TIME':
+ filters = [{
+ 'facetName': 'creationTime',
+ 'values': [last_update_time],
+ 'filterType': 'GreaterThan'
+ }]
+ else:
+ return_error('Given filter to fetch by is invalid.')
+
+ data = query_malops(total_result_limit=10000, per_group_limit=10000, filters=filters)['data']
+ malops = data['resultIdToElementDataMap']
+
+ incidents = []
+
+ for malop in malops:
+ malops[malop]['simpleValues'].pop('iconBase64', None)
+ malops[malop]['simpleValues'].pop('malopActivityTypes', None)
+ malop_update_time = malops[malop]['simpleValues']['malopLastUpdateTime']['values'][0]
+
+ incident = malop_to_incident(malops[malop])
+ incidents.append(incident)
+ if malop_update_time > max_update_time:
+ max_update_time = malop_update_time
+
+ demisto.setLastRun({
+ 'creation_time': max_update_time
+ })
+
+ demisto.incidents(incidents)
+
+
+def login():
+ cmd_url = '/login.html'
+ headers = {
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ 'Connection': 'close'
+ }
+ data = {
+ 'username': USERNAME,
+ 'password': PASSWORD
+ }
+ http_request('POST', cmd_url, data=data, headers=headers)
+
+
+def client_certificate():
+ cert = CERTIFICATE
+
+ if 'Bag Attributes' not in cert:
+ return_error('Could not find Bag Attributes')
+ if '-----BEGIN CERTIFICATE-----' not in cert:
+ return_error('Could not find certificate file')
+ if '-----BEGIN RSA PRIVATE KEY-----' in cert: # guardrails-disable-line
+ i = cert.index('-----BEGIN RSA PRIVATE KEY-----') # guardrails-disable-line
+ else:
+ return_error('Could not find certificate key')
+ client_cert = cert[:i]
+ client_key = cert[i:]
+
+ f = open('client.cert', 'wb')
+ f.write(client_cert)
+ f.flush()
+ f.close()
+ f = open('client.pem', 'wb')
+ f.write(client_key)
+ f.close()
+ client_cert_file = os.path.abspath('client.cert')
+ client_key_file = os.path.abspath('client.pem')
+
+ session.cert = (client_cert_file, client_key_file)
+
+ # Time to check if we are logged on
+ response = session.get(url=SERVER)
+ if response.status_code != 200 and response.status_code != 302:
+ return_error("Failed to connect to server")
+
+ # First time we may get a redirect, but second time should be 200
+ response = session.get(url=SERVER)
+ if response.status_code != 200:
+ return_error("Failed to login with certificate. Expected response 200. Got: " + str(response.status_code))
+
+
+def logout():
+ cmd_url = '/logout'
+ http_request('GET', cmd_url)
+
+
+''' EXECUTION CODE '''
+
+LOG('command is %s' % (demisto.command(), ))
+
+session = requests.session()
+
+
+if CERTIFICATE:
+ client_certificate()
+ AUTH = 'CERT'
+if USERNAME and PASSWORD:
+ login()
+ AUTH = 'BASIC'
+else:
+ return_error('No credentials were provided')
+
+try:
+ if demisto.command() == 'test-module':
+ # Tests connectivity and credentails on login
+ query_user([])
+ demisto.results('ok')
+
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+
+ elif demisto.command() == 'cybereason-is-probe-connected':
+ is_probe_connected_command()
+
+ elif demisto.command() == 'cybereason-query-processes':
+ query_processes_command()
+
+ elif demisto.command() == 'cybereason-query-malops':
+ query_malops_command()
+
+ elif demisto.command() == 'cybereason-query-connections':
+ query_connections_command()
+
+ elif demisto.command() == 'cybereason-isolate-machine':
+ isolate_machine_command()
+
+ elif demisto.command() == 'cybereason-unisolate-machine':
+ unisolate_machine_command()
+
+ elif demisto.command() == 'cybereason-malop-processes':
+ malop_processes_command()
+
+ elif demisto.command() == 'cybereason-add-comment':
+ add_comment_command()
+
+ elif demisto.command() == 'cybereason-update-malop-status':
+ update_malop_status_command()
+
+ elif demisto.command() == 'cybereason-prevent-file':
+ prevent_file_command()
+
+ elif demisto.command() == 'cybereason-unprevent-file':
+ unprevent_file_command()
+
+ elif demisto.command() == 'cybereason-kill-process': # To be added as a command in the future
+ kill_process_command()
+
+ elif demisto.command() == 'cybereason-quarantine-file': # To be added as a command in the future
+ quarantine_file_command()
+
+ elif demisto.command() == 'cybereason-delete-registry-key': # To be added as a command in the future
+ delete_registry_key_command()
+
+ elif demisto.command() == 'cybereason-query-file':
+ query_file_command()
+
+ elif demisto.command() == 'cybereason-query-domain':
+ query_domain_command()
+
+ elif demisto.command() == 'cybereason-query-user':
+ query_user_command()
+
+except Exception as e:
+ LOG(e.message)
+ LOG.print_log()
+ return_error(e.message)
+finally:
+ logout()
+ if AUTH == 'CERT':
+ os.remove(os.path.abspath('client.pem'))
+ os.remove(os.path.abspath('client.cert'))
diff --git a/Integrations/Cybereason/Cybereason.yml b/Integrations/Cybereason/Cybereason.yml
new file mode 100644
index 000000000000..e4c4364bc03f
--- /dev/null
+++ b/Integrations/Cybereason/Cybereason.yml
@@ -0,0 +1,735 @@
+category: Endpoint
+commonfields:
+ id: Cybereason
+ version: -1
+configuration:
+- display: Server URL (e.g. https://192.168.0.1)
+ name: server
+ required: true
+ type: 0
+- display: Credentials
+ name: credentials
+ required: false
+ type: 9
+- display: Trust any certificate (not secure)
+ name: unsecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- defaultvalue: 3 days
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days, 3
+ months, 1 year)
+ name: fetch_time
+ required: false
+ type: 0
+- defaultvalue: MALOP CREATION TIME
+ display: Fetch by "MALOP CREATION TIME" or by "MALOP UPDATE TIME" (Fetching by Malop
+ update time might create duplicates of Malops as incidents)
+ name: fetch_by
+ required: false
+ type: 0
+description: Endpoint detection and response to manage and query malops, connections
+ and processes.
+display: Cybereason
+name: Cybereason
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The hostname of the machine.
+ isArray: false
+ name: machine
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Show only suspicious processes.
+ isArray: false
+ name: onlySuspicious
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10000'
+ description: Maximum number of results to retrieve.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: Process name to filter by.
+ isArray: false
+ name: processName
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: If true, save the result to the context.
+ isArray: false
+ name: saveToContext
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Filter only processes with incoming connections.
+ isArray: false
+ name: hasIncomingConnection
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Filter only processes with outgoing connections.
+ isArray: false
+ name: hasOutgoingConnection
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If process has external connection
+ isArray: false
+ name: hasExternalConnection
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If process is not known to reputation services and its image file
+ is unsigned
+ isArray: false
+ name: unsignedUnknownReputation
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If process is running from temporary folder
+ isArray: false
+ name: fromTemporaryFolder
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If process was identified elevating its privileges to local system
+ user
+ isArray: false
+ name: privilegesEscalation
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If the process was executed by PsExec service and is suspicious
+ as being executed maliciously
+ isArray: false
+ name: maliciousPsExec
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches for processes with various filters.
+ execution: false
+ name: cybereason-query-processes
+ outputs:
+ - contextPath: Process.Name
+ description: The process name
+ type: Unknown
+ - contextPath: Process.Malicious
+ description: Malicious status of the process
+ type: Unknown
+ - contextPath: Process.CreationTime
+ description: The process creation time
+ type: Unknown
+ - contextPath: Process.EndTime
+ description: The process end time
+ type: Unknown
+ - contextPath: Process.CommandLine
+ description: The command line of the process
+ type: Unknown
+ - contextPath: Process.SignedAndVerified
+ description: Is the process signed and verified
+ type: Unknown
+ - contextPath: Process.ProductType
+ description: The product type
+ type: Unknown
+ - contextPath: Process.Children
+ description: Children of the process
+ type: Unknown
+ - contextPath: Process.Parent
+ description: The parent process
+ type: Unknown
+ - contextPath: Process.OwnerMachine
+ description: The machine's hostname
+ type: Unknown
+ - contextPath: Process.User
+ description: The user who ran the process
+ type: Unknown
+ - contextPath: Process.ImageFile
+ description: Image file of the process
+ type: Unknown
+ - contextPath: Process.SHA1
+ description: SHA1 of the process file
+ type: Unknown
+ - contextPath: Process.MD5
+ description: MD5 of the process file
+ type: Unknown
+ - contextPath: Process.CompanyName
+ description: The company's name
+ type: Unknown
+ - contextPath: Process.ProductName
+ description: The product's name
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The hostname of the machine to check.
+ isArray: false
+ name: machine
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks if the machine is currently connected to the Cybereason server
+ execution: false
+ name: cybereason-is-probe-connected
+ outputs:
+ - contextPath: Cybereason.Machine.isConnected
+ description: true if machine is connected, else false
+ type: boolean
+ - contextPath: Cybereason.Machine.Name
+ description: Machine name
+ type: string
+ - arguments:
+ - default: false
+ description: Filter connections which contain this IP (in or out).
+ isArray: false
+ name: ip
+ required: false
+ secret: false
+ - default: false
+ description: Filter connections on the given machine.
+ isArray: false
+ name: machine
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: If true, save the result to the context.
+ isArray: false
+ name: saveToContext
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches for connections.
+ execution: false
+ name: cybereason-query-connections
+ outputs:
+ - contextPath: Connection.Name
+ description: The connection's name
+ type: Unknown
+ - contextPath: Connection.Direction
+ description: OUTGOING/INCOMING
+ type: Unknown
+ - contextPath: Connection.ServerAddress
+ description: Address of the Cybereason machine
+ type: Unknown
+ - contextPath: Connection.ServerPort
+ description: Port of the Cybereason machine
+ type: Unknown
+ - contextPath: Connection.PortType
+ description: Type of the connection
+ type: Unknown
+ - contextPath: Connection.ReceivedBytes
+ description: Received bytes count
+ type: Unknown
+ - contextPath: Connection.TransmittedBytes
+ description: Transmitted bytes count
+ type: Unknown
+ - contextPath: Connection.RemoteCountry
+ description: The connection's remote country
+ type: Unknown
+ - contextPath: Connection.OwnerMachine
+ description: The machine's hostname
+ type: Unknown
+ - contextPath: Connection.OwnerProcess
+ description: The process which performed the connection
+ type: Unknown
+ - contextPath: Connection.CreationTime
+ description: Creation time of the connection
+ type: Unknown
+ - contextPath: Connection.EndTime
+ description: End time of the connection
+ type: Unknown
+ - arguments:
+ - default: true
+ description: Machine name to be isolated
+ isArray: false
+ name: machine
+ required: true
+ secret: false
+ deprecated: false
+ description: Isolates a machine that has been infected from the rest of the network
+ execution: true
+ name: cybereason-isolate-machine
+ outputs:
+ - contextPath: Cybereason.Machine
+ description: Machine name
+ type: string
+ - contextPath: Cybereason.IsIsolated
+ description: Is the machine isolated
+ type: boolean
+ - contextPath: Endpoint.Hostname
+ description: Machine name
+ type: string
+ - arguments:
+ - default: true
+ description: Machine name to be un-isolated
+ isArray: false
+ name: machine
+ required: true
+ secret: false
+ deprecated: false
+ description: Stops isolation of a machine
+ execution: true
+ name: cybereason-unisolate-machine
+ outputs:
+ - contextPath: Cybereason.Machine
+ description: Machine name
+ type: string
+ - contextPath: Cybereason.IsIsolated
+ description: Is the machine isolated
+ type: boolean
+ - contextPath: Endpoint.Hostname
+ description: Machine name
+ type: string
+ - arguments:
+ - default: false
+ description: Filter to filter response by, given in Cybereason API syntax.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: The total number of results to return for your Server. Ensure you
+ make the limit a reasonable number to maximize Server performance and not
+ to overload the system.
+ isArray: false
+ name: totalResultLimit
+ required: false
+ secret: false
+ - default: false
+ description: The number of items to return per Malop group.
+ isArray: false
+ name: perGroupLimit
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: MALOP
+ description: 'The level of detail to provide in the response. Possible values
+ include: SPECIFIC: References value contain only the count in the ElementValues
+ class. The Suspicions map is calculated for each results, with the suspicion
+ name and the first time the suspicion appeared. The Evidence map is not calculated
+ for the results. CUSTOM: Reference values contain the specific Elements,
+ up to the limit defined in the perFeatureLimit parameter. The Suspicions map
+ is not calculated for the results. The Evidence map is not calculated for
+ the results. DETAILS: Reference values contain the specific Elements, up
+ to the limit defined in the perFeatureLimit parameter. The Suspicions map
+ is calculated for each result, containing the suspicion name and the first
+ time the suspicion appeared. The Evidence map is not calculated for the results.'
+ isArray: false
+ name: templateContext
+ predefined:
+ - MALOP
+ - SPECIFIC
+ - CUSTOM
+ - DETAILS
+ - OVERVIEW
+ required: false
+ secret: false
+ - default: false
+ description: Return all the malops within the last days
+ isArray: false
+ name: withinLastDays
+ required: false
+ secret: false
+ - default: false
+ description: Malop GUIDs to filter by (Comma separated values supported, e.g.
+ 11.5681864988155542407,11.1773255057963879999)
+ isArray: false
+ name: malopGuid
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of all Malops and details on the Malops.
+ execution: false
+ name: cybereason-query-malops
+ outputs:
+ - contextPath: Cybereason.Malops.GUID
+ description: The unique globally unique identifier (guid) for the Malop.
+ type: string
+ - contextPath: Cybereason.Malops.CreationTime
+ description: The time reported as when the malicious behavior began on the system.
+ This is not the time that the Malop was first detected by Cybereason.
+ type: string
+ - contextPath: Cybereason.Malops.DecisionFeature
+ description: The reason that Cybereason has raised the Malop.
+ type: string
+ - contextPath: Cybereason.Malops.Link
+ description: Link to the Malop on Cybereason.
+ type: string
+ - contextPath: Cybereason.Malops.Suspects
+ description: Malop suspect type and name
+ type: string
+ - contextPath: Cybereason.Malops.LastUpdatedTime
+ description: Last updated time of malop
+ type: string
+ - contextPath: Cybereason.Malops.AffectedMachine
+ description: List of machines affected by this Malop
+ type: string
+ - contextPath: Cybereason.Malops.InvolvedHash
+ description: List of file hashes involved in this Malop
+ type: string
+ - contextPath: Cybereason.Malops.Status
+ description: Malop managemant status
+ type: string
+ - arguments:
+ - default: false
+ description: Array of malop GUIDs separated by comma. (Malop GUID can be retrieved
+ with the command cybereason-query-malops command)
+ isArray: false
+ name: malopGuids
+ required: true
+ secret: false
+ - default: false
+ description: Machine names which were affected by malop. Comma separated values
+ supported (e.g., machine1,machine2)
+ isArray: false
+ name: machineName
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of malops
+ execution: false
+ name: cybereason-malop-processes
+ outputs:
+ - contextPath: Process.Name
+ description: The process name
+ type: string
+ - contextPath: Process.Malicious
+ description: Malicious status of the process
+ type: Unknown
+ - contextPath: Process.CreationTime
+ description: The process creation time
+ type: date
+ - contextPath: Process.EndTime
+ description: The process end time
+ type: date
+ - contextPath: Process.CommandLine
+ description: The command line of the process
+ type: string
+ - contextPath: Process.SignedAndVerified
+ description: Is the process signed and verified
+ type: Unknown
+ - contextPath: Process.ProductType
+ description: The product type
+ type: Unknown
+ - contextPath: Process.Children
+ description: Children of the process
+ type: Unknown
+ - contextPath: Process.Parent
+ description: The parent process
+ type: Unknown
+ - contextPath: Process.OwnerMachine
+ description: The machine's hostname
+ type: Unknown
+ - contextPath: Process.User
+ description: The user who ran the process
+ type: string
+ - contextPath: Process.ImageFile
+ description: Image file of the process
+ type: Unknown
+ - contextPath: Process.SHA1
+ description: SHA1 of the process file
+ type: string
+ - contextPath: Process.MD5
+ description: MD5 of the process file
+ type: string
+ - contextPath: Process.CompanyName
+ description: The company's name
+ type: string
+ - contextPath: Process.ProductName
+ description: The product's name
+ type: string
+ - arguments:
+ - default: false
+ description: Comment to add to the malop
+ isArray: false
+ name: comment
+ required: true
+ secret: false
+ - default: false
+ description: Malop GUID to add comment to. (Malop GUID can be retrieved with
+ the command cybereason-query-malops command)
+ isArray: false
+ name: malopGuid
+ required: true
+ secret: false
+ deprecated: false
+ description: Add new comment to malop
+ execution: false
+ name: cybereason-add-comment
+ - arguments:
+ - default: false
+ description: Malop GUID to update its status
+ isArray: false
+ name: malopGuid
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Status to update
+ isArray: false
+ name: status
+ predefined:
+ - To Review
+ - Unread
+ - Remediated
+ - Not Relevant
+ - Open
+ required: true
+ secret: false
+ deprecated: false
+ description: Updates malop status
+ execution: false
+ name: cybereason-update-malop-status
+ outputs:
+ - contextPath: Cybereason.Malops.GUID
+ description: Malop GUID
+ type: string
+ - contextPath: Cybereason.Malops.Status
+ description: 'Malop status: To Review,Unread,Remediated,Not Relevant'
+ type: string
+ - arguments:
+ - default: true
+ description: Malop process file MD5 to prevent
+ isArray: false
+ name: md5
+ required: true
+ secret: false
+ deprecated: false
+ description: Prevent malop process file
+ execution: false
+ name: cybereason-prevent-file
+ outputs:
+ - contextPath: Process.MD5
+ description: Process file MD5
+ type: string
+ - contextPath: Process.Prevent
+ description: True if process file is prevented, else false
+ type: boolean
+ - arguments:
+ - default: true
+ description: Malop process file MD5 to unprevent
+ isArray: false
+ name: md5
+ required: true
+ secret: false
+ deprecated: false
+ description: Unprevent malop process file
+ execution: false
+ name: cybereason-unprevent-file
+ outputs:
+ - contextPath: Process.MD5
+ description: Process file MD5
+ type: string
+ - contextPath: Process.Prevent
+ description: True if process file is prevented, else false
+ type: boolean
+ - arguments:
+ - default: true
+ description: File hash (SHA-1 and MD5 supported)
+ isArray: false
+ name: file_hash
+ required: true
+ secret: false
+ deprecated: false
+ description: Query files as part of investigation
+ execution: false
+ name: cybereason-query-file
+ outputs:
+ - contextPath: Cybereason.File.Path
+ description: File path
+ type: string
+ - contextPath: Cybereason.File.SHA1
+ description: File SHA-1 hash
+ type: string
+ - contextPath: Cybereason.File.Machine
+ description: Machine name on which file is located
+ type: string
+ - contextPath: Cybereason.File.SuspicionsCount
+ description: File suspicions count
+ type: number
+ - contextPath: Cybereason.File.Name
+ description: File name
+ type: string
+ - contextPath: Cybereason.File.CreationTime
+ description: File creation time
+ type: date
+ - contextPath: Cybereason.File.Suspicion
+ description: File suspicions object of suspicion as key and detected date as
+ value
+ type: string
+ - contextPath: Cybereason.File.OSVersion
+ description: Machine OS version on which file is located
+ type: string
+ - contextPath: Cybereason.File.ModifiedTime
+ description: File modified date
+ type: date
+ - contextPath: Cybereason.File.Malicious
+ description: Is file malicious
+ type: boolean
+ - contextPath: Cybereason.File.Company
+ description: Company name
+ type: string
+ - contextPath: Cybereason.File.MD5
+ description: File MD5 hash
+ type: string
+ - contextPath: Cybereason.File.IsConnected
+ description: Is machine connected to Cybereason
+ type: boolean
+ - contextPath: Cybereason.File.Signed
+ description: Is file signed
+ type: boolean
+ - contextPath: Cybereason.File.Evidence
+ description: File evidences
+ type: string
+ - contextPath: Endpoint.Hostname
+ description: Hostname on which file is located
+ type: string
+ - contextPath: Endpoint.OSVersion
+ description: Machine OS version on which file is located
+ type: string
+ - contextPath: File.Hostname
+ description: Hostname on which file is located
+ type: string
+ - contextPath: File.MD5
+ description: File MD5 hash
+ type: string
+ - contextPath: File.SHA1
+ description: File SHA-1 hash
+ type: string
+ - contextPath: File.Name
+ description: File name
+ type: string
+ - contextPath: File.Path
+ description: File path
+ type: string
+ - arguments:
+ - default: true
+ description: Domain to query
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: Query domains as part of investigation
+ execution: false
+ name: cybereason-query-domain
+ outputs:
+ - contextPath: Cybereason.Domain.Name
+ description: Domain name
+ type: string
+ - contextPath: Cybereason.Domain.Malicious
+ description: Is domain malicious
+ type: boolean
+ - contextPath: Cybereason.Domain.IsInternalDomain
+ description: Is domain internal
+ type: boolean
+ - contextPath: Cybereason.Domain.Reputation
+ description: Domain reputation
+ type: string
+ - contextPath: Cybereason.Domain.SuspicionsCount
+ description: Domain suspicions count
+ type: number
+ - contextPath: Cybereason.Domain.WasEverResolved
+ description: Was domain ever resolved
+ type: boolean
+ - contextPath: Cybereason.Domain.WasEverResolvedAsASecondLevelDomain
+ description: Was domain ever resolved as a second level domain
+ type: boolean
+ - contextPath: Domain.Name
+ description: Domain name
+ type: string
+ - arguments:
+ - default: true
+ description: Username to query
+ isArray: false
+ name: username
+ required: true
+ secret: false
+ deprecated: false
+ description: Query users as part of investigation
+ execution: false
+ name: cybereason-query-user
+ outputs:
+ - contextPath: Cybereason.User.Username
+ description: User name
+ type: string
+ - contextPath: Cybereason.User.Domain
+ description: User domain
+ type: string
+ - contextPath: Cybereason.User.LastMachineLoggedInTo
+ description: Last machine which user logged in to
+ type: string
+ - contextPath: Cybereason.User.LocalSystem
+ description: Is local system
+ type: boolean
+ - contextPath: Cybereason.User.Organization
+ description: User organization
+ type: string
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- Cybereason Test
diff --git a/Integrations/Cybereason/Cybereason_image.png b/Integrations/Cybereason/Cybereason_image.png
new file mode 100644
index 000000000000..d1bd7cab71f5
Binary files /dev/null and b/Integrations/Cybereason/Cybereason_image.png differ
diff --git a/Integrations/Cybereason/Pipfile b/Integrations/Cybereason/Pipfile
new file mode 100644
index 000000000000..66ad1243db8b
--- /dev/null
+++ b/Integrations/Cybereason/Pipfile
@@ -0,0 +1,22 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+
+[packages]
+certifi = "==2017.11.5"
+chardet = "==3.0.4"
+idna = "==2.6"
+olefile = "==0.44"
+requests = "==2.18.4"
+urllib3 = "==1.22"
+PyYAML = "==3.12"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/Cybereason/Pipfile.lock b/Integrations/Cybereason/Pipfile.lock
new file mode 100644
index 000000000000..17b21182a32b
--- /dev/null
+++ b/Integrations/Cybereason/Pipfile.lock
@@ -0,0 +1,375 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "dbc7e9dc0a5be3767de3b107d1afe7a3e3b6c57f7cb8a820195e76b8ee681d40"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "certifi": {
+ "hashes": [
+ "sha256:244be0d93b71e93fc0a0a479862051414d0e00e16435707e5bf5000f92e04694",
+ "sha256:5ec74291ca1136b40f0379e1128ff80e866597e4e2c1e755739a913bbc3613c0"
+ ],
+ "index": "pypi",
+ "version": "==2017.11.5"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "index": "pypi",
+ "version": "==2.6"
+ },
+ "olefile": {
+ "hashes": [
+ "sha256:61f2ca0cd0aa77279eb943c07f607438edf374096b66332fae1ee64a6f0f73ad"
+ ],
+ "index": "pypi",
+ "version": "==0.44"
+ },
+ "pyyaml": {
+ "hashes": [
+ "sha256:16b20e970597e051997d90dc2cddc713a2876c47e3d92d59ee198700c5427736",
+ "sha256:3262c96a1ca437e7e4763e2843746588a965426550f3797a79fca9c6199c431f",
+ "sha256:592766c6303207a20efc445587778322d7f73b161bd994f227adaa341ba212ab",
+ "sha256:5ac82e411044fb129bae5cfbeb3ba626acb2af31a8d17d175004b70862a741a7",
+ "sha256:827dc04b8fa7d07c44de11fabbc888e627fa8293b695e0f99cb544fdfa1bf0d1",
+ "sha256:bc6bced57f826ca7cb5125a10b23fd0f2fff3b7c4701d64c439a300ce665fff8",
+ "sha256:c01b880ec30b5a6e6aa67b09a2fe3fb30473008c85cd6a67359a1b15ed6d83a4",
+ "sha256:e863072cdf4c72eebf179342c94e6989c67185842d9997960b3e69290b2fa269"
+ ],
+ "index": "pypi",
+ "version": "==3.12"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "index": "pypi",
+ "version": "==1.22"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.5"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:244be0d93b71e93fc0a0a479862051414d0e00e16435707e5bf5000f92e04694",
+ "sha256:5ec74291ca1136b40f0379e1128ff80e866597e4e2c1e755739a913bbc3613c0"
+ ],
+ "index": "pypi",
+ "version": "==2017.11.5"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:45d1272aad6cfd7a8a06cf5c73f2ceb6a190f6acc1fa707e7f82a4c053b28b18",
+ "sha256:bc37850f0cc42a1725a796ef7d92690651bf1af37d744cc63161dac62cabee17"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==3.8.1"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==1.0.2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16",
+ "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.3.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "index": "pypi",
+ "version": "==2.6"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8",
+ "sha256:80d2de76188eabfbfcf27e6a37342c2827801e59c4cc14b0371c56fed43820e3"
+ ],
+ "version": "==0.19"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9",
+ "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe"
+ ],
+ "version": "==19.1"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e",
+ "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8"
+ ],
+ "markers": "python_version < '3.6'",
+ "version": "==2.3.4"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42",
+ "sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300"
+ ],
+ "index": "pypi",
+ "version": "==1.9.5"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
+ "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
+ ],
+ "version": "==2.4.2"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:8fc39199bdda3d9d025d3b1f4eb99a192c20828030ea7c9a0d2840721de7d347",
+ "sha256:d100a02770f665f5dcf7e3f08202db29857fee6d15f34c942be0a511f39814f0"
+ ],
+ "index": "pypi",
+ "version": "==4.6.5"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:12e17c7ad1397fd1df5ead7727eb3f1bdc9fe1c18293b0492e0e01b57997e38d",
+ "sha256:dc9e416a095ee7c3360056990d52e5611fb94469352fc1c2dc85be1ff2189146"
+ ],
+ "index": "pypi",
+ "version": "==1.6.0"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "index": "pypi",
+ "version": "==1.22"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/Cylance_Protect_v2/CHANGELOG.md b/Integrations/Cylance_Protect_v2/CHANGELOG.md
new file mode 100644
index 000000000000..9e7170cee7e9
--- /dev/null
+++ b/Integrations/Cylance_Protect_v2/CHANGELOG.md
@@ -0,0 +1,10 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+Added the *batch_size* argument to the ***cylance-protect-delete-devices*** command, which specifies the number of devices to delete per request (batch).
+
+## [19.8.0] - 2019-08-06
+ - Improved handling of error messages.
+ - Improved logging functionality.
+ - Added the ***Trust any certificate*** parameter.
diff --git a/Integrations/Cylance_Protect_v2/Cylance_Protect_v2-description.md b/Integrations/Cylance_Protect_v2/Cylance_Protect_v2-description.md
new file mode 100644
index 000000000000..fa39fe335d70
--- /dev/null
+++ b/Integrations/Cylance_Protect_v2/Cylance_Protect_v2-description.md
@@ -0,0 +1,2 @@
+CylancePROTECT is an integrated threat prevention solution that combines the
+power of artificial intelligence (AI) to block malware infections.
\ No newline at end of file
diff --git a/Integrations/Cylance_Protect_v2/Cylance_Protect_v2.py b/Integrations/Cylance_Protect_v2/Cylance_Protect_v2.py
new file mode 100644
index 000000000000..7f4966b916fd
--- /dev/null
+++ b/Integrations/Cylance_Protect_v2/Cylance_Protect_v2.py
@@ -0,0 +1,1437 @@
+from CommonServerPython import *
+import jwt
+import uuid
+import requests
+import json
+import re
+import zipfile
+from StringIO import StringIO
+from datetime import datetime, timedelta
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+# CONSTANTS
+TOKEN_TIMEOUT = 300 # 5 minutes
+
+URI_AUTH = 'auth/v2/token'
+URI_DEVICES = 'devices/v2'
+URI_POLICIES = 'policies/v2'
+URI_ZONES = 'zones/v2'
+URI_THREATS = 'threats/v2'
+URI_LISTS = 'globallists/v2'
+
+SCOPE_DEVICE_LIST = 'device:list'
+SCOPE_DEVICE_READ = 'device:read'
+SCOPE_DEVICE_UPDATE = 'device:update'
+SCOPE_DEVICE_THREAT_LIST = 'device:threatlist'
+SCOPE_POLICY_LIST = 'policy:list'
+SCOPE_POLICY_READ = 'policy:read'
+SCOPE_ZONE_CREATE = 'zone:create'
+SCOPE_ZONE_LIST = 'zone:list'
+SCOPE_ZONE_READ = 'zone:read'
+SCOPE_ZONE_UPDATE = 'zone:update'
+SCOPE_THREAT_READ = 'threat:read'
+SCOPE_THREAT_DEVICE_LIST = 'threat:devicelist'
+SCOPE_THREAT_UPDATE = 'threat:update'
+SCOPE_GLOBAL_LIST = 'globallist:list'
+SCOPE_THREAT_LIST = 'threat:list'
+SCOPE_GLOBAL_LIST_CREATE = 'globallist:create'
+SCOPE_GLOBAL_LIST_DELETE = 'globallist:delete'
+
+
+# PREREQUISITES
+def load_server_url():
+ """ Cleans and loads the server url from the configuration """
+ url = demisto.params()['server']
+ url = re.sub('/[\/]+$/', '', url)
+ url = re.sub('\/$', '', url)
+ return url
+
+
+# GLOBALS
+APP_ID = demisto.params()['app_id']
+APP_SECRET = demisto.params()['app_secret']
+TID = demisto.params()['tid']
+SERVER_URL = load_server_url()
+FILE_THRESHOLD = demisto.params()['file_threshold']
+USE_SSL = not demisto.params().get('unsecure', False)
+
+
+# HELPERS
+def generate_jwt_times():
+ """
+ Generates the epoch time window in which the token will be valid
+ Returns the current timestamp and the timeout timestamp (in that order)
+ """
+ now = datetime.utcnow()
+ timeout_datetime = now + timedelta(seconds=TOKEN_TIMEOUT)
+ epoch_time = int((now - datetime(1970, 1, 1)).total_seconds())
+ epoch_timeout = int((timeout_datetime - datetime(1970, 1, 1)).total_seconds())
+ return epoch_time, epoch_timeout
+
+
+def api_call(uri, method='post', headers={}, body={}, params={}, accept_404=False):
+ """
+ Makes an API call to the server URL with the supplied uri, method, headers, body and params
+ """
+ url = '%s/%s' % (SERVER_URL, uri)
+ res = requests.request(method, url, headers=headers, data=json.dumps(body), params=params, verify=USE_SSL)
+ if res.status_code < 200 or res.status_code >= 300:
+ if res.status_code == 409 and str(res.content).find('already an entry for this threat') != -1:
+ raise Warning(res.content)
+ if not res.status_code == 404 and not accept_404:
+ return_error(
+ 'Got status code ' + str(res.status_code) + ' with body ' + res.content + ' with headers ' + str(
+ res.headers))
+ return json.loads(res.text) if res.text else res.ok
+
+
+def get_authentication_token(scope=None):
+ """
+ Generates a JWT authorization token with an optional scope and queries the API for an access token
+ Returns the received API access token
+ """
+ # Generate token ID
+ token_id = str(uuid.uuid4())
+
+ # Generate current time & token timeout
+ epoch_time, epoch_timeout = generate_jwt_times()
+ # Token claims
+ claims = {
+ 'exp': epoch_timeout,
+ 'iat': epoch_time,
+ 'iss': 'http://cylance.com',
+ 'sub': APP_ID,
+ 'tid': TID,
+ 'jti': token_id
+ }
+
+ if scope:
+ claims['scp'] = scope
+
+ # Encode the token
+ encoded = jwt.encode(claims, APP_SECRET, algorithm='HS256')
+ payload = {'auth_token': encoded}
+ headers = {'Content-Type': 'application/json; charset=utf-8'}
+ res = api_call(method='post', uri=URI_AUTH, body=payload, headers=headers)
+ return res['access_token']
+
+
+def threat_to_incident(threat):
+ incident = {
+ 'name': 'Cylance Protect v2 threat ' + threat['name'],
+ 'occurred': threat['last_found'] + 'Z',
+ 'rawJSON': json.dumps(threat)
+ }
+
+ host_name = None
+ devices = get_threat_devices_request(threat['sha256'], None, None)['page_items']
+ for device in devices:
+ if device['date_found'] == threat['last_found']:
+ host_name = device['name']
+
+ labels = [{'type': 'Classification', 'value': threat['classification']}, {'type': 'MD5', 'value': threat['md5']},
+ {'type': 'SHA256', 'value': threat['sha256']}, {'type': 'ThreatLastFound', 'value': threat['last_found']},
+ {'type': 'HostName', 'value': host_name}]
+ incident['labels'] = labels
+ return incident
+
+
+def normalize_score(score):
+ """
+ Translates API raw float (-1 to 1) score to UI score (-100 to 100)
+ """
+ return score * 100
+
+
+def translate_score(score, threshold):
+ if score > 0:
+ dbot_score = 1
+ elif threshold <= score:
+ dbot_score = 2
+ else:
+ dbot_score = 3
+ return dbot_score
+
+
+# FUNCTIONS
+def test():
+ access_token = get_authentication_token()
+ if not access_token:
+ raise Exception('Unable to get access token')
+ demisto.results('ok')
+
+
+def get_devices():
+ page = demisto.args()['pageNumber'] if 'pageNumber' in demisto.args() else None
+ page_size = demisto.args()['pageSize'] if 'pageSize' in demisto.args() else None
+ result = get_devices_request(page, page_size)
+ devices = result['page_items']
+ hr = []
+ devices_context = []
+ endpoint_context = []
+ for device in devices:
+ current_device_context = {
+ 'AgentVersion': device['agent_version'],
+ 'DateFirstRegistered': device['date_first_registered'],
+ 'ID': device['id'],
+ 'IPAddress': device['ip_addresses'],
+ 'MACAdress': device['mac_addresses'],
+ 'Hostname': device['name'],
+ 'State': device['state']
+ }
+ if device['policy']:
+ policy = {}
+ if device['policy']['id']:
+ policy['ID'] = device['policy']['id']
+ if device['policy']['name']:
+ policy['Name'] = device['policy']['name']
+ if policy:
+ current_device_context['Policy'] = policy
+ devices_context.append(current_device_context)
+ endpoint_context.append({
+ 'IPAddress': device['ip_addresses'],
+ 'MACAdress': device['mac_addresses'],
+ 'Hostname': device['name']
+ })
+ current_device = dict(device)
+ current_device['ip_addresses'] = ', '.join(current_device['ip_addresses'])
+ current_device['mac_addresses'] = ', '.join(current_device['mac_addresses'])
+ current_device['policy'] = current_device['policy']['name']
+ hr.append(current_device)
+
+ ec = {
+ 'CylanceProtect.Device(val.ID && val.ID === obj.ID)': devices_context,
+ 'Endpoint(val.Hostname && val.Hostname === obj.Hostname)': endpoint_context
+ }
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': devices,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Cylance Protect Devices', hr, headerTransform=underscoreToCamelCase,
+ removeNull=True),
+ 'EntryContext': ec
+ }
+
+ demisto.results(entry)
+
+
+def get_devices_request(page=None, page_size=None):
+ access_token = get_authentication_token(scope=SCOPE_DEVICE_LIST)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+
+ params = {}
+ if page:
+ params['page'] = page
+ if page_size:
+ params['page_size'] = page_size
+ res = api_call(uri=URI_DEVICES, method='get', headers=headers, params=params)
+ return res
+
+
+def get_device():
+ device_id = demisto.args()['id']
+ device = get_device_request(device_id)
+ hr = []
+ if device:
+ device_context = {
+ 'IPAddress': device['ip_addresses'],
+ 'MACAdress': device['mac_addresses'],
+ 'Hostname': device['host_name'],
+ 'OSVersion': device['os_version'],
+ 'UpdateAvailable': device['update_available'],
+ 'BackgroundDetection': device['background_detection'],
+ 'DateFirstRegistered': device['date_first_registered'],
+ 'DateLastModified': device['date_last_modified'],
+ 'DateOffline': device['date_offline'],
+ 'IsSafe': device['is_safe'],
+ 'LastLoggedInUser': device['last_logged_in_user'],
+ 'State': device['state'],
+ 'ID': device['id'],
+ 'Name': device['name']
+ }
+ if device['update_type']:
+ device_context['UpdateType'] = device['update_type']
+ if device['policy']:
+ policy = {}
+ if device['policy']['id']:
+ policy['ID'] = device['policy']['id']
+ if device['policy']['name']:
+ policy['Name'] = device['policy']['name']
+ if policy:
+ device_context['Policy'] = policy
+ endpoint_context = {
+ 'IPAddress': device['ip_addresses'],
+ 'MACAdress': device['mac_addresses'],
+ 'Hostname': device['host_name'],
+ 'OSVersion': device['os_version']
+ }
+ ec = {
+ 'Endpoint(val.Hostname && val.Hostname === obj.Hostname)': endpoint_context,
+ 'CylanceProtect.Device(val.ID && val.ID === obj.ID)': device_context
+ }
+
+ current_device = dict(device)
+ current_device['ip_addresses'] = ', '.join(current_device['ip_addresses'])
+ current_device['mac_addresses'] = ', '.join(current_device['mac_addresses'])
+ current_device['policy'] = current_device['policy']['name']
+ hr.append(current_device)
+
+ else:
+ ec = {}
+
+ title = 'Cylance Protect Device ' + device_id
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': device,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, hr, headerTransform=underscoreToCamelCase, removeNull=True),
+ 'EntryContext': ec
+ }
+
+ demisto.results(entry)
+
+
+def get_device_request(device_id):
+ access_token = get_authentication_token(scope=SCOPE_DEVICE_READ)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+ uri = '%s/%s' % (URI_DEVICES, device_id)
+ res = api_call(uri=uri, method='get', headers=headers)
+ return res
+
+
+def update_device():
+ device_id = demisto.args()['id']
+
+ name = demisto.args()['name'] if 'name' in demisto.args() else None
+ policy_id = demisto.args()['policyId'] if 'policyId' in demisto.args() else None
+ add_zones = demisto.args()['addZones'] if 'addZones' in demisto.args() else None
+ remove_zones = demisto.args()['removeZones'] if 'removeZones' in demisto.args() else None
+
+ update_device_request(device_id, name, policy_id, add_zones, remove_zones)
+
+ hr = {}
+
+ if name:
+ hr['Name'] = name
+ if policy_id:
+ hr['PolicyID'] = policy_id
+ if add_zones:
+ hr['AddedZones'] = add_zones
+ if remove_zones:
+ hr['RemovedZones'] = remove_zones
+
+ device = hr.copy()
+ device['id'] = device_id
+
+ title = 'Device ' + device_id + ' was updated successfully.'
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': device,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, [hr])
+ }
+
+ demisto.results(entry)
+
+
+def update_device_request(device_id, name=None, policy_id=None, add_zones=None, remove_zones=None):
+ access_token = get_authentication_token(scope=SCOPE_DEVICE_UPDATE)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+
+ body = {}
+ if name:
+ body['name'] = name
+ if policy_id:
+ body['policy_id'] = policy_id
+ if add_zones:
+ body['add_zone_ids'] = [add_zones]
+ if remove_zones:
+ body['remove_zone_ids'] = [remove_zones]
+
+ # Do we have anything to update?
+ if not body:
+ raise Exception('No changes detected')
+
+ uri = '%s/%s' % (URI_DEVICES, device_id)
+ res = api_call(uri=uri, method='put', headers=headers, body=body)
+ return res
+
+
+def get_device_threats():
+ device_id = demisto.args()['id']
+ page = demisto.args()['pageNumber'] if 'pageNumber' in demisto.args() else None
+ page_size = demisto.args()['pageSize'] if 'pageSize' in demisto.args() else None
+
+ device_threats = get_device_threats_request(device_id, page, page_size)['page_items']
+ dbot_score_array = []
+
+ for threat in device_threats:
+ dbot_score = 0
+ score = threat.get('cylance_score', None)
+ if score:
+ threat['cylance_score'] = normalize_score(threat['cylance_score'])
+ threshold = demisto.args().get('threshold', FILE_THRESHOLD)
+ dbot_score = translate_score(threat['cylance_score'], int(threshold))
+ dbot_score_array.append({
+ 'Indicator': threat['name'],
+ 'Type': 'file',
+ 'Vendor': 'Cylance Protect',
+ 'Score': dbot_score
+ })
+ if device_threats:
+ threats_context = createContext(data=device_threats, keyTransform=underscoreToCamelCase)
+ ec = {
+ 'File': threats_context,
+ 'DBotScore': dbot_score_array
+ }
+
+ title = 'Cylance Protect Device Threat ' + device_id
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': device_threats,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, device_threats, headerTransform=underscoreToCamelCase),
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results('No threats found.')
+
+
+def get_device_threats_request(device_id, page=None, page_size=None):
+ access_token = get_authentication_token(scope=SCOPE_DEVICE_THREAT_LIST)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+
+ params = {}
+ if page:
+ params['page'] = page
+ if page_size:
+ params['page_size'] = page_size
+ uri = '%s/%s/threats' % (URI_DEVICES, device_id)
+ res = api_call(uri=uri, method='get', headers=headers, params=params)
+ return res
+
+
+def get_policies():
+ page = demisto.args()['pageNumber'] if 'pageNumber' in demisto.args() else None
+ page_size = demisto.args()['pageSize'] if 'pageSize' in demisto.args() else None
+
+ policies = get_policies_request(page, page_size)['page_items']
+
+ context_policies = createContext(data=policies, keyTransform=underscoreToCamelCase)
+ ec = {
+ 'CylanceProtect.Policies(val.id && val.id === obj.id)': context_policies
+ }
+
+ title = 'Cylance Protect Policies'
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': policies,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, policies, headerTransform=underscoreToCamelCase),
+ 'EntryContext': ec
+ }
+
+ demisto.results(entry)
+
+
+def get_policies_request(page=None, page_size=None):
+ access_token = get_authentication_token(scope=SCOPE_POLICY_LIST)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+
+ params = {}
+ if page:
+ params['page'] = page
+ if page_size:
+ params['page_size'] = page_size
+
+ res = api_call(uri=URI_POLICIES, method='get', headers=headers, params=params)
+ return res
+
+
+def create_zone():
+ name = demisto.args()['name']
+ policy_id = demisto.args()['policy_id']
+ criticality = demisto.args()['criticality']
+
+ zone = create_zone_request(name, policy_id, criticality)
+
+ title = 'Zone ' + name + ' was created successfully.'
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': zone,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, [zone], headerTransform=underscoreToCamelCase)
+ })
+
+
+def create_zone_request(name, policy_id, criticality):
+ access_token = get_authentication_token(scope=SCOPE_ZONE_CREATE)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+ body = {
+ 'name': name,
+ 'policy_id': policy_id,
+ 'criticality': criticality
+ }
+ res = api_call(uri=URI_ZONES, method='post', headers=headers, body=body)
+ return res
+
+
+def get_zones():
+ page = demisto.args()['pageNumber'] if 'pageNumber' in demisto.args() else None
+ page_size = demisto.args()['pageSize'] if 'pageSize' in demisto.args() else None
+
+ zones = get_zones_request(page, page_size)['page_items']
+
+ context_zones = createContext(data=zones, keyTransform=underscoreToCamelCase, removeNull=True)
+ ec = {
+ 'CylanceProtect.Zones(val.Id && val.Id === obj.Id)': context_zones
+ }
+ title = 'Cylance Protect Zones'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': zones,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, zones, headerTransform=underscoreToCamelCase, removeNull=True),
+ 'EntryContext': ec
+ })
+
+
+def get_zones_request(page=None, page_size=None):
+ access_token = get_authentication_token(scope=SCOPE_ZONE_LIST)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+
+ params = {}
+ if page:
+ params['page'] = page
+ if page_size:
+ params['page_size'] = page_size
+
+ res = api_call(uri=URI_ZONES, method='get', headers=headers, params=params)
+ return res
+
+
+def get_zone():
+ zone_id = demisto.args()['id']
+ zone = get_zone_request(zone_id)
+
+ context_zone = createContext(data=zone, keyTransform=underscoreToCamelCase, removeNull=True)
+ ec = {
+ 'CylanceProtect.Zones(val.Id && val.Id === obj.Id)': context_zone
+ }
+ title = 'Cylance Protect Zone ' + zone_id
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': zone,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, zone, headerTransform=underscoreToCamelCase, removeNull=True),
+ 'EntryContext': ec
+ })
+
+
+def get_zone_request(zone_id):
+ access_token = get_authentication_token(scope=SCOPE_ZONE_READ)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+ uri = '%s/%s' % (URI_ZONES, zone_id)
+ res = api_call(uri=uri, method='get', headers=headers)
+ return res
+
+
+def update_zone():
+ zone_id = demisto.args()['id']
+
+ # Get current zone and fill in requires missing arguments
+ current_zone = get_zone_request(zone_id)
+
+ # Details to update
+ name = demisto.args()['name'] if 'name' in demisto.args() else current_zone['name']
+ policy_id = demisto.args()['policy_id'] if 'policy_id' in demisto.args() else current_zone['policy_id']
+ criticality = demisto.args()['criticality'] if 'criticality' in demisto.args() else current_zone['criticality']
+ zone = update_zone_request(zone_id, name, policy_id, criticality)
+ hr = {}
+ if name:
+ hr['Name'] = name
+ if policy_id:
+ hr['PolicyID'] = policy_id
+ if criticality:
+ hr['Criticality'] = criticality
+ title = 'Zone was updated successfully.'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': zone,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, [hr])
+ })
+
+
+def update_zone_request(zone_id, name, policy_id, criticality):
+ access_token = get_authentication_token(scope=SCOPE_ZONE_UPDATE)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+
+ body = {}
+ if name:
+ body['name'] = name
+ if policy_id:
+ body['policy_id'] = policy_id
+ if criticality:
+ body['criticality'] = criticality
+
+ # Do we have anything to update?
+ if not body:
+ raise Exception('No changes detected')
+
+ uri = '%s/%s' % (URI_ZONES, zone_id)
+ res = api_call(uri=uri, method='put', headers=headers, body=body)
+ return res
+
+
+def get_threat():
+ sha256 = demisto.args()['sha256']
+ threat = get_threat_request(sha256)
+ if threat:
+ dbot_score = 0
+ score = threat.get('cylance_score', None)
+ if score:
+ threat['cylance_score'] = normalize_score(threat['cylance_score'])
+ threshold = demisto.args().get('threshold', FILE_THRESHOLD)
+ dbot_score = translate_score(threat['cylance_score'], int(threshold))
+ context_threat = createContext(data=threat, keyTransform=underscoreToCamelCase, removeNull=True)
+ ec = {
+ 'File': context_threat,
+ 'DBotScore': {
+ 'Indicator': threat['name'],
+ 'Type': 'file',
+ 'Vendor': 'Cylance Protect',
+ 'Score': dbot_score
+ }
+ }
+ title = 'Cylance Protect Threat ' + sha256
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': threat,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, threat, headerTransform=underscoreToCamelCase, removeNull=True),
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results('Threat was not found.')
+
+
+def get_threat_request(sha256):
+ access_token = get_authentication_token(scope=SCOPE_THREAT_READ)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+ uri = '%s/%s' % (URI_THREATS, sha256)
+ res = api_call(uri=uri, method='get', headers=headers, body={}, params={}, accept_404=False)
+ return res
+
+
+def get_threats():
+ page = demisto.args()['page'] if 'page' in demisto.args() else None
+ page_size = demisto.args()['page_size'] if 'page_size' in demisto.args() else None
+
+ threats = get_threats_request(page, page_size)['page_items']
+ dbot_score_array = []
+ for threat in threats:
+ dbot_score = 0
+ score = threat.get('cylance_score', None)
+ if score:
+ threat['cylance_score'] = normalize_score(threat['cylance_score'])
+ threshold = demisto.args().get('threshold', FILE_THRESHOLD)
+ dbot_score = translate_score(threat['cylance_score'], int(threshold))
+ dbot_score_array.append({
+ 'Indicator': threat['name'],
+ 'Type': 'file',
+ 'Vendor': 'Cylance Protect',
+ 'Score': dbot_score
+ })
+ context_threat = createContext(data=threats, keyTransform=underscoreToCamelCase, removeNull=True)
+ ec = {
+ 'File': context_threat,
+ 'DBotScore': dbot_score_array
+ }
+
+ title = 'Cylance Protect Threats'
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': threats,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, threats, headerTransform=underscoreToCamelCase, removeNull=True),
+ 'EntryContext': ec
+ })
+
+
+def get_threats_request(page=None, page_size=None):
+ access_token = get_authentication_token(scope=SCOPE_THREAT_LIST)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+
+ params = {}
+ if page in demisto.args():
+ params['page'] = demisto.args()['page']
+ if page_size in demisto.args():
+ params['page_size'] = demisto.args()['page_size']
+
+ res = api_call(uri=URI_THREATS, method='get', headers=headers, params=params)
+ return res
+
+
+def get_threat_devices():
+ threat_hash = demisto.args()['sha256']
+ page = demisto.args()['pageNumber'] if 'pageNumber' in demisto.args() else None
+ page_size = demisto.args()['pageSize'] if 'pageSize' in demisto.args() else None
+
+ threats = get_threat_devices_request(threat_hash, page, page_size)['page_items']
+
+ if threats:
+ threats_context = threats[:]
+
+ for threat in threats:
+ threat['ip_addresses'] = ', '.join(threat['ip_addresses'])
+ threat['mac_addresses'] = ', '.join(threat['mac_addresses'])
+
+ file_paths = []
+ endpoint_context = []
+ devices_context = []
+ for threat in threats_context:
+ endpoint_context.append({
+ 'Hostname': threat['name'],
+ 'IPAddress': threat['ip_addresses'],
+ 'MACAddress': threat['mac_addresses']
+ })
+ current_device = {
+ 'Hostname': threat['name'],
+ 'IPAddress': threat['ip_addresses'],
+ 'MACAddress': threat['mac_addresses'],
+ 'AgentVersion': threat['agent_version'],
+ 'DateFound': threat['date_found'],
+ 'FilePath': threat['file_path'],
+ 'ID': threat['id'],
+ 'State': threat['state'],
+ 'FileStatus': threat['file_status']
+ }
+ if threat['policy_id']:
+ current_device['PolicyID'] = threat['policy_id']
+ devices_context.append(current_device)
+ file_path = threat.pop('file_path')
+ file_paths.append({
+ 'FilePath': file_path
+ })
+
+ file_context = {
+ 'SHA256': threat_hash,
+ 'Path': file_paths
+ }
+
+ ec = {
+ 'File': file_context,
+ 'Endpoint(val.Hostname && val.Hostname === obj.Hostname)': endpoint_context,
+ 'CylanceProtect.Threat(val.SHA256 && val.SHA256 === obj.SHA256)': {
+ 'SHA256': threat_hash,
+ 'Devices': devices_context
+ }
+ }
+
+ title = 'Cylance Protect Threat ' + threat_hash + ' Devices'
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': threats,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, threats, headerTransform=underscoreToCamelCase, removeNull=True),
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results('No devices found on given threat.')
+
+
+def get_threat_devices_request(threat_hash, page=None, page_size=None):
+ access_token = get_authentication_token(scope=SCOPE_THREAT_DEVICE_LIST)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+
+ params = {}
+ if page:
+ params['page'] = page
+ if page_size:
+ params['page_size'] = page_size
+
+ uri = '%s/%s/devices' % (URI_THREATS, threat_hash)
+ res = api_call(uri=uri, method='get', headers=headers, params=params)
+ return res
+
+
+def get_list():
+ page = demisto.args()['pageNumber'] if 'pageNumber' in demisto.args() else None
+ page_size = demisto.args()['pageSize'] if 'pageSize' in demisto.args() else None
+
+ lst = get_list_request(demisto.args()['listTypeId'], page, page_size)['page_items']
+ dbot_score_array = []
+ for threat in lst:
+ dbot_score = 0
+ score = threat.get('cylance_score', None)
+ if score:
+ threat['cylance_score'] = normalize_score(threat['cylance_score'])
+ threshold = demisto.args().get('threshold', FILE_THRESHOLD)
+ dbot_score = translate_score(threat['cylance_score'], int(threshold))
+ dbot_score_array.append({
+ 'Indicator': threat['name'],
+ 'Type': 'file',
+ 'Vendor': 'Cylance Protect',
+ 'Score': dbot_score
+ })
+ if lst:
+ context_list = createContext(data=lst, keyTransform=underscoreToCamelCase, removeNull=True)
+ ec = {
+ 'File': context_list,
+ 'DBotScore': dbot_score_array
+ }
+
+ title = 'Cylance Protect Global List'
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': lst,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, lst, headerTransform=underscoreToCamelCase, removeNull=True),
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results('No list of this type was found.')
+
+
+def get_list_request(list_type_id, page=None, page_size=None):
+ access_token = get_authentication_token(scope=SCOPE_GLOBAL_LIST)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+
+ params = {}
+ if list_type_id == 'GlobalQuarantine':
+ params['listTypeId'] = 0
+ else: # List Type ID is GlobalSafe
+ params['listTypeId'] = 1
+ if page:
+ params['page'] = page
+ if page_size:
+ params['page_size'] = page_size
+ res = api_call(uri=URI_LISTS, method='get', headers=headers, params=params)
+ return res
+
+
+def get_list_entry_by_hash(sha256=None, list_type_id=None):
+ if not sha256:
+ sha256 = demisto.args()['sha256']
+ if not list_type_id:
+ list_type_id = demisto.args()['listTypeId']
+ total_pages = 0
+ current_page = 0
+ found_hash = None
+ while not found_hash and total_pages >= current_page:
+ if not current_page:
+ current_page = 1
+ lst = get_list_request(list_type_id, current_page, 200)
+ if not total_pages:
+ total_pages = lst['total_pages']
+ for i in lst['page_items']:
+ if i['sha256'] == sha256:
+ found_hash = i
+ break
+ current_page += 1
+ if demisto.command() == 'cylance-protect-get-list-entry':
+ if found_hash:
+ context_list = createContext(data=found_hash, keyTransform=underscoreToCamelCase, removeNull=True)
+ ec = {
+ 'CylanceListSearch': context_list
+ }
+ title = 'Cylance Protect Global List Entry'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': found_hash,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, found_hash, headerTransform=underscoreToCamelCase,
+ removeNull=True),
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results("Hash not found")
+ else:
+ return found_hash
+
+
+def get_indicators_report():
+ url = 'https://protect.cylance.com/Reports/ThreatDataReportV1/indicators/' + demisto.args()['token']
+ res = requests.request('GET', url, verify=USE_SSL)
+ filename = 'Indicators_Report.csv'
+ demisto.results(fileResult(filename, res.content))
+
+
+def update_device_threats():
+ device_id = demisto.args()['device_id']
+ threat_id = demisto.args()['threat_id']
+ event = demisto.args()['event']
+ update_device_threats_request(device_id, threat_id, event)
+ demisto.results('Device threat was updated successfully.')
+
+
+def update_device_threats_request(device_id, threat_id, event):
+ access_token = get_authentication_token(scope=SCOPE_THREAT_UPDATE)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+
+ body = {
+ 'threat_id': threat_id,
+ 'event': event
+ }
+
+ uri = '%s/%s/threats' % (URI_DEVICES, device_id)
+ res = api_call(uri=uri, method='post', headers=headers, body=body)
+
+ return res
+
+
+def download_threat():
+ contents = {} # type: Dict
+ context = {}
+ dbot_score = 0
+
+ sha256 = demisto.args()['sha256']
+ threat_url = download_threat_request(sha256)
+
+ threat_file = requests.get(threat_url, allow_redirects=True, verify=USE_SSL)
+ if threat_file.status_code == 200:
+ if demisto.args()['unzip'] == "yes":
+ file_archive = StringIO(threat_file.content)
+ zip_file = zipfile.ZipFile(file_archive)
+ file_data = zip_file.read(sha256.upper(), pwd='infected')
+ demisto.results(fileResult(sha256, file_data))
+ else:
+ demisto.results(fileResult(sha256, threat_file.content + '.zip'))
+ else:
+ return_error('Could not fetch the file')
+
+ threat = get_threat_request(sha256)
+ if threat:
+ # add data about the threat if found
+ if threat.get('cylance_score'):
+ score = normalize_score(threat.get('cylance_score'))
+ threshold = demisto.args().get('threshold', FILE_THRESHOLD)
+ dbot_score = translate_score(score, int(threshold))
+
+ contents = {
+ 'Download URL': threat_url,
+ 'File Name': threat.get('name'),
+ 'File Size': threat.get('file_size'),
+ 'Detected By': threat.get('detected_by'),
+ 'GlobalQuarantine': threat.get('global_quarantined'),
+ 'Safelisted': threat.get('safelisted'),
+ 'Timestamp': threat.get('cert_timestamp'),
+ }
+
+ context[outputPaths['file']] = {
+ 'DownloadURL': threat_url,
+ 'SHA256': threat.get('sha256'),
+ 'Name': threat.get('name'),
+ 'Size': threat.get('file_size'),
+ 'Safelisted': threat.get('safelisted'),
+ 'Timestamp': threat.get('cert_timestamp'),
+ 'MD5': threat.get('md5')
+ }
+
+ if dbot_score == 3:
+ context[outputPaths['file']]['Malicious'] = {
+ 'Vendor': 'Cylance Protect',
+ 'Description': 'Score determined by get threat command'
+ }
+
+ context[outputPaths['dbotscore']] = {
+ 'Indicator': threat.get('name'),
+ 'Type': 'file',
+ 'Vendor': 'Cylance Protect',
+ 'Score': dbot_score
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Cylance Protect - Downloading threat attached to the following hash: '
+ + sha256, contents),
+ 'EntryContext': context
+ })
+
+
+def download_threat_request(hash):
+ access_token = get_authentication_token(scope=SCOPE_THREAT_READ)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+ uri = '%s/%s/%s' % (URI_THREATS, "download", hash)
+ res = api_call(uri=uri, method='get', headers=headers)
+ if not res['url']:
+ return_error('No url was found')
+ return res['url']
+
+
+def add_hash_to_list():
+ context = {}
+
+ sha256 = demisto.args().get('sha256')
+ list_type = demisto.args().get('listType')
+ reason = demisto.args().get('reason')
+ category = demisto.args().get('category')
+
+ if list_type == "GlobalSafe" and not category:
+ return_error('Category argument is required for list type of Global Safe')
+
+ add_hash = add_hash_to_list_request(sha256, list_type, reason, category)
+ if not add_hash:
+ return_error('Could not add hash to list')
+
+ contents = {
+ 'Threat File SHA256': sha256,
+ 'List Type': list_type,
+ 'Category': category,
+ 'Reason': reason
+ }
+
+ context[outputPaths['file']] = {
+ 'SHA256': sha256,
+ 'Cylance': {
+ 'ListType': list_type,
+ 'Category': category
+ }
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(
+ 'The requested threat has been successfully added to ' + list_type + ' hashlist.', contents),
+ 'EntryContext': context
+ })
+
+
+def add_hash_to_list_request(sha256, list_type, reason, category=None):
+ access_token = get_authentication_token(scope=SCOPE_GLOBAL_LIST_CREATE)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+
+ body = {
+ 'sha256': sha256,
+ 'list_type': list_type,
+ 'reason': reason
+ }
+ if category:
+ body['category'] = category.replace(" ", "")
+ res = api_call(uri=URI_LISTS, method='post', headers=headers, body=body)
+ return res
+
+
+def delete_hash_from_lists():
+ sha256 = demisto.args().get('sha256')
+ list_type = demisto.args().get('listType')
+ context = {}
+
+ delete_hash_from_lists_request(sha256, list_type)
+
+ contents = {
+ 'Threat File SHA256': sha256,
+ 'Threat List Type': list_type
+ }
+
+ context[outputPaths['file']] = {
+ 'SHA256': sha256,
+ 'Cylance': {
+ 'ListType': list_type
+ }
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(
+ 'The requested threat has been successfully removed from ' + list_type + ' hashlist.', contents),
+ 'EntryContext': context
+ })
+
+
+def delete_hash_from_lists_request(sha256, list_type):
+ access_token = get_authentication_token(scope=SCOPE_GLOBAL_LIST_DELETE)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+
+ body = {
+ 'sha256': sha256,
+ 'list_type': list_type
+ }
+ res = api_call(uri=URI_LISTS, method='delete', headers=headers, body=body)
+ return res
+
+
+def delete_devices():
+ device_ids = demisto.args().get('deviceIds')
+ device_ids_list = argToList(device_ids)
+ contents = []
+ context_list = []
+
+ for device_id in device_ids_list:
+ device = get_device_request(device_id)
+ if not device:
+ continue
+ device_name = device.get('name')
+ context_list.append({
+ 'Id': device_id,
+ 'Name': device_name,
+ 'Deleted': True
+ })
+ contents.append({
+ 'Device Removed': device_id,
+ 'Device Name': device_name,
+ 'Deletion status': True
+ })
+ batch_size = demisto.args().get("batch_size", 20)
+ try:
+ batch_size = int(batch_size)
+ except ValueError:
+ return_error("Error: Batch Size specified must represent an int.")
+ for i in range(0, len(device_ids_list), batch_size):
+ current_deleted_devices_batch = device_ids_list[i:i + batch_size]
+ delete_devices_request(current_deleted_devices_batch)
+
+ context = {
+ 'Cylance.Device(val.Id && val.Id == obj.Id)': context_list
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(
+ 'The requested devices have been successfully removed from your organization list.', contents),
+ 'EntryContext': context
+ })
+
+
+def delete_devices_request(device_ids):
+ access_token = get_authentication_token()
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+ body = {
+ 'device_ids': device_ids
+ }
+
+ res = api_call(uri=URI_DEVICES, method='delete', headers=headers, body=body)
+ if not res or not res.get('request_id'):
+ return_error('Delete response does not contain request id')
+
+ return res
+
+
+def get_policy_details():
+ policy_id = demisto.args()['policyID']
+ contents = {} # type: Dict
+ context = {} # type: Dict
+ title = 'Could not find policy details for that ID'
+ filetype_actions_threat_contents = [] # type: list
+ filetype_actions_suspicious_contents = [] # type: list
+ safelist_contents = [] # type: list
+ title_filetype_actions_threat = 'Cylance Policy Details - FileType Actions Threat Files'
+ title_filetype_actions_suspicious = 'Cylance Policy Details - FileType Actions Suspicious Files'
+ title_safelist = 'Cylance Policy Details - File Exclusions - SafeList'
+ title_memory_exclusion = 'Cylance Policy Details - Memory Violation Actions \n' +\
+ 'This table provides detailed information about the memory violation settings. \n' +\
+ 'Memory protections Exclusion List :'
+ title_memory_violation = 'Memory Violation Settings: '
+ title_additional_settings = 'Cylance Policy Details - Policy Settings. \n' +\
+ 'Various policy settings are contained within this section.'
+
+ policy_details = get_policy_details_request(policy_id)
+ memory_violations_content = []
+ if policy_details:
+ title = 'Cylance Policy Details for: ' + policy_id
+ date_time = ''
+ # timestamp in response comes back as bugged string, convert to actual timestamp.
+ timestamp = policy_details.get('policy_utctimestamp')
+ if timestamp:
+ reg = re.search(r"\d{13}", timestamp)
+ if reg:
+ ts = float(reg.group())
+ date_time = datetime.fromtimestamp(ts / 1000).strftime('%Y-%m-%dT%H:%M:%S.%f+00:00')
+
+ context = {
+ 'Cylance.Policy(val.ID && val.ID == obj.ID)': {
+ 'ID': policy_details.get('policy_id'),
+ 'Name': policy_details.get('policy_name'),
+ 'Timestamp': date_time
+ }
+ }
+
+ contents = {
+ 'Policy Name': policy_details.get('policy_name'),
+ 'Policy Created At': date_time
+ }
+
+ suspicious_files = policy_details.get('filetype_actions').get('suspicious_files')
+ if suspicious_files:
+ suspicious_files_list = []
+ for file in suspicious_files:
+ suspicious_files_list.append({
+ 'Actions': file.get('actions'),
+ 'File Type': file.get('file_type')
+ })
+
+ threat_files = policy_details.get('filetype_actions').get('threat_files')
+ if threat_files:
+ threat_files_list = []
+ for file in threat_files:
+ threat_files_list.append({
+ 'Actions': file.get('actions'),
+ 'File Type': file.get('file_type')
+ })
+
+ filetype_actions_suspicious_contents = suspicious_files_list
+ filetype_actions_threat_contents = threat_files_list
+ safelist = policy_details.get('file_exclusions')
+ if safelist:
+ file_exclusions_list = []
+ for file_exclusion in safelist:
+ file_exclusions_list.append({
+ 'Research Class ID': file_exclusion.get('research_class_id'),
+ 'Infinity': file_exclusion.get('infinity'),
+ 'File Type': file_exclusion.get('file_type'),
+ 'AV Industry': file_exclusion.get('av_industry'),
+ 'Cloud Score': file_exclusion.get('cloud_score'),
+ 'File Hash': file_exclusion.get('file_hash'),
+ 'Research Subclass ID': file_exclusion.get('research_subclass_id'),
+ 'Reason': file_exclusion.get('reason'),
+ 'File Name': file_exclusion.get('file_name'),
+ 'Category Id': file_exclusion.get('category_id'),
+ 'MD5': file_exclusion.get('md5')
+ })
+
+ safelist_contents = file_exclusions_list
+
+ memory_violations = policy_details.get('memoryviolation_actions').get('memory_violations')
+ for memory_violation in memory_violations:
+ memory_violations_content.append({
+ 'Action': memory_violation.get('action'),
+ 'Violation Type': memory_violation.get('violation_type')
+ })
+
+ additional_settings = policy_details.get('policy')
+ additional_settings_content = []
+ for additional_setting in additional_settings:
+ additional_settings_content.append({
+ 'Name': additional_setting.get('name'),
+ 'Value': additional_setting.get('value')
+ })
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': contents,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, contents)
+ + tableToMarkdown(title_filetype_actions_suspicious, filetype_actions_suspicious_contents)
+ + tableToMarkdown(title_filetype_actions_threat, filetype_actions_threat_contents)
+ + tableToMarkdown(title_safelist, safelist_contents)
+ + tableToMarkdown(title_memory_exclusion, policy_details.get('memory_exclusion_list'))
+ + tableToMarkdown(title_memory_violation, memory_violations_content)
+ + tableToMarkdown(title_additional_settings, memory_violations_content),
+ 'EntryContext': context
+ })
+
+
+def get_policy_details_request(policy_id):
+ access_token = get_authentication_token(scope=SCOPE_POLICY_READ)
+ headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer ' + access_token
+ }
+
+ uri = '%s/%s' % (URI_POLICIES, policy_id)
+ res = api_call(uri=uri, method='get', headers=headers)
+ return res
+
+
+def fetch_incidents():
+ now = datetime.utcnow()
+ last_run = demisto.getLastRun().get('time')
+ if last_run is None:
+ now = now - timedelta(days=3)
+ last_run = now
+ else:
+ last_run = datetime.strptime(last_run, '%Y-%m-%dT%H:%M:%S') # Converts string to datetime object
+ current_run = last_run
+ threats = get_threats_request().get('page_items', [])
+
+ incidents = []
+ for threat in threats:
+ last_found = datetime.strptime(threat['last_found'], '%Y-%m-%dT%H:%M:%S')
+ if last_found > last_run:
+ incident = threat_to_incident(threat)
+ incidents.append(incident)
+ if last_found > current_run:
+ current_run = last_found
+
+ demisto.incidents(incidents)
+ demisto.setLastRun({'time': current_run.isoformat().split('.')[0]})
+
+
+# EXECUTION
+LOG('command is %s' % (demisto.command(),))
+try:
+ handle_proxy()
+ if demisto.command() == 'test-module':
+ test()
+
+ if demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+
+ elif demisto.command() == 'cylance-protect-get-devices':
+ get_devices()
+
+ elif demisto.command() == 'cylance-protect-get-device':
+ get_device()
+
+ elif demisto.command() == 'cylance-protect-update-device':
+ update_device()
+
+ elif demisto.command() == 'cylance-protect-get-device-threats':
+ get_device_threats()
+
+ elif demisto.command() == 'cylance-protect-get-policies':
+ get_policies()
+
+ elif demisto.command() == 'cylance-protect-create-zone':
+ create_zone()
+
+ elif demisto.command() == 'cylance-protect-get-zones':
+ get_zones()
+
+ elif demisto.command() == 'cylance-protect-get-zone':
+ get_zone()
+
+ elif demisto.command() == 'cylance-protect-update-zone':
+ update_zone()
+
+ elif demisto.command() == 'cylance-protect-get-threat':
+ get_threat()
+
+ elif demisto.command() == 'cylance-protect-get-threats':
+ get_threats()
+
+ elif demisto.command() == 'cylance-protect-get-threat-devices':
+ get_threat_devices()
+
+ elif demisto.command() == 'cylance-protect-get-indicators-report':
+ get_indicators_report()
+
+ elif demisto.command() == 'cylance-protect-update-device-threats':
+ update_device_threats()
+
+ elif demisto.command() == 'cylance-protect-get-list':
+ get_list()
+
+ elif demisto.command() == 'cylance-protect-get-list-entry':
+ get_list_entry_by_hash()
+
+ # new commands
+ elif demisto.command() == 'cylance-protect-download-threat':
+ download_threat()
+
+ elif demisto.command() == 'cylance-protect-add-hash-to-list':
+ add_hash_to_list()
+
+ elif demisto.command() == 'cylance-protect-delete-hash-from-lists':
+ delete_hash_from_lists()
+
+ elif demisto.command() == 'cylance-protect-delete-devices':
+ delete_devices()
+
+ elif demisto.command() == 'cylance-protect-get-policy-details':
+ get_policy_details()
+
+except Warning as w:
+ demisto.results({
+ 'Type': 11,
+ 'Contents': str(w),
+ 'ContentsFormat': formats['text']
+ })
+
+except Exception as e:
+ demisto.error('#### error in Cylance Protect v2: ' + str(e))
+ if demisto.command() == 'fetch-incidents':
+ LOG.print_log()
+ raise
+ else:
+ return_error(str(e))
diff --git a/Integrations/Cylance_Protect_v2/Cylance_Protect_v2.yml b/Integrations/Cylance_Protect_v2/Cylance_Protect_v2.yml
new file mode 100644
index 000000000000..a8db19e7d24d
--- /dev/null
+++ b/Integrations/Cylance_Protect_v2/Cylance_Protect_v2.yml
@@ -0,0 +1,972 @@
+category: Endpoint
+commonfields:
+ id: Cylance Protect v2
+ version: -1
+configuration:
+- defaultvalue: https://protectapi.cylance.com
+ display: Server URL
+ name: server
+ required: true
+ type: 0
+- display: Application ID
+ name: app_id
+ required: true
+ type: 0
+- display: Application Secret
+ name: app_secret
+ required: true
+ type: 4
+- display: Tenant API Key
+ name: tid
+ required: true
+ type: 4
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Trust any certificate (not secure)
+ name: unsecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- defaultvalue: '-59'
+ display: File Threshold
+ name: file_threshold
+ type: 0
+ required: false
+description: Manage Endpoints using Cylance protect
+display: Cylance Protect v2
+name: Cylance Protect v2
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The page number parameter is optional. When the value is not specified,
+ the default is 1.
+ isArray: false
+ name: pageNumber
+ required: false
+ secret: false
+ - default: false
+ description: The page size parameter is optional. When the value is not specified,
+ the default is 10. Max is 200.
+ isArray: false
+ name: pageSize
+ required: false
+ secret: false
+ deprecated: false
+ description: Allows a caller to request a page with a list of Console device resources that
+ belongings to a tenant, sorted by registration (created) date in descending
+ order (most recent device registered listed first).
+ execution: false
+ name: cylance-protect-get-devices
+ outputs:
+ - contextPath: CylanceProtect.Device.AgentVersion
+ description: The CylancePROTECT Agent version installed on the device.
+ type: string
+ - contextPath: CylanceProtect.Device.DateFirstRegistered
+ description: The date and time (in UTC) when the device record was created.
+ type: date
+ - contextPath: CylanceProtect.Device.ID
+ description: The device’s unique identifier.
+ type: string
+ - contextPath: Endpoint.IPAddress
+ description: The list of IP addresses for the device.
+ type: Unknown
+ - contextPath: Endpoint.MACAddress
+ description: The list of MAC addresses for the device.
+ type: Unknown
+ - contextPath: Endpoint.Hostname
+ description: The device name.
+ type: string
+ - contextPath: CylanceProtect.Device.Policy.ID
+ description: Device policy ID.
+ type: string
+ - contextPath: CylanceProtect.Device.State
+ description: Machine state.
+ type: string
+ - contextPath: CylanceProtect.Device.Policy.Name
+ description: Device policy name.
+ type: string
+ - contextPath: CylanceProtect.Device.Hostname
+ description: The device name.
+ type: string
+ - contextPath: CylanceProtect.Device.MACAddress
+ description: The list of MAC addresses for the device.
+ type: unknown
+ - contextPath: CylanceProtect.Device.IPAddress
+ description: The list of IP addresses for the device.
+ type: Unknown
+ - arguments:
+ - default: true
+ description: The device ID
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Allows a caller to request a specific device resource belonging to
+ a Tenant
+ execution: false
+ name: cylance-protect-get-device
+ outputs:
+ - contextPath: CylanceProtect.Device.AgentVersion
+ description: The CylancePROTECT Agent version installed on the device.
+ type: date
+ - contextPath: CylanceProtect.Device.DateFirstRegistered
+ description: The date and time (in UTC) when the device record was created.
+ type: date
+ - contextPath: CylanceProtect.Device.BackgroundDetection
+ description: If true, the Agent is currently running.
+ type: boolean
+ - contextPath: CylanceProtect.Device.DateLastModified
+ description: The date and time (in UTC) when the device record was last modified.
+ type: date
+ - contextPath: CylanceProtect.Device.DateOffline
+ description: The date and time (in UTC) when the device last communicated with
+ the Console.
+ type: date
+ - contextPath: CylanceProtect.Device.Hostname
+ description: The hostname for the device.
+ type: string
+ - contextPath: CylanceProtect.Device.ID
+ description: The unique identifier for the device.
+ type: string
+ - contextPath: CylanceProtect.Device.IPAddress
+ description: The list of IP addresses for the device.
+ type: Unknown
+ - contextPath: CylanceProtect.Device.MACAddress
+ description: The list of MAC addresses for the device.
+ type: Unknown
+ - contextPath: CylanceProtect.Device.IsSafe
+ description: If true, there are no outstanding threats.
+ type: boolean
+ - contextPath: CylanceProtect.Device.UpdateAvailable
+ description: If true, there is available update for the device.
+ type: boolean
+ - contextPath: CylanceProtect.Device.State
+ description: Machine state.
+ type: string
+ - contextPath: Endpoint.Hostname
+ description: Device hostname.
+ type: string
+ - contextPath: Endpoint.MACAddress
+ description: The list of MAC addresses for the device.
+ type: Unknown
+ - contextPath: Endpoint.IPAddress
+ description: The list of IP addresses for the device.
+ type: Unknown
+ - contextPath: Endpoint.OSVersion
+ description: Device OS version.
+ type: string
+ - contextPath: CylanceProtect.Device.OSVersion
+ description: Device OS version.
+ type: string
+ - contextPath: CylanceProtect.Device.Name
+ description: Device name.
+ type: string
+ - arguments:
+ - default: true
+ description: The device ID.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: The device name.
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: The policy ID.
+ isArray: false
+ name: policyId
+ required: false
+ secret: false
+ - default: false
+ description: Zones IDs to add.
+ isArray: true
+ name: addZones
+ required: false
+ secret: false
+ - default: false
+ description: Zones IDs to remove.
+ isArray: true
+ name: removeZones
+ required: false
+ secret: false
+ deprecated: false
+ description: Allows a caller to update a specific Console device resource belonging
+ to a Tenant.
+ execution: false
+ name: cylance-protect-update-device
+ - arguments:
+ - default: true
+ description: The device ID.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: If the threat score is less than or equal to the threshold, then file
+ will be considered malicious. If the threshold is not specified, the default file
+ threshold that was configured in the instance settings will be used.
+ isArray: false
+ name: threshold
+ required: false
+ secret: false
+ - default: false
+ description: The page number. If not specified, the default is 1.
+ isArray: false
+ name: pageNumber
+ required: false
+ secret: false
+ - default: false
+ description: The page size. If not specified, the default is 10.
+ isArray: false
+ name: pageSize
+ required: false
+ secret: false
+ deprecated: false
+ description: Allows a caller to request a page with a list of threats found on
+ a specific device.
+ execution: false
+ name: cylance-protect-get-device-threats
+ outputs:
+ - contextPath: File.Classification
+ description: The Cylance threat classification.
+ type: string
+ - contextPath: File.CylanceScore
+ description: The Cylance score assigned to the threat.
+ type: number
+ - contextPath: File.DateFound
+ description: The date and time (in UTC) when the threat was found on the device.
+ type: string
+ - contextPath: File.FilePath
+ description: The file path where the threat was found on the device.
+ type: string
+ - contextPath: File.FileStatus
+ description: The current status of the file on the device. This can be one of
+ the following:Default (0), Quarantined (1), Whitelisted (2), Suspicious (3), FileRemoved
+ (4), Corrupt (5).
+ type: number
+ - contextPath: File.Name
+ description: The name of the threat.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the threat.
+ type: string
+ - contextPath: File.SubClassification
+ description: The Cylance threat sub-classification.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The tested indicator.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - arguments:
+ - default: false
+ description: The page number. If not specified, the default is 1.
+ isArray: false
+ name: pageNumber
+ required: false
+ secret: false
+ - default: false
+ description: The page size. If not specified, the default is 10. Maximum is 200.
+ isArray: false
+ name: pageItems
+ required: false
+ secret: false
+ deprecated: false
+ description: Allows the caller to get a list of tenant policies.
+ execution: false
+ name: cylance-protect-get-policies
+ outputs:
+ - contextPath: CylanceProtect.Policies.DateAdded
+ description: The date and time (in UTC) when the Console policy resource was
+ first created.
+ type: string
+ - contextPath: CylanceProtect.Policies.DateModified
+ description: The date and time (in UTC) when the Console policy resource was
+ last modified.
+ type: string
+ - contextPath: CylanceProtect.Policies.DeviceCount
+ description: The number of devices assigned to this policy.
+ type: number
+ - contextPath: CylanceProtect.Policies.Id
+ description: The unique ID for the policy resource.
+ type: string
+ - contextPath: CylanceProtect.Policies.Name
+ description: The name of the policy.
+ type: string
+ - contextPath: CylanceProtect.Policies.ZoneCount
+ description: The number of zones assigned to this policy.
+ type: number
+ - arguments:
+ - default: false
+ description: The name of the zone.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The unique ID for the policy assigned to the Zone.
+ isArray: false
+ name: policy_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The criticality value of the Zone.
+ isArray: false
+ name: criticality
+ predefined:
+ - Low
+ - Medium
+ - High
+ required: true
+ secret: false
+ deprecated: false
+ description: Creates (adds) a zone to your Console.
+ execution: false
+ name: cylance-protect-create-zone
+ - arguments:
+ - default: false
+ description: The page number parameter is optional. When the value is not specified,
+ the default is 1.
+ isArray: false
+ name: pageNumber
+ required: false
+ secret: false
+ - default: false
+ description: The page size parameter is optional. When the value is not specified,
+ the default is 10. Max is 200.
+ isArray: false
+ name: pageItems
+ required: false
+ secret: false
+ deprecated: false
+ description: Request zone information for your organization. This will return
+ the top 100 records
+ execution: false
+ name: cylance-protect-get-zones
+ outputs:
+ - contextPath: CylanceProtect.Zones.Criticality
+ description: The value of the zone (Low, Medium, or High).
+ type: string
+ - contextPath: CylanceProtect.Zones.DateCreated
+ description: The date and time (in UTC) when the zone was created.
+ type: string
+ - contextPath: CylanceProtect.Zones.DateModified
+ description: The date and time (in UTC) when the zone was last modified.
+ type: string
+ - contextPath: CylanceProtect.Zones.Id
+ description: The unique ID of the zone.
+ type: string
+ - contextPath: CylanceProtect.Zones.Name
+ description: The name of the zone.
+ type: string
+ - contextPath: CylanceProtect.Zones.PolicyId
+ description: The unique ID of the policy assigned to the zone.
+ type: string
+ - contextPath: CylanceProtect.Zones.UpdateType
+ description: The update type for the zone.
+ type: string
+ - contextPath: CylanceProtect.Zones.ZoneRuleId
+ description: The unique ID for the zone rule created for the zone.
+ type: string
+ - arguments:
+ - default: true
+ description: The zone ID.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Request zone information for a specific zone in your organization.
+ execution: false
+ name: cylance-protect-get-zone
+ outputs:
+ - contextPath: CylanceProtect.Zones.Criticality
+ description: The value of the zone (Low, Medium, or High).
+ type: string
+ - contextPath: CylanceProtect.Zones.DateCreated
+ description: The date and time (in UTC) when the zone was created.
+ type: string
+ - contextPath: CylanceProtect.Zones.DateModified
+ description: The date and time (in UTC) when the zone was last modified.
+ type: string
+ - contextPath: CylanceProtect.Zones.Id
+ description: The unique ID of the zone.
+ type: string
+ - contextPath: CylanceProtect.Zones.Name
+ description: The name of the zone.
+ type: string
+ - contextPath: CylanceProtect.Zones.PolicyId
+ description: The unique ID of the policy assigned to the zone.
+ type: string
+ - contextPath: CylanceProtect.Zones.UpdateType
+ description: The update type for the zone.
+ type: string
+ - contextPath: CylanceProtect.Zones.ZoneRuleId
+ description: The unique ID for the zone rule created for the zone.
+ type: string
+ - arguments:
+ - default: true
+ description: The zone ID.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone.
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: The unique ID for the policy assigned to the Zone.
+ isArray: false
+ name: policyId
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The criticality value of the zone. Can be "Low", "Medium", or "High".
+ isArray: false
+ name: criticality
+ predefined:
+ - Low
+ - Medium
+ - High
+ required: false
+ secret: false
+ deprecated: false
+ description: Updates a zone in your organization.
+ execution: false
+ name: cylance-protect-update-zone
+ - arguments:
+ - default: true
+ description: The SHA256 hash of the threat.
+ isArray: false
+ name: sha256
+ required: true
+ secret: false
+ deprecated: false
+ description: Requests threat details for a specific threat.
+ execution: false
+ name: cylance-protect-get-threat
+ outputs:
+ - contextPath: File.AutoRun
+ description: Indicates if the file is set to automatically run on system startup.
+ type: boolean
+ - contextPath: File.AvIndustry
+ description: The score provided by the Anti-Virus industry.
+ type: number
+ - contextPath: File.CertIssuer
+ description: The ID for the certificate issuer.
+ type: string
+ - contextPath: File.CertPublisher
+ description: The ID for the certificate publisher.
+ type: string
+ - contextPath: File.CertTimestamp
+ description: The date and time (in UTC) when the file was signed using the certificate.
+ type: string
+ - contextPath: File.Classification
+ description: The threat classification for the threat.
+ type: string
+ - contextPath: File.CylanceScore
+ description: The Cylance Score assigned to the threat.
+ type: number
+ - contextPath: File.DetectedBy
+ description: The name of the Cylance module that detected the threat.
+ type: string
+ - contextPath: File.FileSize
+ description: The size of the file.
+ type: number
+ - contextPath: File.GlobalQuarantine
+ description: Identifies if the threat is on the Global Quarantine list.
+ type: boolean
+ - contextPath: File.MD5
+ description: The MD5 hash for the threat.
+ type: string
+ - contextPath: File.Name
+ description: The name of the threat.
+ type: string
+ - contextPath: File.Running
+ description: Identifies if the threat is executing, or another executable loaded
+ or called it.
+ type: boolean
+ - contextPath: File.Safelisted
+ description: Identifies if the threat is on the Safe List.
+ type: boolean
+ - contextPath: File.SHA256
+ description: The SHA256 hash for the threat.
+ type: string
+ - contextPath: File.Signed
+ description: Identifies the file as signed or not signed.
+ type: boolean
+ - contextPath: File.SubClassification
+ description: The threat sub-classification for the threat.
+ type: string
+ - contextPath: File.UniqueToCylance
+ description: Whether the threat was identified by Cylance, and not by other anti-virus
+ sources.
+ type: boolean
+ - contextPath: DBotScore.Indicator
+ description: The tested indicator.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - arguments:
+ - default: true
+ description: The SHA256 hash of the threat.
+ isArray: false
+ name: sha256
+ required: true
+ secret: false
+ deprecated: false
+ description: Allows a caller to request a list of devices on a specific threat.
+ execution: false
+ name: cylance-protect-get-threat-devices
+ outputs:
+ - contextPath: CylanceProtect.Threat.Devices.ID
+ description: The device ID.
+ type: string
+ - contextPath: CylanceProtect.Threat.Devices.DateFound
+ description: The date and time (in UTC) when the threat was found on the device.
+ type: date
+ - contextPath: CylanceProtect.Threat.Devices.AgentVersion
+ description: The agent version installed on the device.
+ type: string
+ - contextPath: CylanceProtect.Threat.Devices.FileStatus
+ description: Current quarantine status of the file on the device. Default (0), Quarantined
+ (1), Whitelisted (2), Suspicious (3), FileRemoved (4), Corrupt (5).
+ type: number
+ - contextPath: Endpoint.IPAddress
+ description: The list of IP addresses for the device.
+ type: Unknown
+ - contextPath: Endpoint.MACAddress
+ description: The list of MAC addresses for the device.
+ type: Unknown
+ - contextPath: Endpoint.Hostname
+ description: The device name for the device.
+ type: string
+ - contextPath: CylanceProtect.Threat.Devices.PolicyID
+ description: The unique identifier of the policy assigned to the device. If no policy is assigned, will be null.
+ type: string
+ - contextPath: CylanceProtect.Threat.Devices.State
+ description: The state of the device. Can be "Offline" or "Online".
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the threat.
+ type: string
+ - contextPath: File.Path
+ description: The path where the file was found on the device.
+ type: string
+ - contextPath: CylanceProtect.Threat.Devices.Hostname
+ description: The device name for the device.
+ type: string
+ - contextPath: CylanceProtect.Threat.Devices.IPAddress
+ description: The list of IP addresses for the device.
+ type: Unknown
+ - contextPath: CylanceProtect.Threat.Devices.MACAddress
+ description: The list of MAC addresses for the device.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Threat data report token.
+ isArray: false
+ name: token
+ required: true
+ secret: false
+ deprecated: false
+ description: Produces a CSV threat data report of the indicators.
+ execution: false
+ name: cylance-protect-get-indicators-report
+ - arguments:
+ - default: false
+ description: The page size. If not specified, the default is 10. Maximum is 200.
+ isArray: false
+ name: page_size
+ required: false
+ secret: false
+ - default: false
+ description: The page number. If not specified, the default is 1.
+ isArray: false
+ name: page
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns information about Cylance Protect threats.
+ execution: false
+ name: cylance-protect-get-threats
+ outputs:
+ - contextPath: File.Classification
+ description: The threat classification for the threat.
+ type: string
+ - contextPath: File.SubClassification
+ description: The threat sub-classification for the threat.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash for the threat.
+ type: string
+ - contextPath: File.Safelisted
+ description: Identifies if the threat is on the Safe List.
+ type: boolean
+ - contextPath: File.Name
+ description: The name of the threat.
+ type: string
+ - contextPath: File.LastFound
+ description: The date and time (in UTC) when the file was last found.
+ type: string
+ - contextPath: File.CylanceScore
+ description: The Cylance Score assigned to the threat.
+ type: number
+ - contextPath: File.GlobalQuarantine
+ description: Identifies if the threat is on the Global Quarantine list.
+ type: string
+ - contextPath: File.UniqueToCylance
+ description: The threat was identified by Cylance but not by other anti-virus
+ sources.
+ type: string
+ - contextPath: File.FileSize
+ description: The size of the file.
+ type: number
+ - contextPath: File.MD5
+ description: The MD5 hash of the threat.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The tested indicator.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - arguments:
+ - default: false
+ description: The SHA256 hash of the convicted threat.
+ isArray: false
+ name: threat_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The requested status update for the convicted threat. Can be "Quarantine" or "Waive".
+ isArray: false
+ name: event
+ predefined:
+ - Quarantine
+ - Waive
+ required: true
+ secret: false
+ - default: false
+ description: ID of device to be updated
+ isArray: false
+ name: device_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Updates the status of a convicted threat. Can be "Quarantine" or "Waive".
+ execution: true
+ name: cylance-protect-update-device-threats
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The type of the list for which to retrieve the ashes. Can be "GlobalQuarantine" or "GlobalSafe".
+ isArray: false
+ name: listTypeId
+ predefined:
+ - GlobalQuarantine
+ - GlobalSafe
+ required: true
+ secret: false
+ - default: false
+ description: The page number. If not specified, the default is 1.
+ isArray: false
+ name: page
+ required: false
+ secret: false
+ - default: false
+ description: The page size. If not specified, the default is 10. Maximum is 200.
+ isArray: false
+ name: page_size
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of global list resources for a tenant.
+ execution: false
+ name: cylance-protect-get-list
+ outputs:
+ - contextPath: File.Added
+ description: The timestamp when the file was added to the list.
+ type: string
+ - contextPath: File.AddedBy
+ description: The tenant user ID who added the file to the list.
+ type: string
+ - contextPath: File.AvIndustry
+ description: The score provided by the anti-virus industry.
+ type: number
+ - contextPath: File.Category
+ description: The category for the list specified (Global Safe list only).
+ type: string
+ - contextPath: File.Classification
+ description: The Cylance threat classification.
+ type: string
+ - contextPath: File.CylanceScore
+ description: The Cylance score assigned to the threat.
+ type: number
+ - contextPath: File.ListType
+ description: The list type to which the threat belongs. Can be "GlobalQuarantine" or "GlobalSafe".
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 hash of the threat.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the threat.
+ type: string
+ - contextPath: File.Name
+ description: The name of the threat.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The tested indicator.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - arguments:
+ - default: false
+ description: The SHA256 hash for the file to download.
+ isArray: false
+ name: sha256
+ required: true
+ secret: false
+ - default: false
+ description: File threshold to determine the file reputation.
+ isArray: false
+ name: threshold
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'no'
+ description: If "yes" the file is unzipped and returned to the War Room.
+ isArray: false
+ name: unzip
+ predefined:
+ - 'yes'
+ - 'no'
+ required: false
+ secret: false
+ deprecated: false
+ description: Downloads the threat (file) attached to a specific SHA256 hash.
+ execution: false
+ name: cylance-protect-download-threat
+ outputs:
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file.
+ type: string
+ - contextPath: File.Name
+ description: File name.
+ type: string
+ - contextPath: File.Size
+ description: File size.
+ type: number
+ - contextPath: File.Safelisted
+ description: Whether the file is on the Safe List.
+ type: boolean
+ - contextPath: File.Timestamp
+ description: Timestamp.
+ type: string
+ - contextPath: File.MD5
+ description: MD5 hash of the file.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The Indicator.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The DBot score.
+ type: number
+ - contextPath: DBotScore.Type
+ description: The Indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: The DBot score vendor.
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision.
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the reason for the vendor to make the decision.
+ type: string
+ - arguments:
+ - default: false
+ description: SHA256 hash to add to the GlobalSafe list or GlobalQuarantine list.
+ isArray: false
+ name: sha256
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The list type to which the threat belongs. Can be "GlobalQuarantine" or "GlobalSafe".
+ isArray: false
+ name: listType
+ predefined:
+ - GlobalQuarantine
+ - GlobalSafe
+ required: true
+ secret: false
+ - default: false
+ defaultValue: Added by Demisto
+ description: The reason why the file was added to the list.
+ isArray: false
+ name: reason
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: None
+ description: This field is required only if the list_type value is Global Safe.
+ Can be "Admin Tool", "Commercial Software", "Drivers", "Internal Application", "Operating System", "Security Software", or "None". Default is "None".
+ isArray: false
+ name: category
+ predefined:
+ - Admin Tool
+ - Commercial Software
+ - Drivers
+ - Internal Application
+ - Operating System
+ - Security Software
+ - None
+ required: false
+ secret: false
+ deprecated: false
+ description: Adds a convicted threat for a particular tenant to either the Global Quarantine list or the Global
+ Safe list.
+ execution: false
+ name: cylance-protect-add-hash-to-list
+ outputs:
+ - contextPath: File.SHA256
+ description: The SHA256 hash for the threat.
+ type: string
+ - contextPath: File.Cylance.ListType
+ description: The list type to which the threat belongs. Can be "GlobalQuarantine" or "GlobalSafe".
+ type: string
+ - contextPath: File.Cylance.Category
+ description: This field is required only if the list_type value is Global Safe.
+ Can be "Admin Tool", "Commercial Software", "Drivers", "Internal Application", "Operating System", "Security Software", or "None".
+ type: string
+ - arguments:
+ - default: false
+ description: The SHA256 hash of the threat.
+ isArray: false
+ name: sha256
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The list type to which the threat belongs. Can be "GlobalQuarantine" or "GlobalSafe".
+ isArray: false
+ name: listType
+ predefined:
+ - GlobalSafe
+ - GlobalQuarantine
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes a convicted threat for a particular tenant from either the Global Quarantine list or the
+ Global Safe list.
+ execution: true
+ name: cylance-protect-delete-hash-from-lists
+ outputs:
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file.
+ type: string
+ - contextPath: File.Cylance.ListType
+ description: The list type to which the threat belongs. Can be "GlobalQuarantine" or "GlobalSafe".
+ type: string
+ - arguments:
+ - default: false
+ description: The Tenant policy ID to the service endpoint.
+ isArray: false
+ name: policyID
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns details for a single policy.
+ execution: false
+ name: cylance-protect-get-policy-details
+ outputs:
+ - contextPath: Cylance.Policy.ID
+ description: The ID of the policy.
+ type: string
+ - contextPath: Cylance.Policy.Name
+ description: The name of the policy.
+ type: string
+ - contextPath: Cylance.Policy.Timestamp
+ description: The date and time (in UTC) that the policy was created.
+ type: string
+ - name: cylance-protect-delete-devices
+ arguments:
+ - name: deviceIds
+ required: true
+ description: The unique identifiers for the devices to be deleted. The maximum
+ number of Device IDs per request is 20.
+ isArray: true
+ - name: batch_size
+ description: The number of devices to delete per batch. The default is 20, which is also the maximum number of devices that can be deleted per request.
+ defaultValue: "20"
+ outputs:
+ - contextPath: Cylance.Device.Id
+ description: The unique identifier of the deletion request.
+ type: string
+ - contextPath: Cylance.Device.Name
+ description: Device name.
+ type: string
+ - contextPath: Cylance.Device.Deleted
+ description: A boolean to check if the device was deleted.
+ type: boolean
+ description: Delete one or more devices from an organization.
+ execution: true
+ dockerimage: demisto/cylance_protect_v2
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- Cylance Protect v2 Test
diff --git a/Integrations/Cylance_Protect_v2/Cylance_Protect_v2_image.png b/Integrations/Cylance_Protect_v2/Cylance_Protect_v2_image.png
new file mode 100644
index 000000000000..6a4c1a7e1ef6
Binary files /dev/null and b/Integrations/Cylance_Protect_v2/Cylance_Protect_v2_image.png differ
diff --git a/Integrations/Cymon/cymon-description.md b/Integrations/Cymon/cymon-description.md
new file mode 100644
index 000000000000..c767da0d8fe0
--- /dev/null
+++ b/Integrations/Cymon/cymon-description.md
@@ -0,0 +1,3 @@
+Analyzes suspicious domains and IP addresses.
+
+**Cymon has been deprecated due to the Cymon service becoming no longer available.**
\ No newline at end of file
diff --git a/Integrations/Cymon/cymon.png b/Integrations/Cymon/cymon.png
new file mode 100644
index 000000000000..5948fa904761
Binary files /dev/null and b/Integrations/Cymon/cymon.png differ
diff --git a/Integrations/Cymon/integration-Cymon.py b/Integrations/Cymon/integration-Cymon.py
new file mode 100644
index 000000000000..d74337e2e1f8
--- /dev/null
+++ b/Integrations/Cymon/integration-Cymon.py
@@ -0,0 +1,464 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+import requests
+import os
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+if not demisto.params().get('useProxy', False):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+''' GLOBAL VARS '''
+SERVER_URL_V1 = 'https://www.cymon.io:443/api/nexus/v1'
+SERVER_DASHBOARD_URL_V1 = 'https://www.cymon.io:443/api/dashboard/v1'
+SERVER_URL_V2 = 'https://api.cymon.io/v2/ioc/search'
+
+VERIFY_CERTIFICATES = False if demisto.params().get('unsecure') else True
+
+DEFAULT_HEADERS = {
+ "Content-Type": "application/json"
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def cymon_says():
+ return_error('Cymon service discontinued. Please disable or delete the integration instance.')
+
+
+def http_request(method, url, headers):
+ try:
+ res = requests.request(method,
+ url,
+ verify=VERIFY_CERTIFICATES,
+ headers=headers)
+
+ if res.status_code == 200:
+ return res.json()
+ # 204 HTTP status code is returned when api rate limit has been exceeded
+ elif res.status_code == 204:
+ return_error("You've reached your API call quota.")
+ elif res.status_code == 404:
+ return {}
+
+ res.raise_for_status()
+
+ except Exception, e:
+ raise (e)
+
+
+''' DOMAIN COMMAND '''
+
+
+# def get_domain_full_report(domain):
+# report_results = []
+#
+# from_param = 0
+# size_param = 10
+# total = None
+#
+# url = '{}/{}/{}?from={}&size={}'.format(SERVER_URL_V2, 'domain', domain, from_param, size_param)
+#
+# while total is None or total > from_param:
+# response = http_request('GET', url, DEFAULT_HEADERS)
+#
+# hits = response.get('hits', [])
+# for hit in hits:
+# timestamp = datetime.strptime(
+# hit.get('timestamp', datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")),
+# '%Y-%m-%dT%H:%M:%S.%fZ')
+#
+# report_results.append({
+# 'Title': hit.get('title', "").title(),
+# 'Feed': hit.get('feed'),
+# 'Timestamp': timestamp.strftime("%Y-%m-%d %H:%M:%S"),
+# # Formatting the timestamp to human readable date and time
+# 'Tags': hit.get('tags'),
+# 'Hostname': hit.get('ioc', {}).get('hostname'),
+# 'IP': hit.get('ioc', {}).get('ip'),
+# 'Domain': hit.get('ioc', {}).get('domain'),
+# 'Reported By': hit.get('reported_by'),
+# 'Location': hit.get('location', {}).get('country')
+# })
+#
+# from_param = from_param + size_param
+# total = int(response.get('total', 0))
+#
+# url = '{}/{}/{}?from={}&size={}'.format(SERVER_URL_V2, 'domain', domain, from_param, size_param)
+#
+# return report_results
+
+
+# def get_domain_report(domain_full_report):
+# reports = {} # type:dict
+#
+# for report in domain_full_report:
+# title = report.get('Title')
+# timestamp = datetime.strptime(
+# report.get('Timestamp', datetime.now().strftime("%Y-%m-%d %H:%M:%S")), '%Y-%m-%d %H:%M:%S')
+#
+# if (title in reports and reports.get(title).get('Timestamp') < timestamp) or title not in reports: # type: ignore
+# reports.update({title: {
+# 'Feed': report.get('Feed'),
+# 'Timestamp': timestamp,
+# 'Tags': report.get('Tags'),
+# 'Hostname': report.get('Hostname'),
+# 'IP': report.get('IP'),
+# 'Domain': report.get('Domain'),
+# 'Reported By': report.get('Reported By'),
+# 'Location': report.get('Location')
+# }})
+#
+# report_results = []
+#
+# for report in reports:
+# report_results.append({
+# 'Title': report,
+# 'Feed': reports.get(report).get('Feed'), # type: ignore
+# 'Timestamp': reports.get(report).get('Timestamp').strftime("%Y-%m-%d %H:%M:%S"), # type: ignore
+# # Formatting the timestamp to human readable date and time
+# 'Tags': reports.get(report).get('Tags'), # type: ignore
+# 'Hostname': reports.get(report).get('Hostname'), # type: ignore
+# 'IP': reports.get(report).get('IP'), # type: ignore
+# 'Domain': reports.get(report).get('Domain'), # type: ignore
+# 'Reported By': reports.get(report).get('Reported By'), # type: ignore
+# 'Location': reports.get(report).get('Location') # type: ignore
+# })
+#
+# return {
+# 'reports': report_results,
+# 'total': len(domain_full_report)
+# }
+
+
+# def create_domain_command_markdown(domain, total_hits, reports, domain_full_report, is_full_response):
+# md = '## Cymon Domain report for: {}\n'.format(domain)
+#
+# md += '\n'
+#
+# md += '**Total Hits:** {}'.format(total_hits)
+#
+# md += '\n'
+#
+# md += tableToMarkdown("The following reports are the latest malicious hits resolved to the given domain:", reports,
+# ['Title', 'Hostname', 'IP', 'Timestamp', 'Feed', 'Tags', 'Location', 'Reported By', 'Domain'])
+#
+# if is_full_response:
+# md += tableToMarkdown("Full report list:", domain_full_report,
+# ['Title', 'Hostname', 'IP', 'Timestamp', 'Feed', 'Tags', 'Location', 'Reported By',
+# 'Domain'])
+#
+# return md
+
+
+# def create_context_domain_command(domain, reports):
+# cymon_domain_context_activities = []
+# description = 'Reported suspicious activities: '
+#
+# for report in reports:
+# cymon_domain_context_activities.append({
+# 'Title': report.get('Title'),
+# 'Tags': report.get('Tags'),
+# 'Time': report.get('Timestamp'),
+# 'Hostname': report.get('Hostname'),
+# 'IP': report.get('IP')
+# })
+#
+# description += '{}, '.format(report.get('Title'))
+#
+# description = description[:-2]
+#
+# context = {
+# outputPaths['domain']: {
+# 'Name': domain,
+# 'Malicious': {
+# 'Vendor': 'Cymon',
+# 'Description': description
+# }
+# },
+# 'Cymon': {
+# 'Domain': {
+# 'Activities': cymon_domain_context_activities
+# }
+# }
+# }
+#
+# return context
+
+
+# def get_domain_report_command():
+# args = demisto.args()
+#
+# domain = args.get('domain')
+# is_full_response = args.get('fullResponse') == 'true'
+#
+# domain_full_report = get_domain_full_report(domain)
+# domain_summarized_report = get_domain_report(domain_full_report)
+#
+# if len(domain_full_report) == 0:
+# return "Domain " + domain + " is not in Cymons's dataset"
+#
+# markdown = create_domain_command_markdown(domain, domain_summarized_report.get('total'),
+# domain_summarized_report.get('reports'), domain_full_report,
+# is_full_response)
+# context = create_context_domain_command(domain, domain_summarized_report.get('reports'))
+#
+# return {
+# 'Type': entryTypes['note'],
+# 'Contents': domain_full_report,
+# 'ContentsFormat': formats['json'],
+# 'HumanReadable': markdown,
+# 'EntryContext': context
+# }
+
+
+''' IP COMMAND '''
+
+
+# def get_ip_events_sources(ip):
+# url = '{}/{}/{}'.format(SERVER_URL_V1, 'ip', ip)
+# response = http_request('GET', url, DEFAULT_HEADERS)
+#
+# return response.get('sources', None)
+
+
+# def get_ip_events(ip):
+# url = '{}/{}/{}/{}?limit={}'.format(SERVER_URL_V1, 'ip', ip, 'events', 100)
+# events = {} # type:dict
+#
+# next_link = url
+#
+# while next_link is not None:
+# response = http_request('GET', next_link, DEFAULT_HEADERS)
+#
+# for event in response.get('results', []):
+# tag = event.get('tag')
+# date = datetime.strptime(
+# event.get('updated', datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")), '%Y-%m-%dT%H:%M:%SZ')
+#
+# if (tag in events and events[tag] < date) or tag not in events:
+# events.update({tag: date})
+#
+# next_link = response.get('next')
+#
+# for event in events:
+# events[event] = events[event].strftime(
+# "%Y-%m-%d %H:%M:%S") # Formatting the timestamp to human readable date and time
+#
+# return events
+
+
+# def get_ip_location(ip):
+# url = '{}/{}/{}'.format(SERVER_DASHBOARD_URL_V1, 'geolocation', ip)
+#
+# response = http_request('GET', url, DEFAULT_HEADERS)
+#
+# lon = response.get('longitude', None)
+# lat = response.get('latitude', None)
+#
+# if not lon or not lat:
+# return {}
+# else:
+# return {
+# 'lon': lon,
+# 'lat': lat
+# }
+
+
+# def get_ip_domains(ip, max_len):
+# url = '{}/{}/{}/{}?limit={}'.format(SERVER_URL_V1, 'ip', ip, 'domains', max_len)
+# domains = []
+#
+# response = http_request('GET', url, DEFAULT_HEADERS)
+#
+# for domain in response.get('results', []):
+# date = datetime.strptime(
+# domain.get('updated', datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")), '%Y-%m-%dT%H:%M:%SZ')
+#
+# domains.append({'Hostname': domain.get('name'),
+# 'Last Resolved': date.strftime("%Y-%m-%d %H:%M:%S")})
+#
+# return domains
+
+
+# def get_ip_urls(ip, max_len):
+# url = '{}/{}/{}/{}?limit={}'.format(SERVER_URL_V1, 'ip', ip, 'urls', max_len)
+# urls = {} # type:dict
+#
+# response = http_request('GET', url, DEFAULT_HEADERS)
+#
+# for response_url in response.get('results', []):
+# url = response_url.get('location')
+# if url.endswith("/"):
+# url = url[:-1]
+#
+# date = datetime.strptime(
+# response_url.get('updated', datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")),
+# '%Y-%m-%dT%H:%M:%SZ')
+#
+# if (url in urls and urls[url] < date) or url not in urls:
+# urls.update({url: date})
+#
+# urls_result = []
+# for url in urls:
+# urls_result.append({'Url': url, "Last Resolved": urls[url].strftime(
+# "%Y-%m-%d %H:%M:%S")}) # Formatting the timestamp to human readable date and time
+#
+# return urls_result
+
+
+# def get_ip_asn(ip):
+# url = '{}/{}/{}'.format(SERVER_DASHBOARD_URL_V1, 'ipwhois', ip)
+#
+# response = http_request('GET', url, DEFAULT_HEADERS)
+#
+# asn = response.get('asn')
+# asn_country_code = response.get('asn_country_code')
+#
+# if not asn or not asn_country_code:
+# return {}
+# else:
+# return {
+# 'asn': asn,
+# 'country': asn_country_code
+# }
+
+
+# def create_ip_command_markdown(ip, sources, events, domains, urls, asn):
+# md = '## Cymon IP report for: {}\n'.format(ip)
+#
+# if asn:
+# md += 'ASN: **{}** ({})\n'.format(asn.get('asn'), asn.get('country'))
+#
+# md += '\n'
+#
+# if events:
+# md += '### Reports\n'
+# for event in events:
+# md += '**{}** (Last reported on: {})\n'.format(event.title(), events[event])
+#
+# if sources:
+# md += '#### Sources\n'
+# for source in sources:
+# md += '{}\n'.format(source)
+#
+# if domains and len(domains) > 0:
+# md += tableToMarkdown("The following domains were resolved to the given IP address:", domains)
+#
+# if urls and len(urls) > 0:
+# md += tableToMarkdown("The following urls were resolved to the given IP address:", urls)
+#
+# return md
+
+
+# def create_ip_command_context(ip, asn, events, domains):
+# if events:
+# description = 'Reported suspicious activities: '
+#
+# for event in events:
+# description += '{}, '.format(event)
+#
+# description = description[:-2]
+# else:
+# description = 'No suspicious activities were reported'
+#
+# asn_in_context = {} # type:dict
+#
+# if asn:
+# asn_in_context = {
+# 'ASN': asn.get('asn'),
+# 'Geo': {
+# 'Country': asn.get('country')
+# }
+# }
+#
+# context = {'Cymon': {
+# 'IP': {
+# 'Domains': domains
+# }
+# }, outputPaths['ip']: {
+# 'Address': ip,
+# 'Malicious': {
+# 'Vendor': 'Cymon',
+# 'Description': description
+# }
+# }}
+#
+# context[outputPaths['ip']].update(asn_in_context)
+#
+# return context
+
+
+# def get_ip_report_command():
+# args = demisto.args()
+#
+# full_response = args.get('fullResponse') == 'true'
+#
+# ip = args.get('ip')
+# if not is_ip_valid(ip):
+# return_error('An inalid IP was specified')
+#
+# sources = get_ip_events_sources(ip)
+#
+# if not sources:
+# return "IP " + ip + " is not in Cymons's dataset"
+#
+# if full_response:
+# max_len = 1000
+# else:
+# max_len = 50
+#
+# events = get_ip_events(ip)
+# location = get_ip_location(ip)
+# domains = get_ip_domains(ip, max_len)
+# urls = get_ip_urls(ip, max_len)
+# asn = get_ip_asn(ip)
+#
+# markdown = create_ip_command_markdown(ip, sources, events, domains, urls, asn)
+# context = create_ip_command_context(ip, asn, events, domains)
+#
+# return [
+# {
+# 'Type': entryTypes['map'],
+# 'Contents': {
+# 'lat': float(location.get('lat')),
+# 'lng': float(location.get('lon'))
+# },
+# 'ContentsFormat': formats['json']
+# },
+# {
+# 'Type': entryTypes['note'],
+# 'Contents': {
+# 'events': events,
+# 'sources': sources,
+# 'location': location,
+# 'domains': domains,
+# 'urls': urls,
+# 'asn': asn
+# },
+# 'HumanReadable': markdown,
+# 'EntryContext': context,
+# 'ContentsFormat': formats['json']
+# }]
+
+
+''' EXECUTION CODE '''
+try:
+ command = demisto.command()
+
+ if command == 'test-module':
+ demisto.results('Cymon has been Deprecated and is no longer in service. Please delete the instance.')
+ elif command == 'ip':
+ cymon_says()
+ elif command == 'domain':
+ cymon_says()
+
+except Exception, e:
+ raise
diff --git a/Integrations/Cymon/integration-Cymon.yml b/Integrations/Cymon/integration-Cymon.yml
new file mode 100644
index 000000000000..10a9ca530cee
--- /dev/null
+++ b/Integrations/Cymon/integration-Cymon.yml
@@ -0,0 +1,113 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: Cymon
+ version: -1
+configuration:
+- display: Trust any certificate (unsecure)
+ name: unsecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: useProxy
+ required: false
+ type: 8
+description: Analyzes suspicious domains and IP addresses
+display: Cymon (Deprecated)
+deprecated: true
+name: Cymon
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: IP to check the reputation of
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Returns all results, of which there can be hundreds. We recommend
+ that you do not use this argument in playbooks. Default is false.
+ isArray: false
+ name: fullResponse
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: true
+ description: Check the reputation of an IP address
+ execution: false
+ name: ip
+ outputs:
+ - contextPath: IP.Address
+ description: Malicious IP address
+ type: string
+ - contextPath: IP.ASN
+ description: Malicious IP ASN
+ type: number
+ - contextPath: IP.Geo.Country
+ description: Malicious IP country
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IPs, the vendor that made the decision
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IPs, the reason the vendor made the decision
+ type: string
+ - contextPath: Cymon.IP.Domains
+ description: Related malicious domains
+ type: Unknown
+ - arguments:
+ - default: true
+ description: A domain name
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Displays all the malicious hits of the domain
+ isArray: false
+ name: fullResponse
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: true
+ description: Retrieves a report on a given domain
+ execution: false
+ name: domain
+ outputs:
+ - contextPath: Domain.Name
+ description: Domain name
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious Domains, the vendor that made the decision
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the reason the vendor made the decision
+ type: string
+ - contextPath: Cymon.Domain.Activities.Title
+ description: 'Title describing the malicious activity '
+ type: Unknown
+ - contextPath: Cymon.Domain.Activities.Tags
+ description: Tags describing the malicious activity
+ type: Unknown
+ - contextPath: Cymon.Domain.Activities.Time
+ description: The time the activity was last reported
+ type: Unknown
+ - contextPath: Cymon.Domain.Activities.Hostname
+ description: The hostname on which the malicious activity was reported
+ type: Unknown
+ - contextPath: Cymon.Domain.Activities.IP
+ description: The IP on which the malicious activity was reported
+ type: Unknown
+ isfetch: false
+ runonce: false
+ script: ''
+ type: python
+image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHgAAAAyCAYAAACXpx/YAAAAAXNSR0IArs4c6QAAFC5JREFUeAHtWgl0FFXWvtXdSXfSWbuzbyQkJOwkEBb9HR2MioOKgqCgjIKiCI4yOujMr+Muv9ugHnEBRFAURREQReSgCAooIGaDhKyE7ElnI3t6ff93K+lOJyYhcThH50zfk0pVv1f16r3v7vcVkYtcCLgQcCHgQsCFgAsBFwIuBFwIuBBwIeBCwIWACwEXAi4EXAi4EHAh4ELAhYALgd8FAkKIv5jNZmGzWoXVaEy1CrGsVoiU00LofxcT7GcSUj/t/9XNYKYXALgWR7okSTn4HZyTm3vqRFpawJVjxlD+i6+SV1AguUWGkz5xfGHIJRdtr1S5bwrDvb834FS/twn91vMBM/9kMBiezcrOnhgXG1uI3ymYU1htTY1/fX0dQXvpXH4h1Z/OIyFsVLJtZ2zQhHEPR8y5YbFJiOduJXptmyRZf+t12N//CwZ/Lep9R5LXSD9SDFOT0k9JhL/zk4JIYzN1+NssNsfNCpVKKNzd10ILKh2NvS4AoGSzmZfaOsxhzl0qjUa0NzT4qbXahhaNZo+vJB137v+111VCQPfoZmNNzQiPwMCSeqKtekkq4/EwF21paembX+3dG11bW0tlZaWxl0+/fF97e7vqTFGRUqkEXJJEkkpJCgVWDPvHnCw/kUrVaRmBcSdPvvzR0iWj/y7EimRJanOe4x4h1BeRdYZXh3miuakpwLmvr2uFUmlV6/VFZWT6NlJSZ/Z1z2DaHAwuEiIk2mZbVv3DsXn1madGNBhqVNbmFqyAlzCwJbdZzDTq3nuo6rtDZPjhKCnc3MhmMlHk7OtIc8OsvRigXwavJ1LNy8r7S/Fb68eYzRZGmZSeHpT4yMN0etULZG1vp/DZs1a2w+d5SNL7g1lUf/dUChHtkZe/I/u9D5KaCovIKyKcIhctXFwmxIyITia3+fj4/KBWq6Pd3d2pqqqaduzcGW+xWAj+l3z8/PocWl4v5n36/Y/Icq5xyfi//832iRDLb+rS5GohYn3Ky9cWb//sivyTWWRuae1zHOdGKAVpgoMo9KqURrMQT7tJ0svO/YO9lhncIMQERfbpLZkbN4+pOPYTmdraSYCnktQppecbjJkZcsN1VAvQqjIyCVpLtg4jeScn2RCBiIGeB+clU0tLe3VmFpnMJpnBKq0XNdXXU11xCbWVV1Lt6Vxtkrv7GgBVECxJPw40Xn99rEG+NTVrTr6wOqkiLYOUajUZcvLIYjSOnrjm5eehvZA1Ourr65seGhJyS1VVFalUKjJhbegjm80GWYew8zXMtIDQK9w6NVp+Jxii9PCg/C++Ije9/u5Z9y9PR/tbsPG+msLC9zOfef6iyoxTJLm7deLa30S5nfUJ7xH5BVR+5EffMdWG1RDwegj4u9w9FFLlC+Ej5eZuyHz8mTF1RcWk1KhJgWNIhMlIMFmSEgdA6Tysctv5xtGxHEmSkvhZAcB4LDd4BTaFOBQAxAJgc15f6ztpWORbBUJcFSdJhvON27v/SqKH89/edG1leiapwAgmnq87tLjgzJlbS0tKbkmIj0+tNhiGn87JIdhj+R7WXn9/f4oIDyd/nd7qHRpqjl24QGM8W0w1GSepo7lZFmj5Zvxj/Ao+2U4+ieMeyxBieyTRnPwtH19UkXmKVLBMQyEJcxA2QXmbNpN2VMLjuULsTpCk2qGMoQoh65Xlu79Krj1T5Fi4gLQKmCQGmIE+H7FE2zo6IMEaWdr5flZbYbWyp3Ib6PlhRO7WtjYfK4BkYm1hC8DvtUJ7mHihjdDk/NfXTRi76sl/zRNi8VACmXphubZi245HCz77AgzQyGPyGrXhYaSHG9mxbx81NzZK2dnZk1hjZU3l92IOU6dMoXHjxn2j1+u3oinTSNQUtGJpWACpkuvT0m8v3bptTPG333UzGc+YgEfVF1+Fjrr0D3c3Z5y8tOLQD4SYQn6vrJlwRZJiYFzBV9lC8H0dMOl1Bw/FRE+dkoJBPu4caHD/VR4m6z9qIV1K+E0mXrgHwpCwq1IsGp2uBVLOTnhAsoE5PgnxqnO5eb68AJmYQQ3nEKHZJp0U4mx/A2jIMqq9qjrUYjSRQg3GYmVq+DoknGRpbesUMjysRF/pkR/Ja+N7f35vxfK0bUSv9Demc/tZIWLEz2mv5a3fiJixUyu5X2DOviNiyeLjQ+2trbLGdkBI7cTmOSUlpWXc2LEr0bYBzHbGIRdtB04IsWlSUuIj6lfWPJD34ccOJjOWFcdPUEzhmeVNRWe1bcBBXhuwYdcQt/SODrVOd87+rp5nxk+hqEtNDSj9cp+C3YBC5UZNObnk1tw8BZ1DYzBMUXJHGwI+MITJZjJT+PTLKODee/6GGeyGXlkhQwOLG2kI90Z6BAftUUgKziFl6as6/jNpP96xSqtUPsJtfZOkLvlyryebSyab1ULesTFkrK2XAyxZm7seZHDyt35K2tjhz6DIkBEgSd92dfV5Yr8bVFv7auYb62LaGhvl4M9+owQm1KafpOgzZ2nu3Lm0Z88eagMOrLXsb6dNm2YBc+/B7y32Z3qfESmzuXyQrY6x2vDA2f0HSNllfUwIUBtPZYUiWpeVhp9lRQifNoXCb7/1xWJSrO2gtj5xNZKne0JUxE7DgUOJZvCD3Vd7bR21G2pje8/hfL9VVrsp7nUn4D4VL0lnejX3+3OdEOU3xo/IU/t4TTQhwGKf3IFAKetfr3oCJM/+HmRwWLPYb8sEDQ66eCrVpWc4gHE82wV+7pvrtUnDot5E/HDFiK4Ux3GP0wX87kr43Vkc+NlNs6Mb7zU1NdGpV16j6ZvfEQGBgdLZoiKZwREREZQ4YcKWgZjrGAcXiKaejpx/44zqn34ebULULysLWNcEP815s1152Dq6wzpaSZE6eoDUkceuz842uMGsm+EyWAp4HFNrC0wbwl9J6jKTfOfA1Kk2fdyDQbsQ76Ozj6alkmTWT0zaGTJpIibT5Ts58GKJZvPfz8H97GOZbBA27+HR5DdqJFWyX3PDs0xgBvy5fMn3ttTUUsHraxMi6+tfhZns9C1yb/c/+N2Zldt3/rPQye/ax2GgNXodTX35BUrCUVZbK1UjaubclrU3KirKqtFo3ukebeCrJEk655s4/t2gpAmylsp3Qxgt8J3Wdph9Zz2FT7WQFWo5MPEcnYkVQbLBmA6R+mXwEMeRby8j2hR+y02FfuGhZIU/40naDwa3N3GEaO9nCUVRgxL/dyVVHjxEraVlcpTLjNVGRZD/uDFybs1jsNmuQHGhaMO7NwLSh3qPWyHEMJF6ck3uuo0awWEeE97PQaAucZwMOruhqkA9fQjT/OmOHWTE+9k8s+/V+ftz3p4lPzfIf/CyP6mjo4Sw2hkDRsLkq7Amjit+K7qgDI6UpHIpKXFh0nNPZcXOvJr8kYL4Iln3DgzoDDKcVwkwPfx95X5/RLMxV0ynpFVPFLhPmrihOTcP4toprCy5Ki8w/pGHyCMgwKHJbHILtn9GFdt3PV4vxDX2oVFgcNfV1b1a8Oa64RzcsKtgYgEaufjPFHLZH2RB4XSmtrqaGurqiFMhZi4TMxiFjkZc9qhEyZ0D/IMhPoc82Nw1DIQQPv7nNPIbPZLUsBZ9CfgAw/XZBTGxDcU88yBDMsN9vrVXI8p+R+Ebpyc8M2aOubIy2dJh9FFp1P5nPvrkijMffiIpECix1nqC6ckvrSpC4PQTNLJVExaWXkC0HclEoruHxxLnYZFZk3tM9O74pXdOPfXC6kDZbwNJ1s7cte+oJ8VEv14iRE6UJBVeT7aVZza8e0MlFzMgBEzM3PBLLqbIhQsOFm7YhNhI8mILExAcTDq9noy4tjOZzx1Goy8e47ihO6zmgQYgJVn8UXVzsxsqFpgOuJI6zCMMwlu0bccAT5+/i+eF9SS019StVaJO4ObruxpSGOnd0XGpqbEp1CbkIgK7uzZ1YGABor8vUBQ6c8EZzFNF4FOD07qug8DwidDCY/jteJ8aGuTm57dfGxJyF9od1CzENMePrgsuG6rMlq+Db7ju846iovU5H2yVmcfa2Y7oOO+1N6PHP/fsaoOwfFD56WePFezY5WAum3hfWIj4e5ceO0vWJzHkHk5Zyg58T+MnJ9OCmTOpFczYuWsXcZrEOXBDQ0Mo7huL43scg6Ig5MVVZ0tQsuk2ihz1Vx86QiOXLZFrDBZUCH8tWaEUCkkKzX561VLvkfEUs3zpKMPGd//n9MHDSmsLSsp2wlq0UZEUdevND9agKNQ9G/sNOLP0IUzqjJSc2n/tJfImJWttD4K9AfjdiWmPzj5+CMF1oA0xd9z+cVjyxG5/jOCtGvXdnLfWXa/IyHo/Z907PfwuQKGEZUua2uKj71eSqgprU7JgGOvq6diDf6c0HOF6vQgOCZEDLA60SkqKlUiZ7uxjFn02pQnhdy49fZEhLR05q0OG5XtZi7nq4+bj/W+baVn4ysqpDdF/S2rapdlroMxRkdbQm2+sD1twkyFswbyaoFnXtBpy8yh/7TtRAVbrGyq5JGe3KzwluCIzJAI2aqYBIQO43Gwi86CjBD3ZWn1IXQggB/1Mn6j9slFOD0pF20Mjlt89sfHhR0e0NzXLPpZNccX+g1Rz9LiG8097VC773YULSHf1jMc12I3CpsKE7vAVZt/Xl8Y+eD+dNRikGoNBjqJZuMsAYmZm5i1wBV/j9we/nErPlkSiJzK2fjq6A/Nx5O3AlP3wsNmzyIxCirG+AdHhkIPgni/CLxZO7DRREyp7HhCahLsXHymLirrdRO0WT/IgKNPVEfX1b5d+vZ+fTUGZRLFVq9PNbynmqBWqBnNYum8/WZqaHlJHhK8EeIPWZJ6+lDK9KHd4DFdcmvkNF5riJM9SBFUr4u9avDPjxVdQOO82QqbmVgdzuWDDmj5s0cJPnid6o/c8BHbAAhKnUHtMNH26ZQtiIDCkayw+Hz12TOWp1a5FOzCjt8Fo50qWPBxStIBJNts/T7/82opiCJiDuejlMmv0vDnkNSySsl55HS3/PnPll3b94xImM9va2to6SpLO2vtQACri7IT7LJiDqlGp3KhLHD+/EqU1++4I56OlyEMR3mMUGvTOA0PtFROjrhke09NO2d9+gc46SfrKLMT/teTkPZW/83OHv7XXd+Ugzt+P4pbdVVDu77/ySUnqLHQ7vZ8LK415hRTR2EQeSGVQiyatp5aMJqPshzmo+eabb7T1dXVvoRY9F4zeisczIe3QU0uYvrMWfVv6R9vGlBxwqkXb3wGB8UJ615B5isycLnUFfPbuwZ7lVBLCiJDylyYe1gYuoYfvQ1RoY+Zy1M6CqkojOjT5upn7zp1IvUreRuOJ8IPQ5KESMxiDX2jT3Oc0fqKylybcefvFjbn5M2rgc3hP1kEWK8XftchIiWP/ip2nUke70wWD0FpRQXW7dtOVc2ZReXm5bWR8/M/4miPm+0OHAjgvZq0+dvw45eXnp4SHh6fodDrb2KgoU/W2HZriomKq5d0kuLP+sDJBaOSdNeD5a0iuAUSEUezC+XJdQRMYKGcgPJYSAmqEO2qpqEw0NTSsFpJkAvAouZyLr6xE0QYfJ2D+barpktRRJdrvGf34o5u0m7dcVnnoMHH+aOPkHPNinzQUsmGToC/iycoVLozHGsYmtD/ijQD2n0xcv2WL0psuliLby2GqR9x797dtjz0T1oKyqKTA9hqkfeT8uRQxb/bzSkn5Ze/n5HmglMpSyAys3n+ALrtj0eHhcXGPwuz8GBoaep/BULM6NS1Vzok5RmlCUFPf0EBePj6KYSqVphBRPIygbPH6Yy6/V9Yknj/WwijyGeZB7uJ/AxJ/6YKoXhs9jNwQXuat20iBUyfL6zNjE0Y/KalZPzLBO/OJZ8O9g4MflKN3LIr72pDbj7prMdk0mntlUxoieRRlCXHtiMf+cVvoyVM3NWRlj2+rqPQX/OUBf2UxSOJFeAQFaxDt9jAb+C10CQmq2Guuxl6vmwysV4Ce3Dw8nNSu8yWwGyJocrKc4/IuCu8q+cdEs7npdrZd8wmXpNwWbB1OXPXEurIv90ZbYDx9x4+1Rs6dveF7UsD19iQktjblyAS3uJvmQLM65+Hp54PSoa0Em+nfg+ESUqTkiopyeXeJy5aenp7y1xx8jYiUJR5FG87lOxVAfgMEhYXFmVhIPaFx3nFxbRpoIX/uY8VGim70KIRCyvPm1x4hIe3YdWpT6XTkgdp41NzrEYn7UNxtt5ImKNBmDQhYmfjSqktqjp9Iaa+t9YXSyJoIV2D2jYsrDZw25e2niDbLjc4Tw0yV+UTRfkRRMNZ6SABOg/sui8cBxxoPHTy4e/r06Q7JwOL9ILfXQA97+GYMnAsLcdT5/bg3ECWkyfCEgEQpoMsS9vlsYE427i1yvtd+jTwbENB0bE16t5Iiy1+SvrP3OZ/xlYbWLzj4kjayuvPY9j4EGSVqScrEu/mbrFP2b7IiIyPpipSU0yh8KI4cPpLQ2NpCs5OTKW3Fw90MBmN5I98T4MtfwLBuQwiCJ0+iYUsWba7T65/DuiGcHKu6c/XE0lFWVoqxB0yKMZfQYiI/FngUO63nAG0wkbGKTO5acrcFEHGmYjkhWkN9SOWHNci8NJO7KZWoCp8LycnxLxlsX/V/6RnAXm2oqXk2KytrEn9VCUZcDihCDh8+/ENqZoZy7pSplHrfSgeDuSI25oH7rBF/mrHLZjLyVpKAae7QhIWmVhO9h09ph1TyvNCw99CoCz34f+J40Iq9YPLhoD/+kevb/F00qqDCGBgY2KDX6QO4OuWJGjssHf5sqLdHUPDFF/3416DA+euxo/afuGbXnIEAqkjL8GUlYkirwC6RfCD9AY9tnyMvj3KB5ELAhYALARcCLgRcCLgQcCHgQsCFgAsBFwIuBFwIuBBwIeBCwIWACwEXAv0i8P/Id9hWCN0cNgAAAABJRU5ErkJggg==
diff --git a/Integrations/DuoAdminApi/CHANGELOG.md b/Integrations/DuoAdminApi/CHANGELOG.md
new file mode 100644
index 000000000000..400d9b8df853
--- /dev/null
+++ b/Integrations/DuoAdminApi/CHANGELOG.md
@@ -0,0 +1,8 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+Proxy configuration now works as expected.
+
+## [19.9.1] - 2019-09-18
+Fixed an issue in the ***duoadmin-get-authentication-logs-by-user*** command.
diff --git a/Integrations/DuoAdminApi/DuoAdminApi.py b/Integrations/DuoAdminApi/DuoAdminApi.py
new file mode 100644
index 000000000000..6beb47c15d11
--- /dev/null
+++ b/Integrations/DuoAdminApi/DuoAdminApi.py
@@ -0,0 +1,359 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+# imports
+import calendar
+import duo_client
+
+# Setup
+
+HOST = demisto.getParam('hostname')
+INTEGRATION_KEY = demisto.getParam('integration_key')
+SECRET_KEY = demisto.getParam('secret_key')
+USE_SSL = not demisto.params().get('insecure', False)
+USE_PROXY = demisto.params().get('proxy', False)
+
+# The duo client returns a signature error upon bad secret
+# Convert it to a more informative message using this
+INVALID_SECRET_ERROR_STRING = 'Invalid signature in request credentials'
+
+# Maps
+
+OPTIONS_TO_TIME = {
+ '10_seconds_ago': datetime.now() - timedelta(seconds=10),
+ # left here for backwards compatability
+ '1_minutes_ago': datetime.now() - timedelta(minutes=1),
+ '1_minute_ago': datetime.now() - timedelta(minutes=1),
+ '10_minutes_ago': datetime.now() - timedelta(minutes=10),
+ '1_hour_ago': datetime.now() - timedelta(hours=1),
+ '10_hours_ago': datetime.now() - timedelta(hours=10),
+ '1_day_ago': datetime.now() - timedelta(days=1),
+ '1_week_ago': datetime.now() - timedelta(days=7),
+ '1_month_ago': datetime.now() - timedelta(days=30),
+ '1_year_ago': datetime.now() - timedelta(days=365),
+ '5_years_ago': datetime.now() - timedelta(days=1825),
+ '10_years_ago': datetime.now() - timedelta(days=3650)
+}
+
+
+def override_make_request(self, method, uri, body, headers):
+ """
+
+ This function is an override function to the original
+ duo_client.client.Client._make_request function in API version 4.1.0
+
+ The reason for it is that the API creates a bad uri address for the GET requests.
+
+ """
+
+ conn = self._connect()
+
+ # Ignored original code #
+ # --------------------- #
+ # if self.proxy_type == 'CONNECT':
+ # # Ensure the request uses the correct protocol and Host.
+ # if self.ca_certs == 'HTTP':
+ # api_proto = 'http'
+ # else:
+ # api_proto = 'https'
+ # uri = ''.join((api_proto, '://', self.host, uri))
+ # ------------------- #
+ # End of ignored code #
+
+ conn.request(method, uri, body, headers)
+ response = conn.getresponse()
+ data = response.read()
+ self._disconnect(conn)
+ return (response, data)
+
+
+# Utility Methods
+
+def create_api_call():
+ if USE_SSL:
+ client = duo_client.Admin(
+ ikey=INTEGRATION_KEY,
+ skey=SECRET_KEY,
+ host=HOST,
+ )
+ else:
+ client = duo_client.Admin(
+ ikey=INTEGRATION_KEY,
+ skey=SECRET_KEY,
+ host=HOST,
+ ca_certs='DISABLE'
+ )
+
+ client._make_request = lambda method, uri, body, headers: override_make_request(client, method, uri, body, headers)
+
+ return client
+
+
+def set_proxy():
+ try:
+ proxy_settings = os.environ.get('HTTP_PROXY') or os.environ.get('http_proxy', '')
+ if proxy_settings:
+ host, port = get_host_port_from_proxy_settings(proxy_settings)
+
+ if USE_PROXY:
+ admin_api.set_proxy(host=host, port=port)
+
+ # if no proxy settings have been set
+ except ValueError:
+ admin_api.set_proxy(host=None, port=None, proxy_type=None)
+
+
+def get_host_port_from_proxy_settings(proxy_settings):
+ proxy_settings_str = str(proxy_settings)
+
+ port = proxy_settings_str.split(':')[-1]
+
+ host_regex_filter = re.search(ipv4Regex, proxy_settings_str)
+
+ if host_regex_filter:
+ host = host_regex_filter.group()
+ else:
+ proxy_settings_str_args = proxy_settings_str.split(':')
+
+ if 'http' in proxy_settings_str:
+ host = ':'.join(proxy_settings_str_args[1:-1])[2:]
+ else:
+ host = ':'.join(proxy_settings_str_args[0:-1])
+
+ return host, port
+
+
+def time_to_timestamp_milliseconds(time):
+ return str(calendar.timegm(time.utctimetuple()) * 1000)
+
+
+# Generic function that receives a result json, and turns it into an entryObject
+def get_entry_for_object(title, obj, contents, context, headers=None, human_readable=None):
+ if len(obj) == 0:
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': contents,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': "There is no output result",
+ 'EntryContext': context
+ }
+
+ if headers:
+ if isinstance(headers, (str, unicode)):
+ headers = headers.split(',')
+
+ if isinstance(obj, dict):
+ headers = list(set(headers).intersection(set(obj.keys())))
+
+ readable = tableToMarkdown(
+ title,
+ obj,
+ headers,
+ lambda h: h.title().replace("_", " ").replace(".", ":")
+ )
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': obj,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': readable,
+ 'EntryContext': context
+ }
+
+
+def get_user_id(username):
+ res = admin_api.get_users_by_name(username)
+
+ if len(res) == 0:
+ return_error("No users found with the given username")
+
+ return res[0]['user_id']
+
+
+# Methods
+
+# Duo client return 2 different known structures of error messages
+def test_instance():
+ try:
+ admin_api.get_info_summary()
+ demisto.results('ok')
+
+ except Exception as e:
+ if hasattr(e, 'data'):
+ # error data for 40103 is not informative enough so we write our own
+ if e.__getattribute__('data')['code'] == 40103:
+ demisto.results('Invalid secret key in request credentials')
+
+ else:
+ demisto.results(e.__getattribute__('data')['message'])
+
+ elif hasattr(e, 'strerror'):
+ demisto.results(e.__getattribute__('strerror'))
+
+ else:
+ demisto.results('Unknown error: ' + str(e))
+
+
+def get_all_users():
+ res = admin_api.get_users()
+
+ entry = get_entry_for_object(
+ 'Users', res, res,
+ {
+ 'DuoAdmin.UserDetails(val.username==obj.username)': res
+ },
+ headers=[
+ 'username',
+ 'user_id'
+ ]
+ )
+
+ demisto.results(entry)
+
+
+def get_authentication_logs_by_user(username, mintime):
+ limit = demisto.args().get('limit', '50')
+ res = admin_api.get_authentication_log(
+ 2,
+ users=get_user_id(username),
+ mintime=time_to_timestamp_milliseconds(OPTIONS_TO_TIME[mintime]),
+ maxtime=time_to_timestamp_milliseconds(datetime.now()),
+ limit=limit
+ )
+
+ raw_logs = res['authlogs']
+
+ for log in raw_logs:
+ log['timestamp'] = formatEpochDate(log['timestamp'])
+
+ entry = get_entry_for_object(
+ 'Authentication logs for ' + username, raw_logs, raw_logs,
+ {
+ 'DuoAdmin.UserDetails(val.username && val.username == obj.username)':
+ {
+ 'username': username,
+ 'auth_logs': raw_logs
+ }
+ },
+ headers=[
+ 'access_device',
+ 'event_type',
+ 'result',
+ 'reason',
+ 'application',
+ 'factor',
+ 'timestamp',
+ 'auth_device'
+ ]
+ )
+ demisto.results(entry)
+
+
+def get_devices_by_user(username):
+ user_id = get_user_id(username)
+ res = admin_api.get_user_phones(user_id)
+
+ entry = get_entry_for_object(
+ 'Devices for ' + username, res, res,
+ {
+ 'DuoAdmin.UserDetails(val.username && val.username == obj.username)':
+ {
+ 'username': username,
+ 'phones': res
+ }
+ }
+ )
+
+ demisto.results(entry)
+
+
+def get_all_devices():
+ res = admin_api.get_phones()
+
+ entry = get_entry_for_object(
+ 'Devices', res, res,
+ {
+ 'DuoAdmin.Phones(val.phone_id==obj.phone_id)': res
+ }
+ )
+
+ demisto.results(entry)
+
+
+def dissociate_device_by_user(username, device_id):
+ user_id = get_user_id(username)
+ admin_api.delete_user_phone(user_id, device_id)
+
+ demisto.results('Phone with ID ' + device_id + 'was dissociated from user ' + username)
+
+
+def associate_device_to_user(username, device_id):
+ user_id = get_user_id(username)
+ admin_api.add_user_phone(user_id, device_id)
+
+ demisto.results('Phone with ID ' + device_id + 'was associated to user ' + username)
+
+
+def get_u2f_tokens_by_user(username):
+ user_id = get_user_id(username)
+ res = admin_api.get_user_u2ftokens(user_id)
+
+ for token in res:
+ token['date_added'] = formatEpochDate(token['date_added'])
+
+ entry = get_entry_for_object(
+ 'U2F Tokens for ' + username, res, res,
+ {
+ 'DuoAdmin.UserDetails(val.username && val.username == obj.username)':
+ {
+ 'username': username,
+ 'u2ftokens': res
+ }
+ }
+ )
+
+ demisto.results(entry)
+
+
+def delete_u2f_token(token_id):
+ admin_api.delete_u2ftoken(token_id)
+ demisto.results('Token with ID ' + token_id + ' deleted successfully')
+
+
+# Execution
+try:
+ admin_api = create_api_call()
+ set_proxy()
+
+ if demisto.command() == 'test-module':
+ test_instance()
+
+ if demisto.command() == 'duoadmin-get-users':
+ get_all_users()
+
+ if demisto.command() == 'duoadmin-get-authentication-logs-by-user':
+ get_authentication_logs_by_user(demisto.getArg('username'), demisto.getArg('from'))
+
+ if demisto.command() == 'duoadmin-get-devices':
+ get_all_devices()
+
+ if demisto.command() == 'duoadmin-get-devices-by-user':
+ get_devices_by_user(demisto.getArg('username'))
+
+ if demisto.command() == 'duoadmin-associate-device-to-user':
+ associate_device_to_user(demisto.getArg('username'), demisto.getArg('device_id'))
+
+ if demisto.command() == 'duoadmin-dissociate-device-from-user':
+ dissociate_device_by_user(demisto.getArg('username'), demisto.getArg('device_id'))
+
+ if demisto.command() == 'duoadmin-get-u2f-tokens-by-user':
+ get_u2f_tokens_by_user(demisto.getArg('username'))
+
+ if demisto.command() == 'duoadmin-delete-u2f-token':
+ delete_u2f_token(demisto.getArg('token_id'))
+
+except Exception, e:
+ return_error(e.message)
+sys.exit(0)
diff --git a/Integrations/DuoAdminApi/DuoAdminApi.yml b/Integrations/DuoAdminApi/DuoAdminApi.yml
new file mode 100644
index 000000000000..f82310d08485
--- /dev/null
+++ b/Integrations/DuoAdminApi/DuoAdminApi.yml
@@ -0,0 +1,244 @@
+category: Authentication
+commonfields:
+ id: DUO Admin
+ version: -1
+configuration:
+- display: API Hostname
+ name: hostname
+ required: true
+ type: 0
+- display: Integration Key
+ name: integration_key
+ required: true
+ type: 0
+- display: Secret Key
+ name: secret_key
+ required: true
+ type: 4
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: |-
+ DUO for admins.
+ Must have access to the admin api in order to use this
+display: DUO Admin
+name: DUO Admin
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The user associated with the logs.
+ isArray: false
+ name: username
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Fetch logs from this time until now
+ isArray: false
+ name: from
+ predefined:
+ - 10_seconds_ago
+ - 1_minute_ago
+ - 10_minutes_ago
+ - 1_hour_ago
+ - 10_hours_ago
+ - 1_day_ago
+ - 1_week_ago
+ - 1_month_ago
+ - 1_year_ago
+ - 5_years_ago
+ - 10_years_ago
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: The maximum number of authentication logs to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns authentication logs associated with a user. Limited to 30
+ at a time
+ execution: false
+ name: duoadmin-get-authentication-logs-by-user
+ outputs:
+ - contextPath: DuoAdmin.UserDetails.auth_logs.result
+ description: Result of the authentication attempt
+ type: string
+ - contextPath: DuoAdmin.UserDetails.auth_logs.event_type
+ description: Type of activity logged
+ type: string
+ - contextPath: DuoAdmin.UserDetails.auth_logs.reason
+ description: Reason for the authentication attempt result
+ type: string
+ - contextPath: DuoAdmin.UserDetails.auth_logs.access_device.ip
+ description: The GeoIP location of the access device. IP field
+ type: string
+ - contextPath: DuoAdmin.UserDetails.auth_logs.access_device.hostname
+ description: The GeoIP location of the access device. Hostname field
+ type: string
+ - contextPath: DuoAdmin.UserDetails.auth_logs.access_device.location.city
+ description: The GeoIP location of the access device. City field
+ type: string
+ - contextPath: DuoAdmin.UserDetails.auth_logs.access_device.location.state
+ description: The GeoIP location of the access device. State field
+ type: string
+ - contextPath: DuoAdmin.UserDetails.auth_logs.access_device.location.country
+ description: The GeoIP location of the access device. Country field
+ type: string
+ - contextPath: DuoAdmin.UserDetails.auth_logs.auth_device.ip
+ description: The GeoIP location of the authentication device. IP field
+ type: string
+ - contextPath: DuoAdmin.UserDetails.auth_logs.auth_device.hostname
+ description: The GeoIP location of the authentication device. Hostname field
+ type: string
+ - contextPath: DuoAdmin.UserDetails.auth_logs.auth_device.location.city
+ description: The GeoIP location of the authentication device. City field
+ type: string
+ - contextPath: DuoAdmin.UserDetails.auth_logs.auth_device.location.state
+ description: The GeoIP location of the authentication device. State field
+ type: string
+ - contextPath: DuoAdmin.UserDetails.auth_logs.auth_device.location.country
+ description: The GeoIP location of the authentication device. Country field
+ type: string
+ - contextPath: DuoAdmin.UserDetails.auth_logs.timestamp
+ description: Timestamp of the event
+ type: date
+ - contextPath: DuoAdmin.UserDetails.auth_logs.application.name
+ description: Name of the application accessed
+ type: string
+ - contextPath: DuoAdmin.UserDetails.auth_logs.factor
+ description: The authentication factor
+ type: string
+ - arguments:
+ - default: false
+ description: user to dissociate a device from
+ isArray: false
+ name: username
+ required: true
+ secret: false
+ - default: false
+ description: the device id to dissociate
+ isArray: false
+ name: device_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Dissociates a device from a user
+ execution: false
+ name: duoadmin-dissociate-device-from-user
+ - arguments:
+ - default: false
+ description: the id of the token to delete
+ isArray: false
+ name: token_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Delete a u2f token
+ execution: false
+ name: duoadmin-delete-u2f-token
+ - deprecated: false
+ description: Return usernames and their user id
+ execution: false
+ name: duoadmin-get-users
+ outputs:
+ - contextPath: DuoAdmin.UserDetails.username
+ description: Username
+ type: Unknown
+ - contextPath: DuoAdmin.UserDetails.user_id
+ description: 'User Id '
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Username
+ isArray: false
+ name: username
+ required: true
+ secret: false
+ deprecated: false
+ description: Return devices associated with a user
+ execution: false
+ name: duoadmin-get-devices-by-user
+ outputs:
+ - contextPath: DuoAdmin.UserDetails.phones.phone_id
+ description: Device Id
+ type: string
+ - contextPath: DuoAdmin.UserDetails.phones.number
+ description: Device number
+ type: string
+ - contextPath: DuoAdmin.UserDetails.phones.platform
+ description: Device platform
+ type: string
+ - contextPath: DuoAdmin.UserDetails.phones.last_seen
+ description: Last time the device was used
+ type: date
+ - arguments:
+ - default: false
+ description: username
+ isArray: false
+ name: username
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns a list of U2F tokens associated with the given username
+ execution: false
+ name: duoadmin-get-u2f-tokens-by-user
+ outputs:
+ - contextPath: DuoAdmin.UserDetails.u2ftokens
+ description: The list of tokens
+ type: Unknown
+ - deprecated: false
+ description: Returns all existing devices
+ execution: false
+ name: duoadmin-get-devices
+ outputs:
+ - contextPath: DuoAdmin.Phones.phone_id
+ description: Device Id
+ type: Unknown
+ - contextPath: DuoAdmin.Phones.number
+ description: Device number
+ type: Unknown
+ - contextPath: DuoAdmin.Phones.platform
+ description: Device platform
+ type: Unknown
+ - contextPath: DuoAdmin.Phones.last_seen
+ description: Last time the device was used
+ type: Unknown
+ - contextPath: DuoAdmin.Phones.users
+ description: Users associated with this device
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Username
+ isArray: false
+ name: username
+ required: true
+ secret: false
+ - default: false
+ description: Device Id
+ isArray: false
+ name: device_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Associates a device to a user
+ execution: false
+ name: duoadmin-associate-device-to-user
+ dockerimage: demisto/duoadmin:1.0.0.147
+ isfetch: false
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- DuoAdmin API test playbook
diff --git a/Integrations/DuoAdminApi/DuoAdminApi_description.md b/Integrations/DuoAdminApi/DuoAdminApi_description.md
new file mode 100644
index 000000000000..26ed5b9049e4
--- /dev/null
+++ b/Integrations/DuoAdminApi/DuoAdminApi_description.md
@@ -0,0 +1,10 @@
+DUO admin api provides programmatic access to the administrative functionality of Duo Security's two-factor authentication platform.
+
+To set up an instance you will need:
+API hostname: admin api hostname provided by duo (e.g. api-XXXXXXXX.duosecurity.com)
+Integration key: your integration key provided by duo
+Secret key: your secret key/password provided by duo
+
+Notice that duo admin api requires different credentials from your regular duo account (i.e. another set of api hostname, integration key and secret key)
+
+For more information please check out the documentation at [https://duo.com/docs/adminapi](https://duo.com/docs/adminapi) #disable-secrets-detection
\ No newline at end of file
diff --git a/Integrations/DuoAdminApi/DuoAdminApi_image.png b/Integrations/DuoAdminApi/DuoAdminApi_image.png
new file mode 100644
index 000000000000..72799a865cc4
Binary files /dev/null and b/Integrations/DuoAdminApi/DuoAdminApi_image.png differ
diff --git a/Integrations/EWSMailSender/CHANGELOG.md b/Integrations/EWSMailSender/CHANGELOG.md
new file mode 100644
index 000000000000..4d6ae72f4793
--- /dev/null
+++ b/Integrations/EWSMailSender/CHANGELOG.md
@@ -0,0 +1,10 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+Logging improvements.
+
+
+## [19.8.2] - 2019-08-22
+- Improved memory resource usage.
+- Improved logging.
diff --git a/Integrations/EWSMailSender/EWSMailSender.py b/Integrations/EWSMailSender/EWSMailSender.py
new file mode 100644
index 000000000000..8eb948b5b328
--- /dev/null
+++ b/Integrations/EWSMailSender/EWSMailSender.py
@@ -0,0 +1,329 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+from cStringIO import StringIO
+import logging
+import warnings
+import traceback
+
+import getpass
+
+
+# work arround for bug in exchangelib: https://github.com/ecederstrand/exchangelib/issues/448
+class FixGetPass(object):
+ def __init__(self):
+ self.getpass_getuser_org = getpass.getuser
+
+ def getuser_no_fail():
+ # getuser() fails on some systems. Provide a sane default.
+ user = 'ews'
+ try:
+ if self.getpass_getuser_org:
+ user = self.getpass_getuser_org()
+ except KeyError:
+ pass
+ return user
+ getpass.getuser = getuser_no_fail
+
+ def __del__(self):
+ if self.getpass_getuser_org and getpass:
+ getpass.getuser = self.getpass_getuser_org
+
+
+_fix_getpass = FixGetPass()
+
+warnings.filterwarnings("ignore")
+
+# LOGGING
+log_stream = None
+log_handler = None
+
+
+def start_logging():
+ logging.raiseExceptions = False
+ global log_stream
+ global log_handler
+ if log_stream is None:
+ log_stream = StringIO()
+ log_handler = logging.StreamHandler(stream=log_stream)
+ log_handler.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
+ logger = logging.getLogger()
+ logger.addHandler(log_handler)
+ logger.setLevel(logging.DEBUG)
+
+
+from exchangelib.protocol import BaseProtocol, NoVerifyHTTPAdapter # noqa: E402
+from exchangelib.version import EXCHANGE_2007, EXCHANGE_2010, EXCHANGE_2010_SP2, EXCHANGE_2013, \
+ EXCHANGE_2016 # noqa: E402
+from exchangelib import HTMLBody, Message, FileAttachment, Account, IMPERSONATION, Credentials, Configuration, NTLM, \
+ BASIC, DIGEST, Version, DELEGATE, close_connections # noqa: E402
+
+IS_TEST_MODULE = False
+
+# load arguments
+USE_PROXY = demisto.params().get('proxy', False)
+NON_SECURE = demisto.params().get('insecure', False)
+AUTH_METHOD_STR = demisto.params().get('authType', 'Basic').lower()
+EWS_SERVER = demisto.params().get('ewsServer', 'https://outlook.office365.com/EWS/Exchange.asmx/')
+VERSION_STR = demisto.params().get('defaultServerVersion', '2013')
+FOLDER_NAME = demisto.params().get('folder', 'Inbox')
+ACCESS_TYPE = IMPERSONATION if demisto.params().get('impersonation', False) else DELEGATE
+
+# initialized in main()
+USERNAME = ""
+PASSWORD = ""
+ACCOUNT_EMAIL = ""
+
+VERSIONS = {
+ '2007': EXCHANGE_2007,
+ '2010': EXCHANGE_2010,
+ '2010_SP2': EXCHANGE_2010_SP2,
+ '2013': EXCHANGE_2013,
+ '2016': EXCHANGE_2016
+}
+
+
+def get_account(account_email):
+ return Account(
+ primary_smtp_address=account_email, autodiscover=False, config=config, access_type=ACCESS_TYPE,
+ )
+
+
+def send_email_to_mailbox(account, to, subject, body, bcc=None, cc=None, reply_to=None, html_body=None, attachments=[]):
+ message_body = HTMLBody(html_body) if html_body else body
+ m = Message(
+ account=account,
+ folder=account.sent,
+ cc_recipients=cc,
+ bcc_recipients=bcc,
+ subject=subject,
+ body=message_body,
+ to_recipients=to,
+ reply_to=reply_to
+ )
+ if account.protocol.version.build <= EXCHANGE_2010_SP2:
+ m.save()
+ for attachment in attachments:
+ m.attach(attachment)
+ m.send()
+ else:
+ for attachment in attachments:
+ m.attach(attachment)
+ m.send_and_save()
+ return m
+
+
+def get_auth_method(auth_method):
+ auth_method = auth_method.lower()
+ if auth_method == 'ntlm':
+ return NTLM
+ elif auth_method == 'basic':
+ return BASIC
+ elif auth_method == 'digest':
+ return DIGEST
+ raise Exception("%s auth method is not supported. Choose one of %s" % (auth_method, 'ntlm\\basic\\digest'))
+
+
+def get_version(version_str):
+ if version_str not in VERSIONS:
+ raise Exception("%s is unsupported version: %s. Choose one of" % (version_str, "\\".join(VERSIONS.keys())))
+ return Version(VERSIONS[version_str])
+
+
+def collect_manual_attachments(manualAttachObj):
+ attachments = []
+ for attachment in manualAttachObj:
+ res = demisto.getFilePath(os.path.basename(attachment['RealFileName']))
+
+ file_path = res["path"]
+ with open(file_path, 'rb') as f:
+ attachments.append(FileAttachment(content=f.read(), name=attachment['FileName']))
+
+ return attachments
+
+
+def send_email(to, subject, body="", bcc=None, cc=None, replyTo=None, htmlBody=None,
+ attachIDs="", attachCIDs="", attachNames="", from_mailbox=None, manualAttachObj=None):
+ account = get_account(from_mailbox or ACCOUNT_EMAIL)
+ bcc = bcc.split(",") if bcc else None
+ cc = cc.split(",") if cc else None
+ to = to.split(",") if to else None
+ manualAttachObj = manualAttachObj if manualAttachObj is not None else []
+ subject = subject[:252] + '...' if len(subject) > 255 else subject
+
+ file_entries_for_attachments = [] # type: list
+ attachments_names = [] # type: list
+
+ if attachIDs:
+ file_entries_for_attachments = attachIDs if isinstance(attachIDs, list) else attachIDs.split(",")
+ if attachNames:
+ attachments_names = attachNames if isinstance(attachNames, list) else attachNames.split(",")
+ else:
+ for att_id in file_entries_for_attachments:
+ att_name = demisto.getFilePath(att_id)['name']
+ if isinstance(att_name, list):
+ att_name = att_name[0]
+ attachments_names.append(att_name)
+ if len(file_entries_for_attachments) != len(attachments_names):
+ raise Exception("attachIDs and attachNames lists should be the same length")
+
+ attachments = collect_manual_attachments(manualAttachObj)
+
+ if attachCIDs:
+ file_entries_for_attachments_inline = attachCIDs if isinstance(attachCIDs, list) else attachCIDs.split(",")
+ for att_id_inline in file_entries_for_attachments_inline:
+ try:
+ file_info = demisto.getFilePath(att_id_inline)
+ except Exception as ex:
+ demisto.info("EWS error from getFilePath: {}".format(ex))
+ raise Exception("entry %s does not contain a file" % att_id_inline)
+ att_name_inline = file_info["name"]
+ with open(file_info["path"], 'rb') as f:
+ attachments.append(FileAttachment(content=f.read(), name=att_name_inline, is_inline=True,
+ content_id=att_name_inline))
+
+ for i in range(0, len(file_entries_for_attachments)):
+ entry_id = file_entries_for_attachments[i]
+ attachment_name = attachments_names[i]
+ try:
+ res = demisto.getFilePath(entry_id)
+ except Exception as ex:
+ raise Exception("entry {} does not contain a file: {}".format(entry_id, str(ex)))
+ file_path = res["path"]
+ with open(file_path, 'rb') as f:
+ attachments.append(FileAttachment(content=f.read(), name=attachment_name))
+
+ send_email_to_mailbox(account, to, subject, body, bcc, cc, replyTo, htmlBody, attachments)
+ result_object = {
+ 'from': account.primary_smtp_address,
+ 'to': to,
+ 'subject': subject,
+ 'attachments': attachments_names
+ }
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': result_object,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Sent email', result_object),
+ }
+
+
+def prepare():
+ if NON_SECURE:
+ BaseProtocol.HTTP_ADAPTER_CLS = NoVerifyHTTPAdapter
+
+ if not USE_PROXY:
+ def remove_from_dict(d, key):
+ if key in d:
+ del d[key]
+
+ import os
+
+ remove_from_dict(os.environ, 'HTTP_PROXY')
+ remove_from_dict(os.environ, 'http_proxy')
+ remove_from_dict(os.environ, 'HTTPS_PROXY')
+ remove_from_dict(os.environ, 'https_proxy')
+
+ os.environ['NO_PROXY'] = EWS_SERVER
+
+ version = get_version(VERSION_STR)
+ credentials = Credentials(username=USERNAME, password=PASSWORD)
+ config_args = {
+ 'credentials': credentials,
+ 'auth_type': get_auth_method(AUTH_METHOD_STR),
+ 'version': version
+ }
+ if 'http' in EWS_SERVER.lower():
+ config_args['service_endpoint'] = EWS_SERVER
+ else:
+ config_args['server'] = EWS_SERVER
+ config = Configuration(**config_args)
+ return config
+
+
+def prepare_args(d):
+ return dict((k.replace("-", "_"), v) for k, v in d.items())
+
+
+def test_module():
+ global IS_TEST_MODULE
+ IS_TEST_MODULE = True
+ BaseProtocol.TIMEOUT = 20
+ get_account(ACCOUNT_EMAIL)
+ demisto.results('ok')
+
+
+config = None # type: ignore
+
+
+def main():
+ global USERNAME, PASSWORD, ACCOUNT_EMAIL, log_stream
+ USERNAME = demisto.params()['credentials']['identifier']
+ PASSWORD = demisto.params()['credentials']['password']
+ ACCOUNT_EMAIL = demisto.params().get('mailbox', None)
+ if not ACCOUNT_EMAIL:
+ if "@" in USERNAME:
+ ACCOUNT_EMAIL = USERNAME
+ if ACCOUNT_EMAIL is None:
+ raise Exception("Provide a valid email address in the mailbox field")
+
+ try:
+ start_logging()
+ global config
+ config = prepare()
+ args = prepare_args(demisto.args())
+ if demisto.command() == 'test-module':
+ test_module()
+ elif demisto.command() == 'send-mail':
+ demisto.results(send_email(**args))
+ try:
+ # we don't want to leave cached connection arround as EWS limits the number of connections
+ # in a very aggressive way. 12 seems to be the default limit
+ # see: https://blogs.msdn.microsoft.com/webdav_101/2018/06/02/you-are-doing-too-much-at-one-time-ewsmaxconcurrency-too-many-concurrent-connections-opened/ # noqa
+ close_connections()
+ except Exception as ex:
+ demisto.info("Failed close_connections (shouldn't happen). Ignoring exception: {}".format(ex))
+
+ except Exception as e:
+ import time
+
+ time.sleep(2)
+ debug_log = "=== DEBUG LOG ===\n" + (log_stream.getvalue() if log_stream else "")
+ error_message = ""
+ if "Status code: 401" in debug_log:
+ error_message = ("Got unauthorized from the server. "
+ "Check credentials are correct and authentication"
+ " method are supported. ")
+
+ error_message += ("You can try using 'domain\\username' as username for authentication. "
+ if AUTH_METHOD_STR.lower() == 'ntlm' else '')
+ if "Status code: 503" in debug_log:
+ error_message = "Got timeout from the server. " \
+ "Probably the server is not reachable with the current settings. " \
+ "Check proxy parameter. If you are using server URL - change to server IP address. "
+ error_message = error_message + "\n" + str(e)
+ stacktrace = traceback.format_exc()
+ if stacktrace:
+ debug_log += "\nFull stacktrace:\n" + stacktrace
+
+ demisto.error(
+ "EWS Mail Sender failed {}. Error: {}. Debug: {}".format(demisto.command(), error_message, debug_log))
+ if IS_TEST_MODULE:
+ demisto.results(error_message)
+ else:
+ return_error(error_message + '\n' + debug_log)
+ finally:
+ if log_stream:
+ try:
+ logging.getLogger().removeHandler(log_handler) # type: ignore
+ log_stream.close()
+ log_stream = None
+ except Exception as ex:
+ demisto.error("EWS Mail Sender: unexpected exception when trying to remove log handler: {}".format(ex))
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/EWSMailSender/EWSMailSender.yml b/Integrations/EWSMailSender/EWSMailSender.yml
new file mode 100644
index 000000000000..195d4337f24c
--- /dev/null
+++ b/Integrations/EWSMailSender/EWSMailSender.yml
@@ -0,0 +1,88 @@
+commonfields:
+ id: EWS Mail Sender
+ version: -1
+name: EWS Mail Sender
+display: EWS Mail Sender
+category: Messaging
+description: Exchange Web Services and Office 365 Email sender
+configuration:
+- display: Exchange URL or Server IP address
+ name: ewsServer
+ defaultvalue: https://outlook.office365.com/EWS/Exchange.asmx/
+ type: 0
+ required: true
+- display: 'Authentication: Email address (for Office 365) or DOMAIN\USERNAME (e.g.
+ DEMISTO.INT\admin)'
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: true
+- display: Server Version (2007, 2010, 2010_SP2, 2013, or 2016)
+ name: defaultServerVersion
+ defaultvalue: "2013"
+ type: 0
+ required: true
+- display: Authentication Type (NTLM, Basic, or Digest). For Office 365 use Basic.
+ name: authType
+ defaultvalue: Basic
+ type: 0
+ required: true
+- display: Trust any certificate (not secure)
+ name: insecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: "false"
+ type: 8
+ required: false
+- display: Has impersonation rights
+ name: impersonation
+ defaultvalue: "false"
+ type: 8
+ required: false
+- display: Sender Mailbox
+ name: mailbox
+ defaultvalue: ""
+ type: 0
+ required: false
+script:
+ script: ''
+ type: python
+ subtype: python2
+ commands:
+ - name: send-mail
+ arguments:
+ - name: to
+ required: true
+ description: A CSV list of email addresses for the 'to' field.
+ - name: cc
+ description: A CSV list of email addresses for the 'cc' field.
+ - name: bcc
+ description: A CSV list of email addresses for the 'bcc' field.
+ - name: subject
+ required: true
+ description: Subject for the email to be sent.
+ - name: replyTo
+ description: The email address specified in the 'reply to' field.
+ - name: body
+ description: The contents (body) of the email to send.
+ - name: htmlBody
+ description: Send an HTML formatted email. This argument overrides the "body" argument.
+ - name: attachIDs
+ description: 'A CSV list of War Room entry IDs that contain
+ files, and are used to attach files to the outgoing email. For example: attachIDs=15@8,19@8.'
+ isArray: true
+ - name: attachNames
+ description: A CSV list of names of attachments to send.
+ Should be the same number of elements as attachIDs.
+ isArray: true
+ - name: attachCIDs
+ description: 'A CSV list of CIDs to embed attachments within the email itself.'
+ isArray: true
+ description: Sends an email using EWS.
+ dockerimage: demisto/py-ews
+tests:
+ - "EWS Mail Sender Test"
+ - "EWS Mail Sender Test 2"
diff --git a/Integrations/EWSMailSender/EWSMailSender_description.md b/Integrations/EWSMailSender/EWSMailSender_description.md
new file mode 100644
index 000000000000..011351840854
--- /dev/null
+++ b/Integrations/EWSMailSender/EWSMailSender_description.md
@@ -0,0 +1,4 @@
+For Office 365, use https://outlook.office365.com/EWS/Exchange.asmx (default) as the Server URL.
+Default version is used when accessing the EWS API to determine the API version.
+
+To fetch emails from a specific folder, folder path needs to be specified. Inbox is the default folder.
\ No newline at end of file
diff --git a/Integrations/EWSMailSender/EWSMailSender_image.png b/Integrations/EWSMailSender/EWSMailSender_image.png
new file mode 100644
index 000000000000..97612d231c0f
Binary files /dev/null and b/Integrations/EWSMailSender/EWSMailSender_image.png differ
diff --git a/Integrations/EWSMailSender/EWSMailSender_test.py b/Integrations/EWSMailSender/EWSMailSender_test.py
new file mode 100644
index 000000000000..e1eb35ccb45b
--- /dev/null
+++ b/Integrations/EWSMailSender/EWSMailSender_test.py
@@ -0,0 +1,13 @@
+import logging
+import EWSMailSender
+
+
+def test_prepar():
+ res = EWSMailSender.prepare()
+ assert res.protocol.server == 'outlook.office365.com'
+
+
+def test_start_logging():
+ EWSMailSender.start_logging()
+ logging.getLogger().debug("test this")
+ assert "test this" in EWSMailSender.log_stream.getvalue()
diff --git a/Integrations/EWSMailSender/Pipfile b/Integrations/EWSMailSender/Pipfile
new file mode 100644
index 000000000000..7d930214128c
--- /dev/null
+++ b/Integrations/EWSMailSender/Pipfile
@@ -0,0 +1,16 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest-mock = "*"
+autopep8 = "*"
+
+[packages]
+exchangelib = "==1.10.7"
+flake8 = "*"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/EWSMailSender/Pipfile.lock b/Integrations/EWSMailSender/Pipfile.lock
new file mode 100644
index 000000000000..7ac30d33b09d
--- /dev/null
+++ b/Integrations/EWSMailSender/Pipfile.lock
@@ -0,0 +1,565 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "c197c111817e626526c8b8c5d8750bae871fe8a8955188278b65e5c0b5523b2b"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "asn1crypto": {
+ "hashes": [
+ "sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87",
+ "sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
+ ],
+ "version": "==0.24.0"
+ },
+ "cached-property": {
+ "hashes": [
+ "sha256:3a026f1a54135677e7da5ce819b0c690f156f37976f3e30c5430740725203d7f",
+ "sha256:9217a59f14a5682da7c4b8829deadbfc194ac22e9908ccf7c8820234e80a1504"
+ ],
+ "version": "==1.5.1"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
+ "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
+ ],
+ "version": "==2019.6.16"
+ },
+ "cffi": {
+ "hashes": [
+ "sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774",
+ "sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d",
+ "sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90",
+ "sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b",
+ "sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63",
+ "sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45",
+ "sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25",
+ "sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3",
+ "sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b",
+ "sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647",
+ "sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016",
+ "sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4",
+ "sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb",
+ "sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753",
+ "sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7",
+ "sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9",
+ "sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f",
+ "sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8",
+ "sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f",
+ "sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc",
+ "sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42",
+ "sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3",
+ "sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909",
+ "sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45",
+ "sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d",
+ "sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512",
+ "sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff",
+ "sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201"
+ ],
+ "version": "==1.12.3"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32",
+ "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==3.7.4"
+ },
+ "cryptography": {
+ "hashes": [
+ "sha256:24b61e5fcb506424d3ec4e18bca995833839bf13c59fc43e530e488f28d46b8c",
+ "sha256:25dd1581a183e9e7a806fe0543f485103232f940fcfc301db65e630512cce643",
+ "sha256:3452bba7c21c69f2df772762be0066c7ed5dc65df494a1d53a58b683a83e1216",
+ "sha256:41a0be220dd1ed9e998f5891948306eb8c812b512dc398e5a01846d855050799",
+ "sha256:5751d8a11b956fbfa314f6553d186b94aa70fdb03d8a4d4f1c82dcacf0cbe28a",
+ "sha256:5f61c7d749048fa6e3322258b4263463bfccefecb0dd731b6561cb617a1d9bb9",
+ "sha256:72e24c521fa2106f19623a3851e9f89ddfdeb9ac63871c7643790f872a305dfc",
+ "sha256:7b97ae6ef5cba2e3bb14256625423413d5ce8d1abb91d4f29b6d1a081da765f8",
+ "sha256:961e886d8a3590fd2c723cf07be14e2a91cf53c25f02435c04d39e90780e3b53",
+ "sha256:96d8473848e984184b6728e2c9d391482008646276c3ff084a1bd89e15ff53a1",
+ "sha256:ae536da50c7ad1e002c3eee101871d93abdc90d9c5f651818450a0d3af718609",
+ "sha256:b0db0cecf396033abb4a93c95d1602f268b3a68bb0a9cc06a7cff587bb9a7292",
+ "sha256:cfee9164954c186b191b91d4193989ca994703b2fff406f71cf454a2d3c7327e",
+ "sha256:e6347742ac8f35ded4a46ff835c60e68c22a536a8ae5c4422966d06946b6d4c6",
+ "sha256:f27d93f0139a3c056172ebb5d4f9056e770fdf0206c2f422ff2ebbad142e09ed",
+ "sha256:f57b76e46a58b63d1c6375017f4564a28f19a5ca912691fd2e4261b3414b618d"
+ ],
+ "version": "==2.7"
+ },
+ "dnspython": {
+ "hashes": [
+ "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01",
+ "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d"
+ ],
+ "version": "==1.16.0"
+ },
+ "entrypoints": {
+ "hashes": [
+ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
+ "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
+ ],
+ "version": "==0.3"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==1.1.6"
+ },
+ "exchangelib": {
+ "hashes": [
+ "sha256:9c1d36fc7c31a863ccf0ede851eb86fad6bf39c8c29316436efc76b8991b26aa",
+ "sha256:bbc1e3d536fb9b27041614a191613388ab66313332af4ccaf920c42530ba0822"
+ ],
+ "index": "pypi",
+ "version": "==1.10.7"
+ },
+ "flake8": {
+ "hashes": [
+ "sha256:859996073f341f2670741b51ec1e67a01da142831aa1fdc6242dbf88dffbe661",
+ "sha256:a796a115208f5c03b18f332f7c11729812c8c3ded6c46319c59b53efd3819da8"
+ ],
+ "index": "pypi",
+ "version": "==3.7.7"
+ },
+ "functools32": {
+ "hashes": [
+ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0",
+ "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.3.post2"
+ },
+ "future": {
+ "hashes": [
+ "sha256:67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8"
+ ],
+ "version": "==0.17.1"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "ipaddress": {
+ "hashes": [
+ "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794",
+ "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==1.0.22"
+ },
+ "lxml": {
+ "hashes": [
+ "sha256:06c7616601430aa140a69f97e3116308fffe0848f543b639a5ec2e8920ae72fd",
+ "sha256:177202792f9842374a8077735c69c41a4282183f7851443d2beb8ee310720819",
+ "sha256:19317ad721ceb9e39847d11131903931e2794e447d4751ebb0d9236f1b349ff2",
+ "sha256:36d206e62f3e5dbaafd4ec692b67157e271f5da7fd925fda8515da675eace50d",
+ "sha256:387115b066c797c85f9861a9613abf50046a15aac16759bc92d04f94acfad082",
+ "sha256:3ce1c49d4b4a7bc75fb12acb3a6247bb7a91fe420542e6d671ba9187d12a12c2",
+ "sha256:4d2a5a7d6b0dbb8c37dab66a8ce09a8761409c044017721c21718659fa3365a1",
+ "sha256:58d0a1b33364d1253a88d18df6c0b2676a1746d27c969dc9e32d143a3701dda5",
+ "sha256:62a651c618b846b88fdcae0533ec23f185bb322d6c1845733f3123e8980c1d1b",
+ "sha256:69ff21064e7debc9b1b1e2eee8c2d686d042d4257186d70b338206a80c5bc5ea",
+ "sha256:7060453eba9ba59d821625c6af6a266bd68277dce6577f754d1eb9116c094266",
+ "sha256:7d26b36a9c4bce53b9cfe42e67849ae3c5c23558bc08363e53ffd6d94f4ff4d2",
+ "sha256:83b427ad2bfa0b9705e02a83d8d607d2c2f01889eb138168e462a3a052c42368",
+ "sha256:923d03c84534078386cf50193057aae98fa94cace8ea7580b74754493fda73ad",
+ "sha256:b773715609649a1a180025213f67ffdeb5a4878c784293ada300ee95a1f3257b",
+ "sha256:baff149c174e9108d4a2fee192c496711be85534eab63adb122f93e70aa35431",
+ "sha256:bca9d118b1014b4c2d19319b10a3ebed508ff649396ce1855e1c96528d9b2fa9",
+ "sha256:ce580c28845581535dc6000fc7c35fdadf8bea7ccb57d6321b044508e9ba0685",
+ "sha256:d34923a569e70224d88e6682490e24c842907ba2c948c5fd26185413cbe0cd96",
+ "sha256:dd9f0e531a049d8b35ec5e6c68a37f1ba6ec3a591415e6804cbdf652793d15d7",
+ "sha256:ecb805cbfe9102f3fd3d2ef16dfe5ae9e2d7a7dfbba92f4ff1e16ac9784dbfb0",
+ "sha256:ede9aad2197a0202caff35d417b671f5f91a3631477441076082a17c94edd846",
+ "sha256:ef2d1fc370400e0aa755aab0b20cf4f1d0e934e7fd5244f3dd4869078e4942b9",
+ "sha256:f2fec194a49bfaef42a548ee657362af5c7a640da757f6f452a35da7dd9f923c"
+ ],
+ "version": "==4.3.4"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "ntlm-auth": {
+ "hashes": [
+ "sha256:bb2fd03c665f0f62c5f65695b62dcdb07fb7a45df6ebc86c770be2054d6902dd",
+ "sha256:ce5b4483ed761f341a538a426a71a52e5a9cf5fd834ebef1d2090f9eef14b3f8"
+ ],
+ "version": "==1.3.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pycparser": {
+ "hashes": [
+ "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
+ ],
+ "version": "==2.19"
+ },
+ "pyflakes": {
+ "hashes": [
+ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
+ "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ ],
+ "version": "==2.1.1"
+ },
+ "pygments": {
+ "hashes": [
+ "sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127",
+ "sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297"
+ ],
+ "version": "==2.4.2"
+ },
+ "python-dateutil": {
+ "hashes": [
+ "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb",
+ "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"
+ ],
+ "version": "==2.8.0"
+ },
+ "pytz": {
+ "hashes": [
+ "sha256:303879e36b721603cc54604edcac9d20401bdbe31e1e4fdee5b9f98d5d31dfda",
+ "sha256:d747dd3d23d77ef44c6a3526e274af6efeb0a6f1afd5a69ba4d5be4098c8e141"
+ ],
+ "version": "==2019.1"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
+ "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ ],
+ "version": "==2.22.0"
+ },
+ "requests-ntlm": {
+ "hashes": [
+ "sha256:1eb43d1026b64d431a8e0f1e8a8c8119ac698e72e9b95102018214411a8463ea",
+ "sha256:9189c92e8c61ae91402a64b972c4802b2457ce6a799d658256ebf084d5c7eb71"
+ ],
+ "version": "==1.1.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typing": {
+ "hashes": [
+ "sha256:4027c5f6127a6267a435201981ba156de91ad0d1d98e9ddc2aa173453453492d",
+ "sha256:57dcf675a99b74d64dacf6fba08fb17cf7e3d5fdff53d4a30ea2a5e7e52543d4",
+ "sha256:a4c8473ce11a65999c8f59cb093e70686b6c84c98df58c1dae9b3b196089858a"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==3.6.6"
+ },
+ "tzlocal": {
+ "hashes": [
+ "sha256:4ebeb848845ac898da6519b9b31879cf13b6626f7184c496037b818e238f2c4e"
+ ],
+ "version": "==1.5.1"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
+ "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
+ ],
+ "version": "==1.25.3"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "autopep8": {
+ "hashes": [
+ "sha256:4d8eec30cc81bc5617dbf1218201d770dc35629363547f17577c61683ccfb3ee"
+ ],
+ "index": "pypi",
+ "version": "==1.4.4"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.5"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32",
+ "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==3.7.4"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==1.1.6"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==1.0.2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265",
+ "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.0"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7",
+ "sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db"
+ ],
+ "version": "==0.18"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:c40744b6bc5162bbb39c1257fe298b7a393861d50978b565f3ccd9cb9de0182a",
+ "sha256:f57abacd059dc3bd666258d1efb0377510a89777fda3e3274e3c01f7c03ae22d"
+ ],
+ "version": "==4.3.20"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af",
+ "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"
+ ],
+ "version": "==19.0"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742",
+ "sha256:5887121d7f7df3603bca2f710e7219f3eca0eb69e0b7cc6e0a022e155ac931a7"
+ ],
+ "markers": "python_version == '3.4.*' or python_version < '3'",
+ "version": "==2.3.3"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:02c2b6d268695a8b64ad61847f92e611e6afcff33fd26c3a2125370c4662905d",
+ "sha256:ee1e85575587c5b58ddafa25e1c1b01691ef172e139fc25585e5d3f02451da93"
+ ],
+ "index": "pypi",
+ "version": "==1.9.4"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a",
+ "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03"
+ ],
+ "version": "==2.4.0"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:4a784f1d4f2ef198fe9b7aef793e9fa1a3b2f84e822d9b3a64a181293a572d45",
+ "sha256:926855726d8ae8371803f7b2e6ec0a69953d9c6311fa7c3b6c1b929ff92d27da"
+ ],
+ "version": "==4.6.3"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:8c1019c6aad13642199fbe458275ad6a84907634cc9f0989877ccc4a2840139d",
+ "sha256:ca943a7e809cc12257001ccfb99e3563da9af99d52f261725e96dfe0f9275bc3"
+ ],
+ "version": "==0.5.1"
+ }
+ }
+}
diff --git a/Integrations/EWSv2/CHANGELOG.md b/Integrations/EWSv2/CHANGELOG.md
new file mode 100644
index 000000000000..02c7ec2822eb
--- /dev/null
+++ b/Integrations/EWSv2/CHANGELOG.md
@@ -0,0 +1,20 @@
+## [Unreleased]
+- Improved implementation of the ***ews-search-mailbox*** command.
+- Added the ***ews-get-items-as-eml*** command.
+
+## [19.9.1] - 2019-09-18
+Improved handling of uploaded EMLfiles.
+
+## [19.9.0] - 2019-09-04
+- Improved implementation of the ***ews-get-contacts*** command.
+- Improved security for the Exchange 365 Compliance search.
+- Added the *get-internal-items* argument to the ***ews-get-items-from-folder*** command, which enables you to retrieve EML and MSF file attachments.
+- Improved security within the Docker container.
+
+
+## [19.8.0] - 2019-08-06
+ - Improved memory resource usage.
+ - Added the ***ews-mark-items-as-read*** command.
+ - Added the *Mark fetched emails as read* parameter to the integration instance configuration.
+ - Improved integration documentation.
+
diff --git a/Integrations/EWSv2/EWSv2.py b/Integrations/EWSv2/EWSv2.py
new file mode 100644
index 000000000000..b2eea9619228
--- /dev/null
+++ b/Integrations/EWSv2/EWSv2.py
@@ -0,0 +1,2083 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import sys
+import traceback
+import json
+import os
+import hashlib
+from datetime import timedelta
+from cStringIO import StringIO
+import logging
+import warnings
+import subprocess
+import email
+from requests.exceptions import ConnectionError
+
+import exchangelib
+from exchangelib.errors import ErrorItemNotFound, ResponseMessageError, TransportError, RateLimitError, \
+ ErrorInvalidIdMalformed, \
+ ErrorFolderNotFound, ErrorToFolderNotFound, ErrorMailboxStoreUnavailable, ErrorMailboxMoveInProgress, \
+ AutoDiscoverFailed, ErrorNameResolutionNoResults, ErrorInvalidPropertyRequest
+from exchangelib.items import Item, Message, Contact
+from exchangelib.services import EWSService, EWSAccountService
+from exchangelib.util import create_element, add_xml_child
+from exchangelib import IMPERSONATION, DELEGATE, Account, Credentials, \
+ EWSDateTime, EWSTimeZone, Configuration, NTLM, DIGEST, BASIC, FileAttachment, \
+ Version, Folder, HTMLBody, Body, Build, ItemAttachment
+from exchangelib.version import EXCHANGE_2007, EXCHANGE_2010, EXCHANGE_2010_SP2, EXCHANGE_2013, EXCHANGE_2016
+from exchangelib.protocol import BaseProtocol, NoVerifyHTTPAdapter
+
+# Define utf8 as default encoding
+reload(sys)
+sys.setdefaultencoding('utf8') # pylint: disable=E1101
+
+# Ignore warnings print to stdout
+warnings.filterwarnings("ignore")
+
+# Docker BC
+MNS = None
+TNS = None
+if exchangelib.__version__ == "1.12.0":
+ MNS, TNS = exchangelib.util.MNS, exchangelib.util.TNS
+else:
+ MNS, TNS = exchangelib.transport.MNS, exchangelib.transport.TNS # pylint: disable=E1101
+
+# consts
+VERSIONS = {
+ '2007': EXCHANGE_2007,
+ '2010': EXCHANGE_2010,
+ '2010_SP2': EXCHANGE_2010_SP2,
+ '2013': EXCHANGE_2013,
+ '2016': EXCHANGE_2016
+}
+
+ATTACHMENT_ID = "attachmentId"
+ATTACHMENT_ORIGINAL_ITEM_ID = 'originalItemId'
+NEW_ITEM_ID = 'newItemId'
+MESSAGE_ID = "messageId"
+ITEM_ID = "itemId"
+ACTION = "action"
+MAILBOX = "mailbox"
+MAILBOX_ID = "mailboxId"
+FOLDER_ID = "id"
+
+MOVED_TO_MAILBOX = "movedToMailbox"
+MOVED_TO_FOLDER = "movedToFolder"
+
+FILE_ATTACHMENT_TYPE = 'FileAttachment'
+ITEM_ATTACHMENT_TYPE = 'ItemAttachment'
+ATTACHMENT_TYPE = 'attachmentType'
+
+TOIS_PATH = '/root/Top of Information Store/'
+
+ENTRY_CONTEXT = "EntryContext"
+CONTEXT_UPDATE_EWS_ITEM = "EWS.Items(val.{0} == obj.{0} || (val.{1} && obj.{1} && val.{1} == obj.{1}))".format(ITEM_ID,
+ MESSAGE_ID)
+CONTEXT_UPDATE_EWS_ITEM_FOR_ATTACHMENT = "EWS.Items(val.{0} == obj.{1})".format(ITEM_ID, ATTACHMENT_ORIGINAL_ITEM_ID)
+CONTEXT_UPDATE_ITEM_ATTACHMENT = ".ItemAttachments(val.{0} == obj.{0})".format(ATTACHMENT_ID)
+CONTEXT_UPDATE_FILE_ATTACHMENT = ".FileAttachments(val.{0} == obj.{0})".format(ATTACHMENT_ID)
+CONTEXT_UPDATE_FOLDER = "EWS.Folders(val.{0} == obj.{0})".format(FOLDER_ID)
+
+LAST_RUN_TIME = "lastRunTime"
+LAST_RUN_IDS = "ids"
+LAST_RUN_FOLDER = "folderName"
+ERROR_COUNTER = "errorCounter"
+
+ITEMS_RESULTS_HEADERS = ['sender', 'subject', 'hasAttachments', 'datetimeReceived', 'receivedBy', 'author',
+ 'toRecipients', ]
+
+# Load integratoin params from demisto
+USE_PROXY = demisto.params().get('proxy', False)
+NON_SECURE = demisto.params().get('insecure', True)
+AUTH_METHOD_STR = demisto.params().get('authType', '')
+AUTH_METHOD_STR = AUTH_METHOD_STR.lower() if AUTH_METHOD_STR else ''
+VERSION_STR = demisto.params().get('defaultServerVersion', None)
+MANUAL_USERNAME = demisto.params().get('domainAndUserman', '')
+FOLDER_NAME = demisto.params().get('folder', 'Inbox')
+IS_PUBLIC_FOLDER = demisto.params().get('isPublicFolder', False)
+ACCESS_TYPE = IMPERSONATION if demisto.params().get('impersonation', False) else DELEGATE
+FETCH_ALL_HISTORY = demisto.params().get('fetchAllHistory', False)
+IS_TEST_MODULE = False
+BaseProtocol.TIMEOUT = int(demisto.params().get('requestTimeout', 120))
+AUTO_DISCOVERY = False
+SERVER_BUILD = ""
+MARK_AS_READ = demisto.params().get('markAsRead', False)
+
+START_COMPLIANCE = """
+[CmdletBinding()]
+Param(
+[Parameter(Mandatory=$True)]
+[string]$username,
+
+[Parameter(Mandatory=$True)]
+[string]$query
+)
+
+$WarningPreference = "silentlyContinue"
+# Create Credential object
+$password = Read-Host
+$secpasswd = ConvertTo-SecureString $password -AsPlainText -Force
+$UserCredential = New-Object System.Management.Automation.PSCredential ($username, $secpasswd)
+
+# Generate a unique search name
+$searchName = [guid]::NewGuid().ToString() -replace '[-]'
+$searchName = "DemistoSearch" + $searchName
+
+# open remote PS session to Office 365 Security & Compliance Center
+$session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri `
+https://ps.compliance.protection.outlook.com/powershell-liveid/ -Credential $UserCredential `
+-Authentication Basic -AllowRedirection
+
+if (!$session)
+{
+ "Failed to create remote PS session"
+ return
+}
+
+Import-PSSession $session -CommandName *Compliance* -AllowClobber -DisableNameChecking -Verbose:$false | Out-Null
+
+$compliance = New-ComplianceSearch -Name $searchName -ExchangeLocation All -ContentMatchQuery $query -Confirm:$false
+
+Start-ComplianceSearch -Identity $searchName
+
+$complianceSearchName = "Action status: " + $searchName
+
+$complianceSearchName | ConvertTo-Json
+
+# Close the session
+Remove-PSSession $session
+"""
+GET_COMPLIANCE = """[CmdletBinding()]
+Param(
+[Parameter(Mandatory=$True)]
+[string]$username,
+
+
+[Parameter(Mandatory=$True)]
+[string]$searchName
+)
+
+$WarningPreference = "silentlyContinue"
+# Create Credential object
+$password = Read-Host
+$secpasswd = ConvertTo-SecureString $password -AsPlainText -Force
+$UserCredential = New-Object System.Management.Automation.PSCredential ($username, $secpasswd)
+
+
+# open remote PS session to Office 365 Security & Compliance Center
+$session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri `
+https://ps.compliance.protection.outlook.com/powershell-liveid/ -Credential $UserCredential `
+-Authentication Basic -AllowRedirection
+
+if (!$session)
+{
+ "Failed to create remote PS session"
+ return
+}
+
+
+Import-PSSession $session -CommandName Get-ComplianceSearch -AllowClobber -DisableNameChecking -Verbose:$false | Out-Null
+
+
+
+$searchStatus = Get-ComplianceSearch $searchName
+#"Search status: " + $searchStatus.Status
+$searchStatus.Status
+if ($searchStatus.Status -eq "Completed")
+{
+ $searchStatus.SuccessResults | ConvertTo-Json
+}
+
+# Close the session
+Remove-PSSession $session
+"""
+PURGE_COMPLIANCE = """
+[CmdletBinding()]
+Param(
+[Parameter(Mandatory=$True)]
+[string]$username,
+
+[Parameter(Mandatory=$True)]
+[string]$searchName
+)
+
+$WarningPreference = "silentlyContinue"
+# Create Credential object
+$password = Read-Host
+$secpasswd = ConvertTo-SecureString $password -AsPlainText -Force
+$UserCredential = New-Object System.Management.Automation.PSCredential ($username, $secpasswd)
+
+# open remote PS session to Office 365 Security & Compliance Center
+$session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri `
+https://ps.compliance.protection.outlook.com/powershell-liveid/ -Credential $UserCredential `
+-Authentication Basic -AllowRedirection
+if (!$session)
+{
+ "Failed to create remote PS session"
+ return
+}
+
+
+Import-PSSession $session -CommandName *Compliance* -AllowClobber -DisableNameChecking -Verbose:$false | Out-Null
+
+# Delete mails based on an existing search criteria
+$newActionResult = New-ComplianceSearchAction -SearchName $searchName -Purge -PurgeType SoftDelete -Confirm:$false
+if (!$newActionResult)
+{
+ # Happens when there are no results from the search
+ "No action was created"
+}
+
+# Close the session
+Remove-PSSession $session
+return
+"""
+PURGE_STATUS_COMPLIANCE = """
+[CmdletBinding()]
+Param(
+[Parameter(Mandatory=$True)]
+[string]$username,
+
+[Parameter(Mandatory=$True)]
+[string]$searchName
+)
+
+$WarningPreference = "silentlyContinue"
+# Create Credential object
+$password = Read-Host
+$secpasswd = ConvertTo-SecureString $password -AsPlainText -Force
+$UserCredential = New-Object System.Management.Automation.PSCredential ($username, $secpasswd)
+
+# open remote PS session to Office 365 Security & Compliance Center
+$session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri `
+https://ps.compliance.protection.outlook.com/powershell-liveid/ -Credential $UserCredential `
+-Authentication Basic -AllowRedirection
+
+if (!$session)
+{
+ "Failed to create remote PS session"
+ return
+}
+
+
+Import-PSSession $session -CommandName *Compliance* -AllowClobber -DisableNameChecking -Verbose:$false | Out-Null
+
+$actionName = $searchName + "_Purge"
+$actionStatus = Get-ComplianceSearchAction $actionName
+""
+$actionStatus.Status
+
+# Close the session
+Remove-PSSession $session
+"""
+REMOVE_COMPLIANCE = """
+[CmdletBinding()]
+Param(
+[Parameter(Mandatory=$True)]
+[string]$username,
+
+[Parameter(Mandatory=$True)]
+[string]$searchName
+)
+
+$WarningPreference = "silentlyContinue"
+# Create Credential object
+$password = Read-Host
+$secpasswd = ConvertTo-SecureString $password -AsPlainText -Force
+$UserCredential = New-Object System.Management.Automation.PSCredential ($username, $secpasswd)
+
+
+# open remote PS session to Office 365 Security & Compliance Center
+
+$session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri `
+https://ps.compliance.protection.outlook.com/powershell-liveid/ -Credential $UserCredential `
+-Authentication Basic -AllowRedirection
+
+if (!$session)
+{
+ "Failed to create remote PS session"
+ return
+}
+
+
+Import-PSSession $session -CommandName *Compliance* -AllowClobber -DisableNameChecking -Verbose:$false | Out-Null
+
+# Remove the search
+Remove-ComplianceSearch $searchName -Confirm:$false
+
+# Close the session
+Remove-PSSession $session
+"""
+
+# initialized in main()
+EWS_SERVER = ''
+USERNAME = ''
+ACCOUNT_EMAIL = ''
+PASSWORD = ''
+config = None
+credentials = None
+
+PUBLIC_FOLDERS_ERROR = 'Please update your docker image to use public folders'
+if IS_PUBLIC_FOLDER and exchangelib.__version__ != "1.12.0":
+ if demisto.command() == 'test-module':
+ demisto.results(PUBLIC_FOLDERS_ERROR)
+ exit(3)
+ raise Exception(PUBLIC_FOLDERS_ERROR)
+
+
+# Prep Functions
+def get_auth_method(auth_method):
+ auth_method = auth_method.lower()
+ if auth_method == 'ntlm':
+ return NTLM
+ elif auth_method == 'basic':
+ return BASIC
+ elif auth_method == 'digest':
+ return DIGEST
+ raise Exception("%s auth method is not supported. Choose one of %s" % (auth_method, 'ntlm\\basic\\digest'))
+
+
+def get_build(version_str):
+ if version_str not in VERSIONS:
+ raise Exception("%s is unsupported version: %s. Choose one of" % (version_str, "\\".join(VERSIONS.keys())))
+ return VERSIONS[version_str]
+
+
+def get_build_autodiscover(context_dict):
+ build_params = context_dict["build"].split(".")
+ return Build(*build_params)
+
+
+def get_endpoint_autodiscover(context_dict):
+ return context_dict["service_endpoint"]
+
+
+def get_version(version_str):
+ if version_str not in VERSIONS:
+ raise Exception("%s is unsupported version: %s. Choose one of" % (version_str, "\\".join(VERSIONS.keys())))
+ return Version(VERSIONS[version_str])
+
+
+def create_context_dict(account):
+ return {
+ "auth_type": account.protocol.auth_type,
+ "service_endpoint": account.protocol.service_endpoint,
+ "build": str(account.protocol.version.build),
+ "api_version": account.protocol.version.api_version
+ }
+
+
+def prepare_context(credentials):
+ context_dict = demisto.getIntegrationContext()
+ global SERVER_BUILD, EWS_SERVER
+ if not context_dict:
+ try:
+ account = Account(
+ primary_smtp_address=ACCOUNT_EMAIL, autodiscover=True,
+ access_type=ACCESS_TYPE, credentials=credentials,
+ )
+ EWS_SERVER = account.protocol.service_endpoint
+ if not USE_PROXY:
+ os.environ['NO_PROXY'] = EWS_SERVER
+ SERVER_BUILD = account.protocol.version.build
+ demisto.setIntegrationContext(create_context_dict(account))
+ except AutoDiscoverFailed:
+ return_error("Auto discovery failed. Check credentials or configure manually")
+ except Exception as e:
+ return_error(e.message)
+ else:
+ SERVER_BUILD = get_build_autodiscover(context_dict)
+ EWS_SERVER = get_endpoint_autodiscover(context_dict)
+
+
+def prepare():
+ if NON_SECURE:
+ BaseProtocol.HTTP_ADAPTER_CLS = NoVerifyHTTPAdapter
+
+ if not USE_PROXY:
+ def remove_from_dict(d, key):
+ if key in d:
+ del d[key]
+
+ remove_from_dict(os.environ, 'HTTP_PROXY')
+ remove_from_dict(os.environ, 'http_proxy')
+ remove_from_dict(os.environ, 'HTTPS_PROXY')
+ remove_from_dict(os.environ, 'https_proxy')
+ os.environ['NO_PROXY'] = EWS_SERVER or ""
+
+ global AUTO_DISCOVERY, VERSION_STR, AUTH_METHOD_STR, USERNAME
+ AUTO_DISCOVERY = not EWS_SERVER
+ if AUTO_DISCOVERY:
+ credentials = Credentials(username=USERNAME, password=PASSWORD)
+ prepare_context(credentials)
+ return None, credentials
+ else:
+ if 'outlook.office365.com' in EWS_SERVER.lower():
+ if not AUTH_METHOD_STR:
+ AUTH_METHOD_STR = 'Basic'
+ VERSION_STR = '2016'
+ else:
+ if MANUAL_USERNAME:
+ USERNAME = MANUAL_USERNAME
+ if not AUTH_METHOD_STR:
+ AUTH_METHOD_STR = 'ntlm'
+ if not VERSION_STR:
+ return_error('Exchange Server Version is required for on-premise Exchange Servers.')
+
+ version = get_version(VERSION_STR)
+ credentials = Credentials(username=USERNAME, password=PASSWORD)
+ config_args = {
+ 'credentials': credentials,
+ 'auth_type': get_auth_method(AUTH_METHOD_STR),
+ 'version': version
+ }
+ if not EWS_SERVER:
+ return_error("Exchange Server Hostname or IP Address is required for manual configuration.")
+ elif 'http' in EWS_SERVER.lower():
+ config_args['service_endpoint'] = EWS_SERVER
+ else:
+ config_args['server'] = EWS_SERVER
+
+ return Configuration(**config_args), None
+
+
+def construct_config_args(context_dict, credentials):
+ auth_type = context_dict["auth_type"]
+ api_version = context_dict["api_version"]
+ service_endpoint = context_dict["service_endpoint"]
+ version = Version(get_build_autodiscover(context_dict), api_version)
+
+ config_args = {
+ 'credentials': credentials,
+ 'auth_type': auth_type,
+ 'version': version,
+ 'service_endpoint': service_endpoint
+ }
+ return config_args
+
+
+def get_account_autodiscover(account_email, access_type=ACCESS_TYPE):
+ account = None
+ original_exc = None # type: ignore
+ context_dict = demisto.getIntegrationContext()
+
+ if context_dict:
+ try:
+ config_args = construct_config_args(context_dict, credentials)
+ account = Account(
+ primary_smtp_address=account_email, autodiscover=False, config=Configuration(**config_args),
+ access_type=access_type,
+ )
+ account.root.effective_rights.read # pylint: disable=E1101
+ return account
+ except Exception, original_exc:
+ pass
+
+ try:
+ account = Account(
+ primary_smtp_address=ACCOUNT_EMAIL, autodiscover=True, credentials=credentials, access_type=access_type,
+ )
+ except AutoDiscoverFailed:
+ return_error("Auto discovery failed. Check credentials or configure manually")
+
+ autodiscover_result = create_context_dict(account)
+ if autodiscover_result == context_dict:
+ raise original_exc # pylint: disable=E0702
+
+ if account_email == ACCOUNT_EMAIL:
+ demisto.setIntegrationContext(create_context_dict(account))
+ return account
+
+
+def get_account(account_email, access_type=ACCESS_TYPE):
+ if not AUTO_DISCOVERY:
+ return Account(
+ primary_smtp_address=account_email, autodiscover=False, config=config, access_type=access_type,
+ )
+ return get_account_autodiscover(account_email, access_type)
+
+
+# LOGGING
+log_stream = None
+log_handler = None
+
+
+def start_logging():
+ global log_stream
+ global log_handler
+ if log_stream is None:
+ log_stream = StringIO()
+ log_handler = logging.StreamHandler(stream=log_stream)
+ log_handler.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
+ logger = logging.getLogger()
+ logger.addHandler(log_handler)
+ logger.setLevel(logging.DEBUG)
+
+
+# Exchange 2010 Fixes
+def fix_2010():
+ version = SERVER_BUILD if SERVER_BUILD else get_build(VERSION_STR)
+ if version <= EXCHANGE_2010_SP2:
+ for m in (
+ Item, Message, exchangelib.items.CalendarItem, exchangelib.items.Contact,
+ exchangelib.items.DistributionList,
+ exchangelib.items.PostItem, exchangelib.items.Task, exchangelib.items.MeetingRequest,
+ exchangelib.items.MeetingResponse, exchangelib.items.MeetingCancellation):
+ for i, f in enumerate(m.FIELDS):
+ if f.name == 'text_body':
+ m.FIELDS.pop(i)
+ break
+ for m in (exchangelib.Folder, exchangelib.folders.Inbox):
+ for i, f in enumerate(m.FIELDS):
+ if f.name == 'unread_count':
+ m.FIELDS.pop(i)
+ break
+
+ def repr1(self):
+ return self.__class__.__name__ + repr((self.root, self.name, self.total_count, self.child_folder_count,
+ self.folder_class, self.id, self.changekey))
+
+ def repr2(self):
+ return self.__class__.__name__ + repr(
+ (self.root, self.name, self.total_count, self.child_folder_count, self.folder_class, self.changekey))
+
+ def repr3(self):
+ return self.__class__.__name__ + repr((self.account, '[self]', self.name, self.total_count,
+ self.child_folder_count, self.folder_class, self.changekey))
+
+ exchangelib.Folder.__repr__ = repr1
+ exchangelib.folders.Inbox.__repr__ = exchangelib.folders.JunkEmail.__repr__ = repr2
+ exchangelib.folders.Root.__repr__ = repr3
+
+ start_logging()
+
+
+def str_to_unicode(obj):
+ if isinstance(obj, dict):
+ obj = {k: str_to_unicode(v) for k, v in obj.iteritems()}
+ elif isinstance(obj, list):
+ obj = map(str_to_unicode, obj)
+ elif isinstance(obj, str):
+ obj = unicode(obj, "utf-8")
+ return obj
+
+
+def filter_dict_null(d):
+ if isinstance(d, dict):
+ return dict((k, v) for k, v in d.items() if v is not None)
+ return d
+
+
+def get_attachment_name(attachment_name):
+ if attachment_name is None or attachment_name == "":
+ return 'demisto_untitled_attachment'
+ return attachment_name
+
+
+def get_entry_for_object(title, context_key, obj, headers=None):
+ if len(obj) == 0:
+ return "There is no output results"
+ obj = filter_dict_null(obj)
+ if isinstance(obj, list):
+ obj = map(filter_dict_null, obj)
+ if headers and isinstance(obj, dict):
+ headers = list(set(headers).intersection(set(obj.keys())))
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': obj,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, obj, headers),
+ ENTRY_CONTEXT: {
+ context_key: obj
+ }
+ }
+
+
+def get_items_from_mailbox(account, item_ids):
+ if type(item_ids) is not list:
+ item_ids = [item_ids]
+ items = map(lambda x: Item(item_id=x), item_ids)
+ result = list(account.fetch(ids=items))
+ result = [x for x in result if not isinstance(x, ErrorItemNotFound)]
+ if len(result) != len(item_ids):
+ raise Exception("One or more items were not found. Check the input item ids")
+ if exchangelib.__version__ != "1.12.0": # Docker BC
+ for item in result:
+ item.folder = Folder(account=account)
+ return result
+
+
+def get_item_from_mailbox(account, item_id):
+ result = get_items_from_mailbox(account, [item_id])
+ if len(result) == 0:
+ raise Exception("ItemId %s not found" % str(item_id))
+ return result[0]
+
+
+def is_default_folder(folder_path, is_public):
+ if exchangelib.__version__ != "1.12.0": # Docker BC
+ return False
+
+ if is_public is not None:
+ return is_public
+
+ if folder_path == FOLDER_NAME:
+ return IS_PUBLIC_FOLDER
+
+ return False
+
+
+def get_folder_by_path(account, path, is_public=False):
+ # handle exchange folder id
+ if len(path) == 120:
+ folders_map = account.root._folders_map
+ if path in folders_map:
+ return account.root._folders_map[path]
+
+ if is_public:
+ folder_result = account.public_folders_root
+ elif path == u'AllItems':
+ folder_result = account.root
+ else:
+ folder_result = account.inbox.parent # Top of Information Store
+ path = path.replace("/", "\\")
+ path = path.split('\\')
+ for sub_folder_name in path:
+ folder_filter_by_name = [x for x in folder_result.children if x.name.lower() == sub_folder_name.lower()]
+ if len(folder_filter_by_name) == 0:
+ raise Exception("No such folder %s" % path)
+ folder_result = folder_filter_by_name[0]
+
+ return folder_result
+
+
+class MarkAsJunk(EWSAccountService):
+ SERVICE_NAME = 'MarkAsJunk'
+
+ def call(self, item_id, move_item):
+ elements = list(self._get_elements(payload=self.get_payload(item_id=item_id, move_item=move_item)))
+ for element in elements:
+ if isinstance(element, ResponseMessageError):
+ return element.message
+ return "Success"
+
+ def get_payload(self, item_id, move_item):
+ junk = create_element('m:%s' % self.SERVICE_NAME,
+ IsJunk="true",
+ MoveItem="true" if move_item else "false")
+
+ items_list = create_element('m:ItemIds')
+ item_element = create_element("t:ItemId", Id=item_id)
+ items_list.append(item_element)
+ junk.append(items_list)
+
+ return junk
+
+
+class GetSearchableMailboxes(EWSService):
+ SERVICE_NAME = 'GetSearchableMailboxes'
+ element_container_name = '{%s}SearchableMailboxes' % MNS
+
+ @staticmethod
+ def parse_element(element):
+ return {
+ MAILBOX: element.find("{%s}PrimarySmtpAddress" % TNS).text if element.find(
+ "{%s}PrimarySmtpAddress" % TNS) is not None else None,
+ MAILBOX_ID: element.find("{%s}ReferenceId" % TNS).text if element.find(
+ "{%s}ReferenceId" % TNS) is not None else None,
+ 'displayName': element.find("{%s}DisplayName" % TNS).text if element.find(
+ "{%s}DisplayName" % TNS) is not None else None,
+ 'isExternal': element.find("{%s}IsExternalMailbox" % TNS).text if element.find(
+ "{%s}IsExternalMailbox" % TNS) is not None else None,
+ 'externalEmailAddress': element.find("{%s}ExternalEmailAddress" % TNS).text if element.find(
+ "{%s}ExternalEmailAddress" % TNS) is not None else None
+ }
+
+ def call(self):
+ if self.protocol.version.build < EXCHANGE_2013:
+ raise NotImplementedError('%s is only supported for Exchange 2013 servers and later' % self.SERVICE_NAME)
+ elements = self._get_elements(payload=self.get_payload())
+ return map(lambda x: self.parse_element(x), elements)
+
+ def get_payload(self):
+ element = create_element(
+ 'm:%s' % self.SERVICE_NAME,
+ )
+ return element
+
+
+class SearchMailboxes(EWSService):
+ SERVICE_NAME = 'SearchMailboxes'
+ element_container_name = '{%s}SearchMailboxesResult/{%s}Items' % (MNS, TNS)
+
+ @staticmethod
+ def parse_element(element):
+ to_recipients = element.find('{%s}ToRecipients' % TNS)
+ if to_recipients:
+ to_recipients = map(lambda x: x.text if x is not None else None, to_recipients)
+
+ result = {
+ ITEM_ID: element.find('{%s}Id' % TNS).attrib['Id'] if element.find('{%s}Id' % TNS) is not None else None,
+ MAILBOX: element.find('{%s}Mailbox/{%s}PrimarySmtpAddress' % (TNS, TNS)).text if element.find(
+ '{%s}Mailbox/{%s}PrimarySmtpAddress' % (TNS, TNS)) is not None else None,
+ 'subject': element.find("{%s}Subject" % TNS).text if element.find(
+ "{%s}Subject" % TNS) is not None else None,
+ 'toRecipients': to_recipients,
+ 'sender': element.find("{%s}Sender" % TNS).text if element.find("{%s}Sender" % TNS) is not None else None,
+ 'hasAttachments': element.find("{%s}HasAttachment" % TNS).text if element.find(
+ "{%s}HasAttachment" % TNS) is not None else None,
+ 'datetimeSent': element.find("{%s}SentTime" % TNS).text if element.find(
+ "{%s}SentTime" % TNS) is not None else None,
+ 'datetimeReceived': element.find("{%s}ReceivedTime" % TNS).text if element.find(
+ "{%s}ReceivedTime" % TNS) is not None else None
+ }
+
+ return result
+
+ def call(self, query, mailboxes):
+ if self.protocol.version.build < EXCHANGE_2013:
+ raise NotImplementedError('%s is only supported for Exchange 2013 servers and later' % self.SERVICE_NAME)
+ elements = list(self._get_elements(payload=self.get_payload(query, mailboxes)))
+ return map(lambda x: self.parse_element(x), elements)
+
+ def get_payload(self, query, mailboxes):
+ def get_mailbox_search_scope(mailbox_id):
+ mailbox_search_scope = create_element("t:MailboxSearchScope")
+ add_xml_child(mailbox_search_scope, "t:Mailbox", mailbox_id)
+ add_xml_child(mailbox_search_scope, "t:SearchScope", "All")
+ return mailbox_search_scope
+
+ mailbox_query_element = create_element("t:MailboxQuery")
+ add_xml_child(mailbox_query_element, "t:Query", query)
+ mailboxes_scopes = []
+ for mailbox in mailboxes:
+ mailboxes_scopes.append(get_mailbox_search_scope(mailbox))
+ add_xml_child(mailbox_query_element, "t:MailboxSearchScopes", mailboxes_scopes)
+
+ element = create_element('m:%s' % self.SERVICE_NAME)
+ add_xml_child(element, "m:SearchQueries", mailbox_query_element)
+ add_xml_child(element, "m:ResultType", "PreviewOnly")
+
+ return element
+
+
+class ExpandGroup(EWSService):
+ SERVICE_NAME = 'ExpandDL'
+ element_container_name = '{%s}DLExpansion' % MNS
+
+ @staticmethod
+ def parse_element(element):
+ return {
+ MAILBOX: element.find("{%s}EmailAddress" % TNS).text if element.find(
+ "{%s}EmailAddress" % TNS) is not None else None,
+ 'displayName': element.find("{%s}Name" % TNS).text if element.find("{%s}Name" % TNS) is not None else None,
+ 'mailboxType': element.find("{%s}MailboxType" % TNS).text if element.find(
+ "{%s}MailboxType" % TNS) is not None else None
+ }
+
+ def call(self, email_address, recursive_expansion=False):
+ if self.protocol.version.build < EXCHANGE_2010:
+ raise NotImplementedError('%s is only supported for Exchange 2010 servers and later' % self.SERVICE_NAME)
+ try:
+ if recursive_expansion == 'True':
+ group_members = {} # type: dict
+ self.expand_group_recursive(email_address, group_members)
+ return group_members.values()
+ else:
+ return self.expand_group(email_address)
+ except ErrorNameResolutionNoResults:
+ demisto.results("No results were found.")
+ sys.exit()
+
+ def get_payload(self, email_address):
+ element = create_element('m:%s' % self.SERVICE_NAME, )
+ mailbox_element = create_element('m:Mailbox')
+ add_xml_child(mailbox_element, 't:EmailAddress', email_address)
+ element.append(mailbox_element)
+ return element
+
+ def expand_group(self, email_address):
+ elements = self._get_elements(payload=self.get_payload(email_address))
+ return map(lambda x: self.parse_element(x), elements)
+
+ def expand_group_recursive(self, email_address, non_dl_emails, dl_emails=set()):
+ if email_address in non_dl_emails or email_address in dl_emails:
+ return None
+ dl_emails.add(email_address)
+
+ for member in self.expand_group(email_address):
+ if member['mailboxType'] == 'PublicDL' or member['mailboxType'] == 'PrivateDL':
+ self.expand_group_recursive(member['mailbox'], non_dl_emails, dl_emails)
+ else:
+ if member['mailbox'] not in non_dl_emails:
+ non_dl_emails[member['mailbox']] = member
+
+
+def get_expanded_group(protocol, email_address, recursive_expansion=False):
+ group_members = ExpandGroup(protocol=protocol).call(email_address, recursive_expansion)
+ group_details = {
+ "name": email_address,
+ "members": group_members
+ }
+ entry_for_object = get_entry_for_object("Expanded group", 'EWS.ExpandGroup', group_details)
+ entry_for_object['HumanReadable'] = tableToMarkdown('Group Members', group_members)
+ return entry_for_object
+
+
+def get_searchable_mailboxes(protocol):
+ searchable_mailboxes = GetSearchableMailboxes(protocol=protocol).call()
+ return get_entry_for_object("Searchable mailboxes", 'EWS.Mailboxes', searchable_mailboxes)
+
+
+def search_mailboxes(protocol, filter, limit=100, mailbox_search_scope=None, email_addresses=None):
+ mailbox_ids = []
+ limit = int(limit)
+ if mailbox_search_scope is not None and email_addresses is not None:
+ raise Exception("Use one of the arguments - mailbox-search-scope or email-addresses, not both")
+ if email_addresses:
+ email_addresses = email_addresses.split(",")
+ all_mailboxes = get_searchable_mailboxes(protocol)[ENTRY_CONTEXT]['EWS.Mailboxes']
+ for email_address in email_addresses:
+ for mailbox in all_mailboxes:
+ if MAILBOX in mailbox and email_address.lower() == mailbox[MAILBOX].lower():
+ mailbox_ids.append(mailbox[MAILBOX_ID])
+ if len(mailbox_ids) == 0:
+ raise Exception("No searchable mailboxes were found for the provided email addresses.")
+ elif mailbox_search_scope:
+ mailbox_ids = mailbox_search_scope if type(mailbox_search_scope) is list else [mailbox_search_scope]
+ else:
+ entry = get_searchable_mailboxes(protocol)
+ mailboxes = [x for x in entry[ENTRY_CONTEXT]['EWS.Mailboxes'] if MAILBOX_ID in x.keys()]
+ mailbox_ids = map(lambda x: x[MAILBOX_ID], mailboxes)
+
+ try:
+ search_results = SearchMailboxes(protocol=protocol).call(filter, mailbox_ids)
+ search_results = search_results[:limit]
+ except TransportError, e:
+ if "ItemCount>0<" in str(e):
+ return "No results for search query: " + filter
+ else:
+ raise e
+
+ return get_entry_for_object("Search mailboxes results",
+ CONTEXT_UPDATE_EWS_ITEM,
+ search_results)
+
+
+def get_last_run():
+ last_run = demisto.getLastRun()
+ if not last_run or last_run.get(LAST_RUN_FOLDER) != FOLDER_NAME:
+ last_run = {
+ LAST_RUN_TIME: None,
+ LAST_RUN_FOLDER: FOLDER_NAME,
+ LAST_RUN_IDS: None
+ }
+ if LAST_RUN_TIME in last_run and last_run[LAST_RUN_TIME] is not None:
+ last_run[LAST_RUN_TIME] = EWSDateTime.from_string(last_run[LAST_RUN_TIME])
+
+ return last_run
+
+
+def fetch_last_emails(account, folder_name='Inbox', since_datetime=None, exclude_ids=None):
+ qs = get_folder_by_path(account, folder_name, is_public=IS_PUBLIC_FOLDER)
+ if since_datetime:
+ qs = qs.filter(datetime_received__gte=since_datetime)
+ else:
+ if not FETCH_ALL_HISTORY:
+ last_10_min = EWSDateTime.now(tz=EWSTimeZone.timezone('UTC')) - timedelta(minutes=10)
+ qs = qs.filter(datetime_received__gte=last_10_min)
+ qs = qs.filter().only(*map(lambda x: x.name, Message.FIELDS))
+ result = qs.all()
+ result = [x for x in result if isinstance(x, Message)]
+ if exclude_ids and len(exclude_ids) > 0:
+ exclude_ids = set(exclude_ids)
+ result = [x for x in result if x.message_id not in exclude_ids]
+ return result
+
+
+def keys_to_camel_case(value):
+ def str_to_camel_case(snake_str):
+ components = snake_str.split('_')
+ return components[0] + "".join(x.title() for x in components[1:])
+
+ if value is None:
+ return None
+ if isinstance(value, (list, set)):
+ return map(keys_to_camel_case, value)
+ if isinstance(value, dict):
+ return dict((keys_to_camel_case(k),
+ keys_to_camel_case(v) if isinstance(v, (list, dict)) else v)
+ for (k, v) in value.items())
+
+ return str_to_camel_case(value)
+
+
+def parse_item_as_dict(item, email_address, camel_case=False, compact_fields=False):
+ def parse_object_as_dict(object):
+ raw_dict = {}
+ if object is not None:
+ for field in object.FIELDS:
+ raw_dict[field.name] = getattr(object, field.name, None)
+ return raw_dict
+
+ def parse_attachment_as_raw_json(attachment):
+ raw_dict = parse_object_as_dict(attachment)
+ if raw_dict['attachment_id']:
+ raw_dict['attachment_id'] = parse_object_as_dict(raw_dict['attachment_id'])
+ if raw_dict['last_modified_time']:
+ raw_dict['last_modified_time'] = raw_dict['last_modified_time'].ewsformat()
+ return raw_dict
+
+ def parse_folder_as_json(folder):
+ raw_dict = parse_object_as_dict(folder)
+ if 'parent_folder_id' in raw_dict:
+ raw_dict['parent_folder_id'] = parse_folder_as_json(raw_dict['parent_folder_id'])
+ if 'effective_rights' in raw_dict:
+ raw_dict['effective_rights'] = parse_object_as_dict(raw_dict['effective_rights'])
+ return raw_dict
+
+ raw_dict = {}
+ for field, value in item.__dict__.items():
+ if type(value) in [str, unicode, int, float, bool, Body, HTMLBody, None]:
+ try:
+ if isinstance(value, basestring):
+ value.encode('utf-8') # type: ignore
+ raw_dict[field] = value
+ except Exception:
+ pass
+
+ if getattr(item, 'attachments', None):
+ raw_dict['attachments'] = map(lambda x: parse_attachment_as_dict(item.item_id, x), item.attachments)
+
+ for time_field in ['datetime_sent', 'datetime_created', 'datetime_received', 'last_modified_time',
+ 'reminder_due_by']:
+ value = getattr(item, time_field, None)
+ if value:
+ raw_dict[time_field] = value.ewsformat()
+
+ for dict_field in ['effective_rights', 'parent_folder_id', 'conversation_id', 'author',
+ 'extern_id', 'received_by', 'received_representing', 'reply_to', 'sender', 'folder']:
+ value = getattr(item, dict_field, None)
+ if value:
+ raw_dict[dict_field] = parse_object_as_dict(value)
+
+ for list_dict_field in ['headers', 'cc_recipients', 'to_recipients']:
+ value = getattr(item, list_dict_field, None)
+ if value:
+ raw_dict[list_dict_field] = map(lambda x: parse_object_as_dict(x), value)
+
+ if getattr(item, 'folder', None):
+ raw_dict['folder'] = parse_folder_as_json(item.folder)
+ folder_path = item.folder.absolute[len(TOIS_PATH):] if item.folder.absolute.startswith(
+ TOIS_PATH) else item.folder.absolute
+ raw_dict['folder_path'] = folder_path
+
+ if compact_fields:
+ new_dict = {}
+ fields_list = ['datetime_created', 'datetime_received', 'datetime_sent', 'sender',
+ 'has_attachments', 'importance', 'message_id', 'last_modified_time',
+ 'size', 'subject', 'text_body', 'headers', 'body', 'folder_path', 'is_read']
+
+ # Docker BC
+ if exchangelib.__version__ == "1.12.0":
+ if 'id' in raw_dict:
+ new_dict['item_id'] = raw_dict['id']
+ else:
+ fields_list.append('item_id')
+
+ for field in fields_list:
+ if field in raw_dict:
+ new_dict[field] = raw_dict.get(field)
+ for field in ['received_by', 'author', 'sender']:
+ if field in raw_dict:
+ new_dict[field] = raw_dict.get(field, {}).get('email_address')
+ for field in ['to_recipients']:
+ if field in raw_dict:
+ new_dict[field] = map(lambda x: x.get('email_address'), raw_dict[field])
+ attachments = raw_dict.get('attachments')
+ if attachments and len(attachments) > 0:
+ file_attachments = [x for x in attachments if x[ATTACHMENT_TYPE] == FILE_ATTACHMENT_TYPE]
+ if len(file_attachments) > 0:
+ new_dict['FileAttachments'] = file_attachments
+ item_attachments = [x for x in attachments if x[ATTACHMENT_TYPE] == ITEM_ATTACHMENT_TYPE]
+ if len(item_attachments) > 0:
+ new_dict['ItemAttachments'] = item_attachments
+
+ raw_dict = new_dict
+
+ if camel_case:
+ raw_dict = keys_to_camel_case(raw_dict)
+
+ if email_address:
+ raw_dict[MAILBOX] = email_address
+ return raw_dict
+
+
+def parse_incident_from_item(item, is_fetch):
+ incident = {}
+ labels = []
+
+ try:
+ incident['details'] = item.text_body or item.body
+ except AttributeError:
+ incident['details'] = item.body
+ incident['name'] = item.subject
+ labels.append({'type': 'Email/subject', 'value': item.subject})
+ incident['occurred'] = item.datetime_created.ewsformat()
+
+ # handle recipients
+ if item.to_recipients:
+ for recipient in item.to_recipients:
+ labels.append({'type': 'Email', 'value': recipient.email_address})
+
+ # handle cc
+ if item.cc_recipients:
+ for recipient in item.cc_recipients:
+ labels.append({'type': 'Email/cc', 'value': recipient.email_address})
+ # handle email from
+ if item.sender:
+ labels.append({'type': 'Email/from', 'value': item.sender.email_address})
+
+ # email format
+ email_format = ''
+ try:
+ if item.text_body:
+ labels.append({'type': 'Email/text', 'value': item.text_body})
+ email_format = 'text'
+ except AttributeError:
+ pass
+ if item.body:
+ labels.append({'type': 'Email/html', 'value': item.body})
+ email_format = 'HTML'
+ labels.append({'type': 'Email/format', 'value': email_format})
+
+ # handle attachments
+ if item.attachments:
+ incident['attachment'] = []
+ for attachment in item.attachments:
+ file_result = None
+ label_attachment_type = None
+ label_attachment_id_type = None
+ if isinstance(attachment, FileAttachment):
+ try:
+ if attachment.content:
+ # file attachment
+ label_attachment_type = 'attachments'
+ label_attachment_id_type = 'attachmentId'
+
+ # save the attachment
+ file_name = get_attachment_name(attachment.name)
+ file_result = fileResult(file_name, attachment.content)
+
+ # check for error
+ if file_result['Type'] == entryTypes['error']:
+ demisto.error(file_result['Contents'])
+ raise Exception(file_result['Contents'])
+
+ # save attachment to incident
+ incident['attachment'].append({
+ 'path': file_result['FileID'],
+ 'name': get_attachment_name(attachment.name)
+ })
+ except TypeError, e:
+ if e.message != "must be string or buffer, not None":
+ raise
+ continue
+ else:
+ # other item attachment
+ label_attachment_type = 'attachmentItems'
+ label_attachment_id_type = 'attachmentItemsId'
+
+ # save the attachment
+ if attachment.item.mime_content:
+ attached_email = email.message_from_string(attachment.item.mime_content)
+ if attachment.item.headers:
+ attached_email_headers = [(h, ' '.join(map(str.strip, v.split('\r\n')))) for (h, v) in
+ attached_email.items()]
+ for header in attachment.item.headers:
+ if (header.name, header.value) not in attached_email_headers \
+ and header.name != 'Content-Type':
+ attached_email.add_header(header.name, header.value)
+
+ file_result = fileResult(get_attachment_name(attachment.name) + ".eml", attached_email.as_string())
+
+ if file_result:
+ # check for error
+ if file_result['Type'] == entryTypes['error']:
+ demisto.error(file_result['Contents'])
+ raise Exception(file_result['Contents'])
+
+ # save attachment to incident
+ incident['attachment'].append({
+ 'path': file_result['FileID'],
+ 'name': get_attachment_name(attachment.name) + ".eml"
+ })
+
+ labels.append({'type': label_attachment_type, 'value': get_attachment_name(attachment.name)})
+ labels.append({'type': label_attachment_id_type, 'value': attachment.attachment_id.id})
+
+ # handle headers
+ if item.headers:
+ headers = []
+ for header in item.headers:
+ labels.append({'type': 'Email/Header/{}'.format(header.name), 'value': str(header.value)})
+ headers.append("{}: {}".format(header.name, header.value))
+ labels.append({'type': 'Email/headers', 'value': "\r\n".join(headers)})
+
+ # handle item id
+ if item.message_id:
+ labels.append({'type': 'Email/MessageId', 'value': str(item.message_id)})
+
+ if item.item_id:
+ labels.append({'type': 'Email/ID', 'value': item.item_id})
+ labels.append({'type': 'Email/itemId', 'value': item.item_id})
+
+ # handle conversion id
+ if item.conversation_id:
+ labels.append({'type': 'Email/ConversionID', 'value': item.conversation_id.id})
+
+ if MARK_AS_READ and is_fetch:
+ item.is_read = True
+ item.save()
+
+ incident['labels'] = labels
+ incident['rawJSON'] = json.dumps(parse_item_as_dict(item, None), ensure_ascii=False)
+
+ return incident
+
+
+def fetch_emails_as_incidents(account_email, folder_name):
+ start_time = EWSDateTime.now(tz=EWSTimeZone.timezone('UTC'))
+ last_run = get_last_run()
+
+ try:
+ account = get_account(account_email)
+ last_emails = fetch_last_emails(account, folder_name, last_run.get(LAST_RUN_TIME), last_run.get(LAST_RUN_IDS))
+
+ ids = []
+ incidents = []
+ for item in last_emails:
+ if item.message_id:
+ ids.append(item.message_id)
+ incident = parse_incident_from_item(item, True)
+ incidents.append(incident)
+
+ new_last_run = {
+ LAST_RUN_TIME: start_time.ewsformat(),
+ LAST_RUN_FOLDER: folder_name,
+ LAST_RUN_IDS: ids,
+ ERROR_COUNTER: 0
+ }
+
+ demisto.setLastRun(new_last_run)
+ return incidents
+
+ except RateLimitError:
+ if LAST_RUN_TIME in last_run:
+ last_run[LAST_RUN_TIME] = last_run[LAST_RUN_TIME].ewsformat()
+ if ERROR_COUNTER not in last_run:
+ last_run[ERROR_COUNTER] = 0
+ last_run[ERROR_COUNTER] += 1
+ demisto.setLastRun(last_run)
+ if last_run[ERROR_COUNTER] > 2:
+ raise
+ return []
+
+
+def get_entry_for_file_attachment(item_id, attachment):
+ entry = fileResult(get_attachment_name(attachment.name), attachment.content)
+ ec = {
+ CONTEXT_UPDATE_EWS_ITEM_FOR_ATTACHMENT + CONTEXT_UPDATE_FILE_ATTACHMENT: parse_attachment_as_dict(item_id,
+ attachment)
+ }
+ entry[ENTRY_CONTEXT] = filter_dict_null(ec)
+ return entry
+
+
+def parse_attachment_as_dict(item_id, attachment):
+ try:
+ attachment_content = attachment.content if isinstance(attachment,
+ FileAttachment) else attachment.item.mime_content
+ return {
+ ATTACHMENT_ORIGINAL_ITEM_ID: item_id,
+ ATTACHMENT_ID: attachment.attachment_id.id,
+ 'attachmentName': get_attachment_name(attachment.name),
+ 'attachmentSHA256': hashlib.sha256(attachment_content).hexdigest() if attachment_content else None,
+ 'attachmentContentType': attachment.content_type,
+ 'attachmentContentId': attachment.content_id,
+ 'attachmentContentLocation': attachment.content_location,
+ 'attachmentSize': attachment.size,
+ 'attachmentLastModifiedTime': attachment.last_modified_time.ewsformat(),
+ 'attachmentIsInline': attachment.is_inline,
+ ATTACHMENT_TYPE: FILE_ATTACHMENT_TYPE if isinstance(attachment, FileAttachment) else ITEM_ATTACHMENT_TYPE
+ }
+ except TypeError, e:
+ if e.message != "must be string or buffer, not None":
+ raise
+ return {
+ ATTACHMENT_ORIGINAL_ITEM_ID: item_id,
+ ATTACHMENT_ID: attachment.attachment_id.id,
+ 'attachmentName': get_attachment_name(attachment.name),
+ 'attachmentSHA256': None,
+ 'attachmentContentType': attachment.content_type,
+ 'attachmentContentId': attachment.content_id,
+ 'attachmentContentLocation': attachment.content_location,
+ 'attachmentSize': attachment.size,
+ 'attachmentLastModifiedTime': attachment.last_modified_time.ewsformat(),
+ 'attachmentIsInline': attachment.is_inline,
+ ATTACHMENT_TYPE: FILE_ATTACHMENT_TYPE if isinstance(attachment, FileAttachment) else ITEM_ATTACHMENT_TYPE
+ }
+
+
+def get_entry_for_item_attachment(item_id, attachment, target_email):
+ item = attachment.item
+ dict_result = parse_attachment_as_dict(item_id, attachment)
+ dict_result.update(parse_item_as_dict(item, target_email, camel_case=True, compact_fields=True))
+ title = 'EWS get attachment got item for "%s", "%s"' % (target_email, get_attachment_name(attachment.name))
+
+ return get_entry_for_object(title, CONTEXT_UPDATE_EWS_ITEM_FOR_ATTACHMENT + CONTEXT_UPDATE_ITEM_ATTACHMENT,
+ dict_result)
+
+
+def get_attachments_for_item(item_id, account, attachment_ids=None):
+ item = get_item_from_mailbox(account, item_id)
+ attachments = []
+ if attachment_ids and not isinstance(attachment_ids, list):
+ attachment_ids = attachment_ids.split(",")
+ if item:
+ if item.attachments:
+ for attachment in item.attachments:
+ if attachment_ids and attachment.attachment_id.id not in attachment_ids:
+ continue
+ attachments.append(attachment)
+
+ else:
+ raise Exception('Message item not found: ' + item_id)
+
+ if attachment_ids and len(attachments) < len(attachment_ids):
+ raise Exception('Some attachment id did not found for message:' + str(attachment_ids))
+
+ return attachments
+
+
+def delete_attachments_for_message(item_id, target_mailbox=None, attachment_ids=None):
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ attachments = get_attachments_for_item(item_id, account, attachment_ids)
+ deleted_file_attachments = []
+ deleted_item_attachments = [] # type: ignore
+ for attachment in attachments:
+ attachment_deleted_action = {
+ ATTACHMENT_ID: attachment.attachment_id.id,
+ ACTION: 'deleted'
+ }
+ if isinstance(attachment, FileAttachment):
+ deleted_file_attachments.append(attachment_deleted_action)
+ else:
+ deleted_item_attachments.append(attachment_deleted_action)
+ attachment.detach()
+
+ entries = []
+ if len(deleted_file_attachments) > 0:
+ entry = get_entry_for_object("Deleted file attachments",
+ "EWS.Items" + CONTEXT_UPDATE_FILE_ATTACHMENT,
+ deleted_file_attachments)
+ entries.append(entry)
+ if len(deleted_item_attachments) > 0:
+ entry = get_entry_for_object("Deleted item attachments",
+ "EWS.Items" + CONTEXT_UPDATE_ITEM_ATTACHMENT,
+ deleted_item_attachments)
+ entries.append(entry)
+
+ return entries
+
+
+def fetch_attachments_for_message(item_id, target_mailbox=None, attachment_ids=None):
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ attachments = get_attachments_for_item(item_id, account, attachment_ids)
+ entries = []
+ for attachment in attachments:
+ if isinstance(attachment, FileAttachment):
+ try:
+ if attachment.content:
+ entries.append(get_entry_for_file_attachment(item_id, attachment))
+ except TypeError, e:
+ if e.message != "must be string or buffer, not None":
+ raise
+ else:
+ entries.append(get_entry_for_item_attachment(item_id, attachment, account.primary_smtp_address))
+ if attachment.item.mime_content:
+ entries.append(fileResult(get_attachment_name(attachment.name) + ".eml", attachment.item.mime_content))
+
+ return entries
+
+
+def move_item_between_mailboxes(item_id, destination_mailbox, destination_folder_path, source_mailbox=None,
+ is_public=None):
+ source_account = get_account(source_mailbox or ACCOUNT_EMAIL)
+ destination_account = get_account(destination_mailbox or ACCOUNT_EMAIL)
+ is_public = is_default_folder(destination_folder_path, is_public)
+ destination_folder = get_folder_by_path(destination_account, destination_folder_path, is_public)
+ item = get_item_from_mailbox(source_account, item_id)
+ try:
+ source_account.bulk_move(ids=[item], to_folder=destination_folder)
+ except ErrorToFolderNotFound:
+ exported_items = source_account.export([item])
+ destination_account.upload([(destination_folder, exported_items[0])])
+ source_account.bulk_delete([item])
+
+ move_result = {
+ MOVED_TO_MAILBOX: destination_mailbox,
+ MOVED_TO_FOLDER: destination_folder_path,
+ }
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': "Item was moved successfully.",
+ 'ContentsFormat': formats['text'],
+ ENTRY_CONTEXT: {
+ "EWS.Items(val.itemId === '%s')" % (item_id,): move_result
+ }
+ }
+
+
+def move_item(item_id, target_folder_path, target_mailbox=None, is_public=None):
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ is_public = is_default_folder(target_folder_path, is_public)
+ target_folder = get_folder_by_path(account, target_folder_path, is_public)
+ item = get_item_from_mailbox(account, item_id)
+ if isinstance(item, ErrorInvalidIdMalformed):
+ raise Exception("Item not found")
+ item.move(target_folder)
+ move_result = {
+ NEW_ITEM_ID: item.item_id,
+ ITEM_ID: item_id,
+ MESSAGE_ID: item.message_id,
+ ACTION: 'moved'
+ }
+
+ return get_entry_for_object('Moved items',
+ CONTEXT_UPDATE_EWS_ITEM,
+ move_result)
+
+
+def delete_items(item_ids, delete_type, target_mailbox=None):
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ deleted_items = []
+ if type(item_ids) != list:
+ item_ids = item_ids.split(",")
+ items = get_items_from_mailbox(account, item_ids)
+ delete_type = delete_type.lower()
+
+ for item in items:
+ item_id = item.item_id
+ if delete_type == 'trash':
+ item.move_to_trash()
+ elif delete_type == 'soft':
+ item.soft_delete()
+ elif delete_type == 'hard':
+ item.delete()
+ else:
+ raise Exception('invalid delete type: %s. Use "trash" \\ "soft" \\ "hard"' % delete_type)
+ deleted_items.append({
+ ITEM_ID: item_id,
+ MESSAGE_ID: item.message_id,
+ ACTION: '%s-deleted' % delete_type
+ })
+
+ return get_entry_for_object('Deleted items (%s delete type)' % delete_type,
+ CONTEXT_UPDATE_EWS_ITEM,
+ deleted_items)
+
+
+def prepare_args(d):
+ d = dict((k.replace("-", "_"), v) for k, v in d.items())
+ if 'is_public' in d:
+ if exchangelib.__version__ != "1.12.0": # Docker BC
+ raise Exception(PUBLIC_FOLDERS_ERROR)
+ else:
+ d['is_public'] = d['is_public'] == 'True'
+ return d
+
+
+def get_limited_number_of_messages_from_qs(qs, limit):
+ count = 0
+ results = []
+ for item in qs:
+ if count == limit:
+ break
+ if isinstance(item, Message):
+ count += 1
+ results.append(item)
+ return results
+
+
+def search_items_in_mailbox(query=None, message_id=None, folder_path='', limit=100, target_mailbox=None,
+ is_public=None, selected_fields='all'):
+ if not query and not message_id:
+ return_error("Missing required argument. Provide query or message-id")
+
+ if message_id and message_id[0] != '<' and message_id[-1] != '>':
+ message_id = '<{}>'.format(message_id)
+
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ limit = int(limit)
+ if folder_path.lower() == 'inbox':
+ folders = [account.inbox]
+ elif folder_path:
+ is_public = is_default_folder(folder_path, is_public)
+ folders = [get_folder_by_path(account, folder_path, is_public)]
+ else:
+ folders = account.inbox.parent.walk() # pylint: disable=E1101
+
+ items = [] # type: ignore
+ selected_all_fields = (selected_fields == 'all')
+
+ if selected_all_fields:
+ restricted_fields = list(map(lambda x: x.name, Message.FIELDS)) # type: ignore
+ else:
+ restricted_fields = set(argToList(selected_fields)) # type: ignore
+ restricted_fields.update(['id', 'message_id']) # type: ignore
+
+ for folder in folders:
+ if Message not in folder.supported_item_models:
+ continue
+ if query:
+ items_qs = folder.filter(query).only(*restricted_fields)
+ else:
+ items_qs = folder.filter(message_id=message_id).only(*restricted_fields)
+ items += get_limited_number_of_messages_from_qs(items_qs, limit)
+ if len(items) >= limit:
+ break
+
+ items = items[:limit]
+ searched_items_result = map(
+ lambda item: parse_item_as_dict(item, account.primary_smtp_address, camel_case=True,
+ compact_fields=selected_all_fields), items)
+
+ if not selected_all_fields:
+ searched_items_result = [
+ {k: v for (k, v) in i.iteritems()
+ if k in keys_to_camel_case(restricted_fields)} for i in searched_items_result]
+
+ for item in searched_items_result:
+ item['itemId'] = item.pop('id', '')
+
+ return get_entry_for_object('Searched items',
+ CONTEXT_UPDATE_EWS_ITEM,
+ searched_items_result,
+ headers=ITEMS_RESULTS_HEADERS if selected_all_fields else None)
+
+
+def get_out_of_office_state(target_mailbox=None):
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ oof = account.oof_settings
+ oof_dict = {
+ 'state': oof.state, # pylint: disable=E1101
+ 'externalAudience': getattr(oof, 'external_audience', None),
+ 'start': oof.start.ewsformat() if oof.start else None, # pylint: disable=E1101
+ 'end': oof.end.ewsformat() if oof.end else None, # pylint: disable=E1101
+ 'internalReply': getattr(oof, 'internal_replay', None),
+ 'externalReply': getattr(oof, 'external_replay', None),
+ MAILBOX: account.primary_smtp_address
+ }
+ return get_entry_for_object("Out of office state for %s" % account.primary_smtp_address,
+ 'Account.Email(val.Address == obj.{0}).OutOfOffice'.format(MAILBOX),
+ oof_dict)
+
+
+def recover_soft_delete_item(message_ids, target_folder_path="Inbox", target_mailbox=None, is_public=None):
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ is_public = is_default_folder(target_folder_path, is_public)
+ target_folder = get_folder_by_path(account, target_folder_path, is_public)
+ recovered_messages = []
+ if type(message_ids) != list:
+ message_ids = message_ids.split(",")
+ items_to_recover = account.recoverable_items_deletions.filter( # pylint: disable=E1101
+ message_id__in=message_ids).all() # pylint: disable=E1101
+ if len(items_to_recover) != len(message_ids):
+ raise Exception("Some message ids are missing in recoverable items directory")
+ for item in items_to_recover:
+ item.move(target_folder)
+ recovered_messages.append({
+ ITEM_ID: item.item_id,
+ MESSAGE_ID: item.message_id,
+ ACTION: 'recovered'
+ })
+ return get_entry_for_object("Recovered messages",
+ CONTEXT_UPDATE_EWS_ITEM,
+ recovered_messages)
+
+
+def get_contacts(limit, target_mailbox=None):
+ def parse_physical_address(address):
+ result = {}
+ for attr in ['city', 'country', 'label', 'state', 'street', 'zipcode']:
+ result[attr] = getattr(address, attr, None)
+ return result
+
+ def parse_phone_number(phone_number):
+ result = {}
+ for attr in ['label', 'phone_number']:
+ result[attr] = getattr(phone_number, attr, None)
+ return result
+
+ def parse_contact(contact):
+ contact_dict = dict((k, v if not isinstance(v, EWSDateTime) else v.ewsformat())
+ for k, v in contact.__dict__.items()
+ if isinstance(v, basestring) or isinstance(v, EWSDateTime))
+ if isinstance(contact, Contact) and contact.physical_addresses:
+ contact_dict['physical_addresses'] = map(parse_physical_address, contact.physical_addresses)
+ if isinstance(contact, Contact) and contact.phone_numbers:
+ contact_dict['phone_numbers'] = map(parse_phone_number, contact.phone_numbers)
+ if isinstance(contact, Contact) and contact.email_addresses and len(contact.email_addresses) > 0:
+ contact_dict['emailAddresses'] = map(lambda x: x.email, contact.email_addresses)
+ contact_dict = keys_to_camel_case(contact_dict)
+ contact_dict = dict((k, v) for k, v in contact_dict.items() if v)
+ del contact_dict['mimeContent']
+ contact_dict['originMailbox'] = target_mailbox
+ return contact_dict
+
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ contacts = []
+
+ for contact in account.contacts.all()[:int(limit)]: # pylint: disable=E1101
+ contacts.append(parse_contact(contact))
+ return get_entry_for_object('Email contacts for %s' % target_mailbox,
+ 'Account.Email(val.Address == obj.originMailbox).EwsContacts',
+ contacts)
+
+
+def create_folder(new_folder_name, folder_path, target_mailbox=None):
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ full_path = "%s\\%s" % (folder_path, new_folder_name)
+ try:
+ if get_folder_by_path(account, full_path):
+ return "Folder %s already exists" % full_path
+ except Exception:
+ pass
+ parent_folder = get_folder_by_path(account, folder_path)
+ f = Folder(parent=parent_folder, name=new_folder_name)
+ f.save()
+ get_folder_by_path(account, full_path)
+ return "Folder %s created successfully" % full_path
+
+
+def find_folders(target_mailbox=None, is_public=None):
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ root = account.root
+ if exchangelib.__version__ == "1.12.0": # Docker BC
+ if is_public:
+ root = account.public_folders_root
+ folders = []
+ for f in root.walk(): # pylint: disable=E1101
+ folder = folder_to_context_entry(f)
+ folders.append(folder)
+ folders_tree = root.tree() # pylint: disable=E1101
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': folders,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': folders_tree,
+ ENTRY_CONTEXT: {
+ 'EWS.Folders(val.id == obj.id)': folders
+ }
+ }
+
+
+def mark_item_as_junk(item_id, move_items, target_mailbox=None):
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ move_items = (move_items.lower() == "yes")
+ ews_result = MarkAsJunk(account=account).call(item_id=item_id, move_item=move_items)
+ mark_as_junk_result = {
+ ITEM_ID: item_id,
+ }
+ if ews_result == "Success":
+ mark_as_junk_result[ACTION] = 'marked-as-junk'
+ else:
+ raise Exception("Failed mark-item-as-junk with error: " + ews_result)
+
+ return get_entry_for_object('Mark item as junk',
+ CONTEXT_UPDATE_EWS_ITEM,
+ mark_as_junk_result)
+
+
+def get_items_from_folder(folder_path, limit=100, target_mailbox=None, is_public=None, get_internal_item='no'):
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ limit = int(limit)
+ get_internal_item = (get_internal_item == 'yes')
+ is_public = is_default_folder(folder_path, is_public)
+ folder = get_folder_by_path(account, folder_path, is_public)
+ qs = folder.filter().order_by('-datetime_created')[:limit]
+ items = get_limited_number_of_messages_from_qs(qs, limit)
+ items_result = []
+
+ for item in items:
+ item_attachment = parse_item_as_dict(item, account.primary_smtp_address, camel_case=True,
+ compact_fields=True)
+ for attachment in item.attachments:
+ if get_internal_item and isinstance(attachment, ItemAttachment) and isinstance(attachment.item,
+ Message):
+ # if found item attachment - switch item to the attchment
+ item_attachment = parse_item_as_dict(attachment.item, account.primary_smtp_address, camel_case=True,
+ compact_fields=True)
+ break
+ items_result.append(item_attachment)
+
+ hm_headers = ['sender', 'subject', 'hasAttachments', 'datetimeReceived',
+ 'receivedBy', 'author', 'toRecipients', ]
+ if exchangelib.__version__ == "1.12.0": # Docker BC
+ hm_headers.append('itemId')
+ return get_entry_for_object('Items in folder ' + folder_path,
+ CONTEXT_UPDATE_EWS_ITEM,
+ items_result,
+ headers=hm_headers)
+
+
+def get_items(item_ids, target_mailbox=None):
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ if type(item_ids) != list:
+ item_ids = item_ids.split(",")
+
+ items = get_items_from_mailbox(account, item_ids)
+ items = [x for x in items if isinstance(x, Message)]
+ items_as_incidents = map(lambda x: parse_incident_from_item(x, False), items)
+ items_to_context = map(lambda x: parse_item_as_dict(x, account.primary_smtp_address, True, True), items)
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': items_as_incidents,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Get items', items_to_context, ITEMS_RESULTS_HEADERS),
+ ENTRY_CONTEXT: {
+ CONTEXT_UPDATE_EWS_ITEM: items_to_context
+ }
+ }
+
+
+def get_folder(folder_path, target_mailbox=None, is_public=None):
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ is_public = is_default_folder(folder_path, is_public)
+ folder = folder_to_context_entry(get_folder_by_path(account, folder_path, is_public))
+ return get_entry_for_object("Folder %s" % (folder_path,), CONTEXT_UPDATE_FOLDER, folder)
+
+
+def folder_to_context_entry(f):
+ f_entry = {
+ 'name': f.name,
+ 'totalCount': f.total_count,
+ 'id': f.folder_id,
+ 'childrenFolderCount': f.child_folder_count,
+ 'changeKey': f.changekey
+ }
+
+ if 'unread_count' in map(lambda x: x.name, Folder.FIELDS):
+ f_entry['unreadCount'] = f.unread_count
+ return f_entry
+
+
+def check_cs_prereqs():
+ if 'outlook.office365.com' not in EWS_SERVER:
+ raise Exception("This command is only supported for Office 365")
+ if exchangelib.__version__ != "1.12.0":
+ raise Exception("Please update your docker image to use this command")
+
+
+def get_cs_error(stderr):
+ return {
+ "Type": entryTypes["error"],
+ "ContentsFormat": formats["text"],
+ "Contents": stderr
+ } if stderr else None
+
+
+def get_cs_status(search_name, status):
+ return {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': 'Search {} status: {}'.format(search_name, status),
+ 'EntryContext': {
+ 'EWS.ComplianceSearch(val.Name === obj.Name)': {'Name': search_name, 'Status': status}
+ }
+ }
+
+
+def start_compliance_search(query):
+ check_cs_prereqs()
+ try:
+ with open("startcompliancesearch2.ps1", "w+") as f:
+ f.write(START_COMPLIANCE)
+
+ output = subprocess.Popen(["pwsh", "startcompliancesearch2.ps1", USERNAME, query],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ stdout, stderr = output.communicate(input=PASSWORD.encode())
+
+ finally:
+ os.remove("startcompliancesearch2.ps1")
+
+ if stderr:
+ return get_cs_error(stderr)
+
+ prefix = '"Action status: '
+ pref_ind = stdout.find(prefix)
+ sub_start = pref_ind + len(prefix)
+ sub_end = sub_start + 45
+ search_name = stdout[sub_start:sub_end]
+
+ return {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': 'Search started: {}'.format(search_name),
+ 'EntryContext': {
+ 'EWS.ComplianceSearch': {'Name': search_name, 'Status': 'Starting'}
+ }
+ }
+
+
+def get_compliance_search(search_name):
+ check_cs_prereqs()
+ try:
+ with open("getcompliancesearch2.ps1", "w+") as f:
+ f.write(GET_COMPLIANCE)
+
+ output = subprocess.Popen(["pwsh", "getcompliancesearch2.ps1", USERNAME, search_name],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout, stderr = output.communicate(input=PASSWORD.encode())
+
+ finally:
+ os.remove("getcompliancesearch2.ps1")
+
+ if stderr:
+ return get_cs_error(stderr)
+
+ # Get search status
+ stdout = stdout[len(PASSWORD):]
+ stdout = stdout.split('\n', 1) # type: ignore
+ results = [get_cs_status(search_name, stdout[0])]
+
+ # Parse search results from script output if the search has completed. Output to warroom as table.
+ if stdout[0] == 'Completed':
+ res = list(r[:-1].split(', ') if r[-1] == ',' else r.split(', ') for r in stdout[1][2:-3].split(r'\r\n'))
+ res = map(lambda x: {k: v for k, v in (s.split(': ') for s in x)}, res)
+ results.append(
+ {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': stdout,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Office 365 Compliance search results', res,
+ ['Location', 'Item count', 'Total size'])
+ }
+ )
+
+ return results
+
+
+def purge_compliance_search(search_name):
+ check_cs_prereqs()
+ try:
+ with open("purgecompliancesearch2.ps1", "w+") as f:
+ f.write(PURGE_COMPLIANCE)
+
+ output = subprocess.Popen(["pwsh", "purgecompliancesearch2.ps1", USERNAME, search_name],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ _, stderr = output.communicate(input=PASSWORD.encode())
+
+ finally:
+ os.remove("purgecompliancesearch2.ps1")
+
+ if stderr:
+ return get_cs_error(stderr)
+
+ return get_cs_status(search_name, 'Purging')
+
+
+def check_purge_compliance_search(search_name):
+ check_cs_prereqs()
+ try:
+ with open("purgestatuscompliancesearch2.ps1", "w+") as f:
+ f.write(PURGE_STATUS_COMPLIANCE)
+
+ output = subprocess.Popen(["pwsh", "purgestatuscompliancesearch2.ps1", USERNAME, search_name],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout, stderr = output.communicate(input=PASSWORD.encode())
+
+ stdout = stdout[len(PASSWORD):]
+
+ finally:
+ os.remove("purgestatuscompliancesearch2.ps1")
+
+ if stderr:
+ return get_cs_error(stderr)
+
+ return get_cs_status(search_name, 'Purged' if stdout.split('\n')[-2] == 'Completed' else 'Purging')
+
+
+def remove_compliance_search(search_name):
+ check_cs_prereqs()
+ try:
+ with open("removecompliance2.ps1", "w+") as f:
+ f.write(REMOVE_COMPLIANCE)
+
+ output = subprocess.Popen(
+ ["pwsh", "removecompliance2.ps1", USERNAME, search_name],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout, stderr = output.communicate(input=PASSWORD.encode())
+
+ finally:
+ os.remove("removecompliance2.ps1")
+
+ if stderr:
+ return get_cs_error(stderr)
+
+ return get_cs_status(search_name, 'Removed')
+
+
+def get_autodiscovery_config():
+ config_dict = demisto.getIntegrationContext()
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': config_dict,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Auto-Discovery Exchange Configuration', config_dict)
+ }
+
+
+def mark_item_as_read(item_ids, operation='read', target_mailbox=None):
+ marked_items = []
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ item_ids = argToList(item_ids)
+ items = get_items_from_mailbox(account, item_ids)
+ items = [x for x in items if isinstance(x, Message)]
+
+ for item in items:
+ item.is_read = (operation == 'read')
+ item.save()
+
+ marked_items.append({
+ ITEM_ID: item.item_id,
+ MESSAGE_ID: item.message_id,
+ ACTION: 'marked-as-{}'.format(operation)
+ })
+
+ return get_entry_for_object('Marked items ({} marked operation)'.format(operation),
+ CONTEXT_UPDATE_EWS_ITEM,
+ marked_items)
+
+
+def get_item_as_eml(item_id, target_mailbox=None):
+ account = get_account(target_mailbox or ACCOUNT_EMAIL)
+ item = get_item_from_mailbox(account, item_id)
+
+ if item.mime_content:
+ email_content = email.message_from_string(item.mime_content)
+ if item.headers:
+ attached_email_headers = [(h, ' '.join(map(str.strip, v.split('\r\n')))) for (h, v) in
+ email_content.items()]
+ for header in item.headers:
+ if (header.name, header.value) not in attached_email_headers \
+ and header.name != 'Content-Type':
+ email_content.add_header(header.name, header.value)
+
+ eml_name = item.subject if item.subject else 'demisto_untitled_eml'
+ file_result = fileResult(eml_name + ".eml", email_content.as_string())
+ file_result = file_result if file_result else "Failed uploading eml file to war room"
+
+ return file_result
+
+
+def test_module():
+ try:
+ global IS_TEST_MODULE
+ IS_TEST_MODULE = True
+ account = get_account(ACCOUNT_EMAIL)
+ if not account.root.effective_rights.read: # pylint: disable=E1101
+ raise Exception("Success to authenticate, but user has no permissions to read from the mailbox. "
+ "Need to delegate the user permissions to the mailbox - "
+ "please read integration documentation and follow the instructions")
+ get_folder_by_path(account, FOLDER_NAME, IS_PUBLIC_FOLDER).test_access()
+ except ErrorFolderNotFound, e:
+ if "Top of Information Store" in e.message:
+ raise Exception(
+ "Success to authenticate, but user probably has no permissions to read from the specific folder."
+ "Check user permissions. You can try !ews-find-folders command to "
+ "get all the folders structure that the user has permissions to")
+
+ demisto.results('ok')
+
+
+def get_protocol():
+ if AUTO_DISCOVERY:
+ protocol = get_account_autodiscover(ACCOUNT_EMAIL).protocol
+ else:
+ protocol = config.protocol # type: ignore
+ return protocol
+
+
+def encode_and_submit_results(obj):
+ demisto.results(str_to_unicode(obj))
+
+
+def main():
+ global EWS_SERVER, USERNAME, ACCOUNT_EMAIL, PASSWORD
+ global config, credentials
+ EWS_SERVER = demisto.params()['ewsServer']
+ USERNAME = demisto.params()['credentials']['identifier']
+ ACCOUNT_EMAIL = demisto.params()['defaultTargetMailbox']
+ PASSWORD = demisto.params()['credentials']['password']
+ config, credentials = prepare()
+ args = prepare_args(demisto.args())
+ fix_2010()
+ try:
+ protocol = get_protocol()
+ if demisto.command() == 'test-module':
+ test_module()
+ elif demisto.command() == 'fetch-incidents':
+ incidents = fetch_emails_as_incidents(ACCOUNT_EMAIL, FOLDER_NAME)
+ demisto.incidents(str_to_unicode(incidents))
+ elif demisto.command() == 'ews-get-attachment':
+ encode_and_submit_results(fetch_attachments_for_message(**args))
+ elif demisto.command() == 'ews-delete-attachment':
+ encode_and_submit_results(delete_attachments_for_message(**args))
+ elif demisto.command() == 'ews-get-searchable-mailboxes':
+ encode_and_submit_results(get_searchable_mailboxes(protocol))
+ elif demisto.command() == 'ews-search-mailboxes':
+ encode_and_submit_results(search_mailboxes(protocol, **args))
+ elif demisto.command() == 'ews-move-item-between-mailboxes':
+ encode_and_submit_results(move_item_between_mailboxes(**args))
+ elif demisto.command() == 'ews-move-item':
+ encode_and_submit_results(move_item(**args))
+ elif demisto.command() == 'ews-delete-items':
+ encode_and_submit_results(delete_items(**args))
+ elif demisto.command() == 'ews-search-mailbox':
+ encode_and_submit_results(search_items_in_mailbox(**args))
+ elif demisto.command() == 'ews-get-contacts':
+ encode_and_submit_results(get_contacts(**args))
+ elif demisto.command() == 'ews-get-out-of-office':
+ encode_and_submit_results(get_out_of_office_state(**args))
+ elif demisto.command() == 'ews-recover-messages':
+ encode_and_submit_results(recover_soft_delete_item(**args))
+ elif demisto.command() == 'ews-create-folder':
+ encode_and_submit_results(create_folder(**args))
+ elif demisto.command() == 'ews-mark-item-as-junk':
+ encode_and_submit_results(mark_item_as_junk(**args))
+ elif demisto.command() == 'ews-find-folders':
+ encode_and_submit_results(find_folders(**args))
+ elif demisto.command() == 'ews-get-items-from-folder':
+ encode_and_submit_results(get_items_from_folder(**args))
+ elif demisto.command() == 'ews-get-items':
+ encode_and_submit_results(get_items(**args))
+ elif demisto.command() == 'ews-get-folder':
+ encode_and_submit_results(get_folder(**args))
+ elif demisto.command() == 'ews-o365-start-compliance-search':
+ encode_and_submit_results(start_compliance_search(**args))
+ elif demisto.command() == 'ews-o365-get-compliance-search':
+ encode_and_submit_results(get_compliance_search(**args))
+ elif demisto.command() == 'ews-o365-purge-compliance-search-results':
+ encode_and_submit_results(purge_compliance_search(**args))
+ elif demisto.command() == 'ews-o365-get-compliance-search-purge-status':
+ encode_and_submit_results(check_purge_compliance_search(**args))
+ elif demisto.command() == 'ews-o365-remove-compliance-search':
+ encode_and_submit_results(remove_compliance_search(**args))
+ elif demisto.command() == 'ews-get-autodiscovery-config':
+ encode_and_submit_results(get_autodiscovery_config())
+ elif demisto.command() == 'ews-expand-group':
+ encode_and_submit_results(get_expanded_group(protocol, **args))
+ elif demisto.command() == 'ews-mark-items-as-read':
+ encode_and_submit_results(mark_item_as_read(**args))
+ elif demisto.command() == 'ews-get-items-as-eml':
+ encode_and_submit_results(get_item_as_eml(**args))
+
+ except Exception, e:
+ import time
+
+ time.sleep(2)
+ start_logging()
+ debug_log = log_stream.getvalue() # type: ignore
+ error_message_simple = ""
+ error_message = ""
+
+ # Office365 regular maintenance case
+ if (isinstance(e, ErrorMailboxStoreUnavailable) or isinstance(e, ErrorMailboxMoveInProgress)) \
+ and 'outlook.office365.com' in EWS_SERVER:
+ log_message = "Office365 is undergoing load balancing operations. " \
+ "As a result, the service is temporarily unavailable."
+ if demisto.command() == 'fetch-incidents':
+ demisto.info(log_message)
+ demisto.incidents([])
+ sys.exit(0)
+ if IS_TEST_MODULE:
+ demisto.results(log_message + " Please retry the instance configuration test.")
+ sys.exit(0)
+ error_message_simple = log_message + " Please retry your request."
+
+ # Other exception handling
+ if isinstance(e.message, Exception):
+ e.message = str(e.message)
+
+ if isinstance(e, ConnectionError):
+ error_message_simple = "Could not connect to the server.\n" \
+ "Verify that the Hostname or IP address is correct.\n\n" \
+ "Additional information: {}".format(e.message)
+ if isinstance(e, ErrorInvalidPropertyRequest):
+ error_message_simple = "Verify that the Exchange version is correct."
+ elif exchangelib.__version__ == "1.12.0":
+ from exchangelib.errors import MalformedResponseError
+
+ if IS_TEST_MODULE and isinstance(e, MalformedResponseError):
+ error_message_simple = "Got invalid response from the server.\n" \
+ "Verify that the Hostname or IP address is is correct."
+
+ # Legacy error handling
+ if "Status code: 401" in debug_log:
+ error_message_simple = "Got unauthorized from the server. " \
+ "Check credentials are correct and authentication method are supported. "
+
+ error_message_simple += "You can try using 'domain\\username' as username for authentication. " \
+ if AUTH_METHOD_STR.lower() == 'ntlm' else ''
+ if "Status code: 503" in debug_log:
+ error_message_simple = "Got timeout from the server. " \
+ "Probably the server is not reachable with the current settings. " \
+ "Check proxy parameter. If you are using server URL - change to server IP address. "
+
+ if not error_message_simple:
+ error_message = error_message_simple = str(e.message)
+ else:
+ error_message = error_message_simple + "\n" + str(e.message)
+
+ stacktrace = traceback.format_exc()
+ if stacktrace:
+ error_message += "\nFull stacktrace:\n" + stacktrace
+
+ if debug_log:
+ error_message += "\nFull debug log:\n" + debug_log
+
+ if demisto.command() == 'fetch-incidents':
+ raise
+ if demisto.command() == 'ews-search-mailbox' and isinstance(e, ValueError):
+ return_error(message="Selected invalid field, please specify valid field name.", error=e)
+ if IS_TEST_MODULE:
+ demisto.results(error_message_simple)
+ else:
+ demisto.results(
+ {"Type": entryTypes["error"], "ContentsFormat": formats["text"], "Contents": error_message_simple})
+ demisto.error("%s: %s" % (e.__class__.__name__, error_message))
+ finally:
+ if log_stream:
+ try:
+ logging.getLogger().removeHandler(log_handler) # type: ignore
+ log_stream.close()
+ except Exception as ex:
+ demisto.error("EWS: unexpected exception when trying to remove log handler: {}".format(ex))
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/EWSv2/EWSv2.yml b/Integrations/EWSv2/EWSv2.yml
new file mode 100644
index 000000000000..97697efa148e
--- /dev/null
+++ b/Integrations/EWSv2/EWSv2.yml
@@ -0,0 +1,1180 @@
+category: Messaging
+commonfields:
+ id: EWS v2
+ version: -1
+configuration:
+- display: Email address
+ name: credentials
+ required: true
+ type: 9
+- display: Email address from which to fetch incidents
+ name: defaultTargetMailbox
+ required: true
+ type: 0
+- defaultvalue: Inbox
+ display: Name of the folder from which to fetch incidents (supports Exchange Folder
+ ID and sub-folders e.g. Inbox/Phishing)
+ name: folder
+ required: true
+ type: 0
+- defaultvalue: 'false'
+ display: Public Folder
+ name: isPublicFolder
+ required: false
+ type: 8
+- defaultvalue: 'false'
+ display: Has impersonation rights
+ name: impersonation
+ required: false
+ type: 8
+- defaultvalue: 'false'
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Mark fetched emails as read
+ name: markAsRead
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- display: |-
+ ┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉
+ ‎ Manual Mode
+ Exchange Server Hostname or IP address
+ name: ewsServer
+ required: false
+ type: 0
+- display: DOMAIN\USERNAME (e.g. DEMISTO.INT\admin)
+ name: domainAndUserman
+ required: false
+ type: 0
+- display: 'Exchange Server Version (On-Premise only. Supported versions: 2007, 2010,
+ 2010_SP2, 2013, and 2016)'
+ name: defaultServerVersion
+ required: false
+ type: 0
+- defaultvalue: ''
+ display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: |-
+ ┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉
+ ‎ Advanced Mode
+ Override Authentication Type (NTLM, Basic, or Digest).
+ name: authType
+ required: false
+ type: 0
+- defaultvalue: '120'
+ display: Timeout (in seconds) for HTTP requests to Exchange Server
+ name: requestTimeout
+ required: false
+ type: 0
+description: Exchange Web Services and Office 365 (mail)
+display: EWS v2
+name: EWS v2
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The ID of the email message for which to get the attachments.
+ isArray: false
+ name: item-id
+ required: true
+ secret: false
+ - default: false
+ description: The mailbox in which this attachment was found. If empty, the default
+ mailbox is used. Otherwise the user might require impersonation rights to
+ this mailbox.
+ isArray: false
+ name: target-mailbox
+ required: false
+ secret: false
+ - default: false
+ description: The attachments ids to get. If none - all attachments will be retrieve
+ from the message. Support multiple attachments with comma-separated value
+ or array.
+ isArray: true
+ name: attachment-ids
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves the actual attachments from an item (email message). To
+ get all attachments for a message, only specify the item-id argument.
+ execution: false
+ name: ews-get-attachment
+ outputs:
+ - contextPath: EWS.Items.FileAttachments.attachmentId
+ description: The attachment ID. Used for file attachments only.
+ type: string
+ - contextPath: EWS.Items.FileAttachments.attachmentName
+ description: The attachment name. Used for file attachments only.
+ type: string
+ - contextPath: EWS.Items.FileAttachments.attachmentSHA256
+ description: The SHA256 hash of the attached file.
+ type: string
+ - contextPath: EWS.Items.FileAttachments.attachmentLastModifiedTime
+ description: The attachment last modified time. Used for file attachments only.
+ type: date
+ - contextPath: EWS.Items.ItemAttachments.datetimeCreated
+ description: The created time of the attached email.
+ type: date
+ - contextPath: EWS.Items.ItemAttachments.datetimeReceived
+ description: The received time of the attached email.
+ type: date
+ - contextPath: EWS.Items.ItemAttachments.datetimeSent
+ description: The sent time of the attached email.
+ type: date
+ - contextPath: EWS.Items.ItemAttachments.receivedBy
+ description: The received by address of the attached email.
+ type: string
+ - contextPath: EWS.Items.ItemAttachments.subject
+ description: The subject of the attached email.
+ type: string
+ - contextPath: EWS.Items.ItemAttachments.textBody
+ description: The body of the attached email (as text).
+ type: string
+ - contextPath: EWS.Items.ItemAttachments.headers
+ description: The headers of the attached email.
+ type: Unknown
+ - contextPath: EWS.Items.ItemAttachments.hasAttachments
+ description: Whether the attached email has attachments.
+ type: boolean
+ - contextPath: EWS.Items.ItemAttachments.itemId
+ description: The attached email item ID.
+ type: string
+ - contextPath: EWS.Items.ItemAttachments.toRecipients
+ description: A list of recipient email addresses for the attached email.
+ type: Unknown
+ - contextPath: EWS.Items.ItemAttachments.body
+ description: The body of the attached email (as HTML).
+ type: string
+ - contextPath: EWS.Items.ItemAttachments.attachmentSHA256
+ description: The SHA256 hash of the attached email (as EML file).
+ type: string
+ - contextPath: EWS.Items.ItemAttachments.FileAttachments.attachmentSHA256
+ description: SHA256 hash of the attached files inside of the attached email.
+ type: string
+ - contextPath: EWS.Items.ItemAttachments.ItemAttachments.attachmentSHA256
+ description: SHA256 hash of the attached emails inside of the attached email.
+ type: string
+ - contextPath: EWS.Items.ItemAttachments.isRead
+ description: The read status of the attachment.
+ type: String
+ - arguments:
+ - default: false
+ description: The ID of the email message for which to delete attachments.
+ isArray: false
+ name: item-id
+ required: true
+ secret: false
+ - default: false
+ description: The mailbox in which this attachment was found. If empty, the default
+ mailbox is used. Otherwise the user might require impersonation rights to
+ this mailbox.
+ isArray: false
+ name: target-mailbox
+ required: false
+ secret: false
+ - default: false
+ description: A CSV list (or array) of attachment IDs to delete. If empty, all
+ attachments will be deleted from the message.
+ isArray: true
+ name: attachment-ids
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes the attachments of an item (email message).
+ execution: false
+ name: ews-delete-attachment
+ outputs:
+ - contextPath: EWS.Items.FileAttachments.attachmentId
+ description: The ID of the deleted attachment, in case of file attachment.
+ type: string
+ - contextPath: EWS.Items.ItemAttachments.attachmentId
+ description: The ID of the deleted attachment, in case of other attachment (for
+ example, "email").
+ type: string
+ - contextPath: EWS.Items.FileAttachments.action
+ description: 'The deletion action in case of file attachment. This is a constant
+ value: ''deleted''.'
+ type: string
+ - contextPath: EWS.Items.ItemAttachments.action
+ description: 'The deletion action in case of other attachment (for example,
+ "email"). This is a constant value: ''deleted''.'
+ type: string
+ - deprecated: false
+ description: Returns a list of searchable mailboxes. This command requires eDiscovery
+ permissions to the Exchange Server. For more information, see the EWSv2 integration
+ documentation.
+ execution: false
+ name: ews-get-searchable-mailboxes
+ outputs:
+ - contextPath: EWS.Mailboxes.mailbox
+ description: Addresses of the searchable mailboxes.
+ type: string
+ - contextPath: EWS.Mailboxes.mailboxId
+ description: IDs of the searchable mailboxes.
+ type: string
+ - contextPath: EWS.Mailboxes.displayName
+ description: The email display name.
+ type: string
+ - contextPath: EWS.Mailboxes.isExternal
+ description: Whether the mailbox is external.
+ type: boolean
+ - contextPath: EWS.Mailboxes.externalEmailAddress
+ description: The external email address.
+ type: string
+ - arguments:
+ - default: false
+ description: The filter query to search.
+ isArray: false
+ name: filter
+ required: true
+ secret: false
+ - default: false
+ description: The mailbox IDs to search. If empty, all mailboxes are searched.
+ isArray: true
+ name: mailbox-search-scope
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '250'
+ description: Maximum number of results to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: CSV list or array of email addresses.
+ isArray: true
+ name: email_addresses
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches over multiple mailboxes or all Exchange mailboxes. Use either
+ the mailbox-search-scope command or the email-addresses command to search specific
+ mailboxes. This command requires eDiscovery permissions to the Exchange Server.
+ For more information, see the EWS v2 integration documentation.
+ execution: false
+ name: ews-search-mailboxes
+ outputs:
+ - contextPath: EWS.Items.itemId
+ description: The item ID.
+ type: string
+ - contextPath: EWS.Items.mailbox
+ description: The mailbox address where the item was found.
+ type: string
+ - contextPath: EWS.Items.subject
+ description: The subject of the email.
+ type: string
+ - contextPath: EWS.Items.toRecipients
+ description: List of recipient email addresses.
+ type: Unknown
+ - contextPath: EWS.Items.sender
+ description: Sender email address.
+ type: string
+ - contextPath: EWS.Items.hasAttachments
+ description: Whether the email has attachments?
+ type: boolean
+ - contextPath: EWS.Items.datetimeSent
+ description: Sent time of the email.
+ type: date
+ - contextPath: EWS.Items.datetimeReceived
+ description: Received time of the email.
+ type: date
+ - arguments:
+ - default: false
+ description: The ID of the item to move.
+ isArray: false
+ name: item-id
+ required: true
+ secret: false
+ - default: false
+ description: The path to the folder to which to move the item. Complex paths
+ are supported, for example, "Inbox\Phishing".
+ isArray: false
+ name: target-folder-path
+ required: true
+ secret: false
+ - default: false
+ description: The mailbox on which to run the command.
+ isArray: false
+ name: target-mailbox
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether the target folder is a public folder.
+ isArray: false
+ name: is-public
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Move an item to different folder in the mailbox.
+ execution: false
+ name: ews-move-item
+ outputs:
+ - contextPath: EWS.Items.newItemID
+ description: The item ID after move.
+ type: string
+ - contextPath: EWS.Items.messageID
+ description: The item message ID.
+ type: string
+ - contextPath: EWS.Items.itemId
+ description: The original item ID.
+ type: string
+ - contextPath: EWS.Items.action
+ description: The action taken. The value will be "moved".
+ type: string
+ - arguments:
+ - default: false
+ description: The item IDs to delete.
+ isArray: false
+ name: item-ids
+ required: true
+ secret: false
+ - default: false
+ defaultValue: soft
+ description: Deletion type. Can be "trash", "soft", or "hard".
+ isArray: false
+ name: delete-type
+ required: true
+ secret: false
+ - default: false
+ description: The mailbox on which to run the command.
+ isArray: false
+ name: target-mailbox
+ required: false
+ secret: false
+ deprecated: false
+ description: Delete items from mailbox.
+ execution: false
+ name: ews-delete-items
+ outputs:
+ - contextPath: EWS.Items.itemId
+ description: The deleted item ID.
+ type: string
+ - contextPath: EWS.Items.messageId
+ description: The deleted message ID.
+ type: string
+ - contextPath: EWS.Items.action
+ description: The deletion action. Can be 'trash-deleted', 'soft-deleted', or
+ 'hard-deleted'.
+ type: string
+ - arguments:
+ - default: false
+ description: 'The search query string. For more information about the query
+ syntax, see the Microsoft documentation: https://msdn.microsoft.com/en-us/library/ee693615.aspx'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: The folder path in which to search. If empty, searches all the
+ folders in the mailbox.
+ isArray: false
+ name: folder-path
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: Maximum number of results to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: The mailbox on which to apply the search.
+ isArray: false
+ name: target-mailbox
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether the folder is a Public Folder?
+ isArray: false
+ name: is-public
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: The message ID of the email. This will be ignored if a query argument
+ is provided.
+ isArray: false
+ name: message-id
+ required: false
+ secret: false
+ - default: false
+ defaultValue: all
+ description: A CSV list of fields to retrieve.
+ isArray: true
+ name: selected-fields
+ predefined:
+ - ''
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches for items in the specified mailbox. Specific permissions
+ are needed for this operation to search in a target mailbox other than the default.
+ execution: false
+ name: ews-search-mailbox
+ outputs:
+ - contextPath: EWS.Items.itemId
+ description: The email item ID.
+ type: string
+ - contextPath: EWS.Items.hasAttachments
+ description: Whether the email has attachments.
+ type: boolean
+ - contextPath: EWS.Items.datetimeReceived
+ description: Received time of the email.
+ type: date
+ - contextPath: EWS.Items.datetimeSent
+ description: Sent time of the email.
+ type: date
+ - contextPath: EWS.Items.headers
+ description: Email headers (list).
+ type: Unknown
+ - contextPath: EWS.Items.sender
+ description: Sender email address of the email.
+ type: string
+ - contextPath: EWS.Items.subject
+ description: Subject of the email.
+ type: string
+ - contextPath: EWS.Items.textBody
+ description: Body of the email (as text).
+ type: string
+ - contextPath: EWS.Items.size
+ description: Email size.
+ type: number
+ - contextPath: EWS.Items.toRecipients
+ description: List of email recipients addresses.
+ type: Unknown
+ - contextPath: EWS.Items.receivedBy
+ description: Email received by address.
+ type: Unknown
+ - contextPath: EWS.Items.messageId
+ description: Email message ID.
+ type: string
+ - contextPath: EWS.Items.body
+ description: Body of the email (as HTML).
+ type: string
+ - contextPath: EWS.Items.FileAttachments.attachmentId
+ description: Attachment ID of the file attachment.
+ type: unknown
+ - contextPath: EWS.Items.ItemAttachments.attachmentId
+ description: Attachment ID of the item attachment.
+ type: unknown
+ - contextPath: EWS.Items.FileAttachments.attachmentName
+ description: Attachment name of the file attachment.
+ type: unknown
+ - contextPath: EWS.Items.ItemAttachments.attachmentName
+ description: Attachment name of the item attachment.
+ type: unknown
+ - contextPath: EWS.Items.isRead
+ description: The read status of the email.
+ type: String
+ - arguments:
+ - default: false
+ description: The mailbox for which to retrieve the contacts.
+ isArray: false
+ name: target-mailbox
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: Maximum number of results to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves contacts for a specified mailbox.
+ execution: false
+ name: ews-get-contacts
+ outputs:
+ - contextPath: Account.Email.EwsContacts.displayName
+ description: The contact name.
+ type: Unknown
+ - contextPath: Account.Email.EwsContacts.lastModifiedTime
+ description: The time that the contact was last modified.
+ type: Unknown
+ - contextPath: Account.Email.EwsContacts.emailAddresses
+ description: Phone numbers of the contact.
+ type: Unknown
+ - contextPath: Account.Email.EwsContacts.physicalAddresses
+ description: Physical addresses of the contact.
+ type: Unknown
+ - contextPath: Account.Email.EwsContacts.phoneNumbers.phoneNumber
+ description: Email addresses of the contact.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The mailbox for which to get the out-of-office status.
+ isArray: false
+ name: target-mailbox
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the out-of-office status for a specified mailbox.
+ execution: false
+ name: ews-get-out-of-office
+ outputs:
+ - contextPath: Account.Email.OutOfOffice.state
+ description: 'Out-of-office state. Result can be: Enabled, Scheduled, Disabled.'
+ type: Unknown
+ - contextPath: Account.Email.OutOfOffice.externalAudience
+ description: Out-of-office external audience. Can be "None", "Known", or "All".
+ type: Unknown
+ - contextPath: Account.Email.OutOfOffice.start
+ description: Out-of-office start date.
+ type: Unknown
+ - contextPath: Account.Email.OutOfOffice.end
+ description: Out-of-office end date.
+ type: Unknown
+ - contextPath: Account.Email.OutOfOffice.internalReply
+ description: Out-of-office internal reply.
+ type: Unknown
+ - contextPath: Account.Email.OutOfOffice.externalReply
+ description: Out-of-office external reply.
+ type: Unknown
+ - contextPath: Account.Email.OutOfOffice.mailbox
+ description: Out-of-office mailbox.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: A CSV list of message IDs. Run the py-ews-delete-items command
+ to retrieve the message IDs
+ isArray: false
+ name: message-ids
+ required: true
+ secret: false
+ - default: false
+ defaultValue: Inbox
+ description: The folder path to recover the messages to.
+ isArray: false
+ name: target-folder-path
+ required: true
+ secret: false
+ - default: false
+ description: The mailbox in which the messages found. If empty, will use the
+ default mailbox. If you specify a different mailbox, you might need impersonation
+ rights to the mailbox.
+ isArray: false
+ name: target-mailbox
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether the target folder is a Public Folder.
+ isArray: false
+ name: is-public
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Recovers messages that were soft-deleted.
+ execution: false
+ name: ews-recover-messages
+ outputs:
+ - contextPath: EWS.Items.itemId
+ description: The item ID of the recovered item.
+ type: Unknown
+ - contextPath: EWS.Items.messageId
+ description: The message ID of the recovered item.
+ type: Unknown
+ - contextPath: EWS.Items.action
+ description: The action taken on the item. The value will be 'recovered'.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The name of the new folder.
+ isArray: false
+ name: new-folder-name
+ required: true
+ secret: false
+ - default: false
+ defaultValue: Inbox
+ description: Path to locate the new folder. Exchange folder ID is also supported.
+ isArray: false
+ name: folder-path
+ required: true
+ secret: false
+ - default: false
+ description: The mailbox in which to create the folder.
+ isArray: false
+ name: target-mailbox
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a new folder in a specified mailbox.
+ execution: false
+ name: ews-create-folder
+ - arguments:
+ - default: false
+ description: The item ID to mark as junk.
+ isArray: false
+ name: item-id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'yes'
+ description: Whether to move the item from the original folder to the junk folder.
+ isArray: false
+ name: move-items
+ predefined:
+ - 'yes'
+ - 'no'
+ required: false
+ secret: false
+ - default: false
+ description: If empty, will use the default mailbox. If you specify a different
+ mailbox, you might need impersonation rights to the mailbox.
+ isArray: false
+ name: target-mailbox
+ required: false
+ secret: false
+ deprecated: false
+ description: 'Marks an item as junk. This is commonly used to block an email address.
+ For more information, see the Microsoft documentation: https://msdn.microsoft.com/en-us/library/office/dn481311(v=exchg.150).aspx'
+ execution: false
+ name: ews-mark-item-as-junk
+ - arguments:
+ - default: false
+ description: The mailbox on which to apply the command.
+ isArray: false
+ name: target-mailbox
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether to find Public Folders.
+ isArray: false
+ name: is-public
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves information for folders for a specified mailbox. Only folders
+ with read permissions will be returned. Your visual folders on the mailbox,
+ such as "Inbox", are under the folder "Top of Information Store".
+ execution: false
+ name: ews-find-folders
+ outputs:
+ - contextPath: EWS.Folders.name
+ description: Folder name.
+ type: string
+ - contextPath: EWS.Folders.id
+ description: Folder ID.
+ type: string
+ - contextPath: EWS.Folders.totalCount
+ description: Number of items in folder.
+ type: Unknown
+ - contextPath: EWS.Folders.unreadCount
+ description: Number of unread items in folder
+ type: number
+ - contextPath: EWS.Folders.changeKey
+ description: Folder change key.
+ type: number
+ - contextPath: EWS.Folders.childrenFolderCount
+ description: Number of sub-folders.
+ type: number
+ - arguments:
+ - default: false
+ description: The folder path from which to get the items.
+ isArray: false
+ name: folder-path
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: Maximum number of items to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: The mailbox to on which to apply the command.
+ isArray: false
+ name: target-mailbox
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether the folder is a Public Folder. Default is 'False'.
+ isArray: false
+ name: is-public
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'no'
+ description: If the email item contains another email as an attachment (EML
+ or MSG file), whether to retrieve the EML/MSG file attachment. Can be "yes"
+ or "no". Default is "no".
+ isArray: false
+ name: get-internal-item
+ predefined:
+ - 'yes'
+ - 'no'
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves items from a specified folder in a mailbox. The items are
+ order by the item created time, most recent is first.
+ execution: false
+ name: ews-get-items-from-folder
+ outputs:
+ - contextPath: EWS.Items.itemId
+ description: The item ID of the email.
+ type: string
+ - contextPath: EWS.Items.hasAttachments
+ description: Whether the email has attachments.
+ type: boolean
+ - contextPath: EWS.Items.datetimeReceived
+ description: Received time of the email.
+ type: date
+ - contextPath: EWS.Items.datetimeSent
+ description: Sent time of the email.
+ type: date
+ - contextPath: EWS.Items.headers
+ description: Email headers (list).
+ type: Unknown
+ - contextPath: EWS.Items.sender
+ description: Sender mail address of the email.
+ type: string
+ - contextPath: EWS.Items.subject
+ description: Subject of the email.
+ type: string
+ - contextPath: EWS.Items.textBody
+ description: Body of the email (as text).
+ type: string
+ - contextPath: EWS.Items.size
+ description: Email size.
+ type: number
+ - contextPath: EWS.Items.toRecipients
+ description: Email recipients addresses (list).
+ type: Unknown
+ - contextPath: EWS.Items.receivedBy
+ description: Received by address of the email.
+ type: Unknown
+ - contextPath: EWS.Items.messageId
+ description: Email message ID.
+ type: string
+ - contextPath: EWS.Items.body
+ description: Body of the email (as HTML).
+ type: string
+ - contextPath: EWS.Items.FileAttachments.attachmentId
+ description: Attachment ID of file attachment.
+ type: unknown
+ - contextPath: EWS.Items.ItemAttachments.attachmentId
+ description: Attachment ID of the item attachment.
+ type: unknown
+ - contextPath: EWS.Items.FileAttachments.attachmentName
+ description: Attachment name of the file attachment.
+ type: unknown
+ - contextPath: EWS.Items.ItemAttachments.attachmentName
+ description: Attachment name of the item attachment.
+ type: unknown
+ - contextPath: EWS.Items.isRead
+ description: The read status of the email.
+ type: String
+ - arguments:
+ - default: false
+ description: A CSV list if item IDs.
+ isArray: true
+ name: item-ids
+ required: true
+ secret: false
+ - default: false
+ description: The mailbox on which to run the command on.
+ isArray: false
+ name: target-mailbox
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves items by item ID.
+ execution: false
+ name: ews-get-items
+ outputs:
+ - contextPath: EWS.Items.itemId
+ description: The email item ID.
+ type: string
+ - contextPath: EWS.Items.hasAttachments
+ description: Whether the email has attachments.
+ type: boolean
+ - contextPath: EWS.Items.datetimeReceived
+ description: Received time of the email.
+ type: date
+ - contextPath: EWS.Items.datetimeSent
+ description: Sent time of the email.
+ type: date
+ - contextPath: EWS.Items.headers
+ description: Email headers (list).
+ type: Unknown
+ - contextPath: EWS.Items.sender
+ description: Sender mail address of the email.
+ type: string
+ - contextPath: EWS.Items.subject
+ description: Subject of the email.
+ type: string
+ - contextPath: EWS.Items.textBody
+ description: Body of the email (as text).
+ type: string
+ - contextPath: EWS.Items.size
+ description: Email size.
+ type: number
+ - contextPath: EWS.Items.toRecipients
+ description: Email recipients addresses (list).
+ type: Unknown
+ - contextPath: EWS.Items.receivedBy
+ description: Received by address of the email.
+ type: Unknown
+ - contextPath: EWS.Items.messageId
+ description: Email message ID.
+ type: string
+ - contextPath: EWS.Items.body
+ description: Body of the email (as HTML).
+ type: string
+ - contextPath: EWS.Items.FileAttachments.attachmentId
+ description: Attachment ID of the file attachment.
+ type: unknown
+ - contextPath: EWS.Items.ItemAttachments.attachmentId
+ description: Attachment ID of the item attachment.
+ type: unknown
+ - contextPath: EWS.Items.FileAttachments.attachmentName
+ description: Attachment name of the file attachment.
+ type: unknown
+ - contextPath: EWS.Items.ItemAttachments.attachmentName
+ description: Attachment name of the item attachment.
+ type: unknown
+ - contextPath: EWS.Items.isRead
+ description: The read status of the email.
+ type: String
+ - arguments:
+ - default: false
+ description: The item ID to move.
+ isArray: false
+ name: item-id
+ required: true
+ secret: false
+ - default: false
+ description: The folder in the destination mailbox to which to move the item.
+ You can specify a complex path, for example, "Inbox\Phishing".
+ isArray: false
+ name: destination-folder-path
+ required: true
+ secret: false
+ - default: false
+ description: The mailbox to which to move the item.
+ isArray: false
+ name: destination-mailbox
+ required: true
+ secret: false
+ - default: false
+ description: The mailbox from which to move the item (conventionally called
+ the "target-mailbox", the target mailbox on which to run the command).
+ isArray: false
+ name: source-mailbox
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether the destination folder is a Public Folder. Default is "False".
+ isArray: false
+ name: is-public
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Moves an item from one mailbox to different mailbox.
+ execution: false
+ name: ews-move-item-between-mailboxes
+ outputs:
+ - contextPath: EWS.Items.movedToMailbox
+ description: The mailbox wo which the item was moved.
+ type: string
+ - contextPath: EWS.Items.movedToFolder
+ description: The folder to which the item was moved.
+ type: string
+ - contextPath: EWS.Items.action
+ description: The action taken on the item. The value will be "moved".
+ type: string
+ - arguments:
+ - default: false
+ description: The mailbox on which to apply the search.
+ isArray: false
+ name: target-mailbox
+ required: false
+ secret: false
+ - default: true
+ defaultValue: AllItems
+ description: The path of the folder to retrieve. If empty, will retrieve the
+ folder "AllItems".
+ isArray: false
+ name: folder-path
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether the folder is a Public Folder. Default is "False".
+ isArray: false
+ name: is-public
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a single folder.
+ execution: false
+ name: ews-get-folder
+ outputs:
+ - contextPath: EWS.Folders.id
+ description: Folder ID.
+ type: string
+ - contextPath: EWS.Folders.name
+ description: Folder name.
+ type: string
+ - contextPath: EWS.Folders.changeKey
+ description: Folder change key.
+ type: string
+ - contextPath: EWS.Folders.totalCount
+ description: Total number of emails in the folder.
+ type: number
+ - contextPath: EWS.Folders.childrenFolderCount
+ description: Number of sub-folders.
+ type: number
+ - contextPath: EWS.Folders.unreadCount
+ description: Number of unread emails in the folder.
+ type: number
+ - arguments:
+ - default: true
+ description: Query to use to find emails.
+ isArray: false
+ name: query
+ required: true
+ secret: false
+ deprecated: false
+ description: Starts a compliance search.
+ execution: false
+ name: ews-o365-start-compliance-search
+ outputs:
+ - contextPath: EWS.ComplianceSearch.Name
+ description: The name of the compliance search.
+ type: string
+ - contextPath: EWS.ComplianceSearch.Status
+ description: The status of the compliance search.
+ type: string
+ - arguments:
+ - default: true
+ description: The name of the compliance search.
+ isArray: false
+ name: search-name
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the status and results of a compliance search.
+ execution: false
+ name: ews-o365-get-compliance-search
+ outputs:
+ - contextPath: EWS.ComplianceSearch.Status
+ description: The status of the compliance search.
+ type: Unknown
+ - arguments:
+ - default: true
+ description: The name of the compliance search.
+ isArray: false
+ name: search-name
+ required: true
+ secret: false
+ deprecated: false
+ description: Purges the results found in the compliance search.
+ execution: false
+ name: ews-o365-purge-compliance-search-results
+ outputs:
+ - contextPath: EWS.ComplianceSearch.Status
+ description: The status of the compliance search.
+ type: string
+ - arguments:
+ - default: true
+ description: The name of the compliance search.
+ isArray: false
+ name: search-name
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes the compliance search.
+ execution: false
+ name: ews-o365-remove-compliance-search
+ outputs:
+ - contextPath: EWS.ComplianceSearch.Status
+ description: The status of the compliance search.
+ type: string
+ - arguments:
+ - default: true
+ description: The name of the compliance search.
+ isArray: false
+ name: search-name
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks the status of the purge operation on the compliance search.
+ execution: false
+ name: ews-o365-get-compliance-search-purge-status
+ outputs:
+ - contextPath: EWS.ComplianceSearch.Status
+ description: The status of the compliance search.
+ type: Unknown
+ - deprecated: false
+ description: Returns the auto-discovery information. Can be used to manually configure
+ the Exchange Server.
+ execution: false
+ name: ews-get-autodiscovery-config
+ - arguments:
+ - default: false
+ description: Email address of the group to expand.
+ isArray: false
+ name: email-address
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: Whether to enable recursive expansion. Default is "False".
+ isArray: false
+ name: recursive-expansion
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Expands a distribution list to display all members. By default, expands
+ only first layer of the distribution list. If recursive-expansion is "True",
+ the command expands nested distribution lists and returns all members.
+ execution: false
+ name: ews-expand-group
+ - arguments:
+ - default: false
+ description: A CSV list of item IDs.
+ isArray: true
+ name: item-ids
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: read
+ description: How to mark the item. Can be "read" or "unread". Default is "read".
+ isArray: false
+ name: operation
+ predefined:
+ - read
+ - unread
+ required: false
+ secret: false
+ - default: false
+ description: The mailbox on which to run the command. If empty, the command
+ will be applied on the default mailbox.
+ isArray: false
+ name: target-mailbox
+ required: false
+ secret: false
+ deprecated: false
+ description: Marks items as read or unread.
+ execution: false
+ name: ews-mark-items-as-read
+ outputs:
+ - contextPath: EWS.Items.action
+ description: The action that was performed on item.
+ type: String
+ - contextPath: EWS.Items.itemId
+ description: The ID of the item.
+ type: String
+ - contextPath: EWS.Items.messageId
+ description: The message ID of the item.
+ type: String
+ - arguments:
+ - default: false
+ description: The item ID of item to upload as and EML file.
+ isArray: false
+ name: item-id
+ required: true
+ secret: false
+ - default: false
+ description: The mailbox in which this email was found. If empty, the default
+ mailbox is used. Otherwise the user might require impersonation rights to
+ this mailbox.
+ isArray: false
+ name: target-mailbox
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves items by item ID and uploads it's content as eml file.
+ execution: false
+ name: ews-get-items-as-eml
+ outputs:
+ - contextPath: File.Size
+ description: The size of the file.
+ type: String
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ type: String
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: String
+ - contextPath: File.SHA512
+ description: The SHA512 hash of the file.
+ type: String
+ - contextPath: File.Name
+ description: The name of the file.
+ type: String
+ - contextPath: File.SSDeep
+ description: The SSDeep hash of the file.
+ type: String
+ - contextPath: File.EntryID
+ description: EntryID of the file
+ type: String
+ - contextPath: File.Info
+ description: Information about the file.
+ type: String
+ - contextPath: File.Type
+ description: The file type.
+ type: String
+ - contextPath: File.MD5
+ description: The MD5 hash of the file.
+ type: String
+ - contextPath: File.Extension
+ description: The extension of the file.
+ type: String
+ dockerimage: demisto/py-ews:2.0
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- pyEWS_Test
+- EWS search-mailbox test
diff --git a/Integrations/EWSv2/EWSv2_description.md b/Integrations/EWSv2/EWSv2_description.md
new file mode 100644
index 000000000000..71f41a89cac2
--- /dev/null
+++ b/Integrations/EWSv2/EWSv2_description.md
@@ -0,0 +1,2 @@
+For additional information go to:
+[https://support.demisto.com/hc/en-us/articles/360002253814-EWSv2](https://support.demisto.com/hc/en-us/articles/360002253814-EWSv2)
\ No newline at end of file
diff --git a/Integrations/EWSv2/EWSv2_image.png b/Integrations/EWSv2/EWSv2_image.png
new file mode 100644
index 000000000000..97612d231c0f
Binary files /dev/null and b/Integrations/EWSv2/EWSv2_image.png differ
diff --git a/Integrations/EWSv2/EWSv2_test.py b/Integrations/EWSv2/EWSv2_test.py
new file mode 100644
index 000000000000..5e2cccf9f072
--- /dev/null
+++ b/Integrations/EWSv2/EWSv2_test.py
@@ -0,0 +1,20 @@
+import EWSv2
+import logging
+
+
+def test_keys_to_camel_case():
+ assert EWSv2.keys_to_camel_case('this_is_a_test') == 'thisIsATest'
+ # assert keys_to_camel_case(('this_is_a_test', 'another_one')) == ('thisIsATest', 'anotherOne')
+ obj = {}
+ obj['this_is_a_value'] = 'the_value'
+ obj['this_is_a_list'] = []
+ obj['this_is_a_list'].append('list_value')
+ res = EWSv2.keys_to_camel_case(obj)
+ assert res['thisIsAValue'] == 'the_value'
+ assert res['thisIsAList'][0] == 'listValue'
+
+
+def test_start_logging():
+ EWSv2.start_logging()
+ logging.getLogger().debug("test this")
+ assert "test this" in EWSv2.log_stream.getvalue()
diff --git a/Integrations/EWSv2/Pipfile b/Integrations/EWSv2/Pipfile
new file mode 100644
index 000000000000..780863586f82
--- /dev/null
+++ b/Integrations/EWSv2/Pipfile
@@ -0,0 +1,44 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+flake8 = "*"
+autopep8 = "*"
+
+[packages]
+asn1crypto = "==0.24.0"
+cached-property = "==1.4.2"
+certifi = "==2018.4.16"
+cffi = "==1.11.5"
+chardet = "==3.0.4"
+cryptography = "==2.3.1"
+defusedxml = "==0.5.0"
+dnspython = "==1.15.0"
+enum34 = "==1.1.6"
+exchangelib = "==1.12.0"
+future = "==0.16.0"
+idna = "==2.6"
+ipaddress = "==1.0.22"
+isodate = "==0.6.0"
+lxml = "==4.2.1"
+ntlm-auth = "==1.1.0"
+pycparser = "==2.18"
+pykerberos = "==1.2.1"
+python-dateutil = "==2.7.3"
+pytz = "==2018.4"
+requests = "==2.18.4"
+requests-kerberos = "==0.12.0"
+six = "==1.11.0"
+tzlocal = "==1.5.1"
+urllib3 = "==1.22"
+virtualenv = "==15.0.3"
+Pygments = "==2.2.0"
+requests_ntlm = "==1.1.0"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/EWSv2/Pipfile.lock b/Integrations/EWSv2/Pipfile.lock
new file mode 100644
index 000000000000..8ebf8685c102
--- /dev/null
+++ b/Integrations/EWSv2/Pipfile.lock
@@ -0,0 +1,615 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "6bc6e504fce21b4170e02b247af3c5a24ca063d701a8a99f7dc5cf54b8ff5f09"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "asn1crypto": {
+ "hashes": [
+ "sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87",
+ "sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
+ ],
+ "index": "pypi",
+ "version": "==0.24.0"
+ },
+ "cached-property": {
+ "hashes": [
+ "sha256:67acb3ee8234245e8aea3784a492272239d9c4b487eba2fdcce9d75460d34520",
+ "sha256:bf093e640b7294303c7cc7ba3212f00b7a07d0416c1d923465995c9ef860a139"
+ ],
+ "index": "pypi",
+ "version": "==1.4.2"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7",
+ "sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0"
+ ],
+ "index": "pypi",
+ "version": "==2018.4.16"
+ },
+ "cffi": {
+ "hashes": [
+ "sha256:151b7eefd035c56b2b2e1eb9963c90c6302dc15fbd8c1c0a83a163ff2c7d7743",
+ "sha256:1553d1e99f035ace1c0544050622b7bc963374a00c467edafac50ad7bd276aef",
+ "sha256:1b0493c091a1898f1136e3f4f991a784437fac3673780ff9de3bcf46c80b6b50",
+ "sha256:2ba8a45822b7aee805ab49abfe7eec16b90587f7f26df20c71dd89e45a97076f",
+ "sha256:3bb6bd7266598f318063e584378b8e27c67de998a43362e8fce664c54ee52d30",
+ "sha256:3c85641778460581c42924384f5e68076d724ceac0f267d66c757f7535069c93",
+ "sha256:3eb6434197633b7748cea30bf0ba9f66727cdce45117a712b29a443943733257",
+ "sha256:495c5c2d43bf6cebe0178eb3e88f9c4aa48d8934aa6e3cddb865c058da76756b",
+ "sha256:4c91af6e967c2015729d3e69c2e51d92f9898c330d6a851bf8f121236f3defd3",
+ "sha256:57b2533356cb2d8fac1555815929f7f5f14d68ac77b085d2326b571310f34f6e",
+ "sha256:770f3782b31f50b68627e22f91cb182c48c47c02eb405fd689472aa7b7aa16dc",
+ "sha256:79f9b6f7c46ae1f8ded75f68cf8ad50e5729ed4d590c74840471fc2823457d04",
+ "sha256:7a33145e04d44ce95bcd71e522b478d282ad0eafaf34fe1ec5bbd73e662f22b6",
+ "sha256:857959354ae3a6fa3da6651b966d13b0a8bed6bbc87a0de7b38a549db1d2a359",
+ "sha256:87f37fe5130574ff76c17cab61e7d2538a16f843bb7bca8ebbc4b12de3078596",
+ "sha256:95d5251e4b5ca00061f9d9f3d6fe537247e145a8524ae9fd30a2f8fbce993b5b",
+ "sha256:9d1d3e63a4afdc29bd76ce6aa9d58c771cd1599fbba8cf5057e7860b203710dd",
+ "sha256:a36c5c154f9d42ec176e6e620cb0dd275744aa1d804786a71ac37dc3661a5e95",
+ "sha256:a6a5cb8809091ec9ac03edde9304b3ad82ad4466333432b16d78ef40e0cce0d5",
+ "sha256:ae5e35a2c189d397b91034642cb0eab0e346f776ec2eb44a49a459e6615d6e2e",
+ "sha256:b0f7d4a3df8f06cf49f9f121bead236e328074de6449866515cea4907bbc63d6",
+ "sha256:b75110fb114fa366b29a027d0c9be3709579602ae111ff61674d28c93606acca",
+ "sha256:ba5e697569f84b13640c9e193170e89c13c6244c24400fc57e88724ef610cd31",
+ "sha256:be2a9b390f77fd7676d80bc3cdc4f8edb940d8c198ed2d8c0be1319018c778e1",
+ "sha256:ca1bd81f40adc59011f58159e4aa6445fc585a32bb8ac9badf7a2c1aa23822f2",
+ "sha256:d5d8555d9bfc3f02385c1c37e9f998e2011f0db4f90e250e5bc0c0a85a813085",
+ "sha256:e55e22ac0a30023426564b1059b035973ec82186ddddbac867078435801c7801",
+ "sha256:e90f17980e6ab0f3c2f3730e56d1fe9bcba1891eeea58966e89d352492cc74f4",
+ "sha256:ecbb7b01409e9b782df5ded849c178a0aa7c906cf8c5a67368047daab282b184",
+ "sha256:ed01918d545a38998bfa5902c7c00e0fee90e957ce036a4000a88e3fe2264917",
+ "sha256:edabd457cd23a02965166026fd9bfd196f4324fe6032e866d0f3bd0301cd486f",
+ "sha256:fdf1c1dc5bafc32bc5d08b054f94d659422b05aba244d6be4ddc1c72d9aa70fb"
+ ],
+ "index": "pypi",
+ "version": "==1.11.5"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "cryptography": {
+ "hashes": [
+ "sha256:02602e1672b62e803e08617ec286041cc453e8d43f093a5f4162095506bc0beb",
+ "sha256:10b48e848e1edb93c1d3b797c83c72b4c387ab0eb4330aaa26da8049a6cbede0",
+ "sha256:17db09db9d7c5de130023657be42689d1a5f60502a14f6f745f6f65a6b8195c0",
+ "sha256:227da3a896df1106b1a69b1e319dce218fa04395e8cc78be7e31ca94c21254bc",
+ "sha256:2cbaa03ac677db6c821dac3f4cdfd1461a32d0615847eedbb0df54bb7802e1f7",
+ "sha256:31db8febfc768e4b4bd826750a70c79c99ea423f4697d1dab764eb9f9f849519",
+ "sha256:4a510d268e55e2e067715d728e4ca6cd26a8e9f1f3d174faf88e6f2cb6b6c395",
+ "sha256:6a88d9004310a198c474d8a822ee96a6dd6c01efe66facdf17cb692512ae5bc0",
+ "sha256:76936ec70a9b72eb8c58314c38c55a0336a2b36de0c7ee8fb874a4547cadbd39",
+ "sha256:7e3b4aecc4040928efa8a7cdaf074e868af32c58ffc9bb77e7bf2c1a16783286",
+ "sha256:8168bcb08403ef144ff1fb880d416f49e2728101d02aaadfe9645883222c0aa5",
+ "sha256:8229ceb79a1792823d87779959184a1bf95768e9248c93ae9f97c7a2f60376a1",
+ "sha256:8a19e9f2fe69f6a44a5c156968d9fc8df56d09798d0c6a34ccc373bb186cee86",
+ "sha256:8d10113ca826a4c29d5b85b2c4e045ffa8bad74fb525ee0eceb1d38d4c70dfd6",
+ "sha256:be495b8ec5a939a7605274b6e59fbc35e76f5ad814ae010eb679529671c9e119",
+ "sha256:dc2d3f3b1548f4d11786616cf0f4415e25b0fbecb8a1d2cd8c07568f13fdde38",
+ "sha256:e4aecdd9d5a3d06c337894c9a6e2961898d3f64fe54ca920a72234a3de0f9cb3",
+ "sha256:e79ab4485b99eacb2166f3212218dd858258f374855e1568f728462b0e6ee0d9",
+ "sha256:f995d3667301e1754c57b04e0bae6f0fa9d710697a9f8d6712e8cca02550910f"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "defusedxml": {
+ "hashes": [
+ "sha256:24d7f2f94f7f3cb6061acb215685e5125fbcdc40a857eff9de22518820b0a4f4",
+ "sha256:702a91ade2968a82beb0db1e0766a6a273f33d4616a6ce8cde475d8e09853b20"
+ ],
+ "index": "pypi",
+ "version": "==0.5.0"
+ },
+ "dnspython": {
+ "hashes": [
+ "sha256:40f563e1f7a7b80dc5a4e76ad75c23da53d62f1e15e6e517293b04e1f84ead7c",
+ "sha256:861e6e58faa730f9845aaaa9c6c832851fbf89382ac52915a51f89c71accdd31"
+ ],
+ "index": "pypi",
+ "version": "==1.15.0"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "index": "pypi",
+ "version": "==1.1.6"
+ },
+ "exchangelib": {
+ "hashes": [
+ "sha256:091be9f1fdc3187925875001b09f87c8c1d640242a797868e9e5e31bb9e5e54a",
+ "sha256:3dff2d2100e73d9b92a436b81fb6b4eb06ea557e6d4e21638b13111d5361e540"
+ ],
+ "index": "pypi",
+ "version": "==1.12.0"
+ },
+ "future": {
+ "hashes": [
+ "sha256:e39ced1ab767b5936646cedba8bcce582398233d6a627067d4c6a454c90cfedb"
+ ],
+ "index": "pypi",
+ "version": "==0.16.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "index": "pypi",
+ "version": "==2.6"
+ },
+ "ipaddress": {
+ "hashes": [
+ "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794",
+ "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c"
+ ],
+ "index": "pypi",
+ "version": "==1.0.22"
+ },
+ "isodate": {
+ "hashes": [
+ "sha256:2e364a3d5759479cdb2d37cce6b9376ea504db2ff90252a2e5b7cc89cc9ff2d8",
+ "sha256:aa4d33c06640f5352aca96e4b81afd8ab3b47337cc12089822d6f322ac772c81"
+ ],
+ "index": "pypi",
+ "version": "==0.6.0"
+ },
+ "lxml": {
+ "hashes": [
+ "sha256:01c45df6d90497c20aa2a07789a41941f9a1029faa30bf725fc7f6d515b1afe9",
+ "sha256:0c9fef4f8d444e337df96c54544aeb85b7215b2ed7483bb6c35de97ac99f1bcd",
+ "sha256:0e3cd94c95d30ba9ca3cff40e9b2a14e1a10a4fd8131105b86c6b61648f57e4b",
+ "sha256:0e7996e9b46b4d8b4ac1c329a00e2d10edcd8380b95d2a676fccabf4c1dd0512",
+ "sha256:1858b1933d483ec5727549d3fe166eeb54229fbd6a9d3d7ea26d2c8a28048058",
+ "sha256:1b164bba1320b14905dcff77da10d5ce9c411ac4acc4fb4ed9a2a4d10fae38c9",
+ "sha256:1b46f37927fa6cd1f3fe34b54f1a23bd5bea1d905657289e08e1297069a1a597",
+ "sha256:231047b05907315ae9a9b6925751f9fd2c479cf7b100fff62485a25e382ca0d4",
+ "sha256:28f0c6652c1b130f1e576b60532f84b19379485eb8da6185c29bd8c9c9bc97bf",
+ "sha256:34d49d0f72dd82b9530322c48b70ac78cca0911275da741c3b1d2f3603c5f295",
+ "sha256:3682a17fbf72d56d7e46db2e80ca23850b79c28cfe75dcd9b82f58808f730909",
+ "sha256:3cf2830b9a6ad7f6e965fa53a768d4d2372a7856f20ffa6ce43d2fe9c0d34b19",
+ "sha256:5b653c9379ce29ce271fbe1010c5396670f018e78b643e21beefbb3dc6d291de",
+ "sha256:65a272821d5d8194358d6b46f3ca727fa56a6b63981606eac737c86d27309cdd",
+ "sha256:691f2cd97cf026c611df1ea5055755eec7f878f2d4f4330dc8686583de6fc5fd",
+ "sha256:6b6379495d3baacf7ed755ac68547c8dff6ce5d37bf370f0b7678888dc1283f9",
+ "sha256:75322a531504d4f383264391d89993a42e286da8821ddc5ac315e57305cb84f0",
+ "sha256:7f457cbda964257f443bac861d3a36732dcba8183149e7818ee2fb7c86901b94",
+ "sha256:7ff1fc76d8804e0f870c343a72007ff587090c218b0f92d8ee784ac2b6eaf5b9",
+ "sha256:8523fbde9c2216f3f2b950cb01ebe52e785eaa8a07ffeb456dd3576ca1b4fb9b",
+ "sha256:8f37627f16e026523fca326f1b5c9a43534862fede6c3e99c2ba6a776d75c1ab",
+ "sha256:a7182ea298cc3555ea56ffbb0748fe0d5e0d81451e2bc16d7f4645cd01b1ca70",
+ "sha256:abbd2fb4a5a04c11b5e04eb146659a0cf67bb237dd3d7ca3b9994d3a9f826e55",
+ "sha256:accc9f6b77bed0a6f267b4fae120f6008a951193d548cdbe9b61fc98a08b1cf8",
+ "sha256:bd88c8ce0d1504fdfd96a35911dd4f3edfb2e560d7cfdb5a3d09aa571ae5fbae",
+ "sha256:c557ad647facb3c0027a9d0af58853f905e85a0a2f04dcb73f8e665272fcdc3a",
+ "sha256:defabb7fbb99f9f7b3e0b24b286a46855caef4776495211b066e9e6592d12b04",
+ "sha256:e2629cdbcad82b83922a3488937632a4983ecc0fed3e5cfbf430d069382eeb9b"
+ ],
+ "index": "pypi",
+ "version": "==4.2.1"
+ },
+ "ntlm-auth": {
+ "hashes": [
+ "sha256:3e5c0d07652e81f14b9627139949fd7052a04178b314b57577db957855d3b989",
+ "sha256:d4b21b85cbbf53ec1f16e435898eb9ab80f1e8f1571f0b7e2eb038c0517dd47a"
+ ],
+ "index": "pypi",
+ "version": "==1.1.0"
+ },
+ "pycparser": {
+ "hashes": [
+ "sha256:99a8ca03e29851d96616ad0404b4aad7d9ee16f25c9f9708a11faf2810f7b226"
+ ],
+ "index": "pypi",
+ "version": "==2.18"
+ },
+ "pygments": {
+ "hashes": [
+ "sha256:78f3f434bcc5d6ee09020f92ba487f95ba50f1e3ef83ae96b9d5ffa1bab25c5d",
+ "sha256:dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc"
+ ],
+ "index": "pypi",
+ "version": "==2.2.0"
+ },
+ "pykerberos": {
+ "hashes": [
+ "sha256:4f2dca8df5f84a3be039c026893850d731a8bb38395292e1610ffb0a08ba876c"
+ ],
+ "index": "pypi",
+ "version": "==1.2.1"
+ },
+ "python-dateutil": {
+ "hashes": [
+ "sha256:1adb80e7a782c12e52ef9a8182bebeb73f1d7e24e374397af06fb4956c8dc5c0",
+ "sha256:e27001de32f627c22380a688bcc43ce83504a7bc5da472209b4c70f02829f0b8"
+ ],
+ "index": "pypi",
+ "version": "==2.7.3"
+ },
+ "pytz": {
+ "hashes": [
+ "sha256:65ae0c8101309c45772196b21b74c46b2e5d11b6275c45d251b150d5da334555",
+ "sha256:c06425302f2cf668f1bba7a0a03f3c1d34d4ebeef2c72003da308b3947c7f749"
+ ],
+ "index": "pypi",
+ "version": "==2018.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "requests-kerberos": {
+ "hashes": [
+ "sha256:5733abc0b6524815f6fc72d5c0ec9f3fb89137b852adea2a461c45931f5675e0",
+ "sha256:9d21f15241c53c2ad47e813138b9aee4b9acdd04b82048c4388ade15f40a52fd"
+ ],
+ "index": "pypi",
+ "version": "==0.12.0"
+ },
+ "requests-ntlm": {
+ "hashes": [
+ "sha256:1eb43d1026b64d431a8e0f1e8a8c8119ac698e72e9b95102018214411a8463ea",
+ "sha256:9189c92e8c61ae91402a64b972c4802b2457ce6a799d658256ebf084d5c7eb71"
+ ],
+ "index": "pypi",
+ "version": "==1.1.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9",
+ "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb"
+ ],
+ "index": "pypi",
+ "version": "==1.11.0"
+ },
+ "tzlocal": {
+ "hashes": [
+ "sha256:4ebeb848845ac898da6519b9b31879cf13b6626f7184c496037b818e238f2c4e"
+ ],
+ "index": "pypi",
+ "version": "==1.5.1"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "index": "pypi",
+ "version": "==1.22"
+ },
+ "virtualenv": {
+ "hashes": [
+ "sha256:6d9c760d3fc5fa0894b0f99b9de82a4647e1164f0b700a7f99055034bf548b1d",
+ "sha256:cc8164362fc9611d478f784bbc066f3ee74526c50336ec61a6e75d5af97926c8"
+ ],
+ "index": "pypi",
+ "version": "==15.0.3"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "autopep8": {
+ "hashes": [
+ "sha256:4d8eec30cc81bc5617dbf1218201d770dc35629363547f17577c61683ccfb3ee"
+ ],
+ "index": "pypi",
+ "version": "==1.4.4"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.5"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32",
+ "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.7.4"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "entrypoints": {
+ "hashes": [
+ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
+ "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
+ ],
+ "version": "==0.3"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "index": "pypi",
+ "version": "==1.1.6"
+ },
+ "flake8": {
+ "hashes": [
+ "sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548",
+ "sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696"
+ ],
+ "index": "pypi",
+ "version": "==3.7.8"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.3'",
+ "version": "==1.0.2"
+ },
+ "functools32": {
+ "hashes": [
+ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0",
+ "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.3.post2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16",
+ "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.3.0"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7",
+ "sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db"
+ ],
+ "version": "==0.18"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af",
+ "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"
+ ],
+ "version": "==19.0"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e",
+ "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8"
+ ],
+ "markers": "python_version < '3.6'",
+ "version": "==2.3.4"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pyflakes": {
+ "hashes": [
+ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
+ "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ ],
+ "version": "==2.1.1"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42",
+ "sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300"
+ ],
+ "index": "pypi",
+ "version": "==1.9.5"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a",
+ "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03"
+ ],
+ "version": "==2.4.0"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae",
+ "sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6"
+ ],
+ "index": "pypi",
+ "version": "==4.6.4"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9",
+ "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb"
+ ],
+ "index": "pypi",
+ "version": "==1.11.0"
+ },
+ "typing": {
+ "hashes": [
+ "sha256:38566c558a0a94d6531012c8e917b1b8518a41e418f7f15f00e129cc80162ad3",
+ "sha256:53765ec4f83a2b720214727e319607879fec4acde22c4fbb54fa2604e79e44ce",
+ "sha256:84698954b4e6719e912ef9a42a2431407fe3755590831699debda6fba92aac55"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==3.7.4"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/EclecticIQ/EclecticIQ.py b/Integrations/EclecticIQ/EclecticIQ.py
new file mode 100644
index 000000000000..8218b05aa7b0
--- /dev/null
+++ b/Integrations/EclecticIQ/EclecticIQ.py
@@ -0,0 +1,790 @@
+''' IMPORTS '''
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import json
+import requests
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+USERNAME = demisto.params().get('credentials').get('identifier')
+PASSWORD = demisto.params().get('credentials').get('password')
+URL = demisto.params()['url']
+SERVER = URL[:-1] if (URL and URL.endswith('/')) else URL
+USE_SSL = not demisto.params().get('insecure', False)
+HEADERS = {} # type: Dict[str, str]
+IP_THRESHOLD = demisto.params().get('ip_threshold').lower()
+URL_THRESHOLD = demisto.params().get('url_threshold').lower()
+FILE_THRESHOLD = demisto.params().get('file_threshold').lower()
+EMAIL_THRESHOLD = demisto.params().get('email_threshold').lower()
+DOMAIN_THRESHOLD = demisto.params().get('domain_threshold').lower()
+
+if not demisto.params().get('proxy'):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+ PROXIES = {
+ 'http': None,
+ 'https': None
+ } # type: Dict[str, Optional[str]]
+else:
+ PROXIES = {
+ 'http': os.environ['http_proxy'] or os.environ['HTTP_PROXY'],
+ 'https': os.environ['https_proxy'] or os.environ['HTTPS_PROXY']
+ }
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url_suffix, headers=HEADERS, cmd_json=None):
+
+ res = requests.request(
+ method,
+ SERVER + url_suffix,
+ headers=headers,
+ json=cmd_json,
+ proxies=PROXIES,
+ verify=USE_SSL
+ )
+
+ if res.status_code not in {200}:
+ if res.status_code == 405:
+ return_error(
+ 'Error in API call to EclecticIQ Integration: [405] - Not Allowed - Might occur cause of an invalid '
+ 'URL. '
+ )
+ try: # Parse the error message
+ errors = json.loads(res.text).get('errors', {})[0]
+ title = errors.get('title', '')
+ detail = errors.get('detail', '')
+ return_error(
+ 'Error in API call to EclecticIQ Integration: [%d] - %s - %s' % (res.status_code, title, detail)
+ )
+ except Exception: # In case error message is not in expected format
+ return_error(res.content)
+
+ try: # Verify we can generate json from the response
+ return res.json()
+ except ValueError:
+ return_error(res)
+
+
+def maliciousness_to_dbotscore(maliciousness, threshold):
+
+ """
+
+ Translates EclecticIQ obversable maliciousness confidence level to DBotScore based on given threshold
+
+ Parameters
+ ----------
+ maliciousness : str
+ EclecticIQ obversable maliciousness confidence level.
+ threshold : str
+ Minimum maliciousness confidence level to consider the IOC malicious.
+
+ Returns
+ -------
+ number
+ Translated DBot Score
+
+ """
+ maliciousness_list = ['unknown', 'safe', 'low', 'medium', 'high']
+
+ maliciousness_dictionary = {
+ 'unknown': 0,
+ 'safe': 1,
+ 'low': 2,
+ 'medium': 2,
+ 'high': 3
+ }
+
+ for i in maliciousness_list[maliciousness_list.index(threshold):]:
+ maliciousness_dictionary[i] = 3
+
+ return maliciousness_dictionary[maliciousness]
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """
+
+ The function which runs when clicking on Test in integration settings
+
+
+ Returns
+ -------
+ str
+ ok if getting observable successfully
+
+ """
+
+ get_observable('8.8.8.8')
+ demisto.results('ok')
+
+
+def login():
+
+ """
+
+ Logins to EclecticIQ API with given credentials and sets the returned token in the headers
+
+ """
+
+ cmd_url = '/api/auth'
+ cmd_json = {
+ 'password': PASSWORD,
+ 'username': USERNAME
+ }
+ response = http_request('POST', cmd_url, cmd_json=cmd_json)
+ if 'token' in response:
+ token = response['token']
+ else:
+ return_error('Failed to retrieve token')
+ HEADERS['Authorization'] = 'Bearer {}'.format(token)
+
+
+def ip_command():
+
+ """
+
+ Gets reputation of an EclecticIQ IPv4 observable
+
+ Parameters
+ ----------
+ ip : str
+ IPv4 to get reputation of
+
+ Returns
+ -------
+ entry
+ Reputation of given IPv4
+
+ """
+
+ ip = demisto.args()['ip']
+
+ response = get_observable(ip)
+
+ if 'total_count' in response and response['total_count'] == 0:
+ human_readable = 'No results found'
+
+ integration_outputs = []
+ standard_ip_outputs = []
+
+ observables = response.get('data')
+
+ score = 0
+
+ for observable in observables:
+ meta = observable.get('meta', {})
+ maliciousness = meta.get('maliciousness')
+ score = maliciousness_to_dbotscore(maliciousness, IP_THRESHOLD)
+
+ integration_outputs.append({
+ 'Address': ip,
+ 'Created': observable.get('created_at'),
+ 'LastUpdated': observable.get('last_updated_at'),
+ 'ID': observable.get('id'),
+ 'Maliciousness': maliciousness
+ })
+
+ standard_ip_output = {
+ 'Address': ip
+ }
+ if score == 3:
+ standard_ip_output['Malicious'] = {
+ 'Vendor': 'EclectiqIQ',
+ 'Description': 'EclectiqIQ maliciousness confidence level: ' + maliciousness
+ }
+
+ standard_ip_outputs.append(standard_ip_output)
+
+ dbot_output = {
+ 'Type': 'ip',
+ 'Indicator': ip,
+ 'Vendor': 'EclecticIQ',
+ 'Score': score
+ }
+
+ context = {
+ 'DBotScore': dbot_output
+ } # type: dict
+
+ if observables:
+ human_readable_title = 'EclecticIQ IP reputation - {}'.format(ip)
+ human_readable = tableToMarkdown(human_readable_title, integration_outputs)
+ context['EclecticIQ.IP'] = createContext(data=integration_outputs, id='ID', removeNull=True)
+ context[outputPaths['ip']] = standard_ip_outputs
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': context
+ })
+
+
+def url_command():
+
+ """
+
+ Gets reputation of an EclecticIQ URI observable
+
+ Parameters
+ ----------
+ url : str
+ URL to get reputation of
+
+ Returns
+ -------
+ entry
+ Reputation of given URL
+
+ """
+
+ url = demisto.args()['url']
+
+ response = get_observable(url)
+
+ if 'total_count' in response and response['total_count'] == 0:
+ human_readable = 'No results found.'
+
+ integration_outputs = []
+ standard_url_outputs = []
+
+ observables = response.get('data')
+
+ score = 0
+
+ for observable in observables:
+ meta = observable.get('meta', {})
+ maliciousness = meta.get('maliciousness')
+ score = maliciousness_to_dbotscore(maliciousness, URL_THRESHOLD)
+
+ integration_outputs.append({
+ 'Data': url,
+ 'Created': observable.get('created_at'),
+ 'LastUpdated': observable.get('last_updated_at'),
+ 'ID': observable.get('id'),
+ 'Maliciousness': maliciousness
+ })
+
+ standard_url_output = {
+ 'Data': url
+ }
+ if score == 3:
+ standard_url_output['Malicious'] = {
+ 'Vendor': 'EclectiqIQ',
+ 'Description': 'EclectiqIQ maliciousness confidence level: ' + maliciousness
+ }
+
+ standard_url_outputs.append(standard_url_output)
+
+ dbot_output = {
+ 'Type': 'url',
+ 'Indicator': url,
+ 'Vendor': 'EclecticIQ',
+ 'Score': score
+ }
+
+ context = {
+ 'DBotScore': dbot_output
+ } # type: dict
+
+ if observables:
+ human_readable_title = 'EclecticIQ URL reputation - {}'.format(url)
+ human_readable = tableToMarkdown(human_readable_title, integration_outputs)
+ context['EclecticIQ.URL'] = createContext(data=integration_outputs, id='ID', removeNull=True)
+ context[outputPaths['url']] = standard_url_outputs
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': context
+ })
+
+
+def file_command():
+
+ """
+
+ Gets reputation of an EclecticIQ hash observable
+
+ Parameters
+ ----------
+ file : str
+ File hash to get reputation of
+
+ Returns
+ -------
+ entry
+ Reputation of given file hash
+
+ """
+
+ file = demisto.args()['file']
+
+ hash_type = get_hash_type(file).upper()
+
+ response = get_observable(file)
+
+ if 'total_count' in response and response['total_count'] == 0:
+ human_readable = 'No results found.'
+
+ integration_outputs = []
+ standard_file_outputs = []
+
+ observables = response.get('data')
+
+ score = 0
+
+ for observable in observables:
+ meta = observable.get('meta', {})
+ maliciousness = meta.get('maliciousness')
+ score = maliciousness_to_dbotscore(maliciousness, FILE_THRESHOLD)
+
+ integration_outputs.append({
+ hash_type: file,
+ 'Created': observable.get('created_at'),
+ 'LastUpdated': observable.get('last_updated_at'),
+ 'ID': observable.get('id'),
+ 'Maliciousness': maliciousness
+ })
+
+ standard_file_output = {
+ hash_type: file
+ }
+ if score == 3:
+ standard_file_output['Malicious'] = {
+ 'Vendor': 'EclectiqIQ',
+ 'Description': 'EclectiqIQ maliciousness confidence level: ' + maliciousness
+ }
+
+ standard_file_outputs.append(standard_file_output)
+
+ dbot_output = {
+ 'Type': 'file',
+ 'Indicator': file,
+ 'Vendor': 'EclecticIQ',
+ 'Score': score
+ }
+
+ context = {
+ 'DBotScore': dbot_output
+ } # type: dict
+
+ if observables:
+ human_readable_title = 'EclecticIQ File reputation - {}'.format(file)
+ human_readable = tableToMarkdown(human_readable_title, integration_outputs)
+ context['EclecticIQ.File'] = createContext(data=integration_outputs, id='ID', removeNull=True)
+ context[outputPaths['file']] = standard_file_outputs
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': context
+ })
+
+
+def email_command():
+
+ """
+
+ Gets reputation of an EclecticIQ email address observable
+
+ Parameters
+ ----------
+ email : str
+ Email address to get reputation of
+
+ Returns
+ -------
+ entry
+ Reputation of given email address
+
+ """
+
+ email = demisto.args()['email']
+
+ response = get_observable(email)
+
+ if 'total_count' in response and response['total_count'] == 0:
+ human_readable = 'No results found.'
+
+ integration_outputs = []
+ standard_email_outputs = []
+
+ observables = response.get('data')
+
+ score = 0
+
+ for observable in observables:
+ meta = observable.get('meta', {})
+ maliciousness = meta.get('maliciousness')
+ score = maliciousness_to_dbotscore(maliciousness, EMAIL_THRESHOLD)
+
+ integration_outputs.append({
+ 'Address': email,
+ 'Created': observable.get('created_at'),
+ 'LastUpdated': observable.get('last_updated_at'),
+ 'ID': observable.get('id'),
+ 'Maliciousness': maliciousness
+ })
+
+ standard_email_output = {
+ 'Address': email
+ }
+ if score == 3:
+ standard_email_output['Malicious'] = {
+ 'Vendor': 'EclectiqIQ',
+ 'Description': 'EclectiqIQ maliciousness confidence level: ' + maliciousness
+ }
+
+ standard_email_outputs.append(standard_email_output)
+
+ dbot_output = {
+ 'Type': 'email',
+ 'Indicator': email,
+ 'Vendor': 'EclecticIQ',
+ 'Score': score
+ }
+
+ context = {
+ 'DBotScore': dbot_output
+ } # type: dict
+
+ if observables:
+ human_readable_title = 'EclecticIQ Email reputation - {}'.format(email)
+ human_readable = tableToMarkdown(human_readable_title, integration_outputs)
+ context['EclecticIQ.Email'] = createContext(data=integration_outputs, id='ID', removeNull=True)
+ context[outputPaths['email']] = standard_email_outputs
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': context
+ })
+
+
+def domain_command():
+
+ """
+
+ Gets reputation of an EclecticIQ domain observable
+
+ Parameters
+ ----------
+ domain : str
+ Domain address to get reputation of
+
+ Returns
+ -------
+ entry
+ Reputation of given domain address
+
+ """
+
+ domain = demisto.args()['domain']
+
+ response = get_observable(domain)
+
+ if 'total_count' in response and response['total_count'] == 0:
+ human_readable = 'No results found.'
+
+ integration_outputs = []
+ standard_domain_outputs = []
+
+ observables = response.get('data')
+
+ score = 0
+
+ for observable in observables:
+ meta = observable.get('meta', {})
+ maliciousness = meta.get('maliciousness')
+ score = maliciousness_to_dbotscore(maliciousness, DOMAIN_THRESHOLD)
+
+ integration_outputs.append({
+ 'Name': domain,
+ 'Created': observable.get('created_at'),
+ 'LastUpdated': observable.get('last_updated_at'),
+ 'ID': observable.get('id'),
+ 'Maliciousness': maliciousness
+ })
+
+ standard_email_output = {
+ 'Name': domain
+ }
+ if score == 3:
+ standard_email_output['Malicious'] = {
+ 'Vendor': 'EclectiqIQ',
+ 'Description': 'EclectiqIQ maliciousness confidence level: ' + maliciousness
+ }
+
+ standard_domain_outputs.append(standard_email_output)
+
+ dbot_output = {
+ 'Type': 'domain',
+ 'Indicator': domain,
+ 'Vendor': 'EclecticIQ',
+ 'Score': score
+ }
+
+ context = {
+ 'DBotScore': dbot_output
+ } # type: dict
+
+ if observables:
+ human_readable_title = 'EclecticIQ Domain reputation - {}'.format(domain)
+ human_readable = tableToMarkdown(human_readable_title, integration_outputs)
+ context['EclecticIQ.Domain'] = createContext(data=integration_outputs, id='ID', removeNull=True)
+ context[outputPaths['domain']] = standard_domain_outputs
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': context
+ })
+
+
+def get_observable(ioc):
+
+ """
+
+ Send API query to EclecticIQ to get reputation of an observable
+
+ Parameters
+ ----------
+ ioc : str
+ IOC to get reputation of
+
+ Returns
+ -------
+ response
+ Python requests response object
+
+ """
+
+ cmd_url = '/api/observables?filter[value]={}'.format(ioc)
+ response = http_request('GET', cmd_url)
+ return response
+
+
+def get_observable_related_entity_command():
+
+ """
+
+ Get EclecticIQ related entities to an observable
+
+ Parameters
+ ----------
+ observable_id : str
+ EclecticIQ observable ID to get related entites of
+
+ Returns
+ -------
+ entry
+ Observable related entities data
+
+ """
+
+ observable_id = demisto.args()['observable_id']
+
+ processed_extract_response = processed_extract(observable_id)
+
+ original_extract_response = original_extract(observable_id)
+
+ response = dict(processed_extract_response)
+ response['data'].extend(original_extract_response['data'])
+ response['total_count'] += original_extract_response['total_count']
+
+ if 'total_count' in response and response['total_count'] == 0:
+ demisto.results('No results found')
+ return
+
+ context_outputs = []
+ human_readable = ''
+
+ entities = response.get('data')
+
+ for entity in entities: # type: ignore
+
+ entity_data = entity.get('data', {})
+ test_mechanisms = entity_data.get('test_mechanisms', {})
+ entity_meta = entity.get('meta', {})
+
+ context_output = {
+ 'Title': entity_data.get('title'),
+ 'ID': entity.get('id'),
+ 'Analysis': entity_data.get('description'),
+ 'EstimatedStartTime': entity_meta.get('estimated_threat_start_time'),
+ 'EstimatedObservedTime': entity_meta.get('estimated_observed_time'),
+ 'HalfLife': entity_meta.get('half_life')
+ }
+
+ if context_output['Analysis']:
+ # Removing unnecessary whitespaces from the string
+ context_output['Analysis'] = ' '.join(context_output['Analysis'].split())
+
+ if context_output['HalfLife']:
+ # API returns a number, we add the time format to it
+ context_output['HalfLife'] = str(context_output['HalfLife']) + ' Days'
+
+ human_readable += tableToMarkdown('Observable ID {} related entities'.format(observable_id), context_output)
+
+ test_mechanisms_output = []
+
+ for mechanism in test_mechanisms:
+
+ mechanism_output = {
+ 'Type': mechanism.get('test_mechanism_type')
+ }
+
+ mechanism_rules = mechanism.get('rules')
+
+ mechanism_rules_outputs = []
+
+ for rule in mechanism_rules:
+
+ mechanism_rules_outputs.append(rule.get('value'))
+
+ mechanism_output['Rule'] = mechanism_rules_outputs
+
+ test_mechanisms_output.append(mechanism_output)
+
+ if test_mechanisms_output:
+
+ context_output['TestMechanism'] = test_mechanisms_output
+ human_readable += tableToMarkdown('Test mechanisms', test_mechanisms_output, removeNull=True)
+
+ sources = entity.get('sources')
+
+ sources_output = []
+
+ for source in sources:
+
+ sources_output.append({
+ 'Name': source.get('name'),
+ 'Type': source.get('source_type'),
+ 'Reliability': source.get('source_reliability')
+ })
+
+ if sources_output:
+ context_output['Source'] = sources_output
+ human_readable += tableToMarkdown('Sources', sources_output, removeNull=True)
+
+ exposure = entity.get('exposure')
+
+ exposure_output = {
+ 'Exposed': True if exposure.get('exposed') is True else False,
+ 'Detection': True if exposure.get('detect_feed') is True else False,
+ 'Prevention': True if exposure.get('prevent_feed') is True else False,
+ 'Community': True if exposure.get('community_feed') is True else False,
+ 'Sighting': True if exposure.get('sighted') is True else False
+ }
+
+ context_output['Exposure'] = exposure_output
+ human_readable += tableToMarkdown('Exposure', exposure_output, removeNull=True)
+
+ context_outputs.append(context_output)
+
+ context = {
+ 'EclecticIQ.Entity': createContext(data=context_outputs, id='ID', removeNull=True)
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': context
+ })
+
+
+def processed_extract(observable_id):
+
+ """
+
+ Send API query to EclecticIQ to get extracted processed data of an observable
+
+ Parameters
+ ----------
+ observable_id : str
+ EclecticIQ observable ID to get extracted processed data of of
+
+ Returns
+ -------
+ response
+ Python requests response object
+
+ """
+
+ cmd_url = '/private/entities/processed-extract/{}'.format(observable_id)
+ response = http_request('GET', cmd_url)
+ return response
+
+
+def original_extract(observable_id):
+
+ """
+
+ Send API query to EclecticIQ to get extracted orginial data of an observable
+
+ Parameters
+ ----------
+ observable_id : str
+ EclecticIQ observable ID to get extracted orginial data of of
+
+ Returns
+ -------
+ response
+ Python requests response object
+
+ """
+
+ cmd_url = '/private/entities/original-extract/{}'.format(observable_id)
+ response = http_request('GET', cmd_url)
+ return response
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+login()
+
+COMMANDS = {
+ 'test-module': test_module,
+ 'url': url_command,
+ 'ip': ip_command,
+ 'email': email_command,
+ 'file': file_command,
+ 'domain': domain_command,
+ 'eclecticiq-get-observable-related-entity': get_observable_related_entity_command
+}
+
+try:
+ LOG('Command being called is {}'.format(demisto.command()))
+ command_func = COMMANDS.get(demisto.command())
+ if command_func is not None:
+ command_func()
+except Exception as e:
+ return_error('Error has occurred in EclecticIQ integration: {}\n {}'.format(type(e), e.message))
diff --git a/Integrations/EclecticIQ/EclecticIQ.yml b/Integrations/EclecticIQ/EclecticIQ.yml
new file mode 100644
index 000000000000..4a395b842034
--- /dev/null
+++ b/Integrations/EclecticIQ/EclecticIQ.yml
@@ -0,0 +1,356 @@
+commonfields:
+ id: EclecticIQ Platform
+ version: -1
+name: EclecticIQ Platform
+display: EclecticIQ Platform
+category: Data Enrichment & Threat Intelligence
+description: Threat Intelligence Platform that connects and interprets intelligence
+ data from open sources, commercial suppliers and industry partnerships.
+configuration:
+- display: Server URL (e.g. https://192.168.0.1)
+ name: url
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Username
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: true
+- display: Trust any certificate (unsecure)
+ name: insecure
+ defaultvalue: "true"
+ type: 8
+ required: false
+- display: Use system proxy
+ name: proxy
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: 'IP threshold. Minimum maliciousness confidence level to consider the IP
+ address malicious: High, Medium, Low, Safe, Unknown'
+ name: ip_threshold
+ defaultvalue: High
+ type: 0
+ required: false
+- display: 'URL threshold. Minimum maliciousness confidence level to consider the
+ URL malicious: High, Medium, Low, Safe, Unknown'
+ name: url_threshold
+ defaultvalue: High
+ type: 0
+ required: false
+- display: 'File threshold. Minimum maliciousness confidence level to consider the
+ file malicious: High, Medium, Low, Safe, Unknown'
+ name: file_threshold
+ defaultvalue: High
+ type: 0
+ required: false
+- display: 'Email threshold. Minimum maliciousness confidence level to consider the
+ email address malicious: High, Medium, Low, Safe, Unknown'
+ name: email_threshold
+ defaultvalue: High
+ type: 0
+ required: false
+- display: 'Domain threshold. Minimum maliciousness confidence level to consider the
+ domain malicious: High, Medium, Low, Safe, Unknown'
+ name: domain_threshold
+ defaultvalue: High
+ type: 0
+ required: false
+script:
+ script: ''
+ type: python
+ commands:
+ - name: ip
+ arguments:
+ - name: ip
+ required: true
+ default: true
+ description: IPv4 to get the reputation of
+ outputs:
+ - contextPath: EclecticIQ.IP.Address
+ description: IP address that was tested
+ type: String
+ - contextPath: EclecticIQ.IP.Created
+ description: Observable creation date
+ type: Date
+ - contextPath: EclecticIQ.IP.LastUpdate
+ description: Observable last updated date
+ type: Date
+ - contextPath: EclecticIQ.IP.ID
+ description: Observable ID
+ type: Number
+ - contextPath: EclecticIQ.IP.Maliciousness
+ description: Maliciousness confidence level
+ type: String
+ - contextPath: IP.Address
+ description: IP address that was tested
+ type: String
+ - contextPath: IP.Malcious.Vendor
+ description: For malicious IPs, the vendor that made the decision
+ type: String
+ - contextPath: IP.Malcious.Description
+ description: For malicious IPs, the reason that the vendor made the decision
+ type: String
+ - contextPath: DBotScore.Type
+ description: Indicator type
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: Number
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: String
+ description: Get reputation of IP address observable
+ - name: url
+ arguments:
+ - name: url
+ required: true
+ default: true
+ description: URL observable to get the reputation of
+ outputs:
+ - contextPath: EclecticIQ.URL.Data
+ description: URL that was tested
+ type: String
+ - contextPath: EclecticIQ.URL.Created
+ description: Observable creation date
+ type: Date
+ - contextPath: EclecticIQ.URL.LastUpdate
+ description: Observable last updated date
+ type: Date
+ - contextPath: EclecticIQ.URL.ID
+ description: Observable ID
+ type: Number
+ - contextPath: EclecticIQ.URL.Maliciousness
+ description: Maliciousness confidence level
+ type: String
+ - contextPath: URL.Data
+ description: URL that was tested
+ type: String
+ - contextPath: URL.Malcious.Vendor
+ description: For malicious URLs, the vendor that made the decision
+ type: String
+ - contextPath: URL.Malcious.Description
+ description: For malicious URLs, the reason that the vendor made the decision
+ type: String
+ - contextPath: DBotScore.Type
+ description: Indicator type
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: Number
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: String
+ description: Gets the reputation of a URL observable.
+ - name: file
+ arguments:
+ - name: file
+ required: true
+ default: true
+ description: File hash observable to get the reputation of
+ outputs:
+ - contextPath: EclecticIQ.File.MD5
+ description: File MD5 hash that was tested
+ type: String
+ - contextPath: EclecticIQ.File.SHA1
+ description: File SHA-1 hash that was tested
+ type: String
+ - contextPath: EclecticIQ.File.SHA256
+ description: File SHA-256 hash that was tested
+ type: String
+ - contextPath: EclecticIQ.File.SHA512
+ description: File SHA-512 hash that was tested
+ type: String
+ - contextPath: EclecticIQ.File.Created
+ description: Observable creation date
+ type: Date
+ - contextPath: EclecticIQ.File.LastUpdate
+ description: Observable last updated date
+ type: Date
+ - contextPath: EclecticIQ.File.ID
+ description: Observable ID
+ type: Number
+ - contextPath: EclecticIQ.File.Maliciousness
+ description: Maliciousness confidence level
+ type: String
+ - contextPath: File.MD5
+ description: File MD5 hash that was tested
+ type: String
+ - contextPath: File.SHA1
+ description: File SHA-1 hash that was tested
+ type: String
+ - contextPath: File.SHA256
+ description: File SHA-256 hash that was tested
+ type: String
+ - contextPath: File.SHA512
+ description: File SHA-512 hash that was tested
+ type: String
+ - contextPath: File.Malcious.Vendor
+ description: For malicious files, the vendor that made the decision
+ type: String
+ - contextPath: File.Malcious.Description
+ description: For malicious files, the reason that the vendor made the decision
+ type: String
+ - contextPath: DBotScore.Type
+ description: Indicator type
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: Number
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: String
+ description: Gets the reputation of a file hash observable.
+ - name: eclecticiq-get-observable-related-entity
+ arguments:
+ - name: observable_id
+ required: true
+ default: true
+ description: Observable ID to get entity information for (can be retrieved from one of the
+ IOCs commands)
+ outputs:
+ - contextPath: EclecticIQ.Entity.Analysis
+ description: Entity analysis description
+ type: String
+ - contextPath: EclecticIQ.Entity.EstimatedObservedTime
+ description: Entity estimated observed time
+ type: Date
+ - contextPath: EclecticIQ.Entity.EstimatedStartTime
+ description: Entity estimated start time
+ type: Date
+ - contextPath: EclecticIQ.Entity.Exposure.Community
+ description: Is entity in the community feed
+ type: Boolean
+ - contextPath: EclecticIQ.Entity.Exposure.Detection
+ description: Is entity detected
+ type: Boolean
+ - contextPath: EclecticIQ.Entity.Exposure.Exposed
+ description: Is entity exposed
+ type: Boolean
+ - contextPath: EclecticIQ.Entity.Exposure.Prevention
+ description: Is entity in prevented feed
+ type: Boolean
+ - contextPath: EclecticIQ.Entity.Exposure.Sighting
+ description: Is entity sighted
+ type: Boolean
+ - contextPath: EclecticIQ.Entity.HalfLife
+ description: The time it takes an entity to decay in intelligence value, expressed
+ in the number of days until a 50% decay
+ type: String
+ - contextPath: EclecticIQ.Entity.ID
+ description: Entity ID
+ type: String
+ - contextPath: EclecticIQ.Entity.Source.Name
+ description: Entity source name
+ type: String
+ - contextPath: EclecticIQ.Entity.Source.Reliability
+ description: Entity source reliability
+ type: String
+ - contextPath: EclecticIQ.Entity.Title
+ description: Entity title
+ type: String
+ - contextPath: EclecticIQ.Entity.Source.Type
+ description: Entity source type
+ type: string
+ description: Returns related entities of a single observable.
+ - name: email
+ arguments:
+ - name: email
+ required: true
+ default: true
+ description: Email address observable to get the reputation of
+ outputs:
+ - contextPath: EclecticIQ.Email.Address
+ description: Email that was tested
+ type: String
+ - contextPath: EclecticIQ.Email.Created
+ description: Observable creation date
+ type: Date
+ - contextPath: EclecticIQ.Email.LastUpdate
+ description: Observable last updated date
+ type: Date
+ - contextPath: EclecticIQ.Email.ID
+ description: Observable ID
+ type: Number
+ - contextPath: EclecticIQ.Email.Maliciousness
+ description: Maliciousness confidence level
+ type: String
+ - contextPath: Account.Email.Address
+ description: Email that was tested
+ type: String
+ - contextPath: Account.Email.Malcious.Vendor
+ description: For malicious email addresses, the vendor that made the decision
+ type: String
+ - contextPath: Account.Email.Malcious.Description
+ description: For malicious email addresses, the reason that the vendor made the decision
+ type: String
+ - contextPath: DBotScore.Type
+ description: Indicator type
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: Number
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: String
+ description: Gets the reputation of an email address observable.
+ - name: domain
+ arguments:
+ - name: domain
+ required: true
+ default: true
+ description: Domain observable to get the reputation of
+ outputs:
+ - contextPath: EclecticIQ.Domain.Name
+ description: Domain name that was tested
+ type: String
+ - contextPath: EclecticIQ.Domain.Created
+ description: Observable creation date
+ type: Date
+ - contextPath: EclecticIQ.Domain.LastUpdate
+ description: Observable last updated date
+ type: Date
+ - contextPath: EclecticIQ.Domain.ID
+ description: Observable ID
+ type: Number
+ - contextPath: EclecticIQ.Domain.Maliciousness
+ description: Maliciousness confidence level
+ type: String
+ - contextPath: Domain.Name
+ description: Domain name that was tested
+ type: String
+ - contextPath: Domain.Malcious.Vendor
+ description: For malicious domains, the vendor that made the decision
+ type: String
+ - contextPath: Domain.Malcious.Description
+ description: For malicious domains, the reason that the vendor made the decision
+ type: String
+ - contextPath: DBotScore.Type
+ description: Indicator type
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: Number
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: String
+ description: Gets the reputation of a domain observable.
+ runonce: false
+tests:
+ - No test - instance issues
\ No newline at end of file
diff --git a/Integrations/EclecticIQ/EclecticIQ_description.md b/Integrations/EclecticIQ/EclecticIQ_description.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Integrations/EclecticIQ/EclecticIQ_image.png b/Integrations/EclecticIQ/EclecticIQ_image.png
new file mode 100644
index 000000000000..ee6a665fcbaa
Binary files /dev/null and b/Integrations/EclecticIQ/EclecticIQ_image.png differ
diff --git a/Integrations/Elasticsearch_v2/CHANGELOG.md b/Integrations/Elasticsearch_v2/CHANGELOG.md
new file mode 100644
index 000000000000..4e47f75d58f2
--- /dev/null
+++ b/Integrations/Elasticsearch_v2/CHANGELOG.md
@@ -0,0 +1,3 @@
+##[Unreleased]
+New Elasticsearch v2 integration.
+Supports Elasticsearch 6.0.0. and later.
diff --git a/Integrations/Elasticsearch_v2/Elasticsearch_v2.py b/Integrations/Elasticsearch_v2/Elasticsearch_v2.py
new file mode 100644
index 000000000000..d6d6160612c6
--- /dev/null
+++ b/Integrations/Elasticsearch_v2/Elasticsearch_v2.py
@@ -0,0 +1,348 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+'''IMPORTS'''
+from typing import List
+from elasticsearch import Elasticsearch, RequestsHttpConnection, NotFoundError
+from elasticsearch_dsl import Search
+from elasticsearch_dsl.query import QueryString
+from datetime import datetime
+import json
+import requests
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+SERVER = demisto.params().get('url', '').rstrip('/')
+USERNAME = demisto.params().get('credentials', {}).get('identifier')
+PASSWORD = demisto.params().get('credentials', {}).get('password')
+PROXY = demisto.params().get('proxy')
+HTTP_ERRORS = {
+ 400: '400 Bad Request - Incorrect or invalid parameters',
+ 401: '401 Unauthorized - Incorrect or invalid username or password',
+ 403: '403 Forbidden - The account does not support performing this task',
+ 404: '404 Not Found - Elasticsearch server was not found',
+ 408: '408 Timeout - Check port number or Elasticsearch server credentials',
+ 410: '410 Gone - Elasticsearch server no longer exists in the service',
+ 500: '500 Internal Server Error - Internal error',
+ 503: '503 Service Unavailable'
+}
+
+
+'''VARIABLES FOR FETCH INCIDENTS'''
+TIME_FIELD = demisto.params().get('fetch_time_field', '')
+TIME_FORMAT = demisto.params().get('fetch_time_format', '')
+FETCH_INDEX = demisto.params().get('fetch_index', '')
+FETCH_QUERY = demisto.params().get('fetch_query', '')
+FETCH_TIME = demisto.params().get('fetch_time', '3 days')
+FETCH_SIZE = int(demisto.params().get('fetch_size', 50))
+INSECURE = not demisto.params().get('insecure', False)
+
+
+def elasticsearch_builder():
+ if USERNAME:
+ if PROXY:
+ return Elasticsearch(hosts=[SERVER], connection_class=RequestsHttpConnection,
+ http_auth=(USERNAME, PASSWORD), verify_certs=INSECURE, proxies=handle_proxy())
+
+ else:
+ return Elasticsearch(hosts=[SERVER], connection_class=RequestsHttpConnection,
+ http_auth=(USERNAME, PASSWORD), verify_certs=INSECURE)
+
+ else:
+ if PROXY:
+ return Elasticsearch(hosts=[SERVER], connection_class=RequestsHttpConnection,
+ verify_certs=INSECURE, proxies=handle_proxy())
+
+ else:
+ return Elasticsearch(hosts=[SERVER], connection_class=RequestsHttpConnection, verify_certs=INSECURE)
+
+
+def get_hit_table(hit):
+ table_context = {
+ '_index': hit.get('_index'),
+ '_id': hit.get('_id'),
+ '_type': hit.get('_type'),
+ '_score': hit.get('_score'),
+ }
+ headers = ['_index', '_id', '_type', '_score']
+ if hit.get('_source') is not None:
+ for source_field in hit.get('_source').keys():
+ table_context[str(source_field)] = hit.get('_source').get(str(source_field))
+ headers.append(source_field)
+
+ return table_context, headers
+
+
+def results_to_context(index, query, base_page, size, total_dict, response):
+ search_context = {
+ 'Server': SERVER,
+ 'Index': index,
+ 'Query': query,
+ 'Page': base_page,
+ 'Size': size,
+ 'total': total_dict,
+ 'max_score': response.get('hits').get('max_score'),
+ 'took': response.get('took'),
+ 'timed_out': response.get('timed_out')
+ }
+
+ hit_headers = [] # type: List
+ hit_tables = []
+ if total_dict.get('value') > 0:
+ for hit in response.get('hits').get('hits'):
+ single_hit_table, single_header = get_hit_table(hit)
+ hit_tables.append(single_hit_table)
+ hit_headers = list(set(single_header + hit_headers) - {'_id', '_type', '_index', '_score'})
+ hit_headers = ['_id', '_index', '_type', '_score'] + hit_headers
+
+ search_context['Results'] = response.get('hits').get('hits')
+ meta_headers = ['Query', 'took', 'timed_out', 'total', 'max_score', 'Server', 'Page', 'Size']
+ return search_context, meta_headers, hit_tables, hit_headers
+
+
+def get_total_results(response_dict):
+ total_results = response_dict.get('hits', {}).get('total')
+ if not str(total_results).isdigit():
+ # if in version 7 - total number of hits has value field
+ total_results = total_results.get('value')
+ total_dict = response_dict.get('hits').get('total')
+
+ else:
+ total_dict = {
+ 'value': total_results,
+ }
+
+ return total_dict, total_results
+
+
+def search_command():
+ index = demisto.args().get('index')
+ query = demisto.args().get('query')
+ fields = demisto.args().get('fields') # fields to display
+ explain = 'true' == demisto.args().get('explain')
+ base_page = int(demisto.args().get('page'))
+ size = int(demisto.args().get('size'))
+ sort_field = demisto.args().get('sort-field')
+ sort_order = demisto.args().get('sort-order')
+
+ es = elasticsearch_builder()
+
+ que = QueryString(query=query)
+ search = Search(using=es, index=index).query(que)[base_page:base_page + size]
+ if explain:
+ # if 'explain parameter is set to 'true' - adds explanation section to search results
+ search = search.extra(explain=True)
+
+ if fields is not None:
+ fields = fields.split(',')
+ search = search.source(fields)
+
+ if sort_field is not None:
+ search = search.sort({sort_field: {'order': sort_order}})
+
+ response = search.execute().to_dict()
+
+ total_dict, total_results = get_total_results(response)
+ search_context, meta_headers, hit_tables, hit_headers = results_to_context(index, query, base_page,
+ size, total_dict, response)
+ search_human_readable = tableToMarkdown('Search Metadata:', search_context, meta_headers, removeNull=True)
+ hits_human_readable = tableToMarkdown('Hits:', hit_tables, hit_headers, removeNull=True)
+ total_human_readable = search_human_readable + '\n' + hits_human_readable
+ full_context = {
+ 'Elasticsearch.Search(val.Query == obj.Query && val.Index == obj.Index '
+ '&& val.Server == obj.Server && val.Page == obj.Page && val.Size == obj.Size)': search_context
+ }
+
+ return_outputs(total_human_readable, full_context, response)
+
+
+def fetch_params_check():
+ str_error = [] # type:List
+ if TIME_FIELD == '' or TIME_FIELD is None:
+ str_error.append("Index time field is not configured.")
+
+ if FETCH_INDEX == '' or FETCH_INDEX is None:
+ str_error.append("Index is not configured.")
+
+ if FETCH_QUERY == '' or FETCH_QUERY is None:
+ str_error.append("Query by which to fetch incidents is not configured.")
+
+ if TIME_FORMAT == '' or TIME_FORMAT is None:
+ str_error.append("Time format is not configured.")
+
+ if len(str_error) > 0:
+ return_error("Got the following errors in test:\nFetches incidents is enabled.\n" + '\n'.join(str_error))
+
+
+def test_general_query(es):
+ try:
+ query = QueryString(query='*')
+ search = Search(using=es, index=FETCH_INDEX).query(query)[0:1]
+ response = search.execute().to_dict()
+ _, total_results = get_total_results(response)
+
+ except NotFoundError as e:
+ return_error("Fetch incidents test failed.\nError message: {}.".format(str(e).split(',')[2][2:-1]))
+
+
+def test_time_field_query(es):
+ query = QueryString(query=TIME_FIELD + ':*')
+ search = Search(using=es, index=FETCH_INDEX).query(query)[0:1]
+ response = search.execute().to_dict()
+ _, total_results = get_total_results(response)
+
+ if total_results == 0:
+ # failed in getting the TIME_FIELD
+ return_error("Fetch incidents test failed.\nDate field value incorrect [{}].".format(TIME_FIELD))
+
+ else:
+ return response
+
+
+def test_fetch_query(es):
+ query = QueryString(query=str(TIME_FIELD) + ":* AND " + FETCH_QUERY)
+ search = Search(using=es, index=FETCH_INDEX).query(query)[0:1]
+ response = search.execute().to_dict()
+ _, total_results = get_total_results(response)
+
+ if total_results > 0:
+ return response
+
+ else:
+ # failed to get the TIME_FIELD with the FETCH_QUERY
+ # this can happen and not be an error if the FETCH_QUERY doesn't have results yet.
+ # Thus this does not return an error message
+ return None
+
+
+def test_func():
+ headers = {
+ 'Content-Type': "application/json"
+ }
+
+ try:
+ if USERNAME:
+ res = requests.get(SERVER, auth=(USERNAME, PASSWORD), verify=INSECURE, headers=headers)
+
+ else:
+ res = requests.get(SERVER, verify=INSECURE, headers=headers)
+
+ if res.status_code >= 400:
+ try:
+ res.raise_for_status()
+
+ except requests.exceptions.HTTPError as e:
+ if HTTP_ERRORS.get(res.status_code) is not None:
+ # if it is a known http error - get the message form the preset messages
+ return_error("Failed to connect. "
+ "The following error occurred: {}".format(HTTP_ERRORS.get(res.status_code)))
+
+ else:
+ # if it is unknown error - get the message from the error itself
+ return_error("Failed to connect. The following error occurred: {}".format(str(e)))
+
+ except requests.exceptions.RequestException as e:
+ return_error("Failed to connect. Check Server URL field and port number.\nError message: " + str(e))
+
+ if demisto.params().get('isFetch'):
+ # check the existence of all necessary fields for fetch
+ fetch_params_check()
+
+ try:
+ # build general Elasticsearch class
+ es = elasticsearch_builder()
+
+ # test if FETCH_INDEX exists
+ test_general_query(es)
+
+ # test if TIME_FIELD in index exists
+ response = test_time_field_query(es)
+
+ # try to get response from FETCH_QUERY - if exists check the time field from that query
+ temp = test_fetch_query(es)
+ if temp:
+ response = temp
+
+ hit_date = str(response.get('hits', {}).get('hits')[0].get('_source').get(str(TIME_FIELD)))
+ datetime.strptime(hit_date, TIME_FORMAT)
+
+ except ValueError as e:
+ return_error("Inserted time format is incorrect.\n" + str(e) + '\n' + TIME_FIELD + ' fetched: ' + hit_date)
+
+ demisto.results('ok')
+
+
+def incident_label_maker(source):
+ labels = []
+ for field in source.keys():
+ labels.append({'type': str(field), 'value': str(source.get(field))})
+
+ return labels
+
+
+def results_to_incidents(response, current_fetch, last_fetch):
+ incidents = []
+ for hit in response.get('hits', {}).get('hits'):
+ if hit.get('_source') is not None and hit.get('_source').get(str(TIME_FIELD)) is not None:
+ hit_date = datetime.strptime(str(hit.get('_source')[str(TIME_FIELD)]), TIME_FORMAT)
+ # update last run
+ if hit_date > last_fetch:
+ last_fetch = hit_date
+
+ # avoid duplication due to weak time query
+ if hit_date > current_fetch:
+ inc = {
+ 'name': 'Elasticsearch: Index: ' + str(hit.get('_index')) + ", ID: " + str(hit.get('_id')),
+ 'rawJSON': json.dumps(hit),
+ 'labels': incident_label_maker(hit.get('_source')),
+ 'occurred': hit_date.isoformat() + 'Z'
+ }
+ incidents.append(inc)
+
+ return incidents, last_fetch
+
+
+def fetch_incidents():
+ last_run = demisto.getLastRun()
+ last_fetch = last_run.get('time')
+
+ # handle first time fetch
+ if last_fetch is None:
+ last_fetch, _ = parse_date_range(date_range=FETCH_TIME, date_format=TIME_FORMAT, utc=False, to_timestamp=False)
+ last_fetch = datetime.strptime(str(last_fetch), TIME_FORMAT)
+
+ else:
+ last_fetch = datetime.strptime(last_fetch, TIME_FORMAT)
+
+ current_fetch = last_fetch
+
+ es = elasticsearch_builder()
+
+ query = QueryString(query=FETCH_QUERY + " AND " + TIME_FIELD + ":*")
+ search = Search(using=es, index=FETCH_INDEX).filter({'range': {TIME_FIELD: {'gt': last_fetch}}})
+ search = search.sort({TIME_FIELD: {'order': 'asc'}})[0:FETCH_SIZE].query(query)
+ response = search.execute().to_dict()
+ _, total_results = get_total_results(response)
+
+ incidents = [] # type: List
+ if total_results > 0:
+ incidents, last_fetch = results_to_incidents(response, current_fetch, last_fetch)
+
+ demisto.info('extract {} incidents'.format(len(incidents)))
+ demisto.setLastRun({'time': datetime.strftime(last_fetch, TIME_FORMAT)})
+
+ demisto.incidents(incidents)
+
+
+try:
+ LOG('command is %s' % (demisto.command(),))
+ if demisto.command() == 'test-module':
+ test_func()
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+ elif demisto.command() in ['search', 'es-search']:
+ search_command()
+except Exception as e:
+ return_error("Failed executing {}.\nError message: {}".format(demisto.command(), str(e)), error=e)
diff --git a/Integrations/Elasticsearch_v2/Elasticsearch_v2.yml b/Integrations/Elasticsearch_v2/Elasticsearch_v2.yml
new file mode 100644
index 000000000000..c87fa4fcb8d8
--- /dev/null
+++ b/Integrations/Elasticsearch_v2/Elasticsearch_v2.yml
@@ -0,0 +1,292 @@
+category: Database
+commonfields:
+ id: Elasticsearch v2
+ version: -1
+configuration:
+- display: Server URL
+ name: url
+ required: true
+ type: 0
+- display: Username for server login
+ name: credentials
+ required: false
+ type: 9
+- defaultvalue: 'false'
+ display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Index from which to fetch incidents (CSV)
+ name: fetch_index
+ required: false
+ type: 0
+- display: Query by which to fetch incidents (Lucene syntax)
+ name: fetch_query
+ required: false
+ type: 0
+- display: Index time field (for sorting sort and limiting data)
+ name: fetch_time_field
+ required: false
+ type: 0
+- defaultvalue: '%Y-%m-%dT%H:%M:%SZ'
+ display: Time format as kept in Elasticsearch
+ name: fetch_time_format
+ required: false
+ type: 0
+- defaultvalue: 3 days
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days)
+ name: fetch_time
+ required: false
+ type: 0
+- defaultvalue: '50'
+ display: The number of results returned in each fetch
+ name: fetch_size
+ required: false
+ type: 0
+description: "Search & Analyze Data in Real Time. \n Supports version 6 and up."
+display: Elasticsearch v2
+name: Elasticsearch v2
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The index in which to perform a search.
+ isArray: false
+ name: index
+ required: true
+ secret: false
+ - default: false
+ description: The string to query. Strings are queried using the Lucene syntax.
+ isArray: false
+ name: query
+ predefined:
+ - ''
+ required: true
+ secret: false
+ - default: false
+ description: A CSV list of the fields of a document to fetch. Leaving the fields
+ empty fetches the entire document.
+ isArray: true
+ name: fields
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Calculates an explanation of a score for a query. Default is "false".
+ For example, "value:1.6943597".
+ isArray: false
+ name: explain
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: The number of the page from which to start a search. The default
+ is "0".
+ isArray: false
+ name: page
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: The number of documents displayed per page. Can be "1" to "10,000".
+ The default is "100".
+ isArray: false
+ name: size
+ required: false
+ secret: false
+ - default: false
+ description: The field by which to sort the results table. The supported result
+ types are boolean, numeric, date, and keyword fields. Keyword fields require
+ the doc_values parameter to be set to "true" from the Elasticsearch server.
+ isArray: false
+ name: sort-field
+ predefined:
+ - ''
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: asc
+ description: The order by which to sort the results table. The results tables
+ can only be sorted if a sort-field is defined.
+ isArray: false
+ name: sort-order
+ predefined:
+ - asc
+ - desc
+ required: false
+ secret: false
+ deprecated: false
+ description: Queries an index.
+ execution: false
+ name: es-search
+ outputs:
+ - contextPath: Elasticsearch.Search.Results._index
+ description: The index to which the document belongs.
+ type: String
+ - contextPath: Elasticsearch.Search.Results._id
+ description: The ID of the document.
+ type: String
+ - contextPath: Elasticsearch.Search.Results._type
+ description: The mapping type of the document.
+ type: String
+ - contextPath: Elasticsearch.Search.max_score
+ description: The maximum relevance score of a query.
+ type: Number
+ - contextPath: Elasticsearch.Search.Query
+ description: The query performed in the search.
+ type: String
+ - contextPath: Elasticsearch.Search.total.value
+ description: The number of search results.
+ type: Number
+ - contextPath: Elasticsearch.Search.Results._score
+ description: The relevance score of the search result.
+ type: Number
+ - contextPath: Elasticsearch.Search.Index
+ description: The index in which the search was performed.
+ type: String
+ - contextPath: Elasticsearch.Search.Server
+ description: The server on which the search was performed.
+ type: String
+ - contextPath: Elasticsearch.Search.timed_out
+ description: Whether the search stopped due to a time-out.
+ type: Boolean
+ - contextPath: Elasticsearch.Search.took
+ description: The time in milliseconds taken for the search to complete.
+ type: Number
+ - contextPath: Elasticsearch.Search.Page
+ description: The number of the page from which the search started.
+ type: Number
+ - contextPath: Elasticsearch.Search.Size
+ description: The maximum amount of scores that a search can return.
+ type: Number
+ - arguments:
+ - default: false
+ description: The index in which to perform a search.
+ isArray: false
+ name: index
+ required: true
+ secret: false
+ - default: false
+ description: The string to query. Strings are queried using the Lucene syntax.
+ isArray: false
+ name: query
+ predefined:
+ - ''
+ required: true
+ secret: false
+ - default: false
+ description: A CSV list of the fields of a document to fetch. Leaving the fields
+ empty fetches the entire document.
+ isArray: true
+ name: fields
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Calculates an explanation of a score for a query. Default is "false".
+ For example, "value:1.6943597".
+ isArray: false
+ name: explain
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: The number of the page from which to start a search. The default
+ is "0".
+ isArray: false
+ name: page
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: The number of documents displayed per page. Can be "1" to "10,000".
+ The default is "100".
+ isArray: false
+ name: size
+ required: false
+ secret: false
+ - default: false
+ description: The field by which to sort the results table. The supported result
+ types are boolean, numeric, date, and keyword fields. Keyword fields require
+ the doc_values parameter to be set to "true" from the Elasticsearch server.
+ isArray: false
+ name: sort-field
+ predefined:
+ - ''
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: asc
+ description: The order by which to sort the results table. The results tables
+ can only be sorted if a sort-field is defined.
+ isArray: false
+ name: sort-order
+ predefined:
+ - asc
+ - desc
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches an index.
+ execution: false
+ name: search
+ outputs:
+ - contextPath: Elasticsearch.Search.Results._index
+ description: The index to which the document belongs.
+ type: String
+ - contextPath: Elasticsearch.Search.Results._id
+ description: The ID of the document.
+ type: String
+ - contextPath: Elasticsearch.Search.Results._type
+ description: The mapping type of the document.
+ type: String
+ - contextPath: Elasticsearch.Search.max_score
+ description: The maximum relevance score of a query.
+ type: Number
+ - contextPath: Elasticsearch.Search.Query
+ description: The query performed in the search.
+ type: String
+ - contextPath: Elasticsearch.Search.total.value
+ description: The number of search results.
+ type: Number
+ - contextPath: Elasticsearch.Search.Results._score
+ description: The relevance score of the search result.
+ type: Number
+ - contextPath: Elasticsearch.Search.Index
+ description: The index in which the search was performed.
+ type: String
+ - contextPath: Elasticsearch.Search.Server
+ description: The server on which the search was performed.
+ type: String
+ - contextPath: Elasticsearch.Search.timed_out
+ description: Whether the search stopped due to a time-out.
+ type: Boolean
+ - contextPath: Elasticsearch.Search.took
+ description: The time in milliseconds taken for the search to complete.
+ type: Number
+ - contextPath: Elasticsearch.Search.Page
+ description: The number of the page from which the search started.
+ type: Number
+ - contextPath: Elasticsearch.Search.Size
+ description: The maximum amount of scores that a search can return.
+ type: Number
+ dockerimage: demisto/elasticsearch:1.0.0.1795
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
diff --git a/Integrations/Elasticsearch_v2/Elasticsearch_v2_description.md b/Integrations/Elasticsearch_v2/Elasticsearch_v2_description.md
new file mode 100644
index 000000000000..d8fad18e3bde
--- /dev/null
+++ b/Integrations/Elasticsearch_v2/Elasticsearch_v2_description.md
@@ -0,0 +1,9 @@
+The Elasticsearch v2 integration supports Elasticsearch 6.0.0 and later.
+
+Strings are queried using the Lucene syntax. For more information about the Lucene syntax, see: https://www.elastic.co/guide/en/elasticsearch/reference/7.3/query-dsl-query-string-query.html#query-string-syntax
+
+For further information about request response fields, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-body.html#request-body-search-explain
+
+For further information about type mapping, see: https://www.elastic.co/guide/en/elasticsearch/reference/7.x/mapping.html#mapping-type
+
+Note: Not all fields can be sorted in Elasticsearch. The fields are used to sort the results table. The supported result types are boolean, numeric, date, and keyword fields.
diff --git a/Integrations/Elasticsearch_v2/Elasticsearch_v2_image.png b/Integrations/Elasticsearch_v2/Elasticsearch_v2_image.png
new file mode 100644
index 000000000000..52b2573c7abb
Binary files /dev/null and b/Integrations/Elasticsearch_v2/Elasticsearch_v2_image.png differ
diff --git a/Integrations/Elasticsearch_v2/Elasticsearch_v2_test.py b/Integrations/Elasticsearch_v2/Elasticsearch_v2_test.py
new file mode 100644
index 000000000000..323cb23b4b05
--- /dev/null
+++ b/Integrations/Elasticsearch_v2/Elasticsearch_v2_test.py
@@ -0,0 +1,303 @@
+from datetime import datetime
+from unittest.mock import patch
+
+"""MOCKED RESPONSES"""
+
+ES_V6_RESPONSE = {
+ 'took': 1,
+ 'timed_out': False,
+ '_shards': {
+ 'total': 5,
+ 'successful': 5,
+ 'skipped': 0,
+ 'failed': 0
+ },
+ 'hits': {
+ 'total': 17,
+ 'max_score': 1.3862944,
+ 'hits': [
+ {
+ '_index': 'users',
+ '_type': '_doc',
+ '_id': '123',
+ '_score': 1.3862944,
+ '_source': {
+ 'Date': '2019-08-29T14:45:00Z'
+ }
+ }, {
+ '_index': 'users',
+ '_type': '_doc',
+ '_id': '456',
+ '_score': 0.9517491,
+ '_source': {
+ 'Date': '2019-08-29T14:46:00Z'
+ }
+ }
+ ]
+ }
+}
+
+
+ES_V7_RESPONSE = {
+ 'took': 1,
+ 'timed_out': False,
+ '_shards': {
+ 'total': 1,
+ 'successful': 1,
+ 'skipped': 0,
+ 'failed': 0
+ },
+ 'hits': {
+ 'total': {
+ 'value': 9,
+ 'relation': 'eq'
+ },
+ 'max_score': 0.6814878,
+ 'hits': [
+ {
+ '_index': 'customer',
+ '_type': 'doc',
+ '_id': '123',
+ '_score': 0.6814878,
+ '_source': {
+ 'Date': '2019-08-27T18:00:00Z'
+ }
+ }, {
+ '_index': 'customer',
+ '_type': 'doc',
+ '_id': '456',
+ '_score': 0.6814878,
+ '_source': {
+ 'Date': '2019-08-27T18:01:00Z'
+ }
+ }
+ ]
+ }
+}
+
+MOCK_ES7_SEARCH_CONTEXT = str({
+ 'Server': '',
+ 'Index': 'customer',
+ 'Query': 'check',
+ 'Page': 0,
+ 'Size': 2,
+ 'total': {
+ 'value': 9,
+ 'relation': 'eq'
+ },
+ 'max_score': 0.6814878,
+ 'took': 1,
+ 'timed_out': False,
+ 'Results': [
+ {
+ '_index': 'customer',
+ '_type': 'doc',
+ '_id': '123',
+ '_score': 0.6814878,
+ '_source': {'Date': '2019-08-27T18:00:00Z'}
+ },
+ {
+ '_index': 'customer',
+ '_type': 'doc',
+ '_id': '456',
+ '_score': 0.6814878,
+ '_source': {'Date': '2019-08-27T18:01:00Z'}
+ }
+ ]
+})
+
+MOCK_ES7_HIT_CONTEXT = str([
+ {
+ '_index': 'customer',
+ '_id': '123',
+ '_type': 'doc',
+ '_score': 0.6814878,
+ 'Date': '2019-08-27T18:00:00Z'
+ },
+ {
+ '_index': 'customer',
+ '_id': '456',
+ '_type': 'doc',
+ '_score': 0.6814878,
+ 'Date': '2019-08-27T18:01:00Z'
+ }
+])
+
+MOCK_ES6_SEARCH_CONTEXT = str({
+ 'Server': '',
+ 'Index': 'users',
+ 'Query': 'incident',
+ 'Page': 0,
+ 'Size': 2,
+ 'total': {
+ 'value': 17
+ },
+ 'max_score': 1.3862944,
+ 'took': 1,
+ 'timed_out': False,
+ 'Results': [
+ {
+ '_index': 'users',
+ '_type': '_doc',
+ '_id': '123',
+ '_score': 1.3862944,
+ '_source': {'Date': '2019-08-29T14:45:00Z'}
+ },
+ {
+ '_index': 'users',
+ '_type': '_doc',
+ '_id': '456',
+ '_score': 0.9517491,
+ '_source': {'Date': '2019-08-29T14:46:00Z'}
+ }
+ ]
+})
+
+MOCK_ES6_HIT_CONTEXT = str([
+ {
+ '_index': 'users',
+ '_id': '123',
+ '_type': '_doc',
+ '_score': 1.3862944,
+ 'Date': '2019-08-29T14:45:00Z'
+ },
+ {
+ '_index': 'users',
+ '_id': '456',
+ '_type': '_doc',
+ '_score': 0.9517491,
+ 'Date': '2019-08-29T14:46:00Z'
+ }
+])
+
+MOCK_ES7_INCIDENTS = str([
+ {
+ 'name': 'Elasticsearch: Index: customer, ID: 123',
+ 'rawJSON': '{'
+ '"_index": "customer", '
+ '"_type": "doc", '
+ '"_id": "123", '
+ '"_score": 0.6814878, '
+ '"_source": {"Date": "2019-08-27T18:00:00Z"}'
+ '}',
+ 'labels': [
+ {
+ 'type': 'Date',
+ 'value': '2019-08-27T18:00:00Z'
+ }
+ ],
+ 'occurred': '2019-08-27T18:00:00Z'
+ }, {
+ 'name': 'Elasticsearch: Index: customer, ID: 456',
+ 'rawJSON': '{'
+ '"_index": "customer", '
+ '"_type": "doc", '
+ '"_id": "456", '
+ '"_score": 0.6814878, '
+ '"_source": {"Date": "2019-08-27T18:01:00Z"}'
+ '}',
+ 'labels': [
+ {
+ 'type': 'Date',
+ 'value': '2019-08-27T18:01:00Z'
+ }
+ ],
+ 'occurred': '2019-08-27T18:01:00Z'
+ }
+])
+
+MOCK_ES6_INCIDETNS = str([
+ {
+ 'name': 'Elasticsearch: Index: users, ID: 123',
+ 'rawJSON': '{'
+ '"_index": "users", '
+ '"_type": "_doc", '
+ '"_id": "123", '
+ '"_score": 1.3862944, '
+ '"_source": {"Date": "2019-08-29T14:45:00Z"}'
+ '}',
+ 'labels':
+ [
+ {
+ 'type': 'Date',
+ 'value': '2019-08-29T14:45:00Z'
+ }
+ ],
+ 'occurred': '2019-08-29T14:45:00Z'
+ }, {
+ 'name': 'Elasticsearch: Index: users, ID: 456',
+ 'rawJSON': '{'
+ '"_index": "users", '
+ '"_type": "_doc", '
+ '"_id": "456", '
+ '"_score": 0.9517491, '
+ '"_source": {"Date": "2019-08-29T14:46:00Z"}'
+ '}',
+ 'labels':
+ [
+ {
+ 'type': 'Date',
+ 'value': '2019-08-29T14:46:00Z'
+ }
+ ],
+ 'occurred': '2019-08-29T14:46:00Z'
+ }
+])
+
+
+def test_context_creation_es7():
+ from Elasticsearch_v2 import results_to_context, get_total_results
+
+ base_page = 0
+ size = 2
+ total_dict, total_results = get_total_results(ES_V7_RESPONSE)
+ query = 'check'
+ index = 'customer'
+ search_context, meta_headers, hit_tables, hit_headers = results_to_context(index, query, base_page,
+ size, total_dict, ES_V7_RESPONSE)
+
+ assert str(search_context) == MOCK_ES7_SEARCH_CONTEXT
+ assert str(meta_headers) == "['Query', 'took', 'timed_out', 'total', 'max_score', 'Server', 'Page', 'Size']"
+ assert str(hit_tables) == MOCK_ES7_HIT_CONTEXT
+ assert str(hit_headers) == "['_id', '_index', '_type', '_score', 'Date']"
+
+
+def test_context_creation_es6():
+ from Elasticsearch_v2 import results_to_context, get_total_results
+
+ base_page = 0
+ size = 2
+ total_dict, total_results = get_total_results(ES_V6_RESPONSE)
+ query = 'incident'
+ index = 'users'
+ search_context, meta_headers, hit_tables, hit_headers = results_to_context(index, query, base_page,
+ size, total_dict, ES_V6_RESPONSE)
+
+ assert str(search_context) == MOCK_ES6_SEARCH_CONTEXT
+ assert str(meta_headers) == "['Query', 'took', 'timed_out', 'total', 'max_score', 'Server', 'Page', 'Size']"
+ assert str(hit_tables) == MOCK_ES6_HIT_CONTEXT
+ assert str(hit_headers) == "['_id', '_index', '_type', '_score', 'Date']"
+
+
+@patch("Elasticsearch_v2.TIME_FORMAT", '%Y-%m-%dT%H:%M:%SZ')
+@patch("Elasticsearch_v2.TIME_FIELD", 'Date')
+@patch("Elasticsearch_v2.FETCH_INDEX", "users")
+def test_incident_creation_e6():
+ from Elasticsearch_v2 import results_to_incidents
+ last_fetch = datetime.strptime('2019-08-29T14:44:00Z', '%Y-%m-%dT%H:%M:%SZ')
+ incidents, last_fetch2 = results_to_incidents(ES_V6_RESPONSE, last_fetch, last_fetch)
+
+ assert str(last_fetch2) == '2019-08-29 14:46:00'
+ assert str(incidents) == MOCK_ES6_INCIDETNS
+
+
+@patch("Elasticsearch_v2.TIME_FORMAT", '%Y-%m-%dT%H:%M:%SZ')
+@patch("Elasticsearch_v2.TIME_FIELD", 'Date')
+@patch("Elasticsearch_v2.FETCH_INDEX", "customer")
+def test_incident_creation_e7():
+ from Elasticsearch_v2 import results_to_incidents
+ last_fetch = datetime.strptime('2019-08-27T17:59:00Z', '%Y-%m-%dT%H:%M:%SZ')
+ incidents, last_fetch2 = results_to_incidents(ES_V7_RESPONSE, last_fetch, last_fetch)
+
+ assert str(last_fetch2) == '2019-08-27 18:01:00'
+ assert str(incidents) == MOCK_ES7_INCIDENTS
diff --git a/Integrations/Elasticsearch_v2/Pipfile b/Integrations/Elasticsearch_v2/Pipfile
new file mode 100644
index 000000000000..5ae3da54e6d4
--- /dev/null
+++ b/Integrations/Elasticsearch_v2/Pipfile
@@ -0,0 +1,19 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+pytest-asyncio = "*"
+
+[packages]
+elasticsearch = "*"
+elasticsearch-dsl = "*"
+pytest = "*"
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/Elasticsearch_v2/Pipfile.lock b/Integrations/Elasticsearch_v2/Pipfile.lock
new file mode 100644
index 000000000000..e09eb7ef42fc
--- /dev/null
+++ b/Integrations/Elasticsearch_v2/Pipfile.lock
@@ -0,0 +1,362 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "48171d2185333f85388f3aa0ab137665922d56d20a75cd31d99b95167227a02f"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "elasticsearch": {
+ "hashes": [
+ "sha256:1ec2ed8ff9994a0b0961081f053c90819696c0d03da204b61b79d416606dd055",
+ "sha256:d9eda8d9696f55d7d394ade625a262985d7af762c0b9305b73d421dace41c4e7"
+ ],
+ "index": "pypi",
+ "version": "==7.0.4"
+ },
+ "elasticsearch-dsl": {
+ "hashes": [
+ "sha256:2aedc2a4dbba9870249a57d1798ec29e44404619bded66ac920f5d6a1cbb6f22",
+ "sha256:763fb28add254f2c3a1d071cd114466d8a27f640e02a874afba7b8a04147c094"
+ ],
+ "index": "pypi",
+ "version": "==7.0.0"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:0c505102757e7fa28b9f0958d8bc81301159dea16e2649858c92edc158b78a83",
+ "sha256:9a9f75ce32e78170905888acbf2376a81d3f21ecb3bb4867050413411d3ca7a9"
+ ],
+ "markers": "python_version < '3.8'",
+ "version": "==0.21"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832",
+ "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"
+ ],
+ "version": "==7.2.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9",
+ "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe"
+ ],
+ "version": "==19.1"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6",
+ "sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34"
+ ],
+ "version": "==0.13.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
+ "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
+ ],
+ "version": "==2.4.2"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:95d13143cc14174ca1a01ec68e84d76ba5d9d493ac02716fd9706c949a505210",
+ "sha256:b78fe2881323bd44fd9bd76e5317173d4316577e7b1cddebae9136a4495ec865"
+ ],
+ "index": "pypi",
+ "version": "==5.1.2"
+ },
+ "python-dateutil": {
+ "hashes": [
+ "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb",
+ "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"
+ ],
+ "version": "==2.8.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
+ "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
+ ],
+ "version": "==1.25.3"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e",
+ "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
+ ],
+ "version": "==0.6.0"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
+ "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
+ ],
+ "version": "==2019.6.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:0c505102757e7fa28b9f0958d8bc81301159dea16e2649858c92edc158b78a83",
+ "sha256:9a9f75ce32e78170905888acbf2376a81d3f21ecb3bb4867050413411d3ca7a9"
+ ],
+ "markers": "python_version < '3.8'",
+ "version": "==0.21"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:02b260c8deb80db09325b99edf62ae344ce9bc64d68b7a634410b8e9a568edbf",
+ "sha256:18f9c401083a4ba6e162355873f906315332ea7035803d0fd8166051e3d402e3",
+ "sha256:1f2c6209a8917c525c1e2b55a716135ca4658a3042b5122d4e3413a4030c26ce",
+ "sha256:2f06d97f0ca0f414f6b707c974aaf8829c2292c1c497642f63824119d770226f",
+ "sha256:616c94f8176808f4018b39f9638080ed86f96b55370b5a9463b2ee5c926f6c5f",
+ "sha256:63b91e30ef47ef68a30f0c3c278fbfe9822319c15f34b7538a829515b84ca2a0",
+ "sha256:77b454f03860b844f758c5d5c6e5f18d27de899a3db367f4af06bec2e6013a8e",
+ "sha256:83fe27ba321e4cfac466178606147d3c0aa18e8087507caec78ed5a966a64905",
+ "sha256:84742532d39f72df959d237912344d8a1764c2d03fe58beba96a87bfa11a76d8",
+ "sha256:874ebf3caaf55a020aeb08acead813baf5a305927a71ce88c9377970fe7ad3c2",
+ "sha256:9f5caf2c7436d44f3cec97c2fa7791f8a675170badbfa86e1992ca1b84c37009",
+ "sha256:a0c8758d01fcdfe7ae8e4b4017b13552efa7f1197dd7358dc9da0576f9d0328a",
+ "sha256:a4def978d9d28cda2d960c279318d46b327632686d82b4917516c36d4c274512",
+ "sha256:ad4f4be843dace866af5fc142509e9b9817ca0c59342fdb176ab6ad552c927f5",
+ "sha256:ae33dd198f772f714420c5ab698ff05ff900150486c648d29951e9c70694338e",
+ "sha256:b4a2b782b8a8c5522ad35c93e04d60e2ba7f7dcb9271ec8e8c3e08239be6c7b4",
+ "sha256:c462eb33f6abca3b34cdedbe84d761f31a60b814e173b98ede3c81bb48967c4f",
+ "sha256:fd135b8d35dfdcdb984828c84d695937e58cc5f49e1c854eb311c4d6aa03f4f1"
+ ],
+ "version": "==1.4.2"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832",
+ "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"
+ ],
+ "version": "==7.2.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9",
+ "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe"
+ ],
+ "version": "==19.1"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6",
+ "sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34"
+ ],
+ "version": "==0.13.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
+ "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
+ ],
+ "version": "==2.4.2"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:95d13143cc14174ca1a01ec68e84d76ba5d9d493ac02716fd9706c949a505210",
+ "sha256:b78fe2881323bd44fd9bd76e5317173d4316577e7b1cddebae9136a4495ec865"
+ ],
+ "index": "pypi",
+ "version": "==5.1.2"
+ },
+ "pytest-asyncio": {
+ "hashes": [
+ "sha256:9fac5100fd716cbecf6ef89233e8590a4ad61d729d1732e0a96b84182df1daaf",
+ "sha256:d734718e25cfc32d2bf78d346e99d33724deeba774cc4afdf491530c6184b63b"
+ ],
+ "index": "pypi",
+ "version": "==0.10.0"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
+ "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ ],
+ "version": "==2.22.0"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:510df890afe08d36eca5bb16b4aa6308a6f85e3159ad3013bac8b9de7bd5a010",
+ "sha256:88d3402dd8b3c69a9e4f9d3a73ad11b15920c6efd36bc27bf1f701cf4a8e4646"
+ ],
+ "index": "pypi",
+ "version": "==1.7.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e",
+ "sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e",
+ "sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0",
+ "sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c",
+ "sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631",
+ "sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4",
+ "sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34",
+ "sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b",
+ "sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a",
+ "sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233",
+ "sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1",
+ "sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36",
+ "sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d",
+ "sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a",
+ "sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.4.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
+ "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
+ ],
+ "version": "==1.25.3"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e",
+ "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
+ ],
+ "version": "==0.6.0"
+ }
+ }
+}
diff --git a/Integrations/Exabeam/CHANGELOG.md b/Integrations/Exabeam/CHANGELOG.md
new file mode 100644
index 000000000000..91aec1a8dba1
--- /dev/null
+++ b/Integrations/Exabeam/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+ - The Exabeam Security Management Platform provides end-to-end detection, User Event Behavioral Analytics, and SOAR
\ No newline at end of file
diff --git a/Integrations/Exabeam/Exabeam.py b/Integrations/Exabeam/Exabeam.py
new file mode 100644
index 000000000000..7a33f5b12913
--- /dev/null
+++ b/Integrations/Exabeam/Exabeam.py
@@ -0,0 +1,407 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+
+import requests
+from typing import Dict, Optional, MutableMapping
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' HELPERS '''
+
+
+def convert_unix_to_date(d):
+ """Convert unix timestamp to datetime in iso format"""
+ return datetime.fromtimestamp(int(d) / 1000).isoformat()
+
+
+class Client:
+ def __init__(self, exabeam_url: str, username: str, password: str, verify: bool,
+ proxies: Optional[MutableMapping[str, str]], headers):
+ self.server = exabeam_url.rstrip('/')
+ self.base_url = f'{self.server}/uba/api/'
+ self.username = username
+ self.password = password
+ self.verify = verify
+ self.proxies = proxies
+ self.headers = headers
+ self.session = requests.Session()
+ self.session.headers = headers
+ self._login()
+
+ def __del__(self):
+ self._logout()
+
+ def _http_request(self, method: str, suffix_url: str = None, params: dict = None, data: dict = None,
+ full_url: str = None, resp_type: str = 'json'):
+ full_url = full_url if full_url else f'{self.base_url}{suffix_url}'
+ try:
+ res = self.session.request(
+ method,
+ full_url,
+ verify=self.verify,
+ data=data,
+ proxies=self.proxies,
+ params=params
+ )
+ if not res.ok:
+ raise ValueError(f'Error in API call to Exabeam {res.status_code}. Reason: {res.text}')
+
+ try:
+ if resp_type == 'json':
+ return res.json()
+ else:
+ return res.text
+ except Exception:
+ raise ValueError(
+ f'Failed to parse http response to JSON format. Original response body: \n{res.text}')
+
+ except requests.exceptions.ConnectTimeout as exception:
+ err_msg = 'Connection Timeout Error - potential reasons might be that the Server URL parameter' \
+ ' is incorrect or that the Server is not accessible from your host.'
+ raise DemistoException(err_msg, exception)
+
+ except requests.exceptions.SSLError as exception:
+ err_msg = 'SSL Certificate Verification Failed - try selecting \'Trust any certificate\' checkbox in' \
+ ' the integration configuration.'
+ raise DemistoException(err_msg, exception)
+
+ except requests.exceptions.ProxyError as exception:
+ err_msg = 'Proxy Error - if the \'Use system proxy\' checkbox in the integration configuration is' \
+ ' selected, try clearing the checkbox.'
+ raise DemistoException(err_msg, exception)
+
+ except requests.exceptions.ConnectionError as exception:
+ # Get originating Exception in Exception chain
+ error_class = str(exception.__class__)
+ err_type = '<' + error_class[error_class.find('\'') + 1: error_class.rfind('\'')] + '>'
+ err_msg = '\nError Type: {}\nError Number: [{}]\nMessage: {}\n' \
+ 'Verify that the server URL parameter' \
+ ' is correct and that you have access to the server from your host.' \
+ .format(err_type, exception.errno, exception.strerror)
+ raise DemistoException(err_msg, exception)
+
+ def _login(self):
+ """ Login using the credentials and store the cookie """
+ self._http_request('POST', full_url=f'{self.server}/api/auth/login', data={
+ 'username': self.username,
+ 'password': self.password
+ })
+
+ def _logout(self):
+ """ Logout from the session """
+ self._http_request('GET', self._http_request('GET', f'{self.server}/api/auth/logout'))
+
+ def test_module_request(self):
+ """
+ Performs basic get request to check if the server is reachable.
+ """
+ suffix_url = 'ping'
+ return self._http_request('GET', suffix_url, resp_type='text')
+
+ def get_notable_users_request(self, api_unit: str = None, num: str = None, limit: int = None):
+
+ suffix_url = 'users/notable'
+
+ params = {
+ 'unit': api_unit,
+ 'num': num,
+ 'numberOfResults': limit
+ }
+ response = self._http_request('GET', suffix_url, params)
+ return response
+
+ def get_user_info_request(self, username: str):
+
+ suffix_url = f'user/{username}/info'
+ response = self._http_request('GET', suffix_url)
+ return response
+
+ def get_watchlist_request(self):
+
+ suffix_url = 'watchlist'
+ response = self._http_request('GET', suffix_url)
+
+ return response
+
+ def get_peergroups_request(self):
+
+ suffix_url = 'peerGroup'
+
+ response = self._http_request('GET', suffix_url)
+ return response
+
+ def get_user_labels_request(self):
+
+ suffix_url = 'userLabel'
+ response = self._http_request('GET', suffix_url)
+
+ return response
+
+ def user_sequence_request(self, username: str = None, parse_start_time=None, parse_end_time=None):
+
+ suffix_url = f'user/{username}/sequences'
+ params = {
+ 'username': username,
+ 'startTime': parse_start_time,
+ 'endTime': parse_end_time
+ }
+
+ response = self._http_request('GET', suffix_url, params)
+ return response
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module(client: Client, *_):
+
+ client.test_module_request()
+ demisto.results('ok')
+ return '', None, None
+
+
+def get_notable_users(client: Client, args: Dict):
+ """ Get notable users in a period of time
+
+ Args:
+ client: Client
+ args: Dict
+
+ """
+ limit: int = args.get('limit', 10)
+ time_period: str = args.get('time_period', '')
+ time_ = time_period.split(' ')
+ if not len(time_) == 2:
+ return_error('Got invalid time period. Enter the time period number and unit.')
+ num: str = time_[0]
+ unit: str = time_[1]
+ api_unit = unit[0]
+ if api_unit == 'm':
+ api_unit = api_unit.upper()
+
+ if api_unit not in {'d', 'y', 'M', 'h'}:
+ return_error('The time unit is incorrect - can be hours, days, months, years')
+
+ contents = []
+ headers = ['UserFullName', 'UserName', 'Title', 'Department', 'RiskScore', 'Labels', 'NotableSessionIds',
+ 'EmployeeType', 'FirstSeen', 'LastSeen', 'LastActivity', 'Location']
+ users = client.get_notable_users_request(api_unit, num, limit).get('users', [])
+ if not users:
+ return 'No users were found in this period of time.', {}, {}
+ else:
+ for user in users:
+ user_ = user.get('user', {})
+ user_info = user_.get('info', {})
+ contents.append({
+ 'UserName': user_.get('username'),
+ 'RiskScore': round(user_.get('riskScore')),
+ 'FirstSeen': convert_unix_to_date(user_.get('firstSeen')),
+ 'LastSeen': convert_unix_to_date(user_.get('lastSeen')),
+ 'LastActivity': user_.get('lastActivityType'),
+ 'Labels': user_.get('labels'),
+ 'UserFullName': user.get('userFullName'),
+ 'Location': user_.get('info')['location'],
+ 'NotableSessionIds': user.get('notableSessionIds'),
+ 'NotableUser': True,
+ 'HighestRiskSession': user.get('highestRiskSession'),
+ 'EmployeeType': user_info.get('employeeType'),
+ 'Department': user_info.get('department'),
+ 'Title': user_info.get('title')
+ })
+
+ context = {
+ 'Exabeam.User(val.UserName && val.UserName === obj.UserName)': contents
+ }
+
+ human_readable = tableToMarkdown('Exabeam Notable Users', contents, headers, removeNull=True)
+ return human_readable, context, users
+
+
+def get_user_info(client: Client, args: Dict):
+ """ Returns User info data for the given username
+ Args:
+ client: Client
+ args: Dict
+
+ """
+ username: str = args.get('username', '')
+ headers = ['Username', 'RiskScore', 'AverageRiskScore', 'LastSessionID', 'Labels', 'FirstSeen',
+ 'LastSeen', 'LastActivityType', 'AccountNames', 'PeerGroupFieldName', 'PeerGroupFieldValue',
+ 'PeerGroupDisplayName', 'PeerGroupType']
+ user = client.get_user_info_request(username)
+ user_info = user.get('userInfo', {})
+ contents = {
+ 'Username': user.get('username'),
+ 'RiskScore': round(user_info.get('riskScore')),
+ 'AverageRiskScore': user_info.get('averageRiskScore'),
+ 'LastSessionID': user_info.get('lastSessionId'),
+ 'FirstSeen': convert_unix_to_date(user_info.get('firstSeen')),
+ 'LastSeen': convert_unix_to_date(user_info.get('lastSeen')),
+ 'LastActivityType': user_info.get('lastActivityType'),
+ 'Label': user_info.get('labels'),
+ 'AccountNames': user.get('accountNames'),
+ 'PeerGroupFieldName': user.get('peerGroupFieldName'),
+ 'PeerGroupFieldValue': user.get('peerGroupFieldValue'),
+ 'PeerGroupDisplayName': user.get('peerGroupDisplayName'),
+ 'PeerGroupType': user.get('peerGroupType')
+ }
+
+ context = {
+ 'Exabeam.User(val.UserName && val.UserName === obj.UserName)': contents
+ }
+
+ if not user_info.get('firstSeen'):
+ return f'The user {username} was not found', {}, {}
+ else:
+ human_readable = tableToMarkdown(f'User {username} information', contents, headers, removeNull=True)
+ return human_readable, context, user
+
+
+def get_user_sessions(client: Client, args: Dict):
+ """ Returns sessions for the given username and time range
+
+ Args:
+ client: Client
+ args: Dict
+
+ """
+ username = args.get('username')
+ start_time = args.get('start_time', datetime.now() - timedelta(days=30))
+ end_time = args.get('end_time', datetime.now())
+ parse_start_time = date_to_timestamp(start_time)
+ parse_end_time = date_to_timestamp(end_time)
+ contents = []
+ headers = ['SessionID', 'RiskScore', 'InitialRiskScore', 'StartTime', 'EndTime', 'LoginHost', 'Label']
+
+ user = client.user_sequence_request(username, parse_start_time, parse_end_time)
+ session = user.get('sessions')
+ for session_ in session:
+ contents.append({
+ 'SessionID': session_.get('sessionId'),
+ 'StartTime': convert_unix_to_date(session_.get('startTime')),
+ 'EndTime': convert_unix_to_date(session_.get('endTime')),
+ 'InitialRiskScore': session_.get('initialRiskScore'),
+ 'RiskScore': round(session_.get('riskScore')),
+ 'LoginHost': session_.get('loginHost'),
+ 'Label': session_.get('label')
+ })
+
+ context = {
+ 'Exabeam.User(val.SessionID && val.SessionID === obj.SessionID)': {
+ 'Username': username,
+ 'Session': contents
+ }
+ }
+
+ if session:
+ human_readable = tableToMarkdown(f'User {username} sessions information', contents, headers, removeNull=True)
+ return human_readable, context, user
+ else:
+ return f'The user {username} was not found', {}, {}
+
+
+def get_watchlist(client: Client, *_):
+ """ Returns all watchlist ids and titles.
+
+ Args:
+ client: Client
+
+ """
+
+ watchlist = client.get_watchlist_request()
+ contents = []
+ headers = ['WatchlistID', 'Title', 'Category']
+ for list_ in watchlist:
+ contents.append({
+ 'WatchlistID': list_.get('watchlistId'),
+ 'Title': list_.get('title'),
+ 'Category': list_.get('category')
+ })
+
+ context = {
+ 'Exabeam.Watchlist(val.WatchlistID && val.WatchlistID === obj.WatchlistID)': contents
+ }
+
+ human_readable = tableToMarkdown('Exabeam Watchlists', contents, headers)
+ return human_readable, context, watchlist
+
+
+def get_peer_groups(client: Client, *_):
+ """ Returns all peer groups
+
+ Args:
+ client: Client
+
+ """
+ groups = client.get_peergroups_request()
+ contents = []
+ for group in groups:
+ contents.append({
+ 'Name': group
+ })
+
+ context = {
+ 'Exabeam.PeerGroup(val.Name && val.Name === obj.Name)': contents
+ }
+
+ human_readable = tableToMarkdown('Exabeam Peer Groups', contents)
+ return human_readable, context, groups
+
+
+def get_user_labels(client: Client, *_):
+ """ Returns all user Labels
+
+ Args:
+ client: Client
+
+ """
+ labels = client.get_user_labels_request()
+ contents = []
+ for label in labels:
+ contents.append({
+ 'Label': label
+ })
+
+ context = {
+ 'Exabeam.UserLabel(val.Label && val.Label === obj.Label)': contents
+ }
+
+ human_readable = tableToMarkdown('Exabeam User Labels', contents)
+ return human_readable, context, labels
+
+
+def main():
+ username = demisto.params().get('credentials').get('identifier')
+ password = demisto.params().get('credentials').get('password')
+ server_url = demisto.params().get('url')
+ verify_certificate = not demisto.params().get('insecure', False)
+ headers = {
+ 'Accept': 'application/json'
+ }
+ proxies = handle_proxy()
+ client = Client(server_url, verify=verify_certificate, username=username, password=password, proxies=proxies,
+ headers=headers)
+ LOG(f'Command being called is demisto.command()')
+ commands = {
+ 'test-module': test_module,
+ 'get-notable-users': get_notable_users,
+ 'get-watchlists': get_watchlist,
+ 'get-peer-groups': get_peer_groups,
+ 'get-user-info': get_user_info,
+ 'get-user-labels': get_user_labels,
+ 'get-user-sessions': get_user_sessions
+ }
+ try:
+ command = demisto.command()
+ if command in commands:
+ return_outputs(*commands[command](client, demisto.args())) # type: ignore
+
+ except Exception as e:
+ return_error(str(e))
+
+
+if __name__ in ['__main__', 'builtin', 'builtins']:
+ main()
diff --git a/Integrations/Exabeam/Exabeam.yml b/Integrations/Exabeam/Exabeam.yml
new file mode 100644
index 000000000000..7f5917cfcf4d
--- /dev/null
+++ b/Integrations/Exabeam/Exabeam.yml
@@ -0,0 +1,221 @@
+category: Analytics & SIEM
+commonfields:
+ id: Exabeam
+ version: -1
+configuration:
+- display: Server URL (e.g https://100.24.16.156:8484) # disable-secrets-detection
+ name: url
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: true
+ type: 9
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: The Exabeam Security Management Platform provides end-to-end detection,
+ User Event Behavioral Analytics, and SOAR.
+display: Exabeam
+name: Exabeam
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The time period for which to fetch notable users, such as 3 months, 2 days, 4 hours, 1 year, and so on.
+ isArray: true
+ name: time_period
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: The maximum number of returned results.
+ isArray: false
+ name: limit
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns notable users in a period of time.
+ execution: false
+ name: get-notable-users
+ outputs:
+ - contextPath: Exabeam.User.RiskScore
+ description: The risk score of the notable user.
+ type: Number
+ - contextPath: Exabeam.User.UserFullName
+ description: The full name of the user.
+ type: String
+ - contextPath: Exabeam.User.AverageRiskScore
+ description: The average risk score of the user.
+ type: Number
+ - contextPath: Exabeam.User.FirstSeen
+ description: The date the user was first seen.
+ type: Date
+ - contextPath: Exabeam.User.NotableSessionIds
+ description: The ID of the notable session.
+ type: String
+ - contextPath: Exabeam.User.AccountsNumber
+ description: The number of accounts.
+ type: Number
+ - contextPath: Exabeam.User.LastSeen
+ description: The date the user was last seen.
+ type: Date
+ - contextPath: Exabeam.User.Location
+ description: The location of the user.
+ type: String
+ - contextPath: Exabeam.User.UserName
+ description: The name of the user.
+ type: String
+ - contextPath: Exabeam.User.Labels
+ description: The labels of the user.
+ type: String
+ - contextPath: Exabeam.User.LastActivityType
+ description: The last activity type of the user.
+ type: String
+ - contextPath: Exabeam.User.NotableUser
+ description: Whether the user is a notable user.
+ type: Boolean
+ - deprecated: false
+ description: Returns all watchlist IDs and titles.
+ execution: false
+ name: get-watchlists
+ outputs:
+ - contextPath: Exabeam.Watchlist.Category
+ description: The watchlist category.
+ type: String
+ - contextPath: Exabeam.Watchlist.Title
+ description: The watchlist title.
+ type: String
+ - contextPath: Exabeam.Watchlist.WatchlistID
+ description: The watchlist ID.
+ type: String
+ - deprecated: false
+ description: Returns all peer groups.
+ execution: false
+ name: get-peer-groups
+ outputs:
+ - contextPath: Exabeam.PeerGroup.Name
+ description: The name of the peer group.
+ type: String
+ - arguments:
+ - default: false
+ description: The username of the user to fetch.
+ isArray: false
+ name: username
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns user information data for the username.
+ execution: false
+ name: get-user-info
+ outputs:
+ - contextPath: Exabeam.User.RiskScore
+ description: The risk score of the user.
+ type: Number
+ - contextPath: Exabeam.User.AverageRiskScore
+ description: The average risk score.
+ type: Number
+ - contextPath: Exabeam.User.PeerGroupFieldName
+ description: The field name of the peer group.
+ type: String
+ - contextPath: Exabeam.User.FirstSeen
+ description: The date when the user was first seen.
+ type: Date
+ - contextPath: Exabeam.User.PeerGroupDisplayName
+ description: The display name of the Peer group.
+ type: String
+ - contextPath: Exabeam.User.LastSeen
+ description: The date the user was last seen.
+ type: Date
+ - contextPath: Exabeam.User.PeerGroupFieldValue
+ description: The field value of the peer group.
+ type: String
+ - contextPath: Exabeam.User.Label
+ description: The labels of the user.
+ type: String
+ - contextPath: Exabeam.User.Username
+ description: The name of the user.
+ type: String
+ - contextPath: Exabeam.User.PeerGroupType
+ description: The type of the peer group.
+ type: String
+ - contextPath: Exabeam.User.LastSessionID
+ description: The last session ID of the user.
+ type: String
+ - contextPath: Exabeam.User.LastActivityType
+ description: The last activity type of the user.
+ type: String
+ - contextPath: Exabeam.User.AccountNames
+ description: The account name of the user.
+ type: String
+ - deprecated: false
+ description: Returns all labels of the user.
+ execution: false
+ name: get-user-labels
+ outputs:
+ - contextPath: Exabeam.UserLabel.Label
+ description: The label of the user.
+ type: String
+ - arguments:
+ - default: false
+ description: The username for which to fetch data.
+ isArray: false
+ name: username
+ required: true
+ secret: false
+ - default: false
+ description: The Start time of the time range. For example, 2018-08-01T11:50:16).
+ isArray: false
+ name: start_time
+ required: false
+ secret: false
+ - default: false
+ description: The end time of the time range. For example, 2018-08-01T11:50:16.
+ isArray: false
+ name: end_time
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns sessions for the given username and time range.
+ execution: false
+ name: get-user-sessions
+ outputs:
+ - contextPath: Exabeam.User.Session.EndTime
+ description: The end time of the session.
+ type: Date
+ - contextPath: Exabeam.User.Session.InitialRiskScore
+ description: The initial risk score of the session.
+ type: Number
+ - contextPath: Exabeam.User.Session.Label
+ description: The label of the session.
+ type: String
+ - contextPath: Exabeam.User.Session.LoginHost
+ description: The login host.
+ type: String
+ - contextPath: Exabeam.User.Session.RiskScore
+ description: The risk score of the session.
+ type: Number
+ - contextPath: Exabeam.User.Session.SessionID
+ description: The ID of the session.
+ type: String
+ - contextPath: Exabeam.User.Session.StartTime
+ description: The start time of the session.
+ type: Date
+ - contextPath: Exabeam.User.Username
+ description: The username of the session.
+ type: String
+ dockerimage: demisto/python3:3.7.4.1900
+ isfetch: false
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+ - Exabeam - Test
diff --git a/Integrations/Exabeam/Exabeam_description.md b/Integrations/Exabeam/Exabeam_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/Exabeam/Exabeam_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/Exabeam/Exabeam_image.png b/Integrations/Exabeam/Exabeam_image.png
new file mode 100644
index 000000000000..9642aa150340
Binary files /dev/null and b/Integrations/Exabeam/Exabeam_image.png differ
diff --git a/Integrations/Exchange2016_Compliance/CHANGELOG.md b/Integrations/Exchange2016_Compliance/CHANGELOG.md
new file mode 100644
index 000000000000..648a4f934369
--- /dev/null
+++ b/Integrations/Exchange2016_Compliance/CHANGELOG.md
@@ -0,0 +1,2 @@
+## [Unreleased]
+Improved security inside the docker container.
diff --git a/Integrations/Exchange2016_Compliance/Exchange2016_Compliance.py b/Integrations/Exchange2016_Compliance/Exchange2016_Compliance.py
new file mode 100644
index 000000000000..335b63d075d7
--- /dev/null
+++ b/Integrations/Exchange2016_Compliance/Exchange2016_Compliance.py
@@ -0,0 +1,422 @@
+import demistomock as demisto
+from CommonServerPython import *
+import subprocess
+import uuid
+
+USERNAME = demisto.params()['credentials']['identifier'].replace("'", "''")
+PASSWORD = demisto.params()['credentials']['password'].replace("'", "''")
+EXCHANGE_FQDN = demisto.params()['exchangeFQDN'].replace("'", "''")
+UNSECURE = demisto.params()['insecure']
+
+STARTCS = '''
+[CmdletBinding()]
+Param(
+[Parameter(Mandatory=$True)]
+[string]$username,
+[Parameter(Mandatory=$True)]
+[string]$query,
+[Parameter(Mandatory=$True)]
+[string]$server,
+[Parameter(Mandatory=$True)]
+[bool]$unsecure
+)
+$WarningPreference = "silentlyContinue"
+$password = Read-Host
+$secpasswd = ConvertTo-SecureString $password -AsPlainText -Force
+$UserCredential = New-Object System.Management.Automation.PSCredential ($username, $secpasswd)
+$searchName = [guid]::NewGuid().ToString() -replace '[-]'
+$searchName = "DemistoSearch" + $searchName
+if($unsecure){
+ $url = "http://" + $server + "/PowerShell"
+ $session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri $url `
+ -Credential $UserCredential -Authentication Kerberos
+}else{
+ $url = "https://" + $server + "/PowerShell"
+ $session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri $url `
+ -Credential $UserCredential -Authentication Basic -AllowRedirection
+}
+if (!$session)
+{
+ "Failed to create remote PS session"
+ return
+}
+Import-PSSession $session -CommandName *Compliance* -AllowClobber -DisableNameChecking -Verbose:$false | Out-Null
+$compliance = New-ComplianceSearch -Name $searchName -ExchangeLocation All -ContentMatchQuery $query -Confirm:$false
+Start-ComplianceSearch -Identity $searchName
+$complianceSearchName = "Action status: " + $searchName
+$complianceSearchName | ConvertTo-Json
+Remove-PSSession $session
+'''
+
+GETCS = '''
+[CmdletBinding()]
+Param(
+[Parameter(Mandatory=$True)]
+[string]$username,
+[Parameter(Mandatory=$True)]
+[string]$searchName,
+[Parameter(Mandatory=$True)]
+[string]$server,
+[Parameter(Mandatory=$True)]
+[bool]$unsecure
+)
+$WarningPreference = "silentlyContinue"
+$password = Read-Host
+$secpasswd = ConvertTo-SecureString $password -AsPlainText -Force
+$UserCredential = New-Object System.Management.Automation.PSCredential ($username, $secpasswd)
+if($unsecure){
+ $url = "http://" + $server + "/PowerShell"
+ $session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri $url `
+ -Credential $UserCredential -Authentication Kerberos
+}else{
+ $url = "https://" + $server + "/PowerShell"
+ $session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri $url `
+ -Credential $UserCredential -Authentication Basic -AllowRedirection
+}
+if (!$session)
+{
+ "Failed to create remote PS session"
+ return
+}
+Import-PSSession $session -CommandName Get-ComplianceSearch -AllowClobber `
+-DisableNameChecking -Verbose:$false | Out-Null
+$searchStatus = Get-ComplianceSearch $searchName
+$searchStatus.Status
+if ($searchStatus.Status -eq "Completed")
+{
+ $searchStatus.SuccessResults | ConvertTo-Json
+}
+Remove-PSSession $session
+'''
+
+REMOVECS = '''
+[CmdletBinding()]
+Param(
+[Parameter(Mandatory=$True)]
+[string]$username,
+[Parameter(Mandatory=$True)]
+[string]$searchName,
+[Parameter(Mandatory=$True)]
+[string]$server,
+[Parameter(Mandatory=$True)]
+[bool]$unsecure
+)
+$WarningPreference = "silentlyContinue"
+$password = Read-Host
+$secpasswd = ConvertTo-SecureString $password -AsPlainText -Force
+$UserCredential = New-Object System.Management.Automation.PSCredential ($username, $secpasswd)
+if($unsecure){
+ $url = "http://" + $server + "/PowerShell"
+ $session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri $url `
+ -Credential $UserCredential -Authentication Kerberos
+}else{
+ $url = "https://" + $server + "/PowerShell"
+ $session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri $url `
+ -Credential $UserCredential -Authentication Basic -AllowRedirection
+}
+if (!$session)
+{
+ "Failed to create remote PS session"
+ return
+}
+Import-PSSession $session -CommandName *Compliance* -AllowClobber -DisableNameChecking -Verbose:$false | Out-Null
+Remove-ComplianceSearch $searchName -Confirm:$false
+Remove-PSSession $session
+'''
+
+STARTPURGE = '''
+[CmdletBinding()]
+Param(
+[Parameter(Mandatory=$True)]
+[string]$username,
+[Parameter(Mandatory=$True)]
+[string]$searchName,
+[Parameter(Mandatory=$True)]
+[string]$server,
+[Parameter(Mandatory=$True)]
+[bool]$unsecure
+)
+$WarningPreference = "silentlyContinue"
+$password = Read-Host
+$secpasswd = ConvertTo-SecureString $password -AsPlainText -Force
+$UserCredential = New-Object System.Management.Automation.PSCredential ($username, $secpasswd)
+if($unsecure){
+ $url = "http://" + $server + "/PowerShell"
+ $session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri $url `
+ -Credential $UserCredential -Authentication Kerberos
+}else{
+ $url = "https://" + $server + "/PowerShell"
+ $session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri $url `
+ -Credential $UserCredential -Authentication Basic -AllowRedirection
+}
+if (!$session)
+{
+ "Failed to create remote PS session"
+ return
+}
+Import-PSSession $session -CommandName *Compliance* -AllowClobber -DisableNameChecking -Verbose:$false | Out-Null
+$newActionResult = New-ComplianceSearchAction -SearchName $searchName -Purge -PurgeType SoftDelete -Confirm:$false
+if (!$newActionResult)
+{
+ "No action was created"
+}
+Remove-PSSession $session
+return
+'''
+
+CHECKPURGE = '''
+[CmdletBinding()]
+Param(
+[Parameter(Mandatory=$True)]
+[string]$username,
+[Parameter(Mandatory=$True)]
+[string]$searchName,
+[Parameter(Mandatory=$True)]
+[string]$server,
+[Parameter(Mandatory=$True)]
+[bool]$unsecure
+)
+$WarningPreference = "silentlyContinue"
+$password = Read-Host
+$secpasswd = ConvertTo-SecureString $password -AsPlainText -Force
+$UserCredential = New-Object System.Management.Automation.PSCredential ($username, $secpasswd)
+if($unsecure){
+ $url = "http://" + $server + "/PowerShell"
+ $session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri $url `
+ -Credential $UserCredential -Authentication Kerberos
+}else{
+ $url = "https://" + $server + "/PowerShell"
+ $session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri $url `
+ -Credential $UserCredential -Authentication Basic -AllowRedirection
+}
+if (!$session)
+{
+ "Failed to create remote PS session"
+ return
+}
+Import-PSSession $session -CommandName *Compliance* -AllowClobber -DisableNameChecking -Verbose:$false | Out-Null
+$actionName = $searchName + "_Purge"
+$actionStatus = Get-ComplianceSearchAction $actionName
+""
+$actionStatus.Status
+Remove-PSSession $session
+'''
+
+TESTCON = '''
+[CmdletBinding()]
+Param(
+[Parameter(Mandatory=$True)]
+[string]$username,
+[Parameter(Mandatory=$True)]
+[string]$server,
+[Parameter(Mandatory=$True)]
+[bool]$unsecure
+)
+$errorActionPreference = 'Stop'
+$WarningPreference = "silentlyContinue"
+$password = Read-Host
+$secpasswd = ConvertTo-SecureString $password -AsPlainText -Force
+$UserCredential = New-Object System.Management.Automation.PSCredential ($username, $secpasswd)
+try{
+ if($unsecure){
+ $url = "http://" + $server + "/PowerShell"
+ $session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri $url `
+ -Credential $UserCredential -Authentication Kerberos
+ }else{
+ $url = "https://" + $server + "/PowerShell"
+ $session = New-PSSession -ConfigurationName Microsoft.Exchange -ConnectionUri $url `
+ -Credential $UserCredential -Authentication Basic -AllowRedirection
+ }
+ echo "successful connection"
+} catch {
+ $e = $_.Exception
+ echo $e.Message
+} finally {
+ Remove-PSSession $session
+}
+'''
+
+
+def prepare_args(d):
+ return dict((k.replace("-", "_"), v) for k, v in d.items())
+
+
+def str_to_unicode(obj):
+ if isinstance(obj, dict):
+ obj = {k: str_to_unicode(v) for k, v in obj.iteritems()}
+ elif isinstance(obj, list):
+ obj = map(str_to_unicode, obj)
+ elif isinstance(obj, str):
+ obj = unicode(obj, "utf-8")
+ return obj
+
+
+def encode_and_submit_results(obj):
+ demisto.results(str_to_unicode(obj))
+
+
+def get_cs_status(search_name, status):
+ return {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': 'Search {} status: {}'.format(search_name, status),
+ 'EntryContext': {
+ 'EWS.ComplianceSearch(val.Name === obj.Name)': {'Name': search_name, 'Status': status}
+ }
+ }
+
+
+def create_ps_file(ps_name, ps_content):
+ temp_path = os.getenv('TEMP')
+ if not temp_path:
+ return_error("Check that the integration is using single engine without docker."
+ " If so, add TEMP variable to the enviroment varibes.")
+
+ ps_path = temp_path + '\\' + ps_name # type: ignore
+ with open(ps_path, 'w+') as file:
+ file.write(ps_content)
+ return ps_path
+
+
+def delete_ps_file(ps_path):
+ if os.path.exists(ps_path):
+ os.remove(ps_path)
+
+
+def start_compliance_search(query):
+ try:
+ ps_path = create_ps_file('startcs_' + str(uuid.uuid4()).replace('-', '') + '.ps1', STARTCS)
+ output = subprocess.Popen(["powershell.exe", ps_path, "'" + USERNAME + "'",
+ "'" + str(query).replace("'", "''") + "'", "'" + EXCHANGE_FQDN + "'", "$" + str(UNSECURE)],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout, stderr = output.communicate(input=PASSWORD.encode())
+ finally:
+ delete_ps_file(ps_path)
+
+ if stderr:
+ return_error(stderr)
+ prefix = '"Action status: '
+ pref_ind = stdout.find(prefix)
+ sub_start = pref_ind + len(prefix)
+ sub_end = sub_start + 45
+ search_name = stdout[sub_start:sub_end]
+ return {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': 'Search started: {}'.format(search_name),
+ 'EntryContext': {
+ 'EWS.ComplianceSearch': {'Name': search_name, 'Status': 'Starting'}
+ }
+ }
+
+
+def get_compliance_search(search_name):
+ try:
+ ps_path = create_ps_file('getcs_' + search_name + '.ps1', GETCS)
+ output = subprocess.Popen(["powershell.exe", ps_path, "'" + USERNAME + "'",
+ "'" + search_name + "'", "'" + EXCHANGE_FQDN + "'", "$" + str(UNSECURE)],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout, stderr = output.communicate(input=PASSWORD.encode())
+ finally:
+ delete_ps_file(ps_path)
+
+ stdout = stdout[len(PASSWORD):]
+
+ if stderr:
+ return_error(stderr)
+ stdout = stdout.split('\n', 1)
+ status = stdout[0].strip()
+ results = [get_cs_status(search_name, status)]
+
+ if status == 'Completed' and len(stdout[1].strip()) > 4:
+ res = list(r[:-1].split(', ') if r[-1] == ',' else r.split(', ') for r in stdout[1][2:-4].split(r'\r\n'))
+ res = map(lambda x: {k: v for k, v in (s.split(': ') for s in x)}, res)
+ results.append(
+ {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': stdout,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Exchange 2016 Compliance search results',
+ res, ['Location', 'Item count', 'Total size'])
+ }
+ )
+ return results
+
+
+def remove_compliance_search(search_name):
+ try:
+ ps_path = create_ps_file('removecs_' + search_name + '.ps1', REMOVECS)
+ output = subprocess.Popen(["powershell.exe", ps_path, "'" + USERNAME + "'",
+ "'" + search_name + "'", "'" + EXCHANGE_FQDN + "'", "$" + str(UNSECURE)],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout, stderr = output.communicate(input=PASSWORD.encode())
+ finally:
+ delete_ps_file(ps_path)
+
+ return return_error(stderr) if stderr else get_cs_status(search_name, 'Removed')
+
+
+def purge_compliance_search(search_name):
+ try:
+ ps_path = create_ps_file('startpurge_' + search_name + '.ps1', STARTPURGE)
+ output = subprocess.Popen(["powershell.exe", ps_path, "'" + USERNAME + "'",
+ "'" + search_name + "'", "'" + EXCHANGE_FQDN + "'", "$" + str(UNSECURE)],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout, stderr = output.communicate(input=PASSWORD.encode())
+ finally:
+ delete_ps_file(ps_path)
+ return return_error(stderr) if stderr else get_cs_status(search_name, 'Purging')
+
+
+def check_purge_compliance_search(search_name):
+ try:
+ ps_path = create_ps_file('checkpurge_' + search_name + '.ps1', CHECKPURGE)
+ output = subprocess.Popen(["powershell.exe", ps_path, "'" + USERNAME + "'",
+ "'" + search_name + "'", "'" + EXCHANGE_FQDN + "'", "$" + str(UNSECURE)],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout, stderr = output.communicate(input=PASSWORD.encode())
+ finally:
+ delete_ps_file(ps_path)
+
+ return return_error(stderr) if stderr else get_cs_status(search_name,
+ 'Purged' if stdout.strip() == 'Completed' else 'Purging')
+
+
+def test_module():
+ try:
+ ps_path = create_ps_file('testcon_' + str(uuid.uuid4()).replace('-', '') + '.ps1', TESTCON)
+ output = subprocess.Popen(["powershell.exe", ps_path, "'" + USERNAME + "'",
+ "'" + EXCHANGE_FQDN + "'", "$" + str(UNSECURE)],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout = output.communicate(input=PASSWORD.encode())[0].strip()
+ finally:
+ delete_ps_file(ps_path)
+
+ stdout = stdout[len(PASSWORD):]
+
+ if stdout == "successful connection":
+ demisto.results('ok')
+ else:
+ return_error(stdout)
+
+
+args = prepare_args(demisto.args())
+try:
+ if demisto.command() == 'exchange2016-start-compliance-search':
+ encode_and_submit_results(start_compliance_search(**args))
+ elif demisto.command() == 'exchange2016-get-compliance-search':
+ encode_and_submit_results(get_compliance_search(**args))
+ elif demisto.command() == 'exchange2016-remove-compliance-search':
+ encode_and_submit_results(remove_compliance_search(**args))
+ elif demisto.command() == 'exchange2016-purge-compliance-search-results':
+ encode_and_submit_results(purge_compliance_search(**args))
+ elif demisto.command() == 'exchange2016-get-compliance-search-purge-status':
+ encode_and_submit_results(check_purge_compliance_search(**args))
+ elif demisto.command() == 'test-module':
+ test_module()
+except Exception, e:
+ if isinstance(e, WindowsError): # pylint: disable=undefined-variable
+ return_error("Could not open powershell on the target engine.")
+ else:
+ return_error(e)
diff --git a/Integrations/Exchange2016_Compliance/Exchange2016_Compliance.yml b/Integrations/Exchange2016_Compliance/Exchange2016_Compliance.yml
new file mode 100644
index 000000000000..bd9cebcb830a
--- /dev/null
+++ b/Integrations/Exchange2016_Compliance/Exchange2016_Compliance.yml
@@ -0,0 +1,81 @@
+commonfields:
+ id: Exchange 2016 Compliance Search
+ version: -1
+name: Exchange 2016 Compliance Search
+display: Exchange 2016 Compliance Search
+category: Messaging
+description: Exchange Server 2016 Compliance Search enables you to search for and
+ delete an email message from all mailboxes in your organization.
+configuration:
+- display: DOMAIN\USERNAME (e.g., DEMISTO.INT\admin)
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: true
+- display: Exchange Server fully qualified domain name (FQDN)
+ name: exchangeFQDN
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Trust any certificate (unsecure)
+ name: insecure
+ defaultvalue: ""
+ type: 8
+ required: false
+script:
+ script: ''
+ type: python
+ commands:
+ - name: exchange2016-start-compliance-search
+ arguments:
+ - name: query
+ required: true
+ default: true
+ description: Query for finding mail messages
+ outputs:
+ - contextPath: EWS.ComplianceSearch.Name
+ description: The name of the compliance search
+ type: string
+ - contextPath: EWS.ComplianceSearch.Status
+ description: The status of the compliance search
+ type: string
+ description: Initiates a compliance search.
+ - name: exchange2016-get-compliance-search
+ arguments:
+ - name: search-name
+ required: true
+ default: true
+ description: Name of the compliance search
+ outputs:
+ - contextPath: EWS.ComplianceSearch.Status
+ description: The status of the compliance search
+ type: string
+ description: Gets the status and results of a compliance search.
+ - name: exchange2016-remove-compliance-search
+ arguments:
+ - name: search-name
+ required: true
+ default: true
+ description: Name of the compliance search
+ outputs:
+ - contextPath: EWS.ComplianceSearch.Status
+ description: The status of the compliance search
+ type: string
+ description: Removes the compliance search from the Exchange Server.
+ - name: exchange2016-purge-compliance-search-results
+ arguments:
+ - name: search-name
+ required: true
+ default: true
+ description: Name of the compliance search
+ description: Purges the results found during the compliance search.
+ - name: exchange2016-get-compliance-search-purge-status
+ arguments:
+ - name: search-name
+ required: true
+ default: true
+ description: Name of the compliance search
+ description: Checks the status of the purge operation on the compliance search.
+ runonce: false
+tests:
+- No test
diff --git a/Integrations/Exchange2016_Compliance/Exchange2016_Compliance_description.md b/Integrations/Exchange2016_Compliance/Exchange2016_Compliance_description.md
new file mode 100644
index 000000000000..a63dfba139f0
--- /dev/null
+++ b/Integrations/Exchange2016_Compliance/Exchange2016_Compliance_description.md
@@ -0,0 +1,2 @@
+The Compliance Search feature in Exchange Server 2016 enables you to search all mailboxes in your organization. This integration must run on an engine that is installed on a target machine, which is part of a domain.
+The user need to be assigned permissions before running commands of the integration.
\ No newline at end of file
diff --git a/Integrations/Exchange2016_Compliance/Exchange2016_Compliance_image.png b/Integrations/Exchange2016_Compliance/Exchange2016_Compliance_image.png
new file mode 100644
index 000000000000..97612d231c0f
Binary files /dev/null and b/Integrations/Exchange2016_Compliance/Exchange2016_Compliance_image.png differ
diff --git a/Integrations/Exchange2016_Compliance/Pipfile b/Integrations/Exchange2016_Compliance/Pipfile
new file mode 100644
index 000000000000..66ad1243db8b
--- /dev/null
+++ b/Integrations/Exchange2016_Compliance/Pipfile
@@ -0,0 +1,22 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+
+[packages]
+certifi = "==2017.11.5"
+chardet = "==3.0.4"
+idna = "==2.6"
+olefile = "==0.44"
+requests = "==2.18.4"
+urllib3 = "==1.22"
+PyYAML = "==3.12"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/Exchange2016_Compliance/Pipfile.lock b/Integrations/Exchange2016_Compliance/Pipfile.lock
new file mode 100644
index 000000000000..5b7eff8d3e08
--- /dev/null
+++ b/Integrations/Exchange2016_Compliance/Pipfile.lock
@@ -0,0 +1,376 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "dbc7e9dc0a5be3767de3b107d1afe7a3e3b6c57f7cb8a820195e76b8ee681d40"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "certifi": {
+ "hashes": [
+ "sha256:244be0d93b71e93fc0a0a479862051414d0e00e16435707e5bf5000f92e04694",
+ "sha256:5ec74291ca1136b40f0379e1128ff80e866597e4e2c1e755739a913bbc3613c0"
+ ],
+ "index": "pypi",
+ "version": "==2017.11.5"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "index": "pypi",
+ "version": "==2.6"
+ },
+ "olefile": {
+ "hashes": [
+ "sha256:61f2ca0cd0aa77279eb943c07f607438edf374096b66332fae1ee64a6f0f73ad"
+ ],
+ "index": "pypi",
+ "version": "==0.44"
+ },
+ "pyyaml": {
+ "hashes": [
+ "sha256:16b20e970597e051997d90dc2cddc713a2876c47e3d92d59ee198700c5427736",
+ "sha256:3262c96a1ca437e7e4763e2843746588a965426550f3797a79fca9c6199c431f",
+ "sha256:592766c6303207a20efc445587778322d7f73b161bd994f227adaa341ba212ab",
+ "sha256:5ac82e411044fb129bae5cfbeb3ba626acb2af31a8d17d175004b70862a741a7",
+ "sha256:827dc04b8fa7d07c44de11fabbc888e627fa8293b695e0f99cb544fdfa1bf0d1",
+ "sha256:bc6bced57f826ca7cb5125a10b23fd0f2fff3b7c4701d64c439a300ce665fff8",
+ "sha256:c01b880ec30b5a6e6aa67b09a2fe3fb30473008c85cd6a67359a1b15ed6d83a4",
+ "sha256:e863072cdf4c72eebf179342c94e6989c67185842d9997960b3e69290b2fa269"
+ ],
+ "index": "pypi",
+ "version": "==3.12"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "index": "pypi",
+ "version": "==1.22"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==1.5"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:244be0d93b71e93fc0a0a479862051414d0e00e16435707e5bf5000f92e04694",
+ "sha256:5ec74291ca1136b40f0379e1128ff80e866597e4e2c1e755739a913bbc3613c0"
+ ],
+ "index": "pypi",
+ "version": "==2017.11.5"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32",
+ "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==3.7.4"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.3'",
+ "version": "==1.0.2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16",
+ "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.3.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "index": "pypi",
+ "version": "==2.6"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8",
+ "sha256:80d2de76188eabfbfcf27e6a37342c2827801e59c4cc14b0371c56fed43820e3"
+ ],
+ "version": "==0.19"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af",
+ "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"
+ ],
+ "version": "==19.0"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e",
+ "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8"
+ ],
+ "markers": "python_version == '3.4.*' or python_version < '3'",
+ "version": "==2.3.4"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42",
+ "sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300"
+ ],
+ "index": "pypi",
+ "version": "==1.9.5"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:43c5486cefefa536c9aab528881c992328f020eefe4f6d06332449c365218580",
+ "sha256:d6c5ffe9d0305b9b977f7a642d36b9370954d1da7ada4c62393382cbadad4265"
+ ],
+ "version": "==2.4.1.1"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae",
+ "sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6"
+ ],
+ "index": "pypi",
+ "version": "==4.6.4"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:12e17c7ad1397fd1df5ead7727eb3f1bdc9fe1c18293b0492e0e01b57997e38d",
+ "sha256:dc9e416a095ee7c3360056990d52e5611fb94469352fc1c2dc85be1ff2189146"
+ ],
+ "index": "pypi",
+ "version": "==1.6.0"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "index": "pypi",
+ "version": "==1.22"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1",
+ "sha256:6cbdf72f089581983cd395d20290714610182eaedcf7410f655711e710064a4e"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/ExtraHop/ExtraHop.py b/Integrations/ExtraHop/ExtraHop.py
new file mode 100644
index 000000000000..b42dd13e7a73
--- /dev/null
+++ b/Integrations/ExtraHop/ExtraHop.py
@@ -0,0 +1,385 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import json
+import requests
+from distutils.util import strtobool
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+APIKEY = demisto.params().get('apikey')
+SERVER = demisto.params()['url'][:-1] if (demisto.params()['url'] and demisto.params()['url'].endswith('/')) else \
+ demisto.params()['url']
+USE_SSL = not demisto.params().get('insecure', False)
+BASE_URL = SERVER + '/api/v1/'
+HEADERS = {
+ 'Accept': 'application/json',
+ 'Authorization': 'ExtraHop apikey={key}'.format(key=APIKEY)
+}
+if not demisto.params().get('proxy'):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+# 'response' is a container for paginated results
+response = []
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url_suffix, data=None, payload=None):
+ data = json.dumps(data)
+ try:
+ res = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ verify=USE_SSL,
+ data=data,
+ headers=HEADERS,
+ params=payload
+ )
+ except requests.exceptions.RequestException: # This is the correct syntax
+ return_error('Failed to connect to - {url} - Please check the URL'.format(url=BASE_URL))
+ # Handle error responses gracefully
+ if res.status_code == 204:
+ return demisto.results('Successful Modification')
+ elif res.status_code not in {200, 204, 201}:
+ return_error('Error in API call to ExtraHop {code} - {reason}'.format(code=res.status_code, reason=res.reason))
+ return res
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """
+ Performs basic get request to check ExtraHop version
+ """
+ test_result = http_request('GET', 'extrahop')
+ return test_result
+
+
+def get_alerts():
+ res_raw = http_request('GET', 'alerts')
+ res = res_raw.json()
+ return res
+
+
+def paginate(can_paginate, cursor):
+ while can_paginate is True:
+ body = {
+ "cursor": cursor,
+ "context_ttl": 400000
+ }
+ res_raw = http_request('POST', 'records/cursor', body)
+ res = res_raw.json()
+ response.append(res)
+ if 'cursor' in res:
+ paginate(True, res['cursor'])
+ else:
+ break
+ return response
+
+
+def query_records(field, value, operator, query_from, limit):
+ data = {
+ "filter": {
+ "field": str(field),
+ "operand": str(value),
+ "operator": str(operator)
+ },
+ "from": int(query_from),
+ "limit": int(limit)
+ }
+ res_raw = http_request('POST', 'records/search', data)
+ res = res_raw.json()
+ response.append(res)
+ if 'cursor' in res:
+ response.append(paginate(True, res['cursor']))
+ return response
+
+
+def devices():
+ active_from = demisto.args().get('active_from')
+ active_until = demisto.args().get('active_until')
+ search_type = demisto.args().get('search_type')
+ limit = demisto.args().get('limit')
+ payload = {}
+ if active_from:
+ payload['active_from'] = active_from
+ if active_until:
+ payload['active_until'] = active_until
+ if limit:
+ payload['limit'] = limit
+ payload['search_type'] = search_type
+ res_raw = http_request('GET', 'devices', data=None, payload=payload)
+ res = res_raw.json()
+ return res
+
+
+def format_alerts(alerts):
+ hr = ''
+ ec = {
+ "ExtraHop": {
+ "Alert": []
+ }
+ } # type: dict
+ for alert in alerts:
+ hr += tableToMarkdown('Found Alert', alert, headerTransform=string_to_table_header, removeNull=True)
+ ec['ExtraHop']['Alert'].append(createContext(alert, keyTransform=string_to_context_key, removeNull=True))
+ if len(alerts) == 0:
+ demisto.results('No results were found')
+ else:
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': alerts,
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+
+
+def format_device_results(data):
+ hr_table = []
+ ec = {
+ "ExtraHop": {
+ "Device": []
+ }
+ } # type: dict
+ for device in data:
+ hr = {}
+ if 'id' in device:
+ hr['ID'] = device.get('id')
+ if 'display_name' in device:
+ hr['Display Name'] = device.get('display_name')
+ if 'ipaddr4' in device:
+ hr['IP Address'] = device.get('ipaddr4')
+ if 'macaddr' in device:
+ hr['MAC Address'] = device.get('macaddr')
+ if 'vendor' in device:
+ hr['Vendor'] = device.get('vendor')
+ hr_table.append(hr)
+ ec['ExtraHop']['Device'].append(createContext(device, keyTransform=string_to_context_key, removeNull=True))
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': data,
+ 'HumanReadable': tableToMarkdown('Devices Found', hr_table),
+ 'EntryContext': ec
+ })
+
+
+def whitelist_modify(add, remove):
+ assignments = {}
+ if add:
+ add_items = add.split(',')
+ add_items = list(map(int, add_items))
+ assignments['assign'] = add_items
+ if remove:
+ remove_items = remove.split(',')
+ remove_items = list(map(int, remove_items))
+ assignments['unassign'] = remove_items
+ res = http_request('POST', 'whitelist/devices', data=assignments)
+ return res
+
+
+def whitelist_retrieve():
+ res_raw = http_request('GET', 'whitelist/devices')
+ res = res_raw.json()
+ return res
+
+
+def add_alert(apply_all, disabled, name, notify_snmp, refire_interval, severity, alert_type, object_type,
+ protocols, field_name, stat_name, units, interval_length, operand, operator, field_name2, field_op,
+ param, param2, alert_id=None):
+ data = {
+ "apply_all": apply_all,
+ "disabled": disabled,
+ "name": name,
+ "notify_snmp": notify_snmp,
+ "refire_interval": int(refire_interval),
+ "severity": int(severity),
+ "type": alert_type
+ }
+ if alert_type == 'detection':
+ data['object_type'] = object_type
+ data['protocols'] = [str(protocols)]
+ elif alert_type == 'threshold':
+ data['field_name'] = field_name
+ data['stat_name'] = stat_name
+ data['units'] = units
+ data['interval_length'] = int(interval_length)
+ data['operand'] = operand
+ data['operator'] = operator
+ if demisto.args().get('field_name2'):
+ data['field_name2'] = field_name2
+ if demisto.args().get('field_op'):
+ data['field_op'] = field_op
+ if demisto.args().get('param'):
+ data['param'] = param
+ if demisto.args().get('param2'):
+ data['param2'] = param2
+ if alert_id:
+ method = 'PATCH'
+ url_suffix = 'alerts/{alert_id}'.format(alert_id=alert_id)
+ else:
+ method = 'POST'
+ url_suffix = 'alerts'
+ data = json.dumps(data)
+ try:
+ res = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ verify=USE_SSL,
+ data=data,
+ headers=HEADERS
+ )
+ except requests.exceptions.RequestException: # This is the correct syntax
+ return_error('Failed to connect to - {url} - Please check the URL'.format(url=BASE_URL))
+ # Handle error responses gracefully
+ if res.status_code == 204:
+ return demisto.results('Successful Modification')
+ if res.status_code == 400:
+ resp = res.json()
+ return_error('Error in request format - {message}'.format(message=resp['error_message']))
+ if res.status_code == 201:
+ return demisto.results('Alert successfully added')
+ elif res.status_code not in {200, 204, 201}:
+ return_error('Error in API call to ExtraHop {code} - {reason}'.format(code=res.status_code, reason=res.reason))
+ return res
+
+
+def add_alert_command():
+ apply_all = bool(strtobool(demisto.args().get('apply_all', False)))
+ disabled = bool(strtobool(demisto.args().get('disabled', False)))
+ name = demisto.args().get('name')
+ notify_snmp = bool(strtobool(demisto.args().get('notify_snmp', False)))
+ field_name = demisto.args().get('field_name')
+ stat_name = demisto.args().get('stat_name')
+ units = demisto.args().get('units')
+ interval_length = demisto.args().get('interval_length')
+ operand = demisto.args().get('operand')
+ refire_interval = demisto.args().get('refire_interval')
+ severity = demisto.args().get('severity')
+ alert_type = demisto.args().get('type')
+ object_type = demisto.args().get('object_type')
+ protocols = demisto.args().get('protocols')
+ operator = demisto.args().get('operator')
+ field_name2 = demisto.args().get('field_name2')
+ field_op = demisto.args().get('field_op')
+ param = demisto.args().get('param')
+ param2 = demisto.args().get('param2')
+ add_alert(apply_all, disabled, name, notify_snmp, refire_interval, severity, alert_type, object_type,
+ protocols, field_name, stat_name, units, interval_length, operand, operator, field_name2, field_op,
+ param, param2)
+
+
+def modify_alert_command():
+ alert_id = demisto.args().get('alert_id')
+ apply_all = bool(strtobool(demisto.args().get('apply_all', False)))
+ disabled = bool(strtobool(demisto.args().get('disabled', False)))
+ name = demisto.args().get('name')
+ notify_snmp = bool(strtobool(demisto.args().get('notify_snmp', False)))
+ field_name = demisto.args().get('field_name')
+ stat_name = demisto.args().get('stat_name')
+ units = demisto.args().get('units')
+ interval_length = demisto.args().get('interval_length')
+ operand = demisto.args().get('operand')
+ refire_interval = demisto.args().get('refire_interval')
+ severity = demisto.args().get('severity')
+ alert_type = demisto.args().get('type')
+ object_type = demisto.args().get('object_type')
+ protocols = demisto.args().get('protocols')
+ operator = demisto.args().get('operator')
+ field_name2 = demisto.args().get('field_name2')
+ field_op = demisto.args().get('field_op')
+ param = demisto.args().get('param')
+ param2 = demisto.args().get('param2')
+ add_alert(apply_all, disabled, name, notify_snmp, refire_interval, severity, alert_type, object_type,
+ protocols, field_name, stat_name, units, interval_length, operand, operator, field_name2, field_op,
+ param, param2, alert_id)
+
+
+def get_alerts_command():
+ res = get_alerts()
+ format_alerts(res)
+
+
+def whitelist_modify_command():
+ add = demisto.args().get('add')
+ remove = demisto.args().get('remove')
+ whitelist_modify(add, remove)
+
+
+def query_records_command():
+ field = demisto.args().get('field')
+ value = demisto.args().get('value')
+ operator = demisto.args().get('operator')
+ query_from = demisto.args().get('query_from')
+ limit = demisto.args().get('limit')
+ res = query_records(field, value, operator, query_from, limit)
+ source = res[0]['records']
+ hr = ''
+ ec = {
+ "ExtraHop": {
+ "Query": []
+ }
+ } # type: dict
+ for record in source:
+ hr += tableToMarkdown('Incident result for ID {id}'.format(id=record['_id']), record['_source'])
+ ec['ExtraHop']['Query'].append(createContext(record, keyTransform=string_to_context_key, removeNull=True))
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': source,
+ 'HumanReadable': hr,
+ 'EntryContext': createContext(ec, removeNull=True)
+ })
+
+
+def whitelist_retrieve_command():
+ res = whitelist_retrieve()
+ if len(res) == 0:
+ demisto.results('No devices found in whitelist')
+ elif len(res) > 0:
+ format_device_results(res)
+
+
+def devices_command():
+ found_devices = devices()
+ format_device_results(found_devices)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+LOG('Command being called is {command}'.format(command=demisto.command()))
+try:
+ if demisto.command() == 'test-module':
+ test_module()
+ demisto.results('ok')
+ elif demisto.command() == 'extrahop-get-alert-rules':
+ get_alerts_command()
+ elif demisto.command() == 'extrahop-query':
+ query_records_command()
+ elif demisto.command() == 'extrahop-devices':
+ devices_command()
+ elif demisto.command() == 'extrahop-whitelist-modify':
+ whitelist_modify_command()
+ elif demisto.command() == 'extrahop-whitelist-retrieve':
+ whitelist_retrieve_command()
+ elif demisto.command() == 'extrahop-add-alert-rule':
+ add_alert_command()
+ elif demisto.command() == 'extrahop-modify-alert-rule':
+ modify_alert_command()
+
+# Log exceptions
+except Exception as e:
+ LOG(str(e))
+ LOG.print_log()
+ raise
diff --git a/Integrations/ExtraHop/ExtraHop.yml b/Integrations/ExtraHop/ExtraHop.yml
new file mode 100644
index 000000000000..c2688cf58b8e
--- /dev/null
+++ b/Integrations/ExtraHop/ExtraHop.yml
@@ -0,0 +1,892 @@
+category: Network Security
+commonfields:
+ id: ExtraHop
+ version: -1
+configuration:
+- display: API Key for instance
+ name: apikey
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: URL for the instance
+ name: url
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Trust any certificate (Not Secure)
+ name: insecure
+ defaultvalue: "false"
+ type: 8
+ required: false
+- display: Use System Proxy
+ name: proxy
+ defaultvalue: "false"
+ type: 8
+ required: false
+description: ExtraHop performs real-time stream analysis of the packets that carry
+ data across a network.
+display: ExtraHop
+name: ExtraHop
+script:
+ commands:
+ - deprecated: false
+ description: Get alerts from ExtraHop
+ execution: false
+ name: extrahop-get-alert-rules
+ outputs:
+ - contextPath: Extrahop.Alert.Operator
+ description: The logical operator applied when comparing the value of the operand
+ field to alert conditions.
+ type: String
+ - contextPath: Extrahop.Alert.FieldName
+ description: The name of the monitored metric.
+ type: String
+ - contextPath: Extrahop.Alert.NotifySnmp
+ description: 'Indicates whether to send an SNMP trap when an alert is generated. '
+ type: Boolean
+ - contextPath: Extrahop.Alert.Operand
+ description: The value to compare against alert conditions.
+ type: String
+ - contextPath: Extrahop.Alert.IntervalLength
+ description: The length of the alert interval, expressed in seconds.
+ type: Number
+ - contextPath: Extrahop.Alert.Author
+ description: 'The name of the user that created the alert. '
+ type: String
+ - contextPath: Extrahop.Alert.Name
+ description: The unique, friendly name for the alert.
+ type: String
+ - contextPath: Extrahop.Alert.FieldName2
+ description: The second monitored metric when applying a ratio.
+ type: String
+ - contextPath: Extrahop.Alert.RefireInterval
+ description: The time interval in which alert conditions are monitored, expressed
+ in seconds.
+ type: Number
+ - contextPath: Extrahop.Alert.ModTime
+ description: 'The time of the most recent update, expressed in milliseconds
+ since the epoch. '
+ type: Number
+ - contextPath: Extrahop.Alert.Units
+ description: The interval in which to evaluate the alert condition.
+ type: String
+ - contextPath: Extrahop.Alert.ApplyAll
+ description: Indicates whether the alert is assigned to all available data sources.
+ type: Boolean
+ - contextPath: Extrahop.Alert.Type
+ description: The type of alert.
+ type: String
+ - contextPath: Extrahop.Alert.FieldOp
+ description: The type of comparison between the "field_name" and "field_name2" fields
+ when applying a ratio.
+ type: String
+ - contextPath: Extrahop.Alert.Id
+ description: The unique identifier for the alert.
+ type: Number
+ - contextPath: Extrahop.Alert.Disabled
+ description: Indicates whether the alert is disabled.
+ type: Boolean
+ - contextPath: Extrahop.Alert.Description
+ description: An optional description for the alert.
+ type: String
+ - contextPath: Extrahop.Alert.Severity
+ description: The severity level of the alert.
+ type: Number
+ - contextPath: Extrahop.Alert.StatName
+ description: The statistic name for the alert.
+ type: String
+ - arguments:
+ - default: false
+ description: The name of the field in the record to filter.
+ isArray: false
+ name: field
+ required: false
+ secret: false
+ - default: false
+ description: The value that the query attempts to match.
+ isArray: false
+ name: value
+ required: false
+ secret: false
+ - default: false
+ description: The compare method applied when matching the operand value against
+ the field contents.
+ isArray: false
+ name: operator
+ required: true
+ secret: false
+ - default: false
+ description: The beginning timestamp of the time range the query will search,
+ expressed in milliseconds since the epoch. A negative value specifies that
+ the search will begin with records created at a time in the past. For example,
+ specify -600000ms to begin the search with records created 10 minutes before
+ the time of the request.
+ isArray: false
+ name: query_from
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '20'
+ description: The maximum number of entries to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Queries records from ExtraHop.
+ execution: false
+ name: extrahop-query
+ outputs:
+ - contextPath: ExtraHop.Query.Id
+ description: The ID of the queried item.
+ type: String
+ - contextPath: ExtraHop.Query.Index
+ description: The index of the queried item.
+ type: String
+ - contextPath: ExtraHop.Query.Sort
+ description: The sort type for the queried item.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.roundTripTime
+ description: The roundtrip time of the item.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.processingTime
+ description: The processing time of the item.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.method
+ description: The method of the item.
+ type: String
+ - contextPath: ExtraHop.Query.Source.serverZeroWnd
+ description: The Server Zero Wnd.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.server.type
+ description: The type of item found.
+ type: String
+ - contextPath: ExtraHop.Query.Source.server.value
+ description: The ID of server found.
+ type: String
+ - contextPath: ExtraHop.Query.Source.serverPort
+ description: The port of item.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.reqTimeToLastByte
+ description: The time to Last Byte.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.isSQLi
+ description: Whether it is SQLi.
+ type: Boolean
+ - contextPath: ExtraHop.Query.Source.isRspCompressed
+ description: Whether the response is compressed.
+ type: Boolean
+ - contextPath: ExtraHop.Query.Source.rspTimeToFirstHeader
+ description: The response time of the first header.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.reqL2Bytes
+ description: Request L2 Bytes.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.isRspChunked
+ description: Whether the response is chunked.
+ type: Boolean
+ - contextPath: ExtraHop.Query.Source.query
+ description: The query of the item.
+ type: String
+ - contextPath: ExtraHop.Query.Source.host
+ description: The host of item.
+ type: String
+ - contextPath: ExtraHop.Query.Source.rspPkts
+ description: The number of response packets found.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.flowId
+ description: The flow ID.
+ type: String
+ - contextPath: ExtraHop.Query.Source.rspVersion
+ description: The response version.
+ type: String
+ - contextPath: ExtraHop.Query.Source.isRspAborted
+ description: Whether the response was aborted.
+ type: Boolean
+ - contextPath: ExtraHop.Query.Source.client.type
+ description: The type of client.
+ type: String
+ - contextPath: ExtraHop.Query.Source.client.value
+ description: The ID of client.
+ type: String
+ - contextPath: ExtraHop.Query.Source.uri
+ description: The URI of the item.
+ type: String
+ - contextPath: ExtraHop.Query.Source.clientPort
+ description: The client port.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.reqBytes
+ description: The request size (in bytes).
+ type: Number
+ - contextPath: ExtraHop.Query.Source.isXSS
+ description: Whether it is XSS.
+ type: Boolean
+ - contextPath: ExtraHop.Query.Source.reqRTO
+ description: The request RTO.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.rspSize
+ description: The response size.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.ex.isSuspicious
+ description: Whether it is suspicious?
+ type: Boolean
+ - contextPath: ExtraHop.Query.Source.contentType
+ description: The content type.
+ type: String
+ - contextPath: ExtraHop.Query.Source.serverAddr.type
+ description: The server address type.
+ type: String
+ - contextPath: ExtraHop.Query.Source.serverAddr.value
+ description: The server address value.
+ type: String
+ - contextPath: ExtraHop.Query.Source.rspTimeToLastByte
+ description: The response time to last byte.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.rspTimeToFirstPayload
+ description: The response time to first payload.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.timestamp
+ description: The timestamp of the item.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.rspL2Bytes
+ description: The Response L2 Bytes.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.reqPkts
+ description: The request packets.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.isPipelined
+ description: Whether it is pipelined?
+ type: Boolean
+ - contextPath: ExtraHop.Query.Source.clientZeroWnd
+ description: Client Zero Wnd
+ type: Number
+ - contextPath: ExtraHop.Query.Source.isReqAborted
+ description: Whether the request was aborted.
+ type: Boolean
+ - contextPath: ExtraHop.Query.Source.clientAddr.type
+ description: The client address type.
+ type: String
+ - contextPath: ExtraHop.Query.Source.clientAddr.value
+ description: The client address value.
+ type: String
+ - contextPath: ExtraHop.Query.Source.rspBytes
+ description: The response bytes.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.rspRTO
+ description: The response RTO.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.statusCode
+ description: The status code.
+ type: Number
+ - contextPath: ExtraHop.Query.Source.reqSize
+ description: The request size.
+ type: Number
+ - contextPath: ExtraHop.Query.Type
+ description: The type of query.
+ type: String
+ - arguments:
+ - default: false
+ description: The beginning timestamp for the request. Returns only devices active
+ after this time. Time is expressed in milliseconds since the epoch.
+ isArray: false
+ name: active_from
+ required: false
+ secret: false
+ - default: false
+ description: The ending timestamp for the request.
+ isArray: false
+ name: active_until
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: any
+ description: Indicates the field to search.
+ isArray: false
+ name: search_type
+ predefined:
+ - any
+ - name
+ - discovery_id
+ - ip address
+ - mac address
+ - vendor
+ - type
+ - tag
+ - activity
+ - node
+ - vlan
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: The maximum number of devices to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: View Devices from ExtraHop
+ execution: false
+ name: extrahop-devices
+ outputs:
+ - contextPath: ExtraHop.Device.Macaddr
+ description: The MAC Address of the device.
+ type: String
+ - contextPath: ExtraHop.Device.DeviceClass
+ description: The class of the device.
+ type: String
+ - contextPath: ExtraHop.Device.UserModTime
+ description: The time of the most recent update, expressed in milliseconds since
+ the epoch.
+ type: Number
+ - contextPath: ExtraHop.Device.AutoRole
+ description: The role given automatically to the device.
+ type: String
+ - contextPath: ExtraHop.Device.ParentId
+ description: The ID of the parent device.
+ type: Number
+ - contextPath: ExtraHop.Device.Vendor
+ description: The device vendor.
+ type: String
+ - contextPath: ExtraHop.Device.AnalysisLevel
+ description: The analysis level of the device.
+ type: Number
+ - contextPath: ExtraHop.Device.DiscoveryId
+ description: The ID given by Discovery appliance.
+ type: String
+ - contextPath: ExtraHop.Device.DefaultName
+ description: The default name of the device.
+ type: String
+ - contextPath: ExtraHop.Device.DisplayName
+ description: The display name of device.
+ type: String
+ - contextPath: ExtraHop.Device.OnWatchlist
+ description: Whether the device is whitelisted.
+ type: Boolean
+ - contextPath: ExtraHop.Device.ModTime
+ description: The time of the most recent update, expressed in milliseconds since
+ the epoch.
+ type: Number
+ - contextPath: ExtraHop.Device.IsL3
+ description: Indicates whether the device is an L3 device.
+ type: Boolean
+ - contextPath: ExtraHop.Device.ExtrahopId
+ description: The ID given by ExtraHop.
+ type: String
+ - contextPath: ExtraHop.Device.Role
+ description: The role of the device.
+ type: String
+ - contextPath: ExtraHop.Device.DiscoverTime
+ description: The time that the device was discovered.
+ type: Number
+ - contextPath: ExtraHop.Device.Id
+ description: The ID of the device.
+ type: Number
+ - contextPath: ExtraHop.Device.Ipaddr4
+ description: The IPv4 address of the device.
+ type: String
+ - contextPath: ExtraHop.Device.Analysis
+ description: The analysis level of the device.
+ type: String
+ - contextPath: ExtraHop.Device.Vlanid
+ description: The ID of VLan.
+ type: Number
+ - arguments:
+ - default: false
+ description: The IDs of the resources to assign.
+ isArray: true
+ name: add
+ required: false
+ secret: false
+ - default: false
+ description: The IDs of the resources to unassign.
+ isArray: true
+ name: remove
+ required: false
+ secret: false
+ deprecated: false
+ description: Modifies a whitelist from ExtraHop.
+ execution: false
+ name: extrahop-whitelist-modify
+ - deprecated: false
+ description: Retrieves the whitelist from ExtraHop.
+ execution: false
+ name: extrahop-whitelist-retrieve
+ outputs:
+ - contextPath: Extrahop.Device.Macaddr
+ description: 'The MAC address for this device. '
+ type: String
+ - contextPath: Extrahop.Device.DeviceClass
+ description: 'The class of this device. '
+ type: String
+ - contextPath: Extrahop.Device.UserModTime
+ description: 'The time of the most recent update made by the user, expressed
+ in milliseconds since the epoch. '
+ type: Number
+ - contextPath: Extrahop.Device.AutoRole
+ description: 'The role automatically detected by the ExtraHop. '
+ type: String
+ - contextPath: Extrahop.Device.ParentId
+ description: 'The unique identifier for the parent device object. '
+ type: Number
+ - contextPath: Extrahop.Device.Vendor
+ description: The name of the vendor that created the device.
+ type: String
+ - contextPath: Extrahop.Device.AnalysisLevel
+ description: (Deprecated) Replaced by the analysis field.
+ type: Number
+ - contextPath: Extrahop.Device.DiscoveryId
+ description: The unique identifier for the device, which is displayed as part
+ of the device URL in the ExtraHop Web UI.
+ type: String
+ - contextPath: Extrahop.Device.DefaultName
+ description: The default name for this device.
+ type: String
+ - contextPath: Extrahop.Device.DisplayName
+ description: 'The display name chosen based on the running configuration settings. '
+ type: String
+ - contextPath: Extrahop.Device.OnWatchlist
+ description: Indicates whether the device is on the watchlist.
+ type: Boolean
+ - contextPath: Extrahop.Device.ModTime
+ description: 'The time of the most recent update, expressed in milliseconds
+ since the epoch. '
+ type: Number
+ - contextPath: Extrahop.Device.IsL3
+ description: 'Indicates whether the device is an L3 device. '
+ type: Boolean
+ - contextPath: Extrahop.Device.ExtrahopId
+ description: (Deprecated) Use the discover_id field instead.
+ type: String
+ - contextPath: Extrahop.Device.Role
+ description: 'The role of the device. '
+ type: String
+ - contextPath: Extrahop.Device.DiscoverTime
+ description: 'The time when the device was first discovered on the network,
+ expressed in milliseconds since the epoch. '
+ type: Number
+ - contextPath: Extrahop.Device.Id
+ description: The unique identifier for the device, which is displayed as the
+ API ID on the device page in the ExtraHop Web UI .
+ type: Number
+ - contextPath: Extrahop.Device.Ipaddr4
+ description: The IPv4 address for this device.
+ type: String
+ - contextPath: Extrahop.Device.DhcpName
+ description: 'The DHCP name for this device. '
+ type: String
+ - contextPath: Extrahop.Device.Analysis
+ description: 'The analysis level of the device. '
+ type: String
+ - contextPath: Extrahop.Device.Vlanid
+ description: The unique identifier for the VLAN this device is associated with.
+ type: Number
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether the alert is assigned to all available data sources.
+ isArray: false
+ name: apply_all
+ predefined:
+ - 'true'
+ - 'false'
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether the alert is disabled.
+ isArray: false
+ name: disabled
+ predefined:
+ - 'true'
+ - 'false'
+ required: true
+ secret: false
+ - default: false
+ description: The unique, friendly name for the alert.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether to send an SNMP trap when an alert is generated.
+ isArray: false
+ name: notify_snmp
+ predefined:
+ - 'true'
+ - 'false'
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The time interval in which alert conditions are monitored, expressed
+ in seconds. '
+ isArray: false
+ name: refire_interval
+ predefined:
+ - '300'
+ - '600'
+ - '900'
+ - '1800'
+ - '3600'
+ - '7200'
+ - '14400'
+ required: true
+ secret: false
+ - default: false
+ description: 'The severity level of the alert, which is displayed in the Alert
+ History, email notifications, and SNMP traps. Supported values: 0, 1, 2, 3,
+ 4, 5, 6, 7'
+ isArray: false
+ name: severity
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of alert. '
+ isArray: false
+ name: type
+ predefined:
+ - detection
+ - threshold
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of metric source monitored by the alert configuration.
+ Only applicable to detection alerts. '
+ isArray: false
+ name: object_type
+ predefined:
+ - application
+ - device
+ required: false
+ secret: false
+ - default: false
+ description: The list of monitored protocols. Only applicable to detection alerts.
+ isArray: false
+ name: protocols
+ required: false
+ secret: false
+ - default: false
+ description: The name of the monitored metric. Only applicable to threshold
+ alerts.
+ isArray: false
+ name: field_name
+ required: false
+ secret: false
+ - default: false
+ description: The second monitored metric when applying a ratio. Only applicable
+ to threshold alerts.
+ isArray: false
+ name: field_name2
+ required: false
+ secret: false
+ - default: false
+ description: The statistic name for the alert. Only applicable to threshold
+ alerts.
+ isArray: false
+ name: stat_name
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: "The interval in which to evaluate the alert condition. Only applicable\
+ \ to threshold alerts. \nSupported values: \"none\", \"period\", \"1 sec\"\
+ , \"1 min\", \"1 hr\""
+ isArray: false
+ name: units
+ predefined:
+ - none
+ - period
+ - 1 sec
+ - 1 min
+ - 1 hr
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: '30'
+ description: "The length of the alert interval, expressed in seconds. Only applicable\
+ \ to threshold alerts. \nSupported values: 30, 60, 120, 300, 600, 900, 1200,\
+ \ 1800"
+ isArray: false
+ name: interval_length
+ predefined:
+ - '30'
+ - '60'
+ - '120'
+ - '300'
+ - '600'
+ - '900'
+ - '1200'
+ - '1800'
+ required: false
+ secret: false
+ - default: false
+ description: The value to compare against alert conditions. The compare method
+ is specified by the value of the operator field. Only applicable to threshold
+ alerts.
+ isArray: false
+ name: operand
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The logical operator applied when comparing the value of the operand
+ field to alert conditions. Only applicable to threshold alerts.
+ isArray: false
+ name: operator
+ predefined:
+ - ==
+ - '>'
+ - <
+ - '>='
+ - <=
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The type of comparison between the field_name and field_name2 fields
+ when applying a ratio. Only applicable to threshold alerts.
+ isArray: false
+ name: field_op
+ predefined:
+ - /
+ - 'null'
+ required: false
+ secret: false
+ - default: false
+ description: The first alert parameter, which is either a key pattern or a data
+ point. Only applicable to threshold alerts.
+ isArray: false
+ name: param
+ required: false
+ secret: false
+ - default: false
+ description: The second alert parameter, which is either a key pattern or a
+ data point. Only applicable to threshold alerts.
+ isArray: false
+ name: param2
+ required: false
+ secret: false
+ deprecated: false
+ description: Adds an alert to ExtraHop.
+ execution: false
+ name: extrahop-add-alert-rule
+ - arguments:
+ - default: false
+ description: The unique identifier for the alert.
+ isArray: false
+ name: alert_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether the alert is assigned to all available data sources.
+ isArray: false
+ name: apply_all
+ predefined:
+ - 'true'
+ - 'false'
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether the alert is disabled.
+ isArray: false
+ name: disabled
+ predefined:
+ - 'true'
+ - 'false'
+ required: true
+ secret: false
+ - default: false
+ description: The unique, friendly name for the alert.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether to send an SNMP trap when an alert is generated.
+ isArray: false
+ name: notify_snmp
+ predefined:
+ - 'true'
+ - 'false'
+ required: true
+ secret: false
+ - default: false
+ description: The name of the monitored metric. Only applicable to threshold
+ alerts.
+ isArray: false
+ name: field_name
+ required: false
+ secret: false
+ - default: false
+ description: The statistic name for the alert. Only applicable to threshold
+ alerts.
+ isArray: false
+ name: stat_name
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The interval in which to evaluate the alert condition. Only applicable
+ to threshold alerts.
+ isArray: false
+ name: units
+ predefined:
+ - none
+ - period
+ - 1 sec
+ - 1 min
+ - 1 hr
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The length of the alert interval, expressed in seconds. Only applicable
+ to threshold alerts.
+ isArray: false
+ name: interval_length
+ predefined:
+ - '30'
+ - '60'
+ - '120'
+ - '300'
+ - '600'
+ - '900'
+ - '1200'
+ - '1800'
+ required: false
+ secret: false
+ - default: false
+ description: The value to compare against alert conditions. The compare method
+ is specified by the value of the operator field. Only applicable to threshold
+ alerts.
+ isArray: false
+ name: operand
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The time interval in which alert conditions are monitored, expressed
+ in seconds.
+ isArray: false
+ name: refire_interval
+ predefined:
+ - '300'
+ - '600'
+ - '900'
+ - '1800'
+ - '3600'
+ - '7200'
+ - '14400'
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The severity level of the alert, which is displayed in the Alert
+ History, email notifications, and SNMP traps.
+ isArray: false
+ name: severity
+ predefined:
+ - '0'
+ - '1'
+ - '2'
+ - '3'
+ - '4'
+ - '5'
+ - '6'
+ - '7'
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of alert.'
+ isArray: false
+ name: type
+ predefined:
+ - detection
+ - threshold
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The type of metric source monitored by the alert configuration.
+ Only applicable to detection alerts.
+ isArray: false
+ name: object_type
+ predefined:
+ - application
+ - device
+ required: false
+ secret: false
+ - default: false
+ description: The list of monitored protocols. Only applicable to detection alerts.
+ isArray: false
+ name: protocols
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The logical operator applied when comparing the value of the operand
+ field to alert conditions. Only applicable to threshold alerts.
+ isArray: false
+ name: operator
+ predefined:
+ - ==
+ - '>'
+ - <
+ - '>='
+ - <=
+ required: false
+ secret: false
+ - default: false
+ description: The second monitored metric when applying a ratio. Only applicable
+ to threshold alerts.
+ isArray: false
+ name: field_name2
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The type of comparison between the field_name and field_name2 fields
+ when applying a ratio. Only applicable to threshold alerts.
+ isArray: false
+ name: field_op
+ predefined:
+ - /
+ - 'null'
+ required: false
+ secret: false
+ - default: false
+ description: The first alert parameter, which is either a key pattern or a data
+ point. Only applicable to threshold alerts.
+ isArray: false
+ name: param
+ required: false
+ secret: false
+ - default: false
+ description: The second alert parameter, which is either a key pattern or a
+ data point. Only applicable to threshold alerts.
+ isArray: false
+ name: param2
+ required: false
+ secret: false
+ deprecated: false
+ description: Modifies an alert in ExtraHop.
+ execution: false
+ name: extrahop-modify-alert-rule
+ dockerimage: demisto/python3:3.7.2.200
+ isfetch: false
+ runonce: false
+ script: ''
+ type: python
+ subtype: python3
+tests:
+- ExtraHop-Test
diff --git a/Integrations/ExtraHop/ExtraHop_description.md b/Integrations/ExtraHop/ExtraHop_description.md
new file mode 100644
index 000000000000..eeb31de547de
--- /dev/null
+++ b/Integrations/ExtraHop/ExtraHop_description.md
@@ -0,0 +1 @@
+ExtraHop performs real-time stream analysis of the packets that carry data across a network.
\ No newline at end of file
diff --git a/Integrations/ExtraHop/ExtraHop_image.png b/Integrations/ExtraHop/ExtraHop_image.png
new file mode 100644
index 000000000000..0dfdfaabdd3d
Binary files /dev/null and b/Integrations/ExtraHop/ExtraHop_image.png differ
diff --git a/Integrations/FidelisElevateNetwork/CHANGELOG.md b/Integrations/FidelisElevateNetwork/CHANGELOG.md
new file mode 100644
index 000000000000..8dba824fda0b
--- /dev/null
+++ b/Integrations/FidelisElevateNetwork/CHANGELOG.md
@@ -0,0 +1,10 @@
+## [Unreleased]
+
+
+## [19.8.0] - 2019-08-06
+ - Added 5 new commands.
+ - ***list-metadata***
+ - ***get-alert-by-uuid***
+ - ***list-alert-by-ip***
+ - ***download-malware-file***
+ - ***download-pcap-file***
diff --git a/Integrations/FidelisElevateNetwork/FidelisElevateNetwork.py b/Integrations/FidelisElevateNetwork/FidelisElevateNetwork.py
new file mode 100644
index 000000000000..23a02605351f
--- /dev/null
+++ b/Integrations/FidelisElevateNetwork/FidelisElevateNetwork.py
@@ -0,0 +1,857 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+import json
+import shutil
+import requests
+import random
+import urllib
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+
+''' GLOBALS / PARAMS '''
+IS_FETCH = demisto.params().get('isFetch')
+SERVER_URL = demisto.params().get('server_url', '')
+CREDENTIALS = demisto.params().get('credentials')
+INSECURE = demisto.params().get('unsecure')
+PROXY = demisto.params().get('proxy')
+FETCH_TIME = demisto.params().get('fetch_time', '3 days')
+SESSION_ID = None
+ALERT_UUID_REGEX = re.compile('[a-zA-Z0-9]{8}-[a-zA-Z0-9]{4}-[a-zA-Z0-9]{4}-[a-zA-Z0-9]{4}-[a-zA-Z0-9]{12}')
+
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url_suffix, params=None, data=None, files=None, is_json=True):
+ # A wrapper for requests lib to send our requests and handle requests and responses better
+ headers = {} # type: Dict[str, str]
+ if SESSION_ID is not None:
+ headers['x-uid'] = SESSION_ID
+ if files is None:
+ headers['Content-Type'] = 'application/json'
+
+ res = requests.request(
+ method,
+ SERVER_URL + url_suffix,
+ data=None if data is None else json.dumps(data),
+ headers=headers,
+ params=params,
+ files=files,
+ verify=not INSECURE,
+ )
+ # Handle error responses gracefully
+ if res.status_code not in {200, 201}:
+ if res.status_code == 500:
+ try:
+ error = res.json().get('detailMessage', res.content)
+
+ except: # noqa
+ error = res.content
+
+ raise Exception('Error in API call to Fidelis Integration [%d] - %s' % (res.status_code, error))
+ else:
+ raise Exception('Error in API call to Fidelis Integration [%d] - %s' % (res.status_code, res.reason))
+
+ if is_json:
+ try:
+ return res.json()
+
+ except ValueError:
+ return_error('failed to parse json object from response: {}'.format(res.content))
+
+ else:
+ return res.content
+
+
+@logger
+def login():
+ global SESSION_ID
+
+ data = {
+ 'user': CREDENTIALS.get('identifier'),
+ 'password': CREDENTIALS.get('password')
+ }
+
+ if SESSION_ID is None:
+ url = '/j/rest/v1/access/login/json/'
+ try:
+ res = http_request('POST', url, data=data)
+ if res.get('error') is not None:
+ raise requests.HTTPError('Failed to login: {}'.format(res['error']))
+ SESSION_ID = res.get('uid')
+ except requests.exceptions.RequestException as e: # noqa
+ return_error('Demisto has encounter a connection error, '
+ 'please check the server_url and credentials parameters')
+
+
+def logout():
+ global SESSION_ID
+ if SESSION_ID is not None:
+ try:
+ url = '/j/rest/v1/access/logout/{}/'.format(SESSION_ID)
+ http_request('GET', url)
+ SESSION_ID = None
+
+ except: # noqa
+ pass
+
+
+def generate_pagination():
+ return {
+ 'getLast': False,
+ 'page': 1,
+ 'referenceTime': '',
+ 'size': 200,
+ 'supportPaging': True,
+ }
+
+
+def get_ioc_filter(ioc):
+ if re.match(ipv4Regex, ioc):
+ return {'simple': {'column': 'ANY_IP', 'operator': '=', 'value': ioc}}
+ elif md5Regex.match(ioc):
+ return {'simple': {'column': 'MD5', 'operator': '=', 'value': ioc}}
+ elif sha256Regex.match(ioc):
+ return {'simple': {'column': 'SHA256', 'operator': '=', 'value': ioc}}
+ elif sha1Regex.match(ioc):
+ return {'simple': {'column': 'SHA1_HASH', 'operator': '=', 'value': ioc}}
+ elif ALERT_UUID_REGEX.match(ioc):
+ return {'simple': {'column': 'UUID', 'operator': '=', 'value': ioc}}
+ else:
+ return {'simple': {'column': 'ANY_STRING', 'operator': '=~', 'value': ioc}}
+
+
+def to_fidelis_time_format(t):
+ if isinstance(t, STRING_TYPES):
+ try:
+ t = datetime.strptime(t, '%Y-%m-%dT%H:%M:%SZ')
+ except ValueError:
+ t = datetime.strptime(t, '%Y-%m-%dT%H:%M:%S')
+
+ return datetime.strftime(t, '%Y-%m-%d %H:%M:%S')
+
+
+def generate_time_settings(time_frame=None, start_time=None, end_time=None):
+ # default value
+ settings = {
+ 'from': '',
+ 'to': '',
+ 'key': 'all',
+ 'value': '',
+ }
+
+ if time_frame is None:
+ return settings
+ elif time_frame in ['Today', 'Yesterday']:
+ settings['key'] = time_frame.lower()
+ elif 'Last' in time_frame:
+ settings['key'] = 'last'
+ if time_frame == 'Last 7 Days':
+ settings['value'] = '7:00:00:00'
+ elif time_frame == 'Last 30 Days':
+ settings['value'] = '30:00:00:00'
+ elif time_frame == 'Last Hour':
+ settings['value'] = '1:00:00'
+ elif time_frame == 'Last 24 Hours':
+ settings['value'] = '24:00:00'
+ elif time_frame == 'Last 48 Hours':
+ settings['value'] = '48:00:00'
+ else:
+ raise ValueError('Could not parse time frame: {}'.format(time_frame))
+
+ elif time_frame == 'Custom':
+ settings['key'] = 'custom'
+ if start_time is None and end_time is None:
+ raise ValueError('invalid custom time frame: need to specify one of start_time, end_time')
+ if start_time is not None:
+ settings['from'] = to_fidelis_time_format(start_time)
+ if end_time is not None:
+ settings['to'] = to_fidelis_time_format(end_time)
+
+ return settings
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def get_alert_command():
+ args = demisto.args()
+ alert_id = args['alert_id']
+
+ alert = get_alert(alert_id)
+
+ output = {
+ 'ID': alert['alertId'],
+ 'ThreatScore': alert['fidelisScore'],
+ 'Time': alert['time'],
+ 'RuleID': alert['ruleId'],
+ 'RuleName': alert['rule'],
+ 'Summary': alert['summary'],
+ 'PolicyName': alert['policy'],
+ 'Severity': alert['severity'],
+ 'Protocol': alert['protocol'],
+ 'Type': alert['alertType'],
+ 'AlertUUID': alert['alertUUID'],
+ 'AssignedUser': alert['ticket']['assignedUserId'] if alert['ticket'] is not None else None,
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': alert,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Alert {}'.format(alert_id), output, headerTransform=pascalToSpace,
+ removeNull=True),
+ 'EntryContext': {
+ 'Fidelis.Alert(val.ID && val.ID == obj.ID)': output,
+ },
+ })
+
+
+@logger
+def get_alert(alert_id):
+ return http_request('GET', '/j/rest/v1/alert/info/{}/'.format(alert_id))
+
+
+def delete_alert_command():
+ args = demisto.args()
+ alert_id = args['alert_id'].split(',')
+
+ delete_alert(alert_id)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': '\n'.join('Alert ({}) deleted successfully!'.format(_id) for _id in alert_id),
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': '\n'.join('Alert ({}) deleted successfully!'.format(_id) for _id in alert_id),
+ })
+
+
+@logger
+def delete_alert(alert_id):
+ data = {
+ 'type': 'byAlertID',
+ 'alertIds': alert_id,
+ }
+ result = http_request('POST', '/j/rest/v1/alert/delete/', data=data)
+
+ return result
+
+
+def get_malware_data_command():
+ args = demisto.args()
+ alert_id = args['alert_id']
+
+ result = get_malware_data(alert_id)
+
+ output = {
+ 'ID': alert_id,
+ 'Malware': {
+ 'Name': result['malwareName'],
+ 'Behavior': result['malwareBehavior'],
+ 'Description': result['malwareDescription'],
+ 'DetailName': result['malwareDetailName'],
+ 'Platform': result['malwarePlatform'],
+ 'Type': result['malwareType'],
+ 'Variant': result['malwareVariant'],
+ }
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Alert {} Malware:'.format(alert_id), result, headerTransform=pascalToSpace),
+ 'EntryContext': {
+ 'Fidelis.Alert(val.ID && val.ID == obj.ID)': output,
+ },
+ })
+
+
+@logger
+def get_malware_data(alert_id):
+ result = http_request('GET', '/j/rest/v1/alert/malware/{}/'.format(alert_id))
+
+ return result
+
+
+def get_alert_pcap_command():
+ args = demisto.args()
+ alert_id = args['alert_id']
+
+ results = get_alert_pcap(alert_id)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': results,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('', results),
+ })
+
+
+@logger
+def get_alert_pcap(alert_id):
+ # result = http_request('GET', '/j/rest/v1/alert/pcap/{}/'.format(alert_id), is_json=False)
+ # return result
+ raise NotImplementedError()
+
+
+def get_alert_report_command():
+ args = demisto.args()
+ alert_id = int(args['alert_id'])
+
+ pdf_content = get_alert_report(alert_id)
+
+ demisto.results(fileResult(
+ 'Alert_Details_{}.pdf'.format(alert_id),
+ pdf_content,
+ file_type=entryTypes['entryInfoFile']
+ ))
+
+
+@logger
+def get_alert_report(alert_id):
+ result = http_request(
+ 'GET',
+ '/j/rest/v1/alert/export/alertdetails/pdf',
+ params={'alertIds': alert_id},
+ is_json=False)
+
+ return result
+
+
+def sandbox_upload_command():
+ args = demisto.args()
+ upload_item = args['upload_item']
+
+ results = sandbox_upload(upload_item)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': results,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('', results),
+ # 'EntryContext': create_context([indicator]),
+ })
+
+
+@logger
+def sandbox_upload(upload_item):
+ raise NotImplementedError()
+
+
+def list_alerts_command():
+ args = demisto.args()
+ time_frame = args.get('time_frame')
+ start_time = args.get('start_time')
+ end_time = args.get('end_time')
+ severity = args.get('severity')
+ _type = args.get('type')
+ threat_score = args.get('threat_score')
+ ioc = args.get('ioc')
+
+ results = list_alerts(time_frame=time_frame, start_time=start_time, end_time=end_time, severity=severity,
+ _type=_type, threat_score=threat_score, ioc=ioc)
+ output = [{
+ 'ID': alert['ALERT_ID'],
+ 'Time': alert['ALERT_TIME'],
+ 'Summary': alert['SUMMARY'],
+ 'Severity': alert['SEVERITY'],
+ 'Type': alert['ALERT_TYPE'],
+ } for alert in results]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': results,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Found {} Alerts:'.format(len(output)), output),
+ 'EntryContext': {
+ 'Fidelis.Alert(val.ID && val.ID == obj.ID)': output,
+ },
+ })
+
+
+@logger
+def list_alerts(time_frame=None, start_time=None, end_time=None, severity=None, _type=None,
+ threat_score=None, ioc=None, additional_columns=None):
+ columns = additional_columns if additional_columns is not None else []
+
+ filters = [{'simple': {'column': 'ACTION', 'operator': '=', 'value': 'alert'}}]
+ if severity is not None:
+ filters.append({'simple': {'column': 'SEVERITY', 'operator': 'IN', 'value': severity}})
+ if _type is not None:
+ filters.append({'simple': {'column': 'ALERT_TYPE', 'operator': 'IN', 'value': _type}})
+ if threat_score is not None:
+ filters.append({'simple': {'column': 'FIDELIS_SCORE', 'operator': '>', 'value': threat_score}})
+ if ioc is not None:
+ filters.append(get_ioc_filter(ioc))
+
+ data = {
+ 'columns': columns + ['ALERT_ID', 'ALERT_TIME', 'SUMMARY', 'SEVERITY', 'ALERT_TYPE', ],
+ 'filter': {
+ 'composite': {
+ 'logic': 'and',
+ 'filters': filters,
+ }
+ },
+ 'order': [{'column': 'ALERT_TIME', 'direction': 'DESC'}],
+ 'pagination': generate_pagination(),
+ 'timeSettings': generate_time_settings(time_frame, start_time, end_time)
+ }
+ res = http_request('POST', '/j/rest/v1/alert/search/', data=data)
+
+ return res['aaData']
+
+
+def list_alerts_by_ip_request(time_frame=None, start_time=None, end_time=None, src_ip=None, dest_ip=None):
+
+ filters = []
+ if src_ip is not None:
+ filters.append({'simple': {'column': 'SRC_IP', 'operator': 'IN', 'value': src_ip}})
+ if dest_ip is not None:
+ filters.append({'simple': {'column': 'DEST_IP', 'operator': 'IN', 'value': dest_ip}})
+
+ data = {
+ 'commandPosts': [],
+ 'filter': {
+ 'composite': {
+ 'logic': 'or',
+ 'filters': filters
+ }
+ },
+ 'order': [
+ {
+ 'column': 'ALERT_TIME',
+ 'direction': 'DESC'
+ }
+ ],
+ 'pagination': {
+ 'page': 1,
+ 'size': 100
+ },
+ 'columns': ['ALERT_TIME', 'UUID', 'ALERT_ID', 'DISTRIBUTED_ALERT_ID', 'USER_RATING', 'HOST_IP', 'ASSET_ID',
+ 'ALERT_TYPE', 'DEST_COUNTRY_NAME', 'SRC_COUNTRY_NAME', 'DEST_IP', 'SRC_IP'],
+
+ 'timeSettings': generate_time_settings(time_frame, start_time, end_time)
+ }
+
+ res = http_request('POST', '/j/rest/v1/alert/search/', data=data)
+ return res['aaData']
+
+
+def list_alerts_by_ip():
+ """
+ List alerts by the source IP or destination IP
+ """
+ args = demisto.args()
+ time_frame = args.get('time_frame')
+ start_time = args.get('start_time')
+ end_time = args.get('end_time')
+ src_ip = args.get('src_ip')
+ dest_ip = args.get('dest_ip')
+ headers = ['Time', 'AlertUUID', 'ID', 'DistributedAlertID', 'UserRating', 'HostIP', 'AssetID',
+ 'Type', 'DestinationCountry', 'SourceCountry', 'DestinationIP', 'SourceIP']
+ results = list_alerts_by_ip_request(time_frame=time_frame, start_time=start_time, end_time=end_time, src_ip=src_ip,
+ dest_ip=dest_ip)
+ output = [{
+ 'ID': alert.get('ALERT_ID'),
+ 'Time': alert.get('ALERT_TIME'),
+ 'AlertUUID': alert.get('UUID'),
+ 'DistributedAlertID': alert.get('DISTRIBUTED_ALERT_ID'),
+ 'Type': alert.get('ALERT_TYPE'),
+ 'UserRating': alert.get('USER_RATING'),
+ 'HostIP': alert.get('HOST_IP'),
+ 'AssetID': alert.get('ASSET_ID'),
+ 'DestinationCountry': alert.get('DEST_COUNTRY_NAME'),
+ 'SourceCountry': alert.get('SRC_COUNTRY_NAME'),
+ 'DestinationIP': alert.get('DEST_IP'),
+ 'SourceIP': alert.get('SRC_IP')
+ } for alert in results]
+
+ context = {
+ 'Fidelis.Alert(val.ID && val.ID == obj.ID)': output
+ }
+
+ return_outputs(tableToMarkdown('Found {} Alerts:'.format(len(output)), output, headers), context, results)
+
+
+def get_alert_by_uuid():
+
+ alert_uuid = demisto.args().get('alert_uuid')
+
+ results = list_alerts(ioc=alert_uuid)
+
+ output = [{
+ 'ID': alert['ALERT_ID'],
+ 'Time': alert['ALERT_TIME'],
+ 'Summary': alert['SUMMARY'],
+ 'Severity': alert['SEVERITY'],
+ 'Type': alert['ALERT_TYPE']
+ } for alert in results]
+
+ context = {
+ 'Fidelis.Alert(val.ID && val.ID == obj.ID)': output
+ }
+
+ return_outputs(tableToMarkdown('Found {} Alerts:'.format(len(output)), output), context, results)
+
+
+def upload_pcap_command():
+ args = demisto.args()
+ component_ip = args['component_ip']
+ entry_id = args['entry_id']
+
+ upload_pcap(component_ip, entry_id)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': 'Pcap file uploaded successfully.',
+ })
+
+
+@logger
+def upload_pcap(component_ip, entry_id):
+ file_info = demisto.getFilePath(entry_id)
+ shutil.copy(file_info['path'], file_info['name'])
+
+ try:
+ with open(file_info['name'], 'rb') as f:
+ http_request('POST', '/j/rest/policy/pcap/upload/{}/'.format(component_ip),
+ files={'uploadFile': f}, is_json=False)
+ finally:
+ shutil.rmtree(file_info['name'], ignore_errors=True)
+
+
+def run_pcap_command():
+ args = demisto.args()
+ component_ip = args['component_ip']
+ file_names = args['files'].split(',')
+
+ run_pcap(component_ip, file_names)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': 'Pcap file run submitted.',
+ })
+
+
+@logger
+def run_pcap(component_ip, file_names):
+ data = {
+ 'component': component_ip,
+ 'files': file_names
+ }
+ res = http_request('POST', '/j/rest/policy/pcap/run/', data=data) # noqa
+
+
+def list_pcap_components_command():
+ results = list_pcap_components()
+ output = [{
+ 'IP': r['ip'],
+ 'Name': r['name'],
+ } for r in results]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': results,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('PCAP Components', output, headers=['Name', 'IP']),
+ 'EntryContext': {'Fidelis.Component(val.Name && val.Name == obj.Name)': output},
+ })
+
+
+@logger
+def list_pcap_components():
+ res = http_request('GET', '/j/rest/policy/pcap/components/')
+
+ return res
+
+
+def list_metadata_request(time_frame=None, start_time=None, end_time=None, client_ip=None, server_ip=None,
+ request_direction=None):
+
+ filters = []
+ if client_ip is not None:
+ filters.append({'simple': {'column': 'ClientIP', 'operator': '=', 'value': client_ip}})
+ if server_ip is not None:
+ filters.append({'simple': {'column': 'ServerIP', 'operator': '=', 'value': server_ip}})
+ if request_direction is not None:
+ filters.append({'simple': {'column': 'Direction', 'operator': '=', 'value': request_direction}})
+ search_id = str([random.randint(1, 9) for _ in range(8)])
+
+ data = {
+ 'collectors': [],
+ 'action': 'new',
+ 'allCollectors': True,
+ 'timeSettings': generate_time_settings(time_frame, start_time, end_time),
+ 'displaySettings': {
+ 'pageSize': 1000,
+ 'currentPage': 1,
+ 'pageNavigation': "",
+ 'sorting': {
+ 'column': 'Timestamp',
+ 'sortingOrder': 'D'
+ }
+ },
+ 'dataSettings': {
+ 'composite': {
+ 'logic': 'and',
+ 'filters': filters
+ }
+ },
+ 'searchId': search_id
+ }
+ res = http_request('POST', '/j/rest/metadata/search/', data=data)
+
+ return res.get('aaData')
+
+
+def list_metadata():
+
+ args = demisto.args()
+ time_frame = args.get('time_frame')
+ start_time = args.get('start_time')
+ end_time = args.get('end_time')
+ client_ip = args.get('client_ip')
+ server_ip = args.get('server_ip')
+ request_direction = args.get('request_direction')
+
+ data = []
+ event_context = []
+
+ results = list_metadata_request(time_frame=time_frame, start_time=start_time, end_time=end_time,
+ client_ip=client_ip, server_ip=server_ip, request_direction=request_direction)
+ for event in results:
+ data.append({
+ 'Timestamp': event.get('Timestamp'),
+ 'ServerIP': event.get('ServerIP'),
+ 'ServerPort': event.get('ServerPort'),
+ 'ClientIP': event.get('ClientIP'),
+ 'ClientPort': event.get('ClientPort')
+ })
+
+ event_context.append({
+ 'Timestamp': event.get('Timestamp'),
+ 'ServerIP': event.get('ServerIP'),
+ 'ServerPort': event.get('ServerPort'),
+ 'ServerCountry': event.get('ServerCountry'),
+ 'ClientIP': event.get('ClientIP'),
+ 'ClientPort': event.get('ClientPort'),
+ 'ClientCountry': event.get('ClientCountry'),
+ 'Type': event.get('Type'),
+ 'SensorUUID': event.get('SensorUUID'),
+ 'SessionStart': event.get('SessionStart'),
+ 'SessionDuration': event.get('SessionDuration'),
+ 'Protocol': event.get('Protocol'),
+ 'URL': event.get('URL'),
+ 'RequestDirection': event.get('RequestDirection'),
+ 'UserAgent': event.get('UserAgent'),
+ 'FileName': event.get('FileName'),
+ 'FileType': event.get('FileType'),
+ 'FileSize': event.get('FileSize'),
+ 'MD5': event.get('MD5'),
+ 'SHA256': event.get('SHA256'),
+ 'MalwareName': event.get('MalwareName'),
+ 'MalwareType': event.get('MalwareType'),
+ 'MalwareSeverity': event.get('MalwareSeverity'),
+ 'PcapFilename': event.get('PcapFilename'),
+ 'PcapTimestamp': event.get('PcapTimestamp')
+
+ })
+ context = {
+ 'Fidelis.Metadata(val.ID && val.ID == obj.ID)': event_context
+ }
+
+ return_outputs(tableToMarkdown('Found {} Metadata:'.format(len(data)), data), context, results)
+
+
+def request_dpath(alert_id):
+
+ res = http_request('GET', '/j/rest/v1/alert/dpath/{}/'.format(alert_id))
+ dpath = res.get('decodingPaths')[0]
+ link_path = dpath.get('linkPath')
+ encoded_path = urllib.quote(link_path)
+ return encoded_path
+
+
+def download_malware_file_request(alert_id):
+
+ dpath = request_dpath(alert_id)
+ query_params = {
+ 'uid': SESSION_ID,
+ 'alert_id': alert_id,
+ 'type': '1',
+ 'params': dpath
+ }
+ res = http_request(
+ 'GET',
+ '/query/tcpses_getfile.cgi',
+ params=query_params,
+ is_json=False)
+
+ return res
+
+
+def download_malware_file():
+ """
+ Download specific malware from the alert
+ """
+ alert_id = demisto.args().get('alert_id')
+ file_name = request_dpath(alert_id)
+ decoded_file_name = urllib.unquote(file_name)
+ results = download_malware_file_request(alert_id)
+
+ demisto.results(fileResult(
+ decoded_file_name + '.zip',
+ results,
+ file_type=entryTypes['file']))
+
+
+def download_pcap_request(alert_id):
+
+ query_params = {
+ 'uid': SESSION_ID,
+ 'alert_id': alert_id,
+ 'commandpost': '127.0.0.1',
+ }
+
+ results = http_request(
+ 'GET',
+ '/e.cgi',
+ params=query_params,
+ is_json=False
+ )
+
+ return results
+
+
+def download_pcap_file():
+ """
+ Download PCAP from an alert
+ """
+ alert_id = demisto.args().get('alert_id')
+
+ results = download_pcap_request(alert_id)
+ demisto.results(fileResult(
+ 'Alert ID_' + alert_id + '.pcap',
+ results,
+ file_type=entryTypes['file']))
+
+
+def test_integration():
+ # the login is executed in the switch panel code
+ if IS_FETCH:
+ # just check the correctness of the parameter
+ parse_date_range(FETCH_TIME)
+ list_pcap_components()
+ demisto.results('ok')
+
+
+def fetch_incidents():
+ last_run = demisto.getLastRun()
+ # Get the last fetch time, if exists
+ last_fetch = last_run.get('time')
+
+ # Handle first time fetch, fetch incidents retroactively
+ if last_fetch is None:
+ last_fetch, _ = parse_date_range(FETCH_TIME, date_format='%Y-%m-%dT%H:%M:%S')
+
+ latest = datetime.strptime(last_fetch, '%Y-%m-%dT%H:%M:%S')
+
+ demisto.debug('getting alarms since {}'.format(last_fetch))
+ incidents = []
+ items = list_alerts(time_frame='Custom', start_time=last_fetch)
+ demisto.debug('got {} new alarms'.format(len(items)))
+ for item in items:
+ incident_date = datetime.strptime(item['ALERT_TIME'], '%Y-%m-%d %H:%M:%S')
+ incident = {
+ 'Type': 'Fidelis',
+ 'name': '{} {}'.format(item['ALERT_ID'], item['SUMMARY']),
+ 'occurred': incident_date.strftime('%Y-%m-%dT%H:%M:%SZ'),
+ 'rawJSON': json.dumps(item),
+ }
+ latest = max(latest, incident_date)
+ incidents.append(incident)
+
+ if latest != last_fetch:
+ last_fetch = (latest + timedelta(seconds=1)).strftime('%Y-%m-%dT%H:%M:%S')
+ demisto.setLastRun({'time': last_fetch})
+
+ demisto.incidents(incidents)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+
+def main():
+ try:
+ handle_proxy()
+ command = demisto.command()
+ LOG('Command being called is {}'.format(command))
+ login()
+ if command == 'test-module':
+ test_integration()
+ elif command == 'fetch-incidents':
+ fetch_incidents()
+
+ elif command == 'fidelis-get-alert':
+ get_alert_command()
+
+ elif command == 'fidelis-delete-alert':
+ delete_alert_command()
+
+ elif command == 'fidelis-get-malware-data':
+ get_malware_data_command()
+
+ elif command == 'fidelis-get-alert-pcap':
+ get_alert_pcap_command()
+
+ elif command == 'fidelis-get-alert-report':
+ get_alert_report_command()
+
+ elif command == 'fidelis-sandbox-upload':
+ sandbox_upload_command()
+
+ elif command == 'fidelis-list-alerts':
+ list_alerts_command()
+
+ elif command == 'fidelis-upload-pcap':
+ upload_pcap_command()
+
+ elif command == 'fidelis-run-pcap':
+ run_pcap_command()
+
+ elif command == 'fidelis-list-pcap-components':
+ list_pcap_components_command()
+ elif command == 'fidelis-get-alert-by-uuid':
+ get_alert_by_uuid()
+ elif command == 'fidelis-list-metadata':
+ list_metadata()
+ elif command == 'fidelis-list-alerts-by-ip':
+ list_alerts_by_ip()
+ elif command == 'fidelis-download-malware-file':
+ download_malware_file()
+ elif command == 'fidelis-download-pcap-file':
+ download_pcap_file()
+
+ except Exception as e:
+ return_error('error has occurred: {}'.format(str(e)))
+
+ finally:
+ logout()
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/FidelisElevateNetwork/FidelisElevateNetwork.yml b/Integrations/FidelisElevateNetwork/FidelisElevateNetwork.yml
new file mode 100644
index 000000000000..4875a9bd1c9e
--- /dev/null
+++ b/Integrations/FidelisElevateNetwork/FidelisElevateNetwork.yml
@@ -0,0 +1,636 @@
+category: Network Security
+commonfields:
+ id: Fidelis Elevate Network
+ version: -1
+configuration:
+- display: Server URL
+ name: server_url
+ required: true
+ type: 0
+- display: Credentials
+ name: credentials
+ required: true
+ type: 9
+- defaultvalue: 'True'
+ display: Trust any certificate (unsecure)
+ name: unsecure
+ required: false
+ type: 8
+- defaultvalue: 'True'
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- defaultvalue: 3 days
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days, 3 months,
+ 1 year)
+ name: fetch_time
+ required: false
+ type: 0
+description: Automate Detection and Response to Network Threats and data leakage in
+ your organization with Fidelis Elevate Network Integration.
+display: Fidelis Elevate Network
+name: Fidelis Elevate Network
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Alert ID
+ isArray: false
+ name: alert_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets alert details from Fidelis Elevate.
+ execution: false
+ name: fidelis-get-alert
+ outputs:
+ - contextPath: Fidelis.Alert.ID
+ description: Alert ID.
+ type: string
+ - contextPath: Fidelis.Alert.ThreatScore
+ description: Alert threat score.
+ type: number
+ - contextPath: Fidelis.Alert.Time
+ description: Alert time.
+ type: date
+ - contextPath: Fidelis.Alert.RuleID
+ description: Related rule ID.
+ type: string
+ - contextPath: Fidelis.Alert.RuleName
+ description: Related rule name.
+ type: string
+ - contextPath: Fidelis.Alert.Summary
+ description: Alert summary.
+ type: string
+ - contextPath: Fidelis.Alert.PolicyName
+ description: Related policy name.
+ type: string
+ - contextPath: Fidelis.Alert.Severity
+ description: Alert severity.
+ type: string
+ - contextPath: Fidelis.Alert.Protocol
+ description: Protocol involved in the alert.
+ type: string
+ - contextPath: Fidelis.Alert.Type
+ description: Alert type.
+ type: string
+ - contextPath: Fidelis.Alert.AssignedUser
+ description: Assigned user ID.
+ type: string
+ - arguments:
+ - default: false
+ description: ID of the alert to delete.
+ isArray: true
+ name: alert_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes an alert from Fidelis Elevate.
+ execution: false
+ name: fidelis-delete-alert
+ - arguments:
+ - default: false
+ description: Alert ID.
+ isArray: false
+ name: alert_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves malware data related to a "Malware" type alert.
+ execution: false
+ name: fidelis-get-malware-data
+ outputs:
+ - contextPath: Fidelis.Alert.ID
+ description: Alert ID.
+ type: string
+ - contextPath: Fidelis.Alert.Malware.Name
+ description: Malware name.
+ type: string
+ - contextPath: Fidelis.Alert.Malware.Type
+ description: Malware type.
+ type: string
+ - contextPath: Fidelis.Alert.Malware.Behavior
+ description: Malware behavior.
+ type: string
+ - contextPath: Fidelis.Alert.Malware.Platform
+ description: Malware platform.
+ type: string
+ - contextPath: Fidelis.Alert.Malware.DetailName
+ description: Malware detail name from Fidelis Elevate.
+ type: string
+ - contextPath: Fidelis.Alert.Malware.Variant
+ description: Malware variant.
+ type: string
+ - contextPath: Fidelis.Alert.Malware.Description
+ description: Malware description from Fidelis Elevate.
+ type: string
+ - arguments:
+ - default: false
+ description: ID of the alert for which to get the PCAP file.
+ isArray: false
+ name: alert_id
+ required: true
+ secret: false
+ deprecated: true
+ description: Retrieves the alert PCAP from Fidelis Elevate.
+ execution: false
+ name: fidelis-get-alert-pcap
+ - arguments:
+ - default: false
+ description: Alert ID of the alert for which to download a PDF report.
+ isArray: false
+ name: alert_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Downloads a PDF report for a specified alert.
+ execution: false
+ name: fidelis-get-alert-report
+ - arguments:
+ - default: false
+ description: The file or URL to upload to Fidelis Elevate for analysis.
+ isArray: false
+ name: upload_item
+ required: true
+ secret: false
+ deprecated: false
+ description: Uploads a file or a URL for sandbox analysis in Fidelis Elevate.
+ execution: false
+ name: fidelis-sandbox-upload
+ outputs:
+ - contextPath: Fidelis.Alert.ID
+ description: Alert ID generated from the upload.
+ type: string
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: Filter alerts by time frame, for example, Last 48 Hours.
+ isArray: false
+ name: time_frame
+ predefined:
+ - Today
+ - Yesterday
+ - Last 7 Days
+ - Last Hour
+ - Last 24 Hours
+ - Last 48 Hours
+ - Last 30 Days
+ - Custom
+ required: false
+ secret: false
+ - default: false
+ description: If the time_frame value is Custom, specify the start time for the time range,
+ for example, 2017-06-01T12:48:16.734.
+ isArray: false
+ name: start_time
+ required: false
+ secret: false
+ - default: false
+ description: If the time_frame value is Custom, specify the end time for the time range,
+ for example, 2017-06-01T12:48:16.734.
+ isArray: false
+ name: end_time
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Filter alerts by alert severity.
+ isArray: false
+ name: severity
+ predefined:
+ - Low
+ - Medium
+ - High
+ - Critical
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Filter alerts by alert type.
+ isArray: false
+ name: type
+ predefined:
+ - Endpoint
+ - DSI
+ - DPI
+ - DNS
+ - Malware
+ - Mail
+ - File Upload
+ - Collector Feed
+ - Analytics
+ required: false
+ secret: false
+ - default: false
+ description: Filter alerts by alert threat score threshold (higher than).
+ isArray: false
+ name: threat_score
+ required: false
+ secret: false
+ - default: false
+ description: Filter alerts that are related to a specified IOC.
+ isArray: false
+ name: ioc
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of open alerts from Fidelis Elevate.
+ execution: false
+ name: fidelis-list-alerts
+ outputs:
+ - contextPath: Fidelis.Alert.ID
+ description: Alert ID.
+ type: string
+ - contextPath: Fidelis.Alert.Time
+ description: Alert time.
+ type: date
+ - contextPath: Fidelis.Alert.Summary
+ description: Alert summary.
+ type: string
+ - contextPath: Fidelis.Alert.Severity
+ description: Alert severity.
+ type: string
+ - contextPath: Fidelis.Alert.Type
+ description: Alert type.
+ type: string
+ - arguments:
+ - default: false
+ description: Component IP address.
+ isArray: false
+ name: component_ip
+ required: true
+ secret: false
+ - default: false
+ description: War Room entry ID of the PCAP file, for example, "3245@6".
+ isArray: false
+ name: entry_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Uploads a PCAP file to Fidelis Elevate for analysis.
+ execution: false
+ name: fidelis-upload-pcap
+ - deprecated: false
+ description: Gets PCAP components.
+ execution: false
+ name: fidelis-list-pcap-components
+ outputs:
+ - contextPath: Fidelis.Component.Name
+ description: Component name.
+ type: string
+ - contextPath: Fidelis.Component.IP
+ description: Component IP address.
+ type: string
+ - arguments:
+ - default: false
+ description: Component IP address. Run the 'fidelis-list-pcap-components' command to get this value.
+ isArray: false
+ name: component_ip
+ required: true
+ secret: false
+ - default: false
+ description: CSV list of PCAP file names in Fidelis Elevate.
+ isArray: false
+ name: files
+ required: true
+ secret: false
+ deprecated: false
+ description: Runs PCAP file analysis in Fidelis Elevate.
+ execution: false
+ name: fidelis-run-pcap
+ - arguments:
+ - default: false
+ description: The UUID of the alert.
+ isArray: false
+ name: alert_uuid
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns an alert, by UUID.
+ execution: false
+ name: fidelis-get-alert-by-uuid
+ outputs:
+ - contextPath: Fidelis.Alert.ID
+ description: Alert ID.
+ type: Number
+ - contextPath: Fidelis.Alert.Severity
+ description: Alert severity.
+ type: String
+ - contextPath: Fidelis.Alert.Summary
+ description: Alert summary.
+ type: String
+ - contextPath: Fidelis.Alert.Time
+ description: Alert time.
+ type: Date
+ - contextPath: Fidelis.Alert.Type
+ description: Alert type.
+ type: String
+ - contextPath: Fidelis.Alert.UUID
+ description: Alert UUID.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: Filter alerts by time frame, for example, Last 48 Hours.
+ isArray: false
+ name: time_frame
+ predefined:
+ - Today
+ - Yesterday
+ - Last 7 Days
+ - Last Hour
+ - Last 24 Hours
+ - Last 48 Hours
+ - Last 30 Days
+ - Custom
+ required: false
+ secret: false
+ - default: false
+ description: If the time_frame value is Custom, specify the start time for the time range,
+ for example, 2017-06-01T12:48:16.734.
+ isArray: false
+ name: start_time
+ predefined:
+ - ''
+ required: false
+ secret: false
+ - default: false
+ description: If the time_frame value is Custom, specify the end time for the time range,
+ for example,2017-06-01T12:48:16.734.
+ isArray: false
+ name: end_time
+ required: false
+ secret: false
+ - default: false
+ description: Filter alerts by client IP.
+ isArray: false
+ name: client_ip
+ required: false
+ secret: false
+ - default: false
+ description: Filter alerts by server IP address.
+ isArray: false
+ name: server_ip
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Direction of the request. Can be "s2c" (server to client) or "c2s" (client to server).
+ isArray: false
+ name: request_direction
+ predefined:
+ - s2c
+ - c2s
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a metadata list.
+ execution: false
+ name: fidelis-list-metadata
+ outputs:
+ - contextPath: Fidelis.Metadata.MalwareName
+ description: Malware name.
+ type: String
+ - contextPath: Fidelis.Metadata.ServerPort
+ description: Server port number.
+ type: Number
+ - contextPath: Fidelis.Metadata.SHA256
+ description: SHA256 hash of the file.
+ type: String
+ - contextPath: Fidelis.Metadata.FileName
+ description: File name.
+ type: String
+ - contextPath: Fidelis.Metadata.PcapFilename
+ description: PCAP file name.
+ type: String
+ - contextPath: Fidelis.Metadata.SessionDuration
+ description: The event session duration.
+ type: String
+ - contextPath: Fidelis.Metadata.ServerIP
+ description: The server IP address.
+ type: String
+ - contextPath: Fidelis.Metadata.ClientCountry
+ description: The client country.
+ type: String
+ - contextPath: Fidelis.Metadata.ClientPort
+ description: The client port number.
+ type: Number
+ - contextPath: Fidelis.Metadata.SessionStart
+ description: The date/time that the session started.
+ type: Date
+ - contextPath: Fidelis.Metadata.MalwareType
+ description: The malware type.
+ type: String
+ - contextPath: Fidelis.Metadata.URL
+ description: Request URL.
+ type: String
+ - contextPath: Fidelis.Metadata.RequestDirection
+ description: 'Request direction (s2c or c2s). '
+ type: String
+ - contextPath: Fidelis.Metadata.MalwareSeverity
+ description: The severity of the malware.
+ type: String
+ - contextPath: Fidelis.Metadata.ClientIP
+ description: The client IP address.
+ type: String
+ - contextPath: Fidelis.Metadata.ServerCountry
+ description: The country of the server.
+ type: String
+ - contextPath: Fidelis.Metadata.PcapTimestamp
+ description: PCAP timestamp.
+ type: Date
+ - contextPath: Fidelis.Metadata.SensorUUID
+ description: Sensor UUID.
+ type: String
+ - contextPath: Fidelis.Metadata.Timestamp
+ description: Timestamp of the event.
+ type: Date
+ - contextPath: Fidelis.Metadata.FileType
+ description: File type.
+ type: String
+ - contextPath: Fidelis.Metadata.Protocol
+ description: Event protocol.
+ type: String
+ - contextPath: Fidelis.Metadata.UserAgent
+ description: User agent of the request.
+ type: String
+ - contextPath: Fidelis.Metadata.Type
+ description: Type of the event.
+ type: String
+ - contextPath: Fidelis.Metadata.FileSize
+ description: The size of the file.
+ type: Number
+ - contextPath: Fidelis.Metadata.MD5
+ description: MD5 hash of the file.
+ type: String
+ - arguments:
+ - default: false
+ description: Today,Yesterday,Last 7 Days,Last Hour,Last 24 Hours,Last 48 Hours,Last
+ 30 Days,Custom
+ isArray: false
+ name: time_frame
+ required: false
+ secret: false
+ - default: false
+ description: If the time_frame value is Custom, specify the start time for the time range,
+ for example, 2017-06-01T12:48:16.734.
+ isArray: false
+ name: start_time
+ required: false
+ secret: false
+ - default: false
+ description: If the time_frame value is Custom, specify the start time for the time range,
+ for example, 2017-06-01T12:48:16.734.
+ isArray: false
+ name: end_time
+ required: false
+ secret: false
+ - default: false
+ description: Filter alerts by the source IP.
+ isArray: false
+ name: src_ip
+ required: false
+ secret: false
+ - default: false
+ description: Filter alerts by the destination IP address.
+ isArray: false
+ name: dest_ip
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of alerts, by source IP address or destination IP address.
+ execution: false
+ name: fidelis-list-alerts-by-ip
+ outputs:
+ - contextPath: Fidelis.Alert.SourceIP
+ description: 'The alert source IP address. '
+ type: String
+ - contextPath: Fidelis.Alert.UserRating
+ description: User rating.
+ type: String
+ - contextPath: Fidelis.Alert.DestinationCountry
+ description: Destination country of the alert.
+ type: String
+ - contextPath: Fidelis.Alert.AssetID
+ description: The ID of the asset.
+ type: Number
+ - contextPath: Fidelis.Alert.Time
+ description: Date/time that the alert started.
+ type: Date
+ - contextPath: Fidelis.Alert.HostIP
+ description: The host IP address of the alert.
+ type: String
+ - contextPath: Fidelis.Alert.DistributedAlertID
+ description: Alert distributed ID.
+ type: String
+ - contextPath: Fidelis.Alert.DestinationIP
+ description: Alert destination IP address.
+ type: String
+ - contextPath: Fidelis.Alert.AlertUUID
+ description: The alert UUID.
+ type: String
+ - contextPath: Fidelis.Alert.Type
+ description: The alert type.
+ type: String
+ - contextPath: Fidelis.Alert.ID
+ description: Alert ID.
+ type: Number
+ - contextPath: Fidelis.Alert.SourceCountry
+ description: Alert source country
+ type: String
+ - arguments:
+ - default: false
+ description: ID of the alert from which to download the file.
+ isArray: false
+ name: alert_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Downloads a malware file from a specified alert.
+ execution: false
+ name: fidelis-download-malware-file
+ outputs:
+ - contextPath: File.Size
+ description: The size of the file.
+ type: Number
+ - contextPath: File.Extension
+ description: 'The file extension. '
+ type: String
+ - contextPath: File.Info
+ description: Information about the file.
+ type: String
+ - contextPath: File.Name
+ description: The name of the file.
+ type: String
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file.
+ type: String
+ - contextPath: File.Type
+ description: The file type.
+ type: String
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file.
+ type: String
+ - contextPath: File.SSDeep
+ description: SSDeep hash of the file.
+ type: String
+ - contextPath: File.EntryID
+ description: File entry ID.
+ type: String
+ - contextPath: File.MD5
+ description: MD5 hash of the file.
+ type: String
+ - arguments:
+ - default: false
+ description: The ID of the alert from which to download the file.
+ isArray: false
+ name: alert_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Downloads the PCAP file from a specified alert.
+ execution: false
+ name: fidelis-download-pcap-file
+ outputs:
+ - contextPath: File.EntryID
+ description: The entry ID of the file.
+ type: String
+ - contextPath: File.Info
+ description: File information.
+ type: String
+ - contextPath: File.Name
+ description: Name of the file.
+ type: String
+ - contextPath: File.Size
+ description: File size
+ type: Number
+ - contextPath: File.Type
+ description: File type.
+ type: String
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file.
+ type: String
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file.
+ type: String
+ - contextPath: File.SSDeep
+ description: SSDeep hash of the file.
+ type: String
+ - contextPath: File.MD5
+ description: MD5 hash of the file.
+ type: String
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- Fidelis-Test
diff --git a/Integrations/FidelisElevateNetwork/FidelisElevateNetwork_description.md b/Integrations/FidelisElevateNetwork/FidelisElevateNetwork_description.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Integrations/FidelisElevateNetwork/FidelisElevateNetwork_image.png b/Integrations/FidelisElevateNetwork/FidelisElevateNetwork_image.png
new file mode 100644
index 000000000000..15e2bbcedb94
Binary files /dev/null and b/Integrations/FidelisElevateNetwork/FidelisElevateNetwork_image.png differ
diff --git a/Integrations/FidelisElevateNetwork/FidelisElevateNetwork_test.py b/Integrations/FidelisElevateNetwork/FidelisElevateNetwork_test.py
new file mode 100644
index 000000000000..df78b89d5d74
--- /dev/null
+++ b/Integrations/FidelisElevateNetwork/FidelisElevateNetwork_test.py
@@ -0,0 +1,31 @@
+import re
+from datetime import datetime
+
+
+def test_get_ioc_filter():
+ from FidelisElevateNetwork import get_ioc_filter
+ f = get_ioc_filter('192.168.19.1') # disable-secrets-detection
+ assert f.get('simple', {}).get('column') == 'ANY_IP'
+
+ f = get_ioc_filter('c9a31ea148232b201fe7cb7db5c75f5e')
+ assert f.get('simple', {}).get('column') == 'MD5'
+
+ f = get_ioc_filter('2F6C57D8CB43AA5C0153CD3A06E4A783B5BB7BC1')
+ assert f.get('simple', {}).get('column') == 'SHA1_HASH'
+
+ f = get_ioc_filter('9d88425e266b3a74045186837fbd71de657b47d11efefcf8b3cd185a884b5306')
+ assert f.get('simple', {}).get('column') == 'SHA256'
+
+ f = get_ioc_filter('some ioc')
+ assert f.get('simple', {}).get('column') == 'ANY_STRING'
+
+
+def test_to_fidelis_time_format():
+ from FidelisElevateNetwork import to_fidelis_time_format
+ fidelis_time = re.compile(r'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}')
+
+ dt = datetime.now()
+ assert fidelis_time.match(to_fidelis_time_format(dt)) is not None
+ assert fidelis_time.match(to_fidelis_time_format('2019-12-01T05:40:10')) is not None
+ assert fidelis_time.match(to_fidelis_time_format('2019-12-01T05:40:1')) is not None
+ assert fidelis_time.match(to_fidelis_time_format('2019-12-01T05:40:10Z')) is not None
diff --git a/Integrations/FireEyeETP/FireEyeETP.py b/Integrations/FireEyeETP/FireEyeETP.py
new file mode 100644
index 000000000000..2409087d8894
--- /dev/null
+++ b/Integrations/FireEyeETP/FireEyeETP.py
@@ -0,0 +1,650 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+
+'''
+IMPORTS
+'''
+
+from datetime import timedelta, datetime
+import requests
+import os
+import re
+import copy
+import json
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+
+'''
+GLOBAL VARS
+'''
+
+API_KEY = demisto.params().get('api_key')
+BASE_PATH = '{}/api/v1'.format(demisto.params().get('server'))
+HTTP_HEADERS = {
+ 'Content-Type': 'application/json'
+}
+USE_SSL = not demisto.params().get('unsecure')
+MESSAGE_STATUS = demisto.params().get('message_status')
+
+
+'''
+SEARCH ATTRIBUTES VALID VALUES
+'''
+
+REJECTION_REASONS = ['ETP102', 'ETP103', 'ETP104', 'ETP200', 'ETP201', 'ETP203', 'ETP204', 'ETP205',
+ 'ETP300', 'ETP301', 'ETP302', 'ETP401', 'ETP402', 'ETP403', 'ETP404', 'ETP405']
+
+STATUS_VALUES = ["accepted", "deleted", "delivered", "delivered (retroactive)", "dropped",
+ "dropped oob", "dropped (oob retroactive)", "permanent failure", "processing",
+ "quarantined", "rejected", "temporary failure"]
+
+
+'''
+BASIC FUNCTIONS
+'''
+
+
+def set_proxies():
+
+ if not demisto.params().get('proxy', False):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+
+def listify(comma_separated_list):
+
+ if isinstance(comma_separated_list, list):
+ return comma_separated_list
+ return comma_separated_list.split(',')
+
+
+def http_request(method, url, body=None, headers={}, url_params=None):
+
+ '''
+ returns the http response
+ '''
+
+ # add API key to headers
+ headers['x-fireeye-api-key'] = API_KEY
+
+ request_kwargs = {
+ 'headers': headers,
+ 'verify': USE_SSL
+ }
+
+ # add optional arguments if specified
+ if body is not None:
+ request_kwargs['data'] = json.dumps(body)
+ if url_params is not None:
+ request_kwargs['params'] = json.dumps(url_params)
+
+ LOG('attempting {} request sent to {} with body:\n{}'.format(method, url, json.dumps(body, indent=4)))
+ response = requests.request(
+ method,
+ url,
+ **request_kwargs
+ )
+ # handle request failure
+ if response.status_code not in range(200, 205):
+ raise ValueError('Request failed with status code {}\n{}'.format(response.status_code, response.text))
+ return response.json()
+
+
+def return_error_entry(message):
+
+ entry = {
+ 'Type': entryTypes['error'],
+ 'Contents': message,
+ 'ContentsFormat': formats['text'],
+ }
+ demisto.results(entry)
+
+
+def to_search_attribute_object(value, filter=None, is_list=False, valid_values=None):
+
+ values = listify(value) if is_list else value
+ if valid_values:
+ for val in values:
+ if val not in valid_values:
+ raise ValueError('{} is not a valid value'.format(val))
+
+ attribute = {
+ 'value': values
+ }
+ if filter:
+ attribute['filter'] = filter
+ return attribute
+
+
+def format_search_attributes(from_email=None, from_email_not_in=None, recipients=None,
+ recipients_not_in=None, subject=None, from_accepted_date_time=None,
+ to_accepted_date_time=None, rejection_reason=None, sender_ip=None, status=None,
+ status_not_in=None, last_modified_date_time=None, domains=None):
+
+ search_attributes = {} # type: Dict
+
+ # handle from_email attribute
+ if from_email and from_email_not_in:
+ raise ValueError('Only one of the followings can be specified: from_email, from_email_not_in')
+ if from_email:
+ search_attributes['fromEmail'] = to_search_attribute_object(from_email, filter='in', is_list=True)
+ elif from_email_not_in:
+ search_attributes['fromEmail'] = to_search_attribute_object(from_email_not_in, filter='not in', is_list=True)
+
+ # handle recipients attributes
+ if recipients and recipients_not_in:
+ raise ValueError('Only one of the followings can be specified: recipients, recipients_not_in')
+ if recipients:
+ search_attributes['recipients'] = to_search_attribute_object(recipients, filter='in', is_list=True)
+ elif recipients_not_in:
+ search_attributes['recipients'] = to_search_attribute_object(recipients_not_in, filter='not in', is_list=True)
+
+ # handle status attributes
+ if status and status_not_in:
+ raise ValueError('Only one of the followings can be specified: status, status_not_in')
+ if status:
+ search_attributes['status'] = to_search_attribute_object(status, filter='in', is_list=True, valid_values=STATUS_VALUES)
+ elif status_not_in:
+ search_attributes['status'] = to_search_attribute_object(status, filter='in', is_list=True, valid_values=STATUS_VALUES)
+
+ if subject:
+ search_attributes['subject'] = to_search_attribute_object(subject, filter='in', is_list=True)
+ if rejection_reason:
+ search_attributes['rejectionReason'] = to_search_attribute_object(rejection_reason, is_list=True,
+ valid_values=REJECTION_REASONS)
+ if sender_ip:
+ search_attributes['senderIP'] = to_search_attribute_object(sender_ip, filter='in', is_list=True)
+ if domains:
+ search_attributes['domains'] = to_search_attribute_object(domains, is_list=True)
+ if from_accepted_date_time and to_accepted_date_time:
+ search_attributes['period'] = {
+ 'range': {
+ 'fromAcceptedDateTime': from_accepted_date_time,
+ 'toAcceptedDateTime': to_accepted_date_time
+ }
+ }
+ if last_modified_date_time:
+ # try to parse '>timestamp' | '>=timestamp' | '', context_data['senderHeader'].replace('\\"', ''))
+ context_data['from'] = match.group() if match else context_data['senderHeader']
+
+ if context_data.get('recipientHeader') is None:
+ context_data['recipients'] = []
+ return context_data
+
+ recipients = []
+ for recipient_header in context_data.get('recipientHeader', []):
+ match = re.search('<(.*)>', recipient_header)
+ recipient_address = match.group() if match else recipient_header
+ recipients.append(recipient_address)
+ context_data['recipients'] = ','.join(recipients)
+
+ return context_data
+
+
+def search_messages_request(attributes={}, has_attachments=None, max_message_size=None):
+
+ url = '{}/messages/trace'.format(BASE_PATH)
+ body = {
+ 'attributes': attributes,
+ 'type': 'MessageAttributes',
+ 'size': max_message_size or 20
+ }
+ if has_attachments is not None:
+ body['hasAttachments'] = has_attachments
+ response = http_request(
+ 'POST',
+ url,
+ body=body,
+ headers=HTTP_HEADERS
+ )
+ # no results
+ if response['meta']['total'] == 0:
+ return []
+ return response['data']
+
+
+def search_messages_command():
+
+ args = demisto.args()
+ if 'size' in args.keys():
+ # parse to int
+ args['size'] = int(args['size'])
+ if args.get('has_attachments') is not None:
+ # parse to boolean
+ args['hasAttachments'] = args['hasAttachments'] == 'true'
+
+ search_attributes = format_search_attributes(
+ from_email=args.get('from_email'),
+ from_email_not_in=args.get('from_email_not_in'),
+ recipients=args.get('recipients'),
+ recipients_not_in=args.get('recipients_not_in'),
+ subject=args.get('subject'),
+ from_accepted_date_time=args.get('from_accepted_date_time'),
+ to_accepted_date_time=args.get('to_accepted_date_time'),
+ rejection_reason=args.get('rejection_reason'),
+ sender_ip=args.get('sender_ip'),
+ status=args.get('status'),
+ status_not_in=args.get('status_not_in'),
+ last_modified_date_time=args.get('last_modified_date_time'),
+ domains=args.get('domains')
+ )
+
+ # raw data
+ messages_raw = search_messages_request(search_attributes, args.get('hasAttachments'), args.get('size'))
+
+ # create context data
+ messages_context = [message_context_data(message) for message in messages_raw]
+
+ # create readable data
+ messages_readable_data = [readable_message_data(message) for message in messages_context]
+ messages_md_headers = [
+ 'Message ID',
+ 'Accepted Time',
+ 'From',
+ 'Recipients',
+ 'Subject',
+ 'Message Status'
+ ]
+ md_table = tableToMarkdown(
+ 'FireEye ETP - Search Messages',
+ messages_readable_data,
+ headers=messages_md_headers
+ )
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': messages_raw,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md_table,
+ 'EntryContext': {
+ "FireEyeETP.Messages(obj.id==val.id)": messages_context
+ }
+ }
+ demisto.results(entry)
+
+
+def get_message_request(message_id):
+
+ url = '{}/messages/{}'.format(BASE_PATH, message_id)
+ response = http_request(
+ 'GET',
+ url
+ )
+ if response['meta']['total'] == 0:
+ return {}
+ return response['data'][0]
+
+
+def get_message_command():
+
+ # get raw data
+ raw_message = get_message_request(demisto.args()['message_id'])
+
+ if raw_message:
+ # create context data
+ context_data = message_context_data(raw_message)
+
+ # create readable data
+ message_readable_data = readable_message_data(context_data)
+ messages_md_headers = [
+ 'Message ID',
+ 'Accepted Time',
+ 'From',
+ 'Recipients',
+ 'Subject',
+ 'Message Status'
+ ]
+ md_table = tableToMarkdown(
+ 'FireEye ETP - Get Message',
+ message_readable_data,
+ headers=messages_md_headers
+ )
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': raw_message,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md_table,
+ 'EntryContext': {
+ "FireEyeETP.Messages(obj.id==val.id)": context_data
+ }
+ }
+ demisto.results(entry)
+ # no results
+ else:
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': {},
+ 'ContentsFormat': formats['text'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': '### FireEye ETP - Get Message \n no results'
+ }
+ demisto.results(entry)
+
+
+def alert_readable_data_summery(alert):
+
+ return {
+ 'Alert ID': alert['id'],
+ 'Alert Timestamp': alert['alert']['timestamp'],
+ 'From': alert['email']['headers']['from'],
+ 'Recipients': '{}|{}'.format(alert['email']['headers']['to'], alert['email']['headers']['cc']),
+ 'Subject': alert['email']['headers']['subject'],
+ 'MD5': alert['alert'].get('malware_md5'),
+ 'URL/Attachment': alert['email']['attachment'],
+ 'Email Status': alert['email']['status'],
+ 'Email Accepted': alert['email']['timestamp']['accepted'],
+ 'Threat Intel': alert['ati']
+ }
+
+
+def alert_readable_data(alert):
+
+ return {
+ 'Alert ID': alert['id'],
+ 'Alert Timestamp': alert['alert']['timestamp'],
+ 'From': alert['email']['headers']['from'],
+ 'Recipients': '{}|{}'.format(alert['email']['headers']['to'], alert['email']['headers']['cc']),
+ 'Subject': alert['email']['headers']['subject'],
+ 'MD5': alert['alert'].get('malware_md5'),
+ 'URL/Attachment': alert['email']['attachment'],
+ 'Email Status': alert['email']['status'],
+ 'Email Accepted': alert['email']['timestamp']['accepted'],
+ 'Sevirity': alert['alert']['severity']
+ }
+
+
+def malware_readable_data(malware):
+
+ return {
+ 'Name': malware['name'],
+ 'Domain': malware.get('domain'),
+ 'Downloaded At': malware['downloaded_at'],
+ 'Executed At': malware['executed_at'],
+ 'Type': malware['stype'],
+ 'Submitted At': malware['submitted_at'],
+ 'SID': malware['sid']
+ }
+
+
+def alert_context_data(alert):
+
+ context_data = copy.deepcopy(alert)
+ # remove 'attributes' level
+ context_data.update(context_data.pop('attributes', {}))
+ return context_data
+
+
+def get_alerts_request(legacy_id=None, from_last_modified_on=None, etp_message_id=None, size=None, raw_response=False):
+
+ url = '{}/alerts'.format(BASE_PATH)
+
+ # constract the body for the request
+ body = {}
+ attributes = {}
+ if legacy_id:
+ attributes['legacy_id'] = legacy_id
+ if etp_message_id:
+ attributes['etp_message_id'] = etp_message_id
+ if attributes:
+ body['attribute'] = attributes
+ if size:
+ body['size'] = size
+ if from_last_modified_on:
+ body['fromLastModifiedOn'] = from_last_modified_on
+
+ response = http_request(
+ 'POST',
+ url,
+ body=body,
+ headers=HTTP_HEADERS
+ )
+ if raw_response:
+ return response
+ if response['meta']['total'] == 0:
+ return []
+ return response['data']
+
+
+def get_alerts_command():
+
+ args = demisto.args()
+
+ if 'size' in args.keys():
+ args['size'] = int(args['size'])
+
+ if 'legacy_id' in args.keys():
+ args['legacy_id'] = int(args['legacy_id'])
+
+ # get raw data
+ alerts_raw = get_alerts_request(
+ legacy_id=args.get('legacy_id'),
+ from_last_modified_on=args.get('from_last_modified_on'),
+ etp_message_id=args.get('etp_message_id'),
+ size=args.get('size')
+ )
+
+ # create context data
+ alerts_context = [alert_context_data(alert) for alert in alerts_raw]
+
+ # create readable data
+ alerts_readable_data = [alert_readable_data_summery(alert) for alert in alerts_context]
+ alerts_summery_headers = [
+ 'Alert ID',
+ 'Alert Timestamp',
+ 'Email Accepted',
+ 'From',
+ 'Recipients',
+ 'Subject',
+ 'MD5',
+ 'URL/Attachment',
+ 'Email Status',
+ 'Threat Intel'
+ ]
+ md_table = tableToMarkdown(
+ 'FireEye ETP - Get Alerts',
+ alerts_readable_data,
+ headers=alerts_summery_headers
+ )
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': alerts_raw,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md_table,
+ 'EntryContext': {
+ "FireEyeETP.Alerts(obj.id==val.id)": alerts_context
+ }
+ }
+ demisto.results(entry)
+
+
+def get_alert_request(alert_id):
+
+ url = '{}/alerts/{}'.format(BASE_PATH, alert_id)
+ response = http_request(
+ 'GET',
+ url
+ )
+ if response['meta']['total'] == 0:
+ return {}
+ return response['data'][0]
+
+
+def get_alert_command():
+
+ # get raw data
+ alert_raw = get_alert_request(demisto.args()['alert_id'])
+
+ if alert_raw:
+ # create context data
+ alert_context = alert_context_data(alert_raw)
+
+ # create readable data
+ readable_data = alert_readable_data(alert_context)
+ alert_md_table = tableToMarkdown(
+ 'Alert Details',
+ readable_data
+ )
+ data = alert_context['alert']['explanation']['malware_detected']['malware']
+ malware_data = [malware_readable_data(malware) for malware in data]
+ malware_md_table = tableToMarkdown(
+ 'Malware Details',
+ malware_data
+ )
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': alert_raw,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': '## FireEye ETP - Get Alert\n{}\n{}'.format(alert_md_table, malware_md_table),
+ 'EntryContext': {
+ "FireEyeETP.Alerts(obj.id==val.id)": alert_context
+ }
+ }
+ demisto.results(entry)
+ # no results
+ else:
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': {},
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': '### FireEye ETP - Get Alert\nno results',
+
+ }
+ demisto.results(entry)
+
+
+def parse_string_in_iso_format_to_datetime(iso_format_string):
+
+ alert_last_modified = None
+ try:
+ alert_last_modified = datetime.strptime(iso_format_string, "%Y-%m-%dT%H:%M:%S.%f")
+ except ValueError:
+ try:
+ alert_last_modified = datetime.strptime(iso_format_string, "%Y-%m-%dT%H:%M:%S")
+ except ValueError:
+ alert_last_modified = datetime.strptime(iso_format_string, "%Y-%m-%dT%H:%M")
+ return alert_last_modified
+
+
+def parse_alert_to_incident(alert):
+
+ context_data = alert_context_data(alert)
+ incident = {
+ 'name': context_data['email']['headers']['subject'],
+ 'rawJSON': json.dumps(context_data)
+ }
+ return incident
+
+
+def fetch_incidents():
+
+ last_run = demisto.getLastRun()
+ week_ago = datetime.now() - timedelta(days=7)
+ iso_format = "%Y-%m-%dT%H:%M:%S.%f"
+
+ if 'last_modified' not in last_run.keys():
+ # parse datetime to iso format string yyy-mm-ddThh:mm:ss.fff
+ last_run['last_modified'] = week_ago.strftime(iso_format)[:-3]
+ if 'last_created' not in last_run.keys():
+ last_run['last_created'] = week_ago.strftime(iso_format)
+
+ alerts_raw_response = get_alerts_request(
+ from_last_modified_on=last_run['last_modified'],
+ size=100,
+ raw_response=True
+ )
+ # end if no results returned
+ if not alerts_raw_response or 'data' not in alerts_raw_response.keys():
+ return
+
+ alerts = alerts_raw_response['data']
+ last_alert_created = parse_string_in_iso_format_to_datetime(last_run['last_created'])
+ alert_creation_limit = parse_string_in_iso_format_to_datetime(last_run['last_created'])
+ incidents = []
+
+ for alert in alerts:
+ # filter by message status if specified
+ if MESSAGE_STATUS and alert['attributes']['email']['status'] != MESSAGE_STATUS:
+ continue
+ # filter alerts created before 'last_created'
+ current_alert_created = parse_string_in_iso_format_to_datetime(alert['attributes']['alert']['timestamp'])
+ if current_alert_created < alert_creation_limit:
+ continue
+ # append alert to incident
+ incidents.append(parse_alert_to_incident(alert))
+ # set last created
+ if current_alert_created > last_alert_created:
+ last_alert_created = current_alert_created
+
+ last_run['last_modified'] = alerts_raw_response['meta']['fromLastModifiedOn']['end']
+ last_run['last_created'] = last_alert_created.strftime(iso_format)
+
+ demisto.incidents(incidents)
+ demisto.setLastRun(last_run)
+
+
+'''
+EXECUTION
+'''
+
+set_proxies()
+
+try:
+ if demisto.command() == 'test-module':
+ alerts = get_alerts_request(size=1)
+ # request was succesful
+ demisto.results('ok')
+ if demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+ if demisto.command() == 'fireeye-etp-search-messages':
+ search_messages_command()
+ if demisto.command() == 'fireeye-etp-get-message':
+ get_message_command()
+ if demisto.command() == 'fireeye-etp-get-alerts':
+ get_alerts_command()
+ if demisto.command() == 'fireeye-etp-get-alert':
+ get_alert_command()
+except ValueError as e:
+ LOG(e)
+ LOG.print_log()
+ return_error_entry(e)
diff --git a/Integrations/FireEyeETP/FireEyeETP.yml b/Integrations/FireEyeETP/FireEyeETP.yml
new file mode 100644
index 000000000000..d71b10d991e8
--- /dev/null
+++ b/Integrations/FireEyeETP/FireEyeETP.yml
@@ -0,0 +1,312 @@
+commonfields:
+ id: FireEye ETP
+ version: -1
+name: FireEye ETP
+display: FireEye ETP
+category: Email Gateway
+description: 'FireEye Email Threat Prevention (ETP Cloud) is a cloud-based platform that protects against advanced email attacks.'
+configuration:
+ - display: 'Server URL. Valid values: https://etp.us.fireeye.com, https://etp.eu.fireeye.com,
+ https://etp.us.fireeyegov.com '
+ name: server
+ defaultvalue: https://etp.us.fireeye.com
+ type: 0
+ required: true
+ - display: API key
+ name: api_key
+ defaultvalue: ""
+ type: 4
+ required: true
+ - display: Trust any certificate (unsecure)
+ name: unsecure
+ defaultvalue: "true"
+ type: 8
+ required: false
+ - display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ type: 0
+ required: false
+ - display: Fetch incidents
+ name: isFetch
+ defaultvalue: ""
+ type: 8
+ required: false
+ - display: Incident type
+ name: incidentType
+ defaultvalue: ""
+ type: 13
+ required: false
+ - display: Messages status. All messages with the status specified will be imported
+ as incidents.
+ name: message_status
+ defaultvalue: delivered (retroactive)
+ type: 0
+ required: false
+script:
+ script: '-'
+ type: python
+ commands:
+ - name: fireeye-etp-search-messages
+ arguments:
+ - name: from_email
+ description: 'List of ''From'' email-addresses, max limit of entries is 10. '
+ - name: from_email_not_in
+ description: 'List of ''From'' email-addresses not to be included, max limit
+ of entries is 10. '
+ - name: recipients
+ description: List of 'To'/'Cc' email-addresses, max limit of entries is 10.
+ - name: recipients_not_in
+ description: 'list of ''To''/''Cc'' email-addresses not to be included, max
+ limit of entries is 10. '
+ - name: subject
+ description: List of strings, max limit of entries is 10.
+ - name: from_accepted_date_time
+ description: ' The time stamp of the email-accepted date to specify the beginning
+ of the date range to search, e.g. 2017-10- 24T10:48:51.000Z . Specify ''to_accepted_date_time'' as
+ well to set the complete date range for the search.'
+ - name: to_accepted_date_time
+ description: ' The time stamp of the email-accepted date to specify the end
+ of the date range to search, e.g. 2017-10- 24T10:48:51.000Z . Specify ''from_accepted_date_time'' as
+ well to set the complete date range for the search.'
+ - name: rejection_reason
+ description: 'list of ETP rejection reason codes ( "ETP102", "ETP103", "ETP104",
+ "ETP200", "ETP201", "ETP203", "ETP204", "ETP205", "ETP300", "ETP301", "ETP302",
+ "ETP401", "ETP402", "ETP403", "ETP404", "ETP405") '
+ - name: sender_ip
+ description: List of sender IP addresses, max limit of entries is 10.
+ - name: status
+ description: List of email status values( "accepted", "deleted", "delivered",
+ "delivered (retroactive)", "dropped", "dropped oob", "dropped (oob retroactive)",
+ "permanent failure", "processing", "quarantined", "rejected", "temporary failure").
+ - name: status_not_in
+ description: List of email status values not to include( "accepted", "deleted",
+ "delivered", "delivered (retroactive)", "dropped", "dropped oob", "dropped
+ (oob retroactive)", "permanent failure", "processing", "quarantined", "rejected",
+ "temporary failure").
+ - name: last_modified_date_time
+ description: 'Date corresponding to last modified date, along with one of the
+ following operators: ">", "<", ">=", "<=". E.g. use value "<2017-10-24T18:00:00.000Z"
+ to search for messages that were last modified after the specified time stamp.'
+ - name: domain
+ description: List of domain names.
+ - name: has_attachments
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Boolean value to indicate if the message has attachments.
+ - name: max_message_size
+ description: The default value is 20kb and maximum value is 100kb.
+ outputs:
+ - contextPath: FireEyeETP.Message.acceptedDateTime
+ description: Message accepted date.
+ - contextPath: FireEyeETP.Message.countryCode
+ description: Sender country code.
+ - contextPath: FireEyeETP.Message.domain
+ description: Domain.
+ - contextPath: FireEyeETP.Message.emailSize
+ description: Email size in kb.
+ - contextPath: FireEyeETP.Message.lastModifiedDateTime
+ description: Message last modified date.
+ - contextPath: FireEyeETP.Message.recipientHeader
+ description: List of message recipients header (includes the display name of
+ the user).
+ - contextPath: FireEyeETP.Message.recipients
+ description: List of message recipients.
+ - contextPath: FireEyeETP.Message.senderHeader
+ description: Message sender header (includes the display name of the user).
+ - contextPath: FireEyeETP.Message.sender
+ description: Message sender address.
+ - contextPath: FireEyeETP.Message.senderSMTP
+ description: Message sender SMTP.
+ - contextPath: FireEyeETP.Message.senderIP
+ description: Message sender IP.
+ - contextPath: FireEyeETP.Message.status
+ description: Message status.
+ - contextPath: FireEyeETP.Message.subject
+ description: Message subject
+ - contextPath: FireEyeETP.Message.verdicts.AS
+ description: pass/fail verdict for AS.
+ - contextPath: FireEyeETP.Message.verdicts.AV
+ description: pass/fail verdict for AV
+ - contextPath: FireEyeETP.Message.verdicts.AT
+ description: pass/fail verdict for AT
+ - contextPath: FireEyeETP.Message.verdicts.PV
+ description: pass/fail verdict for PV
+ - contextPath: FireEyeETP.Message.id
+ description: Message ID.
+ description: Search for messages that include specified message attributes that
+ are accessible in he ETP portal.
+ - name: fireeye-etp-get-message
+ arguments:
+ - name: message_id
+ required: true
+ description: The message ID.
+ outputs:
+ - contextPath: FireEyeETP.Message.acceptedDateTime
+ description: Message accepted date.
+ - contextPath: FireEyeETP.Message.countryCode
+ description: Sender country code.
+ - contextPath: FireEyeETP.Message.domain
+ description: Domain.
+ - contextPath: FireEyeETP.Message.emailSize
+ description: Email size in kb.
+ - contextPath: FireEyeETP.Message.lastModifiedDateTime
+ description: Message last modified date.
+ - contextPath: FireEyeETP.Message.recipientHeader
+ description: List of message recipients header (includes the display name of
+ the user).
+ - contextPath: FireEyeETP.Message.recipients
+ description: List of message recipients.
+ - contextPath: FireEyeETP.Message.senderHeader
+ description: Message sender header (includes the display name of the user).
+ - contextPath: FireEyeETP.Message.sender
+ description: Message sender address.
+ - contextPath: FireEyeETP.Message.senderSMTP
+ description: Message sender SMTP.
+ - contextPath: FireEyeETP.Message.senderIP
+ description: Message sender IP.
+ - contextPath: FireEyeETP.Message.status
+ description: Message status.
+ - contextPath: FireEyeETP.Message.subject
+ description: Message subject
+ - contextPath: FireEyeETP.Message.verdicts.AS
+ description: pass/fail verdict for AS.
+ - contextPath: FireEyeETP.Message.verdicts.AV
+ description: pass/fail verdict for AV
+ - contextPath: FireEyeETP.Message.verdicts.AT
+ description: pass/fail verdict for AT
+ - contextPath: FireEyeETP.Message.verdicts.PV
+ description: pass/fail verdict for PV
+ - contextPath: FireEyeETP.Message.id
+ description: Message ID.
+ description: Get the data of a specific message.
+ - name: fireeye-etp-get-alerts
+ arguments:
+ - name: legacy_id
+ description: Alert ID as shown in ETP Web Portal.
+ - name: from_last_modified_on
+ description: Datetime in yyy-mm-ddThh:mm:ss.fff format. Default last 90 days.
+ - name: etp_message_id
+ description: Email message id.
+ - name: size
+ description: Number of alerts intended in response. Default 20. Valid range
+ 1-100 .
+ outputs:
+ - contextPath: FireEyeETP.Alerts.meta.read
+ description: Email read flag.
+ - contextPath: FireEyeETP.Alerts.meta.last_modified_on
+ description: Last modified timestamp.
+ - contextPath: FireEyeETP.Alerts.meta.legacy_id
+ description: 'Alert ID as shown in ETP Web Portal '
+ - contextPath: FireEyeETP.Alerts.alert.product
+ description: Product alerted
+ - contextPath: FireEyeETP.Alerts.alert.timestamp
+ description: Alert timestamp
+ - contextPath: FireEyeETP.Alerts.alert.malware_md5
+ description: md5 of file attached
+ - contextPath: FireEyeETP.Alerts.email.status
+ description: The email status.
+ - contextPath: FireEyeETP.Alerts.email.source_ip
+ description: Email source IP.
+ - contextPath: FireEyeETP.Alerts.email.smtp.rcpt_to
+ description: Recipient SMTP.
+ - contextPath: FireEyeETP.Alerts.email.smtp.mail_from
+ description: Sender SMTP.
+ - contextPath: FireEyeETP.Alerts.email.etp_message_id
+ description: The message ID.
+ - contextPath: FireEyeETP.Alerts.email.headers.cc
+ description: Email 'cc' recipients.
+ - contextPath: FireEyeETP.Alerts.email.headers.to
+ description: Email recipients.
+ - contextPath: FireEyeETP.Alerts.email.headers.from
+ description: Email sender.
+ - contextPath: FireEyeETP.Alerts.email.headers.subject
+ description: Email subject.
+ - contextPath: FireEyeETP.Alerts.email.attachment
+ description: File name or URL pointing to file.
+ - contextPath: FireEyeETP.Alerts.email.timestamp.accepted
+ description: Email accepted time.
+ - contextPath: FireEyeETP.Alerts.id
+ description: The alert ID.
+ description: Get summary format information about the alerts.
+ - name: fireeye-etp-get-alert
+ arguments:
+ - name: alert_id
+ required: true
+ description: The alert ID.
+ outputs:
+ - contextPath: FireEyeETP.Alerts.meta.read
+ description: Email read flag.
+ - contextPath: FireEyeETP.Alerts.meta.last_modified_on
+ description: Last modified timestamp.
+ - contextPath: FireEyeETP.Alerts.meta.legacy_id
+ description: 'Alert ID as shown in ETP Web Portal '
+ - contextPath: FireEyeETP.Alerts.meta.acknowledged
+ description: Acknowledged
+ - contextPath: FireEyeETP.Alerts.alert.product
+ description: Product generate the alert.
+ - contextPath: FireEyeETP.Alerts.alert.alert_type
+ description: Alert type code.
+ - contextPath: FireEyeETP.Alerts.alert.severity
+ description: Severity code.
+ - contextPath: FireEyeETP.Alerts.alert.explanation.analysis
+ description: Analysis
+ - contextPath: FireEyeETP.Alerts.alert.explanation.anomaly
+ description: Anomaly
+ - contextPath: FireEyeETP.Alerts.alert.explanation.malware_detected.malware.domain
+ description: Malware domain
+ - contextPath: FireEyeETP.Alerts.alert.explanation.malware_detected.malware.downloaded_at
+ description: Malware downloaded at timestamp
+ - contextPath: FireEyeETP.Alerts.alert.explanation.malware_detected.malware.executed_at
+ description: Malware executed at timestamp
+ - contextPath: FireEyeETP.Alerts.alert.explanation.malware_detected.malware.name
+ description: Malware name
+ - contextPath: FireEyeETP.Alerts.alert.explanation.malware_detected.malware.sid
+ description: Malware sid
+ - contextPath: FireEyeETP.Alerts.alert.explanation.malware_detected.malware.stype
+ description: Malware type
+ - contextPath: FireEyeETP.Alerts.alert.explanation.malware_detected.malware.submitted_at
+ description: Malware submitted at
+ - contextPath: FireEyeETP.Alerts.alert.explanation.protocol
+ description: Protocol
+ - contextPath: FireEyeETP.Alerts.alert.explanation.timestamp
+ description: Explanation timestamp
+ - contextPath: FireEyeETP.Alerts.alert.timestamp
+ description: Alert timestamp.
+ - contextPath: FireEyeETP.Alerts.alert.action
+ description: Alert acrion
+ - contextPath: FireEyeETP.Alerts.alert.name
+ description: Alert name.
+ - contextPath: FireEyeETP.Alerts.email.status
+ description: The email status.
+ - contextPath: FireEyeETP.Alerts.email.source_ip
+ description: Email source IP.
+ - contextPath: FireEyeETP.Alerts.email.smtp.rcpt_to
+ description: Recipient SMTP.
+ - contextPath: FireEyeETP.Alerts.email.smtp.mail_from
+ description: Sender SMTP.
+ - contextPath: FireEyeETP.Alerts.email.etp_message_id
+ description: FE ETP unique message ID.
+ - contextPath: FireEyeETP.Alerts.email.headers.cc
+ description: Email cc recipients.
+ - contextPath: FireEyeETP.Alerts.email.headers.to
+ description: Email recipients.
+ - contextPath: FireEyeETP.Alerts.email.headers.from
+ description: 'Email sender '
+ - contextPath: FireEyeETP.Alerts.email.headers.subject
+ description: Email subject
+ - contextPath: FireEyeETP.Alerts.email.attachment
+ description: File name or URL pointing to file
+ - contextPath: FireEyeETP.Alerts.email.timestamp.accepted
+ description: Email eccepted time
+ - contextPath: FireEyeETP.Alerts.id
+ description: The alert unique ID
+ description: Detailed information from any particular alert. Alerts more than
+ 90 days old are not available.
+ isfetch: true
+ runonce: false
+tests:
+ - No Test
+
diff --git a/Integrations/FireEyeETP/FireEyeETP_description.md b/Integrations/FireEyeETP/FireEyeETP_description.md
new file mode 100644
index 000000000000..1219a9553344
--- /dev/null
+++ b/Integrations/FireEyeETP/FireEyeETP_description.md
@@ -0,0 +1 @@
+FireEye Email Threat Prevention (ETP Cloud) is a cloud-based platform that protects against advanced email attacks.
diff --git a/Integrations/FireEyeETP/FireEyeETP_image.png b/Integrations/FireEyeETP/FireEyeETP_image.png
new file mode 100644
index 000000000000..6d9ffcfe9cd1
Binary files /dev/null and b/Integrations/FireEyeETP/FireEyeETP_image.png differ
diff --git a/Integrations/FireEyeHX/CHANGELOG.md b/Integrations/FireEyeHX/CHANGELOG.md
new file mode 100644
index 000000000000..85210da870bd
--- /dev/null
+++ b/Integrations/FireEyeHX/CHANGELOG.md
@@ -0,0 +1 @@
+Fixed issue when fetching incidents with an unknown indicator.
\ No newline at end of file
diff --git a/Integrations/FireEyeHX/FireEyeHX.py b/Integrations/FireEyeHX/FireEyeHX.py
new file mode 100644
index 000000000000..7f708e722505
--- /dev/null
+++ b/Integrations/FireEyeHX/FireEyeHX.py
@@ -0,0 +1,2630 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+"""
+
+IMPORTS
+
+"""
+import requests
+import base64
+import time
+import json
+import os
+import re
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+"""
+
+HANDLE PROXY
+
+"""
+
+
+def set_proxies():
+
+ if not demisto.params().get('proxy', False):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+
+"""
+
+GLOBAL VARS
+
+"""
+SERVER_URL = demisto.params()['server']
+USERNAME = demisto.params()['credentials']['identifier']
+PASSWORD = demisto.params()['credentials']['password']
+USE_SSL = not demisto.params()['insecure']
+VERSION = demisto.params()['version']
+GET_HEADERS = {
+ 'Accept': 'application/json'
+}
+POST_HEADERS = {
+ 'Accept': 'application/json',
+ 'Content-type': 'application/json'
+}
+PATCH_HEADERS = {
+ 'Content-Type': 'text/plain'
+}
+BASE_PATH = '{}/hx/api/{}'.format(SERVER_URL, VERSION)
+INDICATOR_MAIN_ATTRIBUTES = [
+ 'OS',
+ 'Name',
+ 'Created By',
+ 'Active Since',
+ 'Category',
+ 'Signature',
+ 'Active Condition',
+ 'Hosts With Alerts',
+ 'Source Alerts'
+]
+ALERT_MAIN_ATTRIBUTES = [
+ 'Alert ID',
+ 'Reported',
+ 'Event Type',
+ 'Agent ID'
+]
+HOST_MAIN_ATTRIBUTES = [
+ 'Host Name',
+ 'Host IP',
+ 'Agent ID',
+ 'Agent Version',
+ 'OS',
+ 'Last Poll',
+ 'Containment State',
+ 'Domain',
+ 'Last Alert'
+]
+HOST_SET_MAIN_ATTRIBUTES = [
+ 'Name',
+ 'ID',
+ 'Type'
+]
+# scripts for data acquisitions
+STANDART_INVESTIGATIVE_DETAILS_OSX = {
+ "commands": [
+ {
+ "name": "sysinfo"
+ },
+ {
+ "name": "disks"
+ },
+ {
+ "name": "volumes"
+ },
+ {
+ "name": "useraccounts"
+ },
+ {
+ "name": "groups"
+ },
+ {
+ "name": "files-api",
+ "parameters": [
+ {
+ "name": "Path",
+ "value": "/"
+ },
+ {
+ "name": "Regex",
+ "value": "^(?:Applications|Library|System|User|bin|cores|opt|private|sbin|usr)+"
+ },
+ {
+ "name": "Include Remote Locations",
+ "value": False
+ },
+ {
+ "name": "Depth",
+ "value": -1
+ },
+ {
+ "name": "MD5",
+ "value": True
+ },
+ {
+ "name": "SHA1",
+ "value": False
+ },
+ {
+ "name": "SHA256",
+ "value": False
+ },
+ {
+ "name": "Verify Digital Signatures",
+ "value": False
+ },
+ {
+ "name": "AND Operator",
+ "value": False
+ },
+ {
+ "name": "Include Files",
+ "value": True
+ },
+ {
+ "name": "Include Directories",
+ "value": True
+ },
+ {
+ "name": "Preserve Times",
+ "value": False
+ }
+ ]
+ },
+ {
+ "name": "persistence",
+ "parameters": [
+ {
+ "name": "MD5",
+ "value": True
+ },
+ {
+ "name": "SHA1",
+ "value": False
+ },
+ {
+ "name": "SHA256",
+ "value": False
+ },
+ {
+ "name": "Preserve Times",
+ "value": False
+ },
+ {
+ "name": "Verify Digital Signatures",
+ "value": False
+ }
+ ]
+ },
+ {
+ "name": "tasks",
+ "parameters": [
+ {
+ "name": "MD5",
+ "value": True
+ },
+ {
+ "name": "SHA1",
+ "value": False
+ },
+ {
+ "name": "SHA256",
+ "value": False
+ },
+ {
+ "name": "Verify Digital Signatures",
+ "value": True
+ },
+ {
+ "name": "Preserve Times",
+ "value": False
+ }
+ ]
+ },
+ {
+ "name": "processes-api"
+ },
+ {
+ "name": "urlhistory",
+ "parameters": [
+ {
+ "name": "TargetBrowser",
+ "value": "Chrome"
+ },
+ {
+ "name": "TargetBrowser",
+ "value": "Firefox"
+ },
+ {
+ "name": "TargetBrowser",
+ "value": "Safari"
+ }
+ ]
+ },
+ {
+ "name": "quarantine-events"
+ },
+ {
+ "name": "ports"
+ },
+ {
+ "name": "services",
+ "parameters": [
+ {
+ "name": "MD5",
+ "value": True
+ },
+ {
+ "name": "SHA1",
+ "value": False
+ },
+ {
+ "name": "SHA256",
+ "value": False
+ },
+ {
+ "name": "Verify Digital Signatures",
+ "value": True
+ },
+ {
+ "name": "Preserve Times",
+ "value": False
+ }
+ ]
+ },
+ {
+ "name": "stateagentinspector",
+ "parameters": [
+ {
+ "name": "eventTypes",
+ "value": []
+ }
+ ]
+ },
+ {
+ "name": "syslog"
+ }
+ ]
+}
+STANDART_INVESTIGATIVE_DETAILS_LINUX = {
+ "commands": [
+ {
+ "name": "sysinfo"
+ },
+ {
+ "name": "files-api",
+ "parameters": [
+ {
+ "name": "Path",
+ "value": "/"
+ },
+ {
+ "name": "Regex",
+ "value": "^(?:usr|lib|lib64|opt|home|sbin|bin|etc|root)+"
+ },
+ {
+ "name": "Include Remote Locations",
+ "value": False
+ },
+ {
+ "name": "Depth",
+ "value": -1
+ },
+ {
+ "name": "MD5",
+ "value": True
+ },
+ {
+ "name": "SHA1",
+ "value": False
+ },
+ {
+ "name": "SHA256",
+ "value": False
+ },
+ {
+ "name": "AND Operator",
+ "value": False
+ },
+ {
+ "name": "Include Files",
+ "value": True
+ },
+ {
+ "name": "Include Directories",
+ "value": True
+ },
+ {
+ "name": "Preserve Times",
+ "value": False
+ }
+ ]
+ },
+ {
+ "name": "processes-api"
+ },
+ {
+ "name": "ports"
+ },
+ {
+ "name": "shell-history",
+ "parameters": [
+ {
+ "name": "ShellList",
+ "value": [
+ "bash",
+ "zsh",
+ "ksh93"
+ ]
+ }
+ ]
+ }
+ ]
+}
+STANDART_INVESTIGATIVE_DETAILS_WIN = {
+ "commands": [
+ {
+ "name": "sysinfo"
+ },
+ {
+ "name": "disks",
+ "parameters": [
+ {
+ "name": "Prevent Hibernation",
+ "value": True
+ }
+ ]
+ },
+ {
+ "name": "volumes",
+ "parameters": [
+ {
+ "name": "Prevent Hibernation",
+ "value": True
+ }
+ ]
+ },
+ {
+ "name": "useraccounts",
+ "parameters": [
+ {
+ "name": "Prevent Hibernation",
+ "value": True
+ }
+ ]
+ },
+ {
+ "name": "prefetch",
+ "parameters": [
+ {
+ "name": "Prevent Hibernation",
+ "value": True
+ }
+ ]
+ },
+ {
+ "name": "files-raw",
+ "parameters": [
+ {
+ "name": "Prevent Hibernation",
+ "value": True
+ },
+ {
+ "name": "Active Files",
+ "value": True
+ },
+ {
+ "name": "Deleted Files",
+ "value": True
+ },
+ {
+ "name": "Parse NTFS INDX Buffers",
+ "value": True
+ },
+ {
+ "name": "Path",
+ "value": "%systemdrive%"
+ },
+ {
+ "name": "Depth",
+ "value": -1
+ },
+ {
+ "name": "MD5",
+ "value": True
+ },
+ {
+ "name": "SHA1",
+ "value": False
+ },
+ {
+ "name": "SHA256",
+ "value": False
+ },
+ {
+ "name": "Analyze Entropy",
+ "value": False
+ },
+ {
+ "name": "Enumerate Imports",
+ "value": False
+ },
+ {
+ "name": "Enumerate Exports",
+ "value": False
+ },
+ {
+ "name": "Analyze File Anomalies",
+ "value": False
+ },
+ {
+ "name": "Verify Digital Signatures",
+ "value": False
+ },
+ {
+ "name": "Strings",
+ "value": False
+ },
+ {
+ "name": "AND Operator",
+ "value": False
+ },
+ {
+ "name": "Include Files",
+ "value": True
+ },
+ {
+ "name": "Include Directories",
+ "value": True
+ },
+ {
+ "name": "Get Resources",
+ "value": False
+ },
+ {
+ "name": "Get Resource Data",
+ "value": False
+ },
+ {
+ "name": "Get Version Info",
+ "value": False
+ }
+ ]
+ },
+ {
+ "name": "persistence",
+ "parameters": [
+ {
+ "name": "MD5",
+ "value": True
+ },
+ {
+ "name": "SHA1",
+ "value": False
+ },
+ {
+ "name": "SHA256",
+ "value": False
+ },
+ {
+ "name": "Preserve Times",
+ "value": False
+ },
+ {
+ "name": "Enumerate Imports",
+ "value": False
+ },
+ {
+ "name": "Enumerate Exports",
+ "value": False
+ },
+ {
+ "name": "Verify Digital Signatures",
+ "value": True
+ },
+ {
+ "name": "Analyze Entropy",
+ "value": False
+ },
+ {
+ "name": "Analyze File Anomalies",
+ "value": False
+ },
+ {
+ "name": "Get Resources",
+ "value": False
+ },
+ {
+ "name": "Get Version Info",
+ "value": False
+ },
+ {
+ "name": "Prevent Hibernation",
+ "value": True
+ }
+ ]
+ },
+ {
+ "name": "registry-raw",
+ "parameters": [
+ {
+ "name": "Prevent Hibernation",
+ "value": True
+ },
+ {
+ "name": "Type",
+ "value": "All"
+ }
+ ]
+ },
+ {
+ "name": "tasks",
+ "parameters": [
+ {
+ "name": "Prevent Hibernation",
+ "value": True
+ },
+ {
+ "name": "MD5",
+ "value": True
+ },
+ {
+ "name": "SHA1",
+ "value": False
+ },
+ {
+ "name": "SHA256",
+ "value": False
+ },
+ {
+ "name": "Verify Digital Signatures",
+ "value": True
+ },
+ {
+ "name": "Preserve Times",
+ "value": False
+ },
+ {
+ "name": "raw mode",
+ "value": False
+ }
+ ]
+ },
+ {
+ "name": "eventlogs",
+ "parameters": [
+ {
+ "name": "Prevent Hibernation",
+ "value": True
+ }
+ ]
+ },
+ {
+ "name": "processes-memory",
+ "parameters": [
+ {
+ "name": "Preserve Times",
+ "value": False
+ },
+ {
+ "name": "Prevent Hibernation",
+ "value": True
+ },
+ {
+ "name": "MD5",
+ "value": True
+ },
+ {
+ "name": "SHA1",
+ "value": False
+ },
+ {
+ "name": "SHA256",
+ "value": False
+ },
+ {
+ "name": "MemD5",
+ "value": False
+ },
+ {
+ "name": "enumerate imports",
+ "value": True
+ },
+ {
+ "name": "enumerate exports",
+ "value": True
+ },
+ {
+ "name": "Verify Digital Signatures",
+ "value": True
+ },
+ {
+ "name": "sections",
+ "value": True
+ },
+ {
+ "name": "ports",
+ "value": True
+ },
+ {
+ "name": "handles",
+ "value": True
+ },
+ {
+ "name": "detect injected dlls",
+ "value": True
+ },
+ {
+ "name": "raw mode",
+ "value": False
+ },
+ {
+ "name": "strings",
+ "value": False
+ }
+ ]
+ },
+ {
+ "name": "urlhistory",
+ "parameters": [
+ {
+ "name": "Prevent Hibernation",
+ "value": True
+ },
+ {
+ "name": "GetThumbnails",
+ "value": False
+ },
+ {
+ "name": "GetIndexedPageContent",
+ "value": False
+ }
+ ]
+ },
+ {
+ "name": "ports",
+ "parameters": [
+ {
+ "name": "Prevent Hibernation",
+ "value": True
+ }
+ ]
+ },
+ {
+ "name": "services",
+ "parameters": [
+ {
+ "name": "Prevent Hibernation",
+ "value": True
+ },
+ {
+ "name": "MD5",
+ "value": True
+ },
+ {
+ "name": "SHA1",
+ "value": False
+ },
+ {
+ "name": "SHA256",
+ "value": False
+ },
+ {
+ "name": "Verify Digital Signatures",
+ "value": True
+ },
+ {
+ "name": "Preserve Times",
+ "value": False
+ },
+ {
+ "name": "raw mode",
+ "value": False
+ }
+ ]
+ },
+ {
+ "name": "stateagentinspector",
+ "parameters": [
+ {
+ "name": "eventTypes",
+ "value": []
+ }
+ ]
+ }
+ ]
+}
+
+SYS_SCRIPT_MAP = {
+ 'osx': STANDART_INVESTIGATIVE_DETAILS_OSX,
+ 'win': STANDART_INVESTIGATIVE_DETAILS_WIN,
+ 'linux': STANDART_INVESTIGATIVE_DETAILS_LINUX
+}
+
+"""
+
+COMMAND HANDLERS
+
+"""
+
+
+def get_token_request():
+
+ """
+ returns a token on successful request
+ """
+
+ url = '{}/token'.format(BASE_PATH)
+
+ # basic authentication
+ try:
+ response = requests.request(
+ 'GET',
+ url,
+ headers=GET_HEADERS,
+ verify=USE_SSL,
+ auth=(USERNAME, PASSWORD)
+ )
+ except requests.exceptions.SSLError as e:
+ LOG(e)
+ raise ValueError('An SSL error occurred when trying to connect to the server.\
+ Consider configuring unsecure connection in the integration settings')
+
+ # handle request failure
+ if response.status_code not in range(200, 205):
+ message = parse_error_response(response)
+ raise ValueError('Token request failed with status code {}\n{}'.format(response.status_code, message))
+ # successful request
+ response_headers = response.headers
+ token = response_headers.get('X-FeApi-Token')
+ return token
+
+
+def get_token():
+
+ token = get_token_request()
+ if token:
+ return token
+ raise Exception('Failed to get a token, unexpected response structure from the server')
+
+
+"""
+
+HOST INFORMATION
+
+"""
+
+
+def get_host_by_agent_request(agent_id):
+
+ """
+ returns the response body
+
+ raises an exception on:
+
+ - http request failure
+ - response status code different from 200
+ """
+ url = '{}/hosts/{}'.format(BASE_PATH, agent_id)
+
+ response = http_request(
+ 'GET',
+ url,
+ headers=GET_HEADERS
+ )
+
+ # successful request
+ try:
+ return response.json()['data']
+ except Exception as e:
+ LOG(e)
+ raise ValueError('Failed to get host information - unexpected response structure from the server.')
+
+
+def get_host_information():
+
+ """
+
+ return the host information to the war room, given an agentId or hostName from input.
+
+ """
+ args = demisto.args()
+
+ if not args.get('agentId') and not args.get('hostName'):
+ raise ValueError('Please provide either agentId or hostName')
+
+ host = {} # type: Dict[str, str]
+ if args.get('agentId'):
+ host = get_host_by_agent_request(args.get('agentId'))
+ else:
+ host = get_host_by_name_request(args.get('hostName'))
+
+ md_table = tableToMarkdown(
+ 'FireEye HX Get Host Information',
+ host_entry(host),
+ headers=HOST_MAIN_ATTRIBUTES
+ )
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': host,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md_table,
+ 'EntryContext': {
+ "FireEyeHX.Hosts(obj._id==val._id)": host,
+ "Endpoint(obj.ID==val.ID)": collect_endpoint_contxt(host)
+ }
+ }
+ demisto.results(entry)
+
+
+def get_hosts_information():
+
+ """
+
+ return the host information to the war room, given an agentId or hostName from input.
+
+ """
+
+ offset = 0
+ hosts = [] # type: List[Dict[str, str]]
+
+ # get all hosts
+ while True:
+ hosts_partial_results = get_hosts_request(offset=offset, limit=1000)
+ if not hosts_partial_results:
+ break
+ hosts.extend(hosts_partial_results)
+ offset = len(hosts)
+
+ hosts_entry = [host_entry(host) for host in hosts]
+ md_table = tableToMarkdown(
+ 'FireEye HX Get Hosts Information',
+ hosts_entry,
+ headers=HOST_MAIN_ATTRIBUTES
+ )
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': hosts,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md_table,
+ 'EntryContext': {
+ "FireEyeHX.Hosts(obj._id==val._id)": hosts_entry,
+ "Endpoint(obj.ID==val.ID)": [collect_endpoint_contxt(host)for host in hosts]
+ }
+ }
+ demisto.results(entry)
+
+
+def get_host_set_information():
+
+ """
+ return host set information to the war room according to given id or filters
+
+ """
+ args = demisto.args()
+
+ url = '{}/host_sets/{}'.format(BASE_PATH, args.get('hostSetID', ''))
+ url_params = {
+ 'limit': args.get('limit'),
+ 'offset': args.get('offset'),
+ 'search': args.get('search'),
+ 'sort': args.get('sort'),
+ 'name': args.get('name'),
+ 'type': args.get('type')
+ }
+ response = http_request(
+ 'GET',
+ url,
+ headers=GET_HEADERS,
+ url_params=url_params
+ )
+ host_set = [] # type: List[Dict[str, str]]
+ try:
+ if args.get('hostSetID'):
+ data = response.json()['data']
+ host_set = [data]
+ else:
+ data = response.json()['data']
+ host_set = data.get('entries', [])
+ except Exception as e:
+ LOG(e)
+ raise ValueError('Failed to get host set information - unexpected response from the server.\n' + response.text)
+
+ md_table = "No host sets found"
+ if len(host_set) > 0:
+ md_table = tableToMarkdown(
+ 'FireEye HX Get Host Sets Information',
+ host_set_entry(host_set),
+ headers=HOST_SET_MAIN_ATTRIBUTES
+ )
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': host_set,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md_table,
+ 'EntryContext': {
+ "FireEyeHX.HostSets(obj._id==val._id)": host_set
+ }
+ }
+ demisto.results(entry)
+
+
+def get_hosts_request(limit=None, offset=None, has_active_threats=None, has_alerts=None,
+ agent_version=None, containment_queued=None, containment_state=None,
+ host_name=None, os_platform=None, reported_clone=None, time_zone=None):
+
+ """
+ returns the response body
+
+ raises an exception on:
+
+ - http request failure
+ - response status code different from 200
+ """
+ url = '{}/hosts'.format(BASE_PATH)
+ url_params = {
+ 'limit': limit,
+ 'offset': offset,
+ 'has_active_threats': has_active_threats,
+ 'has_alerts': has_alerts,
+ 'agent_version': agent_version,
+ 'containment_queued': containment_queued,
+ 'containment_state': containment_state,
+ 'hostname': host_name,
+ 'os.platform': os_platform,
+ 'reported_clone': reported_clone,
+ 'time_zone': time_zone
+ }
+ # remove None values
+ url_params = {k: v for k, v in url_params.items() if v is not None}
+
+ response = http_request(
+ 'GET',
+ url,
+ url_params=url_params,
+ headers=GET_HEADERS
+ )
+ # successful request
+ try:
+ return response.json()['data']['entries']
+ except Exception as e:
+ LOG(e)
+ raise ValueError('Failed to parse response body - unexpected response structure from the server.')
+
+
+def get_host_by_name_request(host_name):
+
+ try:
+ return get_hosts_request(host_name=host_name, limit=1)[0]
+ except Exception as e:
+ LOG(e)
+ raise ValueError('Host {} not found.'.format(host_name))
+
+
+def get_all_agents_ids():
+
+ """
+ returns a list of all agents ids
+ """
+ offset = 0
+ hosts = [] # type: List[Dict[str, str]]
+
+ # get all hosts
+ while True:
+ hosts_partial_results = get_hosts_request(offset=offset, limit=1000)
+ if not hosts_partial_results:
+ break
+ hosts.extend(hosts_partial_results)
+ offset = len(hosts)
+ return [host.get('_id') for host in hosts]
+
+
+def get_agent_id(host_name):
+
+ """
+ returns the agent id given the host name
+
+ raises an exception on:
+ - unexpected response structure
+ - empty results
+
+ """
+ host = get_host_by_name_request(host_name)
+ try:
+ return host['_id']
+ except Exception as e:
+ LOG(e)
+ raise ValueError('Failed to get agent id for host {}'.format(host_name))
+
+
+def collect_endpoint_contxt(host):
+
+ return {
+ 'Hostname': host['hostname'],
+ 'ID': host['_id'],
+ 'IPAddress': host['primary_ip_address'],
+ 'Domain': host['domain'],
+ 'MACAddress': host['primary_mac'],
+ 'OS': host['os']['platform'],
+ 'OSVersion': host['os']['product_name']
+ }
+
+
+"""
+
+HOST CONTAINMENT
+
+"""
+
+
+def containment_request(agent_id):
+
+ """
+
+ no return value on successful request
+
+ """
+ url = '{}/hosts/{}/containment'.format(BASE_PATH, agent_id)
+ body = {
+ 'state': 'contain'
+ }
+
+ http_request(
+ 'POST',
+ url,
+ body=body,
+ headers=POST_HEADERS
+ )
+ # no exception raised - successful request
+
+
+def containment():
+
+ """
+
+ returns a success message to the war room
+
+ """
+
+ args = demisto.args()
+
+ # validate one of the arguments was passed
+ if not args:
+ raise ValueError('Please provide either agentId or hostName')
+
+ # in case a hostName was given, set the agentId accordingly
+ if args.get('hostName'):
+ args['agentId'] = get_agent_id(args['hostName'])
+
+ containment_request(args['agentId'])
+ # no exceptions raised->successful request
+
+ host = get_host_by_agent_request(args['agentId'])
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': 'Containment rquest for the host was sent and approved successfully',
+ 'ContentsFormat': formats['text'],
+ 'EntryContext': {
+ "FireEyeHX.Hosts(obj._id==val._id)": host,
+ "Endpoint(obj.ID==val.ID)": collect_endpoint_contxt(host)
+ }
+ }
+ demisto.results(entry)
+
+
+def containment_cancellation_request(agent_id):
+
+ """
+
+ no return value on successful request
+
+ """
+ url = '{}/hosts/{}/containment'.format(BASE_PATH, agent_id)
+
+ http_request(
+ 'DELETE',
+ url,
+ headers=GET_HEADERS
+ )
+ # no exceptions are raised - successful request
+
+
+def containment_cancellation():
+
+ """
+
+ returns a success message to the war room
+
+ """
+
+ args = demisto.args()
+
+ # validate one of the arguments was passed
+ if not args:
+ raise ValueError('Please provide either agentId or hostName')
+
+ # in case a hostName was given, set the agentId accordingly
+ if args.get('hostName'):
+ args['agentId'] = get_agent_id(args['hostName'])
+
+ containment_cancellation_request(args['agentId'])
+ # no exceptions raised->successful request
+
+ host = get_host_by_agent_request(args['agentId'])
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': 'The host is released from containment.',
+ 'ContentsFormat': formats['text'],
+ 'EntryContext': {
+ "FireEyeHX.Hosts(obj._id==val._id)": host,
+ "Endpoint(obj.ID==val.ID)": collect_endpoint_contxt(host)
+ }
+ }
+ demisto.results(entry)
+
+
+"""
+
+ALERTS
+
+"""
+
+
+def get_alert_request(alert_id):
+
+ url = '{}/alerts/{}'.format(BASE_PATH, alert_id)
+
+ response = http_request(
+ 'GET',
+ url,
+ headers=GET_HEADERS
+ )
+ return response.json().get('data')
+
+
+def get_alert():
+
+ alert_id = demisto.args().get('alertId')
+ alert = get_alert_request(alert_id)
+
+ alert_table = tableToMarkdown(
+ 'FireEye HX Get Alert # {}'.format(alert_id),
+ alert_entry(alert),
+ headers=ALERT_MAIN_ATTRIBUTES
+ )
+
+ event_type = alert.get('event_type')
+ event_type = 'NewEvent' if not event_type else event_type
+ event_type = re.sub("([a-z])([A-Z])", "\g<1> \g<2>", event_type).title()
+ event_table = tableToMarkdown(
+ event_type,
+ alert.get('event_values')
+ )
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': alert,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': '{}\n{}'.format(alert_table, event_table),
+ 'EntryContext': {
+ "FireEyeHX.Alerts(obj._id==val._id)": alert
+ }
+ }
+ demisto.results(entry)
+
+
+def get_alerts_request(has_share_mode=None, resolution=None, agent_id=None, host_name=None,
+ condition_id=None, limit=None, offset=None, sort=None, min_id=None,
+ event_at=None, alert_id=None, matched_at=None, reported_at=None, source=None):
+
+ """
+
+ returns the response body on successful request
+
+ """
+ url = '{}/alerts'.format(BASE_PATH)
+
+ body = {
+ 'has_share_mode': has_share_mode,
+ 'resolution': resolution,
+ 'agent._id': agent_id,
+ 'condition._id': condition_id,
+ 'event_at': event_at,
+ 'min_id': min_id,
+ '_id': alert_id,
+ 'matched_at': matched_at,
+ 'reported_at': reported_at,
+ 'source': source,
+ 'limit': limit,
+ 'offset': offset,
+ 'sort': sort
+ }
+
+ # remove None values
+ body = {k: v for k, v in body.items() if v is not None}
+
+ response = http_request(
+ 'GET',
+ url,
+ url_params=body,
+ headers=GET_HEADERS
+ )
+ try:
+ return response.json()['data']['entries']
+ except Exception as e:
+ LOG(e)
+ raise ValueError('Failed to parse response body')
+
+
+def get_all_alerts(has_share_mode=None, resolution=None, agent_id=None, condition_id=None, limit=None,
+ sort=None, min_id=None, event_at=None, alert_id=None, matched_at=None, reported_at=None, source=None):
+
+ """
+
+ returns a list of alerts, all results up to limit
+
+ """
+ offset = 0
+ alerts = [] # type: List[Dict[str, str]]
+
+ max_records = limit or float('inf')
+
+ while len(alerts) < max_records:
+ alerts_partial_results = get_alerts_request(
+ has_share_mode=has_share_mode,
+ resolution=resolution,
+ agent_id=agent_id,
+ condition_id=condition_id,
+ event_at=event_at,
+ alert_id=alert_id,
+ matched_at=matched_at,
+ reported_at=reported_at,
+ source=source,
+ min_id=min_id,
+ offset=offset,
+ limit=limit or 100,
+ sort=sort
+ )
+ # empty list
+ if not alerts_partial_results:
+ break
+ alerts.extend(alerts_partial_results)
+ offset = len(alerts)
+
+ # remove access results
+ if len(alerts) > max_records:
+ alerts[int(max_records) - 1: -1] = []
+
+ return alerts
+
+
+def general_context_from_event(alert):
+
+ def file_context(values):
+
+ return {
+ 'Name': values.get('fileWriteEvent/fileName'),
+ 'MD5': values.get('fileWriteEvent/md5'),
+ 'Extension': values.get('fileWriteEvent/fileExtension'),
+ 'Path': values.get('fileWriteEvent/fullPath')
+ }
+
+ def ip_context(values):
+
+ return {
+ 'Address': values.get('ipv4NetworkEvent/remoteIP')
+ }
+
+ def registry_key_context(values):
+
+ return {
+ 'Path': values.get('regKeyEvent/path'),
+ 'Name': values.get('regKeyEvent/valueName'),
+ 'Value': values.get('regKeyEvent/value')
+ }
+ context_map = {
+ 'fileWriteEvent': file_context,
+ 'ipv4NetworkEvent': ip_context,
+ 'regKeyEvent': registry_key_context
+ }
+
+ if context_map.get(alert['event_type']) is not None:
+ f = context_map[alert['event_type']]
+ return f(alert['event_values'])
+ return None
+
+
+def collect_context(alerts):
+
+ # collect_context
+ files = []
+ ips = []
+ registry_keys = []
+
+ for alert in alerts:
+ event_type = alert.get('event_type')
+ context = general_context_from_event(alert)
+ if event_type == 'fileWriteEvent':
+ files.append(context)
+ elif event_type == 'ipv4NetworkEvent':
+ ips.append(context)
+ elif event_type == 'regKeyEvent':
+ registry_keys.append(context)
+ return (files, ips, registry_keys)
+
+
+def get_alerts():
+
+ """
+
+ returns a list of alerts to the war room
+
+ """
+
+ args = demisto.args()
+ source = []
+ # add source type
+ if args.get('MALsource'):
+ source.append('mal')
+ if args.get('EXDsource'):
+ source.append('exd')
+ if args.get('IOCsource'):
+ source.append('ioc')
+
+ sort_map = {
+ 'agentId': 'agent._id',
+ 'conditionId': 'condition._id',
+ 'eventAt': 'event_at',
+ 'alertId': '_id',
+ 'matchedAt': 'matched_at',
+ 'id': '_id',
+ 'reportedAt': 'reported_at'
+ }
+
+ if args.get('sort'):
+ args['sort'] = '{}+{}'.format(sort_map.get(args['sort']), args.get('sortOrder', 'ascending'))
+
+ if args.get('hostName'):
+ args['agentId'] = get_agent_id(args.get('hostName'))
+
+ if args.get('limit'):
+ args['limit'] = int(args['limit'])
+
+ alerts = get_all_alerts(
+ has_share_mode=args.get("hasShareMode"),
+ resolution=args.get('resolution'),
+ agent_id=args.get('agentId'),
+ condition_id=args.get('conditionId'),
+ event_at=args.get('eventAt'),
+ alert_id=args.get('alertId'),
+ matched_at=args.get('matchedAt'),
+ reported_at=args.get('reportedAt'),
+ source=source,
+ min_id=args.get('min_id'),
+ limit=args.get('limit'),
+ sort=args.get('sort')
+ )
+
+ # parse each alert to a record displayed in the human readable table
+ alerts_entries = [alert_entry(alert) for alert in alerts]
+
+ files, ips, registry_keys = collect_context(alerts)
+
+ md_table = tableToMarkdown(
+ 'FireEye HX Get Alerts',
+ alerts_entries,
+ headers=ALERT_MAIN_ATTRIBUTES
+ )
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': alerts,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md_table,
+ 'EntryContext': {
+ "FireEyeHX.Alerts(obj._id==val._id)": alerts,
+ 'File': files,
+ 'RegistryKey': registry_keys,
+ 'IP': ips
+ }
+ }
+ demisto.results(entry)
+
+
+def suppress_alert_request(alert_id):
+
+ """
+
+ no return value on successful request
+
+ """
+
+ url = '{}/alerts/{}'.format(BASE_PATH, alert_id)
+
+ http_request(
+ 'DELETE',
+ url
+ )
+
+
+def suppress_alert():
+
+ """
+
+ returns a success message to the war room
+
+ """
+
+ alert_id = demisto.args().get('alertId')
+
+ suppress_alert_request(alert_id)
+ # no exceptions raised->successful request
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': 'Alert {} suppressed successfully.'.format(alert_id),
+ 'ContentsFormat': formats['text']
+ }
+ demisto.results(entry)
+
+
+"""
+
+INDICATORS
+
+"""
+
+
+def new_indicator_request(category):
+
+ """
+ Create a new indicator
+ """
+ url = '{}/indicators/{}'.format(BASE_PATH, category)
+
+ response = http_request(
+ 'POST',
+ url,
+ headers=GET_HEADERS
+ )
+ try:
+ return response.json().get('data')
+ except Exception as e:
+ LOG(e)
+ raise ValueError('Failed to parse response body, unexpected response structure from the server.')
+
+
+def create_indicator():
+
+ """
+ Get new indicator details
+ returns a success message to the war room
+ """
+
+ category = demisto.args().get('category')
+
+ response = new_indicator_request(category)
+
+ md_table = {
+ 'ID': response.get('_id'),
+ }
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FireEye HX New Indicator created successfully', md_table),
+ 'EntryContext': {
+ "FireEyeHX.Indicators(obj._id===val._id)": response
+ }
+ }
+ demisto.results(entry)
+
+
+def append_conditions_request(name, category, body):
+
+ """
+ Append conditions to indicator request
+ """
+
+ url = '{}/indicators/{}/{}/conditions'.format(BASE_PATH, category, name)
+
+ response = http_request(
+ 'PATCH',
+ url,
+ conditions_params=body,
+ headers=PATCH_HEADERS
+ )
+
+ return response.json()
+
+
+def append_conditions():
+
+ """
+ Append conditions to indicator
+ no return value on successfull request
+ """
+ name = demisto.args().get('name')
+ category = demisto.args().get('category')
+ body = demisto.args().get('condition')
+
+ body = body.replace(',', '\n')
+
+ response = append_conditions_request(name, category, body)
+
+ md_table = {
+ 'Name': name,
+ 'Category': category,
+ 'Conditions': body
+ }
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('The conditions were added successfully', md_table)
+ }
+ demisto.results(entry)
+
+
+def get_indicator_request(category, name):
+
+ """
+
+ returns a json object representing an indicator
+
+ """
+
+ url = '{}/indicators/{}/{}'.format(BASE_PATH, category, name)
+
+ response = http_request(
+ 'GET',
+ url,
+ headers=GET_HEADERS,
+ )
+ return response.json().get('data')
+
+
+def get_indicator_conditions_request(category, name, limit=None, offset=None, enabled=None, has_alerts=None):
+
+ """
+
+ returns a list of json objects, each representing an indicator condition
+ if no results are found- returns None
+
+ """
+ url = '{}/indicators/{}/{}/conditions'.format(BASE_PATH, category, name)
+ url_params = {
+ 'limit': limit,
+ 'offset': offset,
+ 'enabled': enabled,
+ 'has_alerts': has_alerts
+ }
+ # remove None values
+ url_params = {k: v for k, v in url_params.items() if v is not None}
+
+ response = http_request(
+ 'GET',
+ url,
+ headers=GET_HEADERS,
+ url_params=url_params
+ )
+ try:
+ return response.json()['data']['entries']
+ except Exception as e:
+ LOG(e)
+ raise ValueError('Failed to parse response body')
+
+
+def get_all_enabled_conditions(indicator_category, indicator_name):
+
+ offset = 0
+ conditions = [] # type: List[Dict[str, str]]
+
+ # get all results
+ while True:
+ conditions_partial_results = get_indicator_conditions_request(
+ indicator_category,
+ indicator_name,
+ enabled=True,
+ offset=offset
+ )
+ if not conditions_partial_results:
+ break
+ conditions.extend(conditions_partial_results)
+ offset = len(conditions)
+ return conditions
+
+
+def get_indicator_conditions():
+
+ """
+
+ returns a list of enabled conditions assosiated with a specific indicator to the war room
+
+ """
+
+ args = demisto.args()
+
+ conditions = get_all_enabled_conditions(
+ args.get('category'),
+ args.get('name')
+ )
+
+ conditions_entries = [condition_entry(condition) for condition in conditions]
+
+ md_table = tableToMarkdown(
+ 'Indicator "{}" Alerts on'.format(args.get('name')),
+ conditions_entries
+ )
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': conditions,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md_table,
+ 'EntryContext': {
+ "FireEyeHX.Conditions(obj._id==val._id)": conditions
+ }
+ }
+ demisto.results(entry)
+
+
+def get_indicator():
+
+ args = demisto.args()
+
+ indicator = get_indicator_request(
+ args.get('category'),
+ args.get('name')
+ )
+
+ md_table = tableToMarkdown(
+ 'FireEye HX Get Indicator- {}'.format(args.get('name')),
+ indicator_entry(indicator),
+ headers=INDICATOR_MAIN_ATTRIBUTES
+ )
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': indicator,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md_table,
+ 'EntryContext': {
+ "FireEyeHX.Indicators(obj._id==val._id)": indicator
+ }
+ }
+ demisto.results(entry)
+
+
+def get_indicators_request(category=None, search=None, limit=None, offset=None,
+ share_mode=None, sort=None, created_by=None, alerted=None):
+
+ url = '{}/indicators'.format(BASE_PATH)
+ if category:
+ url = url + '/' + category
+
+ url_params = {
+ 'search': search,
+ 'limit': limit,
+ 'offset': offset,
+ 'category.share_mode': share_mode,
+ 'sort': sort,
+ 'created_by': created_by,
+ 'stats.alerted_agents': alerted
+ }
+
+ # remove None value
+ url_params = {k: v for k, v in url_params.items() if v}
+
+ response = http_request(
+ 'GET',
+ url,
+ url_params=url_params,
+ headers=GET_HEADERS,
+ )
+ try:
+ response_body = response.json()
+ data = response_body['data']
+ # no results found
+ if data['total'] == 0:
+ return None
+ return data['entries']
+ except Exception as e:
+ LOG(e)
+ raise ValueError('Failed to parse response body')
+
+
+def get_all_indicators(category=None, search=None, share_mode=None, sort=None, created_by=None, alerted=None, limit=None):
+
+ max_records = limit or float('inf')
+ offset = 0
+ indicators = [] # type: List[Dict[str, str]]
+
+ # get all results
+ while len(indicators) < max_records:
+ indicators_partial_results = get_indicators_request(
+ category=category,
+ search=search,
+ offset=offset,
+ share_mode=share_mode,
+ sort=sort,
+ created_by=created_by,
+ alerted=alerted,
+ limit=limit or 100
+ )
+ if not indicators_partial_results:
+ break
+ indicators.extend(indicators_partial_results)
+ offset = len(indicators)
+
+ # remove access results
+ if len(indicators) > max_records:
+ indicators[int(max_records) - 1: -1] = []
+
+ return indicators
+
+
+def get_indicators():
+
+ args = demisto.args()
+
+ sort_map = {
+ 'category': 'category',
+ 'activeSince': 'active_since',
+ 'createdBy': 'created_by',
+ 'alerted': 'stats.alerted_agents'
+ }
+
+ if args.get('limit'):
+ args['limit'] = int(args['limit'])
+ if args.get('alerted'):
+ args['alerted'] = args['alerted'] == 'yes'
+ if args.get('sort'):
+ args['sort'] = sort_map.get(args.get('sort'))
+
+ # get all results
+ indicators = get_all_indicators(
+ category=args.get('category'),
+ search=args.get('searchTerm'),
+ share_mode=args.get('shareMode'),
+ sort=args.get('sort'),
+ created_by=args.get('createdBy'),
+ alerted=args.get('alerted'),
+ limit=args.get('limit')
+ )
+
+ indicators_entries = [indicator_entry(indicator) for indicator in indicators]
+
+ md_table = tableToMarkdown(
+ 'FireEye HX Get Indicator- {}'.format(args.get('name')),
+ indicators_entries,
+ headers=INDICATOR_MAIN_ATTRIBUTES
+ )
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': indicators,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md_table,
+ 'EntryContext': {
+ "FireEyeHX.Indicators(obj._id==val._id)": indicators
+ }
+ }
+ demisto.results(entry)
+
+
+"""
+
+SEARCH
+
+"""
+
+
+def search_request(query, host_set=None, hosts=None, exhaustive=False):
+
+ url = '{}/searches'.format(BASE_PATH)
+
+ body = {'query': query}
+
+ if host_set:
+ body['host_set'] = {'_id': int(host_set)}
+ elif hosts:
+ body['hosts'] = [{'_id': host} for host in hosts]
+
+ if exhaustive:
+ body['exhaustive'] = True
+
+ try:
+ response = http_request(
+ 'POST',
+ url,
+ headers=POST_HEADERS,
+ body=body
+ )
+ except Exception as e:
+ raise e
+ if response.status_code == 409:
+ raise ValueError('Request unsuccessful because the search limits \
+ (10 existing searches or 5 running searches) have been exceeded')
+ return response.json().get('data')
+
+
+def get_search_information_request(search_id):
+
+ """
+
+ returns the search information represented by a json object.
+
+ """
+
+ url = '{}/searches/{}'.format(BASE_PATH, search_id)
+
+ response = http_request(
+ 'GET',
+ url,
+ headers=GET_HEADERS
+ )
+ return response.json().get('data')
+
+
+def get_search_results_request(search_id):
+
+ """
+
+ returns the search results represented by a json object.
+
+ """
+
+ url = '{}/searches/{}/results'.format(BASE_PATH, search_id)
+
+ response = http_request(
+ 'GET',
+ url,
+ headers=GET_HEADERS
+ )
+ return response.json().get('data', {}).get('entries', [])
+
+
+def stop_search_request(search_id):
+
+ """
+
+ returns the search information represented by a json object.
+
+ """
+
+ url = '{}/searches/{}/actions/stop'.format(BASE_PATH, search_id)
+
+ response = http_request(
+ 'POST',
+ url,
+ headers=POST_HEADERS
+ )
+ return response.json()
+
+
+def delete_search_request(search_id):
+
+ """
+
+ no return value on successful request
+
+ """
+
+ url = '{}/searches/{}'.format(BASE_PATH, search_id)
+ http_request(
+ 'DELETE',
+ url
+ )
+
+
+def search_results_to_context(results, search_id):
+
+ for res in results:
+ res["SearchID"] = search_id
+ res["HostID"] = res.get("host", {}).get("_id")
+ res["HostName"] = res.get("host", {}).get("hostname")
+ res["HostUrl"] = res.get("host", {}).get("url")
+ del res['host']
+ res["Results"] = res.get("results")
+ del res["results"]
+ for resData in res.get("Results"):
+ resData.update(resData.get("data", {}))
+ del resData['data']
+ return results
+
+
+def start_search():
+
+ args = demisto.args()
+
+ '''
+ to search all hosts past none of the arguments?
+
+ # validate at list one of the arguments 'agentsIds', 'hostsNames', 'hostSet' was passed
+ if not any([args.get('agentsIds'), args.get('hostsNames'), args.get('hostSet'), args.get('searchAllHosts')]):
+ raise ValueError('Please provide one of the followings: agentsIds, hostsNames, hostSet')
+ '''
+
+ agents_ids = [] # type: List[Dict[str, str]]
+ if args.get('agentsIds'):
+ agents_ids = args['agentsIds'].split(',')
+ elif args.get('hostsNames'):
+ names = args.get('hostsNames').split(',')
+ for name in names:
+ try:
+ agent_id = get_agent_id(name)
+ agents_ids.append(agent_id)
+ except Exception as e:
+ LOG(e)
+ pass
+ if not agents_ids:
+ raise ValueError('None of the host names were matched with an agent')
+
+ # limit can't exceed 1000.
+ limit = args.get('limit')
+ if not limit or limit > 1000:
+ limit = 1000
+
+ arg_to_query_field_map = {
+ 'dnsHostname': 'DNS Hostname',
+ 'fileFullPath': 'File Full Path',
+ 'fileMD5Hash': 'File MD5 Hash',
+ 'ipAddress': 'IP Address'
+ }
+
+ query = []
+ for arg in arg_to_query_field_map.keys():
+ if not args.get(arg):
+ continue
+ field_filter = {
+ 'field': arg_to_query_field_map[arg],
+ 'operator': args['{}Operator'.format(arg)],
+ 'value': args[arg]
+ }
+ query.append(field_filter)
+
+ search = search_request(
+ query,
+ hosts=agents_ids,
+ host_set=args.get('hostSet'),
+ exhaustive=args.get('exhaustive') == 'yes'
+ )
+
+ search_id = search.get('_id')
+
+ '''
+ loop to get search status once a minute. break on: search has stopped, matched
+ results exceeded limit, or no more pending hosts.
+ '''
+
+ while True:
+ search_info = get_search_information_request(search_id)
+ matched = search_info.get('stats', {}).get('search_state', {}).get('MATCHED', 0)
+ pending = search_info.get('stats', {}).get('search_state', {}).get('PENDING', 0)
+ if search_info.get('state') == 'STOPPED' or matched >= limit or pending == 0:
+ break
+ time.sleep(60)
+
+ results = get_search_results_request(search_id)
+ md_entries = [host_results_md_entry(host_results) for host_results in results]
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': results,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': '## Search Results\n' + '\n'.join(md_entries),
+ 'EntryContext': {
+ "FireEyeHX.Search": search_results_to_context(results, search_id)
+ }
+ }
+ demisto.results(entry)
+
+ # finally stop or delete the search
+ possible_error_message = None
+ try:
+ if args.get('stopSearch') == 'stop':
+ possible_error_message = 'Failed to stop search'
+ stop_search_request(search_id)
+ # no need to stop a search before deleting it.
+ if args.get('stopSearch') == 'stopAndDelete':
+ possible_error_message = 'Failed to delete search'
+ delete_search_request(search_id)
+ possible_error_message = None
+ except Exception as e:
+ LOG('{}\n{}'.format(possible_error_message, e))
+ pass
+ # add warning entry if necessary
+ if possible_error_message:
+ warning_entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': possible_error_message,
+ 'ContentsFormat': formats['text'],
+ }
+ demisto.results(warning_entry)
+
+
+"""
+
+ACQUISITIONS
+
+"""
+
+
+def file_acquisition_request(agent_id, file_name, file_path, comment=None, external_id=None, req_use_api=None):
+
+ url = '{}/hosts/{}/files'.format(BASE_PATH, agent_id)
+
+ body = {
+ 'req_path': file_path,
+ 'req_filename': file_name,
+ 'comment': comment,
+ 'external_id': external_id,
+ 'req_use_api': req_use_api
+ }
+
+ # remove None values
+ body = {k: v for k, v in body.items() if v is not None}
+
+ response = http_request(
+ 'POST',
+ url,
+ body=body,
+ headers=POST_HEADERS
+ )
+
+ return response.json().get('data')
+
+
+def file_acquisition_package_request(acquisition_id):
+
+ url = '{}/acqs/files/{}.zip'.format(BASE_PATH, acquisition_id)
+
+ response = http_request(
+ 'GET',
+ url
+ )
+
+ return response.content
+
+
+def file_acquisition_information_request(acquisition_id):
+
+ url = '{}/acqs/files/{}'.format(BASE_PATH, acquisition_id)
+
+ response = http_request(
+ 'GET',
+ url,
+ headers=GET_HEADERS
+ )
+
+ return response.json().get('data')
+
+
+def delete_file_acquisition_request(acquisition_id):
+
+ """
+
+ no return value on successful request
+
+ """
+
+ url = '{}/acqs/files/{}'.format(BASE_PATH, acquisition_id)
+
+ http_request(
+ 'DELETE',
+ url
+ )
+
+
+def delete_file_acquisition():
+
+ """
+
+ returns a success message to the war room
+
+ """
+ acquisition_id = demisto.args().get('acquisitionId')
+ delete_file_acquisition_request(acquisition_id)
+ # successful request
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': 'file acquisition {} deleted successfully'.format(acquisition_id),
+ 'ContentsFormat': formats['text'],
+ }
+
+
+def file_acquisition():
+
+ args = demisto.args()
+
+ if not args.get('hostName') and not args.get('agentId'):
+ raise ValueError('Please provide either agentId or hostName')
+
+ if args.get('hostName'):
+ args['agentId'] = get_agent_id(args['hostName'])
+
+ use_api = args.get('acquireUsing') == 'API'
+
+ acquisition_info = file_acquisition_request(
+ args.get('agentId'),
+ args.get('fileName'),
+ args.get('filePath'),
+ req_use_api=use_api
+ )
+
+ acquisition_id = acquisition_info.get('_id')
+
+ LOG('acquisition request was successful. Waiting for acquisition process to be complete.')
+ while True:
+ acquisition_info = file_acquisition_information_request(acquisition_id)
+ if acquisition_info.get('state') == 'COMPLETE':
+ break
+ time.sleep(10)
+ LOG('acquisition process has been complete. Fetching zip file.')
+
+ acquired_file = file_acquisition_package_request(acquisition_id)
+
+ message = '{} acquired successfully'.format(args.get('fileName'))
+ if acquisition_info.get('error_message'):
+ message = acquisition_info.get('error_message')
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': '{}\nacquisition ID: {}'.format(message, acquisition_id),
+ 'ContentsFormat': formats['text'],
+ 'EntryContext': {
+ "FireEyeHX.Acquisitions.Files(obj._id==val._id)": acquisition_info
+ }
+ }
+
+ demisto.results(entry)
+ demisto.results(fileResult('{}.zip'.format(os.path.splitext(args.get('fileName'))[0]), acquired_file))
+
+
+def data_acquisition_request(agent_id, script_name, script):
+
+ url = '{}/hosts/{}/live'.format(BASE_PATH, agent_id)
+
+ body = {
+ 'name': script_name,
+ 'script': {'b64': script}
+ }
+
+ response = http_request(
+ 'POST',
+ url,
+ body=body
+ )
+
+ return response.json()['data']
+
+
+def data_acquisition_information_request(acquisition_id):
+
+ url = '{}/acqs/live/{}'.format(BASE_PATH, acquisition_id)
+
+ response = http_request(
+ 'GET',
+ url,
+ headers=GET_HEADERS
+ )
+
+ return response.json()['data']
+
+
+def data_collection_request(acquisition_id):
+
+ url = '{}/acqs/live/{}.mans'.format(BASE_PATH, acquisition_id)
+
+ response = http_request(
+ 'GET',
+ url
+ )
+
+ return response.content
+
+
+def data_acquisition():
+ """
+
+ returns the mans file to the war room
+
+ """
+
+ args = demisto.args()
+
+ # validate the host name or agent ID was passed
+ if not args.get('hostName') and not args.get('agentId'):
+ raise ValueError('Please provide either agentId or hostName')
+
+ if not args.get('defaultSystemScript') and not args.get('script'):
+ raise ValueError('If the script is not provided, defaultSystemScript must be specified.')
+
+ if args.get('script') and not args.get('scriptName'):
+ raise ValueError('If the script is provided, script name must be specified as well.')
+
+ if args.get('hostName'):
+ args['agentId'] = get_agent_id(args['hostName'])
+
+ # determine whether to use the default script
+ sys = args.get('defaultSystemScript')
+ if sys:
+ args['script'] = json.dumps(SYS_SCRIPT_MAP[sys])
+ args['scriptName'] = '{}DefaultScript'.format(sys)
+
+ acquisition_info = data_acquisition_request(
+ args['agentId'],
+ args['scriptName'],
+ base64.b64encode(args['script'])
+ )
+
+ acquisition_id = acquisition_info.get('_id')
+
+ LOG('Acquisition request was successful. Waiting for acquisition process to be complete.')
+ # loop to inquire acquisition state every 30 seconds
+ # break when state is complete
+ while True:
+ acquisition_info = data_acquisition_information_request(acquisition_id)
+ if acquisition_info.get('state') == 'COMPLETE':
+ break
+ time.sleep(30)
+ LOG('Acquisition process has been complete. Fetching mans file.')
+
+ message = '{} acquired successfully'.format(args.get('fileName'))
+ if acquisition_info.get('error_message'):
+ message = acquisition_info.get('error_message')
+
+ # output file and acquisition information to the war room
+ data = data_collection_request(acquisition_id)
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': '{}\nacquisition ID: {}'.format(message, acquisition_id),
+ 'ContentsFormat': formats['text'],
+ 'EntryContext': {
+ "FireEyeHX.Acquisitions.Data(obj._id==val._id)": acquisition_info
+ }
+ }
+ demisto.results(entry)
+ demisto.results(fileResult('agent_{}_data.mans'.format(args['agentId']), data))
+
+
+def delete_data_acquisition_request(acquisition_id):
+
+ """
+
+ no return value on successful request
+
+ """
+
+ url = '{}/acqs/live/{}'.format(BASE_PATH, acquisition_id)
+
+ http_request(
+ 'DELETE',
+ url
+ )
+
+
+def delete_data_acquisition():
+
+ """
+
+ returns a success message to the war room
+
+ """
+ acquisition_id = demisto.args().get('acquisitionId')
+ delete_data_acquisition_request(acquisition_id)
+ # successful request
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': 'data acquisition {} deleted successfully'.format(acquisition_id),
+ 'ContentsFormat': formats['text'],
+ }
+
+
+"""
+
+FETCH INCIDENTS
+
+"""
+
+
+def fetch_incidents():
+
+ last_run = demisto.getLastRun()
+ alerts = [] # type: List[Dict[str, str]]
+ if last_run and last_run.get('min_id'):
+ # get all alerts with id greater than min_id
+ alerts = get_all_alerts(
+ min_id=last_run.get('min_id'),
+ sort='_id+ascending'
+ )
+ # results are sorted in ascending order - the last alert holds the greatest id
+ min_id = alerts[-1].get('_id') if alerts else None
+ else:
+ # get the last 100 alerts
+ alerts = get_all_alerts(
+ sort='_id+descending',
+ limit=100
+ )
+ # results are sorted in descending order - the first alert holds the greatest id
+ min_id = alerts[0].get('_id') if alerts else None
+
+ incidents = [parse_alert_to_incident(alert) for alert in alerts]
+ demisto.incidents(incidents)
+ if min_id is not None:
+ demisto.setLastRun({'min_id': min_id})
+
+
+def parse_alert_to_incident(alert):
+
+ event_type = alert.get('event_type')
+ event_type = 'NewEvent' if not event_type else event_type
+ event_values = alert.get('event_values', {})
+ event_indicators_map = {
+ 'fileWriteEvent': 'fileWriteEvent/fileName',
+ 'ipv4NetworkEvent': 'ipv4NetworkEvent/remoteIP',
+ 'dnsLookupEvent': 'dnsLookupEvent/hostname',
+ 'regKeyEvent': 'regKeyEvent/valueName'
+ }
+ event_indicator = event_indicators_map.get(event_type)
+ event_indicator = 'No Indicator' if not event_indicator else event_indicator
+
+ incident_name = '{event_type_parsed}: {indicator}'.format(
+ event_type_parsed=re.sub("([a-z])([A-Z])", "\g<1> \g<2>", event_type).title(),
+ indicator=event_values.get(event_indicator)
+ )
+
+ incident = {
+ 'name': incident_name,
+ 'rawJSON': json.dumps(alert)
+ }
+ return incident
+
+
+"""
+
+ENTRY ENTITIES
+
+"""
+
+
+def indicator_entry(indicator):
+
+ indicator_entry = {
+ 'OS': ', '.join(indicator.get('platforms', [])),
+ 'Name': indicator.get('name'),
+ 'Created By': indicator.get('created_by'),
+ 'Active Since': indicator.get('active_since'),
+ 'Category': indicator.get('category', {}).get('name'),
+ 'Signature': indicator.get('signature'),
+ 'Active Condition': indicator.get('stats', {}).get('active_conditions'),
+ 'Hosts With Alerts': indicator.get('stats', {}).get('alerted_agents'),
+ 'Source Alerts': indicator.get('stats', {}).get('source_alerts')
+ }
+ return indicator_entry
+
+
+def host_entry(host):
+
+ host_entry = {
+ 'Host Name': host.get('hostname'),
+ 'Last Poll': host.get('last_poll_timestamp'),
+ 'Agent ID': host.get('_id'),
+ 'Agent Version': host.get('agent_version'),
+ 'Host IP': host.get('primary_ip_address'),
+ 'OS': host.get('os', {}).get('platform'),
+ 'Containment State': host.get('containment_state'),
+ 'Domain': host.get('domain'),
+ 'Last Alert': host.get('last_alert')
+ }
+ return host_entry
+
+
+def host_set_entry(host_sets):
+ host_set_entries = [{
+ 'Name': host_set.get('name'),
+ 'ID': host_set.get('_id'),
+ 'Type': host_set.get('type')
+ } for host_set in host_sets]
+ return host_set_entries
+
+
+def alert_entry(alert):
+
+ alert_entry = {
+ 'Alert ID': alert.get('_id'),
+ 'Reported': alert.get('reported_at'),
+ 'Event Type': alert.get('event_type'),
+ 'Agent ID': alert.get('agent', {}).get('_id')
+ }
+ return alert_entry
+
+
+def condition_entry(condition):
+
+ indicator_entry = {
+ 'Event Type': condition.get('event_type'),
+ 'Operator': condition.get('tests', {})[0].get('operator'),
+ 'Value': condition.get('tests', {})[0].get('value'),
+
+ }
+ return indicator_entry
+
+
+def host_results_md_entry(host_entry):
+
+ results = host_entry.get('results', [])
+ host_info = host_entry.get('host', {})
+ entries = []
+ for result in results:
+ data = result.get('data', {})
+ entry = {
+ 'Item Type': result.get('type'),
+ 'Summary': ' '.join(['**{}** {}'.format(k, v) for k, v in data.items()])
+ }
+ entries.append(entry)
+
+ md_table = tableToMarkdown(
+ host_info.get('hostname'),
+ entries,
+ headers=['Item Type', 'Summary']
+ )
+ return md_table
+
+
+"""
+
+ADDITIONAL FUNCTIONS
+
+"""
+
+
+def http_request(method, url, body=None, headers={}, url_params=None, conditions_params=None):
+ """
+
+ returns the http response
+
+ """
+
+ # add token to headers
+ headers['X-FeApi-Token'] = TOKEN
+
+ request_kwargs = {
+ 'headers': headers,
+ 'verify': USE_SSL
+ }
+
+ # add optional arguments if specified
+ if body:
+ # request_kwargs['data'] = ' '.join(format(x, 'b') for x in bytearray(json.dumps(body)))
+ request_kwargs['data'] = json.dumps(body)
+ if url_params:
+ request_kwargs['params'] = url_params
+ if conditions_params:
+ request_kwargs['data'] = conditions_params
+
+ LOG('attempting {} request sent to {} with arguments:\n{}'.format(method, url, json.dumps(request_kwargs, indent=4)))
+ try:
+ response = requests.request(
+ method,
+ url,
+ **request_kwargs
+ )
+ except requests.exceptions.SSLError as e:
+ LOG(e)
+ raise ValueError('An SSL error occurred when trying to connect to the server. Consider configuring unsecure connection in \
+ the integration settings.')
+
+ # handle request failure
+ if response.status_code not in range(200, 205):
+ message = parse_error_response(response)
+ raise ValueError('Request failed with status code {}\n{}'.format(response.status_code, message))
+
+ return response
+
+
+def logout():
+
+ url = '{}/token'.format(BASE_PATH)
+
+ try:
+ http_request(
+ 'DELETE',
+ url
+ )
+ except ValueError as e:
+ LOG('Failed to logout with token')
+ raise e
+ LOG('logout successfully')
+
+
+def parse_error_response(response):
+
+ try:
+ res = response.json()
+ msg = res.get('message')
+ if res.get('details') is not None and res.get('details')[0].get('message') is not None:
+ msg = msg + "\n" + json.dumps(res.get('details')[0])
+ except Exception as e:
+ LOG(e)
+ return response.text
+ return msg
+
+
+def return_error_entry(message):
+
+ error_entry = {
+ 'Type': entryTypes['error'],
+ 'Contents': message,
+ 'ContentsFormat': formats['text']
+ }
+
+ demisto.results(error_entry)
+
+
+"""
+
+EXECUTION
+
+"""
+
+
+set_proxies()
+
+command = demisto.command()
+LOG('Running command "{}"'.format(command))
+
+# ask for a token using user credentials
+TOKEN = get_token()
+
+try:
+ if command == 'test-module':
+ # token generated - credentials are valid
+ demisto.results('ok')
+ elif command == 'fetch-incidents':
+ fetch_incidents()
+ elif command == 'fireeye-hx-get-alerts':
+ get_alerts()
+ elif command == 'fireeye-hx-cancel-containment':
+ containment_cancellation()
+ elif command == 'fireeye-hx-host-containment':
+ containment()
+ elif command == 'fireeye-hx-create-indicator':
+ create_indicator()
+ elif command == 'fireeye-hx-get-indicator':
+ get_indicator()
+ get_indicator_conditions()
+ elif command == 'fireeye-hx-get-indicators':
+ get_indicators()
+ elif command == 'fireeye-hx-suppress-alert':
+ suppress_alert()
+ elif command == 'fireeye-hx-get-host-information':
+ get_host_information()
+ elif command == 'fireeye-hx-get-alert':
+ get_alert()
+ elif command == 'fireeye-hx-file-acquisition':
+ file_acquisition()
+ elif command == 'fireeye-hx-delete-file-acquisition':
+ delete_file_acquisition()
+ elif command == 'fireeye-hx-data-acquisition':
+ data_acquisition()
+ elif command == 'fireeye-hx-delete-data-acquisition':
+ delete_data_acquisition()
+ elif command == 'fireeye-hx-search':
+ start_search()
+ elif command == 'fireeye-hx-get-host-set-information':
+ get_host_set_information()
+ elif command == 'fireeye-hx-append-conditions':
+ append_conditions()
+ elif command == 'fireeye-hx-get-all-hosts-information':
+ get_hosts_information()
+except ValueError as e:
+ LOG(e)
+ LOG.print_log()
+ return_error(e)
+finally:
+ logout()
diff --git a/Integrations/FireEyeHX/FireEyeHX.yml b/Integrations/FireEyeHX/FireEyeHX.yml
new file mode 100644
index 000000000000..09891f5e3748
--- /dev/null
+++ b/Integrations/FireEyeHX/FireEyeHX.yml
@@ -0,0 +1,860 @@
+commonfields:
+ id: FireEye HX
+ version: -1
+name: FireEye HX
+display: FireEye HX
+category: Endpoint
+description: FireEye Endpoint Security is an integrated solution that detects what others miss and protects endpoint against known and unknown threats. The HX Demisto integration provides access to information about endpoints, acquisitions, alerts, indicators, and containment. Customers can extract critical data and effectively operate security operations automated playbook
+configuration:
+ - display: Server URL (e.g. https://192.168.0.1:3000)
+ name: server
+ defaultvalue: ""
+ type: 0
+ required: true
+ - display: Credentials
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: true
+ - display: Version
+ name: version
+ defaultvalue: v3
+ type: 0
+ required: true
+ - display: Trust any certificate (unsecure)
+ name: insecure
+ defaultvalue: "true"
+ type: 8
+ required: false
+ - display: Use system proxy settings
+ name: proxy
+ defaultvalue: "false"
+ type: 8
+ required: false
+ - display: Fetch incidents
+ name: isFetch
+ defaultvalue: ""
+ type: 8
+ required: false
+ - display: Incident type
+ name: incidentType
+ defaultvalue: ""
+ type: 13
+ required: false
+script:
+ script: '-'
+ type: python
+ commands:
+ - name: fireeye-hx-host-containment
+ arguments:
+ - name: hostName
+ description: The host name to be contained. If the hostName is not specified,
+ the agentId must be specified.
+ - name: agentId
+ description: The agent id running on the host to be contained. If the agentId
+ is not specified, the hostName must be specified.
+ outputs:
+ - contextPath: FireEyeHX.Hosts._id
+ description: FireEye HX Agent ID.
+ - contextPath: FireEyeHX.Hosts.agent_version
+ description: The agent version.
+ - contextPath: FireEyeHX.Hosts.excluded_from_containment
+ description: Determines whether the host is excluded from containment.
+ - contextPath: FireEyeHX.Hosts.containment_missing_software
+ description: Boolean value to indicate for containment missing software.
+ - contextPath: FireEyeHX.Hosts.containment_queued
+ description: Determines whether the host is queued for containment.
+ - contextPath: FireEyeHX.Hosts.containment_state
+ description: The containment state of the host. Possible values normal|contain|contain_fail|containing|contained|uncontain|uncontaining|wtfc|wtfu
+ - contextPath: FireEyeHX.Hosts.stats.alerting_conditions
+ description: The number of conditions that have alerted for the host.
+ - contextPath: FireEyeHX.Hosts.stats.alerts
+ description: Total number of alerts, including exploit-detection alerts.
+ - contextPath: FireEyeHX.Hosts.stats.exploit_blocks
+ description: The number of blocked exploits on the host.
+ - contextPath: FireEyeHX.Hosts.stats.malware_alerts
+ description: The number of malware alerts associated with the host.
+ - contextPath: FireEyeHX.Hosts.hostname
+ description: The host name.
+ - contextPath: FireEyeHX.Hosts.domain
+ description: Domain name.
+ - contextPath: FireEyeHX.Hosts.timezone
+ description: Host time zone.
+ - contextPath: FireEyeHX.Hosts.primary_ip_address
+ description: The host IP address.
+ - contextPath: FireEyeHX.Hosts.last_poll_timestamp
+ description: The timestamp of the last system poll preformed on the host.
+ - contextPath: FireEyeHX.Hosts.initial_agent_checkin
+ description: Timestamp of the initial agent check-in.
+ - contextPath: FireEyeHX.Hosts.last_alert_timestamp
+ description: The time stamp of the last alert for the host.
+ - contextPath: FireEyeHX.Hosts.last_exploit_block_timestamp
+ description: Time when the last exploit was blocked on the host. The value is
+ null if no exploits have been blocked.
+ - contextPath: FireEyeHX.Hosts.os.product_name
+ description: Specific operating system
+ - contextPath: FireEyeHX.Hosts.os.bitness
+ description: OS Bitness.
+ - contextPath: FireEyeHX.Hosts.os.platform
+ description: Family of operating systems. Valid values are win, osx, and linux.
+ - contextPath: FireEyeHX.Hosts.primary_mac
+ description: The host MAC address.
+ description: Apply containment for a specific host, so that it no longer has access
+ to other systems.
+ - name: fireeye-hx-cancel-containment
+ arguments:
+ - name: hostName
+ description: The host name to be contained. If the hostName is not specified,
+ the agentId must be specified.
+ - name: agentId
+ description: The agent id running on the host to be contained. If the agentId
+ is not specified, the hostName must be specified.
+ outputs:
+ - contextPath: FireEyeHX.Hosts._id
+ description: FireEye HX Agent ID.
+ - contextPath: FireEyeHX.Hosts.agent_version
+ description: The agent version.
+ - contextPath: FireEyeHX.Hosts.excluded_from_containment
+ description: Determines whether the host is excluded from containment.
+ - contextPath: FireEyeHX.Hosts.containment_missing_software
+ description: Boolean value to indicate for containment missing software.
+ - contextPath: FireEyeHX.Hosts.containment_queued
+ description: Determines whether the host is queued for containment.
+ - contextPath: FireEyeHX.Hosts.containment_state
+ description: The containment state of the host. Possible values normal|contain|contain_fail|containing|contained|uncontain|uncontaining|wtfc|wtfu
+ - contextPath: FireEyeHX.Hosts.stats.alerting_conditions
+ description: The number of conditions that have alerted for the host.
+ - contextPath: FireEyeHX.Hosts.stats.alerts
+ description: Total number of alerts, including exploit-detection alerts.
+ - contextPath: FireEyeHX.Hosts.stats.exploit_blocks
+ description: The number of blocked exploits on the host.
+ - contextPath: FireEyeHX.Hosts.stats.malware_alerts
+ description: The number of malware alerts associated with the host.
+ - contextPath: FireEyeHX.Hosts.hostname
+ description: The host name.
+ - contextPath: FireEyeHX.Hosts.domain
+ description: Domain name.
+ - contextPath: FireEyeHX.Hosts.timezone
+ description: Host time zone.
+ - contextPath: FireEyeHX.Hosts.primary_ip_address
+ description: The host IP address.
+ - contextPath: FireEyeHX.Hosts.last_poll_timestamp
+ description: The timestamp of the last system poll preformed on the host.
+ - contextPath: FireEyeHX.Hosts.initial_agent_checkin
+ description: Timestamp of the initial agent check-in.
+ - contextPath: FireEyeHX.Hosts.last_alert_timestamp
+ description: The time stamp of the last alert for the host.
+ - contextPath: FireEyeHX.Hosts.last_exploit_block_timestamp
+ description: Time when the last exploit was blocked on the host. The value is
+ null if no exploits have been blocked.
+ - contextPath: FireEyeHX.Hosts.os.product_name
+ description: Specific operating system
+ - contextPath: FireEyeHX.Hosts.os.bitness
+ description: OS Bitness.
+ - contextPath: FireEyeHX.Hosts.os.platform
+ description: Family of operating systems. Valid values are win, osx, and linux.
+ - contextPath: FireEyeHX.Hosts.primary_mac
+ description: The host MAC address.
+ description: Release a specific host from containment.
+ - name: fireeye-hx-get-alerts
+ arguments:
+ - name: hasShareMode
+ auto: PREDEFINED
+ predefined:
+ - any
+ - restricted
+ - unrestricted
+ description: Identifies which alerts result from indicators with the specified
+ share mode.
+ - name: resolution
+ auto: PREDEFINED
+ predefined:
+ - active_threat
+ - alert
+ - block
+ - partial_block
+ description: Sorts the results by the specified field.
+ - name: agentId
+ description: Filter by the agent ID.
+ - name: conditionId
+ description: Filter by condition ID.
+ - name: eventAt
+ description: Filter event occurred time. ISO-8601 timestamp..
+ - name: alertId
+ description: Filter by alert ID.
+ - name: matchedAt
+ description: Filter by match detection time. ISO-8601 timestamp.
+ - name: minId
+ description: Filter that returns only records with an AlertId field value great
+ than the minId value
+ - name: reportedAt
+ description: Filter by reported time. ISO-8601 timestamp.
+ - name: IOCsource
+ auto: PREDEFINED
+ predefined:
+ - "yes"
+ description: Source of alert- indicator of compromise
+ - name: EXDsource
+ auto: PREDEFINED
+ predefined:
+ - "yes"
+ description: Source of alert - exploit detection
+ - name: MALsource
+ auto: PREDEFINED
+ predefined:
+ - "yes"
+ description: Source of alert - malware alert
+ - name: minId
+ description: Return only records with id greater than minId
+ - name: limit
+ description: Limit the results returned
+ - name: sort
+ auto: PREDEFINED
+ predefined:
+ - agentId
+ - conditionId
+ - eventAt
+ - alertId
+ - matchedAt
+ - id
+ - reportedAt
+ description: Sorts the results by the specified field in ascending order.
+ - name: sortOrder
+ auto: PREDEFINED
+ predefined:
+ - ascending
+ - descending
+ description: The sort order for the results.
+ outputs:
+ - contextPath: FireEyeHX.Alerts._id
+ description: FireEye alert ID.
+ - contextPath: FireEyeHX.Alerts.agent._id
+ description: FireEye agent ID.
+ - contextPath: FireEyeHX.Alerts.agent.containment_state
+ description: Host containment state.
+ - contextPath: FireEyeHX.Alerts.condition._id
+ description: The condition unique ID.
+ - contextPath: FireEyeHX.Alerts.event_at
+ description: Time when the event occoured.
+ - contextPath: FireEyeHX.Alerts.matched_at
+ description: Time when the event was matched.
+ - contextPath: FireEyeHX.Alerts.reported_at
+ description: Time when the event was reported.
+ - contextPath: FireEyeHX.Alerts.source
+ description: Source of alert.
+ - contextPath: FireEyeHX.Alerts.matched_source_alerts._id
+ description: Source alert ID.
+ - contextPath: FireEyeHX.Alerts.matched_source_alerts.appliance_id
+ description: Appliance ID
+ - contextPath: FireEyeHX.Alerts.matched_source_alerts.meta
+ description: Source alert meta.
+ - contextPath: FireEyeHX.Alerts.matched_source_alerts.indicator_id
+ description: Indicator ID.
+ - contextPath: FireEyeHX.Alerts.resolution
+ description: Alert resulotion.
+ - contextPath: FireEyeHX.Alerts.event_type
+ description: Event type.
+ description: Get a list of alerts, use the different arguments to filter the results
+ returned.
+ - name: fireeye-hx-suppress-alert
+ arguments:
+ - name: alertId
+ description: The alert id. The alert id is listed in the output of 'get-alerts'
+ command.
+ description: Suppress alert by ID
+ - name: fireeye-hx-get-indicators
+ arguments:
+ - name: category
+ description: The indicator category
+ - name: searchTerm
+ description: The searchTerm can be any name, category, signature, source, or
+ condition value.
+ - name: shareMode
+ auto: PREDEFINED
+ predefined:
+ - any
+ - restricted
+ - unrestricted
+ - visible
+ description: Determines who can see the indicator. You must belong to the correct
+ authorization group .
+ - name: sort
+ auto: PREDEFINED
+ predefined:
+ - category
+ - activeSince
+ - createdBy
+ - alerted
+ description: Sorts the results by the specified field in ascending order
+ - name: createdBy
+ description: Person who created the indicator
+ - name: alerted
+ auto: PREDEFINED
+ predefined:
+ - "yes"
+ - "no"
+ description: Whether the indicator resulted in alerts
+ - name: limit
+ description: Limit the number of results
+ outputs:
+ - contextPath: FireEyeHX.Indicators._id
+ description: FireEye unique indicator ID.
+ - contextPath: FireEyeHX.Indicators.name
+ description: The indicator name as displayed in the UI.
+ - contextPath: FireEyeHX.Indicators.description
+ description: Indicator description.
+ - contextPath: FireEyeHX.Indicators.category.name
+ description: Catagory name.
+ - contextPath: FireEyeHX.Indicators.created_by
+ description: The "Created By" field as displayed in UI
+ - contextPath: FireEyeHX.Indicators.active_since
+ description: Date indicator became active.
+ - contextPath: FireEyeHX.Indicators.stats.source_alerts
+ description: Total number of source alerts associated with this indicator.
+ - contextPath: FireEyeHX.Indicators.stats.alerted_agents
+ description: Total number of agents with HX alerts associated with this indicator.
+ - contextPath: FireEyeHX.Indicators.platforms
+ description: List of families of operating systems.
+ description: Get a list of indicators
+ - name: fireeye-hx-get-indicator
+ arguments:
+ - name: category
+ required: true
+ description: Indicator category.
+ - name: name
+ required: true
+ description: Indicator name.
+ outputs:
+ - contextPath: FireEyeHX.Indicators._id
+ description: FireEye unique indicator ID.
+ - contextPath: FireEyeHX.Indicators.name
+ description: The indicator name as displayed in the UI.
+ - contextPath: FireEyeHX.Indicators.description
+ description: Indicator description.
+ - contextPath: FireEyeHX.Indicators.category.name
+ description: Catagory name.
+ - contextPath: FireEyeHX.Indicators.created_by
+ description: The "Created By" field as displayed in UI
+ - contextPath: FireEyeHX.Indicators.active_since
+ description: Date indicator became active.
+ - contextPath: FireEyeHX.Indicators.stats.source_alerts
+ description: Total number of source alerts associated with this indicator.
+ - contextPath: FireEyeHX.Indicators.stats.alerted_agents
+ description: Total number of agents with HX alerts associated with this indicator.
+ - contextPath: FireEyeHX.Indicators.platforms
+ description: List of families of operating systems.
+ - contextPath: FireEyeHX.Conditions._id
+ description: FireEye unique condition ID.
+ - contextPath: FireEyeHX.Conditions.event_type
+ description: Event type.
+ - contextPath: FireEyeHX.Conditions.enabled
+ description: Indicates whether the condition is enabled.
+ description: Get a specific indicator details
+ - name: fireeye-hx-get-host-information
+ arguments:
+ - name: agentId
+ description: The agent ID. If the agent ID is not specified, the host Name must
+ be specified.
+ - name: hostName
+ description: The host name. If the host name is not specified, the agent ID
+ must be specified.
+ outputs:
+ - contextPath: FireEyeHX.Hosts._id
+ description: FireEye HX Agent ID.
+ - contextPath: FireEyeHX.Hosts.agent_version
+ description: The agent version.
+ - contextPath: FireEyeHX.Hosts.excluded_from_containment
+ description: Determines whether the host is excluded from containment.
+ - contextPath: FireEyeHX.Hosts.containment_missing_software
+ description: Boolean value to indicate for containment missing software.
+ - contextPath: FireEyeHX.Hosts.containment_queued
+ description: Determines whether the host is queued for containment.
+ - contextPath: FireEyeHX.Hosts.containment_state
+ description: The containment state of the host. Possible values normal|contain|contain_fail|containing|contained|uncontain|uncontaining|wtfc|wtfu
+ - contextPath: FireEyeHX.Hosts.stats.alerting_conditions
+ description: The number of conditions that have alerted for the host.
+ - contextPath: FireEyeHX.Hosts.stats.alerts
+ description: Total number of alerts, including exploit-detection alerts.
+ - contextPath: FireEyeHX.Hosts.stats.exploit_blocks
+ description: The number of blocked exploits on the host.
+ - contextPath: FireEyeHX.Hosts.stats.malware_alerts
+ description: The number of malware alerts associated with the host.
+ - contextPath: FireEyeHX.Hosts.hostname
+ description: The host name.
+ - contextPath: FireEyeHX.Hosts.domain
+ description: Domain name.
+ - contextPath: FireEyeHX.Hosts.timezone
+ description: Host time zone.
+ - contextPath: FireEyeHX.Hosts.primary_ip_address
+ description: The host IP address.
+ - contextPath: FireEyeHX.Hosts.last_poll_timestamp
+ description: The timestamp of the last system poll preformed on the host.
+ - contextPath: FireEyeHX.Hosts.initial_agent_checkin
+ description: Timestamp of the initial agent check-in.
+ - contextPath: FireEyeHX.Hosts.last_alert_timestamp
+ description: The time stamp of the last alert for the host.
+ - contextPath: FireEyeHX.Hosts.last_exploit_block_timestamp
+ description: Time when the last exploit was blocked on the host. The value is
+ null if no exploits have been blocked.
+ - contextPath: FireEyeHX.Hosts.os.product_name
+ description: Specific operating system
+ - contextPath: FireEyeHX.Hosts.os.bitness
+ description: OS Bitness.
+ - contextPath: FireEyeHX.Hosts.os.platform
+ description: Family of operating systems. Valid values are win, osx, and linux.
+ - contextPath: FireEyeHX.Hosts.primary_mac
+ description: The host MAC address.
+ description: Get information on a host associated with an agent.
+ - name: fireeye-hx-get-alert
+ arguments:
+ - name: alertId
+ required: true
+ description: The alert ID.
+ outputs:
+ - contextPath: FireEyeHX.Alerts._id
+ description: FireEye alert ID.
+ - contextPath: FireEyeHX.Alerts.agent._id
+ description: FireEye agent ID.
+ - contextPath: FireEyeHX.Alerts.agent.containment_state
+ description: Host containment state.
+ - contextPath: FireEyeHX.Alerts.condition._id
+ description: The condition unique ID.
+ - contextPath: FireEyeHX.Alerts.event_at
+ description: Time when the event occoured.
+ - contextPath: FireEyeHX.Alerts.matched_at
+ description: Time when the event was matched.
+ - contextPath: FireEyeHX.Alerts.reported_at
+ description: Time when the event was reported.
+ - contextPath: FireEyeHX.Alerts.source
+ description: Source of alert.
+ - contextPath: FireEyeHX.Alerts.matched_source_alerts._id
+ description: Source alert ID.
+ - contextPath: FireEyeHX.Alerts.matched_source_alerts.appliance_id
+ description: Appliance ID
+ - contextPath: FireEyeHX.Alerts.matched_source_alerts.meta
+ description: Source alert meta.
+ - contextPath: FireEyeHX.Alerts.matched_source_alerts.indicator_id
+ description: Indicator ID.
+ - contextPath: FireEyeHX.Alerts.resolution
+ description: Alert resulotion.
+ - contextPath: FireEyeHX.Alerts.event_type
+ description: Event type.
+ description: Get details of a specific alert
+ - name: fireeye-hx-file-acquisition
+ arguments:
+ - name: fileName
+ required: true
+ description: The file name.
+ - name: filePath
+ required: true
+ description: The file path.
+ - name: acquireUsing
+ auto: PREDEFINED
+ predefined:
+ - API
+ - RAW
+ description: Whether to aqcuire the file using the API or RAW. By default, raw
+ file will be acquired. Use API option when file is encrypted.
+ - name: agentId
+ description: The agent ID associated with the host that holds the file. If the
+ hostName is not specified, the agentId must be specified.
+ - name: hostName
+ description: The host that holds the file. If the agentId is not specified,
+ hostName must be specified.
+ outputs:
+ - contextPath: FireEyeHX.Acquisitions.Files._id
+ description: The acquisition unique ID.
+ - contextPath: FireEyeHX.Acquisitions.Files.state
+ description: The acquisition state.
+ - contextPath: FireEyeHX.Acquisitions.Files.md5
+ description: File md5.
+ - contextPath: FireEyeHX.Acquisitions.Files.req_filename
+ description: The file name.
+ - contextPath: FireEyeHX.Acquisitions.Files.req_path
+ description: The file path.
+ - contextPath: FireEyeHX.Acquisitions.Files.host._id
+ description: FireEye HX agent ID.
+ description: Aquire a specific file as a password protected zip file. The password
+ for unlocking the zip file is 'unzip-me'.
+ - name: fireeye-hx-delete-file-acquisition
+ arguments:
+ - name: acquisitionId
+ required: true
+ description: The acquisition ID.
+ description: Delete the file acquisition, by ID.
+ - name: fireeye-hx-data-acquisition
+ arguments:
+ - name: script
+ description: Acquisition script in JSON format.
+ - name: scriptName
+ description: The script name. If the Acquisition script is specified, the script
+ name must be specified as well.
+ - name: defaultSystemScript
+ auto: PREDEFINED
+ predefined:
+ - osx
+ - win
+ - linux
+ description: Use default script. Select the host system.
+ - name: agentId
+ description: The agent ID. If the host name is not specified, the agent ID must
+ be specified.
+ - name: hostName
+ description: The host name. If the agent ID is not specified, the host name
+ must be specified.
+ outputs:
+ - contextPath: FireEyeHX.Acquisitions.Data._id
+ description: The acquisition unique ID.
+ - contextPath: FireEyeHX.Acquisitions.Data.state
+ description: The acquisition state.
+ - contextPath: FireEyeHX.Acquisitions.Data.md5
+ description: File md5.
+ - contextPath: FireEyeHX.Acquisitions.Data.host._id
+ description: Time when the acquisition was finished.
+ description: Start a data acquisition process to gather artifacts from the system
+ disk and memory. The data is fetched as mans file.
+ - name: fireeye-hx-delete-data-acquisition
+ arguments:
+ - name: acquisitionId
+ required: true
+ description: The acquisition ID.
+ description: Delete data acquisition.
+ - name: fireeye-hx-search
+ arguments:
+ - name: agentsIds
+ description: IDs of agents to be searched
+ - name: hostsNames
+ description: Names of hosts to be searched
+ - name: hostSet
+ description: Id of host set to be searched
+ - name: limit
+ description: Limit results count (once limit is reached, the search is stopped)
+ - name: exhaustive
+ auto: PREDEFINED
+ predefined:
+ - "yes"
+ - "no"
+ description: Should search be exhaustive or quick
+ defaultValue: "yes"
+ - name: ipAddress
+ description: A valid IPv4 address to search for
+ - name: ipAddressOperator
+ auto: PREDEFINED
+ predefined:
+ - equals
+ - not equals
+ description: Which operator to apply to the given IP address
+ - name: fileMD5Hash
+ description: A 32-character MD5 hash value to search for
+ - name: fileMD5HashOperator
+ auto: PREDEFINED
+ predefined:
+ - equals
+ - not equals
+ description: Which operator to apply to the given MD5 hash
+ - name: fileFullPath
+ description: Full path of file to search
+ - name: fileFullPathOperator
+ auto: PREDEFINED
+ predefined:
+ - equals
+ - not equals
+ - contains
+ - not contains
+ description: Which operator to apply to the given file path
+ - name: dnsHostname
+ description: DNS value to search for
+ - name: dnsHostnameOperator
+ auto: PREDEFINED
+ predefined:
+ - equals
+ - not equals
+ - contains
+ - not contains
+ description: Which operator to apply to the given DNS
+ - name: stopSearch
+ auto: PREDEFINED
+ predefined:
+ - stopAndDelete
+ - stop
+ description: Method in which search should be stopped after finding
+ number of results
+ outputs:
+ - contextPath: FireEyeHX.Search.Results.Timestamp - Modified
+ description: Time when the entry was last modified
+ type: string
+ - contextPath: FireEyeHX.Search.Results.File Text Written
+ description: The file text content
+ type: string
+ - contextPath: FireEyeHX.Search.Results.File Name
+ description: Name of the file
+ type: string
+ - contextPath: FireEyeHX.Search.Results.File Full Path
+ description: The full path of the file
+ type: string
+ - contextPath: FireEyeHX.Search.Results.File Bytes Written
+ description: Number of bytes written to the file
+ type: string
+ - contextPath: FireEyeHX.Search.Results.Size in bytes
+ description: Size of the file in bytes
+ type: string
+ - contextPath: FireEyeHX.Search.Results.Browser Version
+ description: Version of the browser
+ type: string
+ - contextPath: FireEyeHX.Search.Results.Browser Name
+ description: Name of the browser
+ type: string
+ - contextPath: FireEyeHX.Search.Results.Cookie Name
+ description: Name of the cookie
+ type: string
+ - contextPath: FireEyeHX.Search.Results.DNS Hostname
+ description: Name of the DNS host
+ type: string
+ - contextPath: FireEyeHX.Search.Results.URL
+ description: The event URL
+ type: string
+ - contextPath: FireEyeHX.Search.Results.Username
+ description: The event username
+ type: string
+ - contextPath: FireEyeHX.Search.Results.File MD5 Hash
+ description: MD5 hash of the file
+ type: string
+ - contextPath: FireEyeHX.Search.HostID
+ description: ID of the host
+ type: string
+ - contextPath: FireEyeHX.Search.HostName
+ description: Name of host
+ type: string
+ - contextPath: FireEyeHX.Search.HostUrl
+ description: Inner FireEye host url
+ type: string
+ - contextPath: FireEyeHX.Search.SearchID
+ description: ID of performed search
+ type: string
+ - contextPath: FireEyeHX.Search.Results.Timestamp - Accessed
+ description: Last accessed time
+ type: string
+ - contextPath: FireEyeHX.Search.Results.Port
+ description: Port
+ type: number
+ - contextPath: FireEyeHX.Search.Results.Process ID
+ description: ID of the process
+ type: string
+ - contextPath: FireEyeHX.Search.Results.Local IP Address
+ description: Local IP Address
+ type: string
+ - contextPath: FireEyeHX.Search.Results.Local IP Address
+ description: Local IP Address
+ type: string
+ - contextPath: FireEyeHX.Search.Results.Local Port
+ description: Local Port
+ type: number
+ - contextPath: FireEyeHX.Search.Results.Username
+ description: Username
+ type: string
+ - contextPath: FireEyeHX.Search.Results.Remote Port
+ description: Remote Port
+ type: number
+ - contextPath: FireEyeHX.Search.Results.IP Address
+ description: IP Address
+ type: string
+ - contextPath: FireEyeHX.Search.Results.Process Name
+ description: Process Name
+ type: string
+ - contextPath: FireEyeHX.Search.Results.Timestamp - Event
+ description: Timestamp - Event
+ type: string
+ - contextPath: FireEyeHX.Search.Results.type
+ description: The type of the event
+ type: string
+ - contextPath: FireEyeHX.Search.Results.id
+ description: ID of the result
+ type: string
+ description: Search endpoints to check all hosts or a subset of hosts for a specific
+ file or indicator.
+ - name: fireeye-hx-get-host-set-information
+ arguments:
+ - name: hostSetID
+ description: ID of a specific host set to get
+ - name: offset
+ description: Specifies which record to start with in the response. The offset
+ value must be an unsigned 32-bit integer. The default is 0.
+ - name: limit
+ description: Specifies how many records are returned. The limit value must be
+ an unsigned 32-bit integer. The default is 50.
+ - name: search
+ description: Searches the names of all host sets connected to the specified
+ HX appliance.
+ - name: sort
+ description: Sorts the results by the specified field in ascending or descending
+ order. The default is sorting by name in ascending order. Sortable fields
+ are _id (host set ID) and name (host set name).
+ - name: name
+ description: Specifies the name of host set to look for
+ - name: type
+ description: Specifies the type of host sets to search for
+ outputs:
+ - contextPath: FireEyeHX.HostSets._id
+ description: host set id
+ type: number
+ - contextPath: FireEyeHX.HostSets._revision
+ description: Revision number
+ type: string
+ - contextPath: FireEyeHX.HostSets.name
+ description: Host set name
+ type: string
+ - contextPath: FireEyeHX.HostSets.type
+ description: Host set type (static/dynamic/hidden)
+ type: string
+ - contextPath: FireEyeHX.HostSets.url
+ description: Host set FireEye url
+ type: string
+ description: Get a list of all host sets known to your HX Series appliance
+ - name: fireeye-hx-create-indicator
+ arguments:
+ - name: category
+ required: true
+ description: The indicator category
+ outputs:
+ - contextPath: FireEyeHX.Indicators.active_since
+ description: Date indicator became active.
+ type: date
+ - contextPath: FireEyeHX.Indicators.meta
+ description: Meta data for new indicator
+ type: string
+ - contextPath: FireEyeHX.Indicators.display_name
+ description: The indicator display name
+ type: string
+ - contextPath: FireEyeHX.Indicators.name
+ description: The indicator name as displayed in the UI.
+ type: string
+ - contextPath: FireEyeHX.Indicators.created_by
+ description: The "Created By" field as displayed in UI
+ type: string
+ - contextPath: FireEyeHX.Indicators.url
+ description: The data URL
+ type: string
+ - contextPath: FireEyeHX.Indicators.create_text
+ description: The indicator create text
+ - contextPath: FireEyeHX.Indicators.platforms
+ description: List of families of operating systems.
+ type: string
+ - contextPath: FireEyeHX.Indicators.create_actor._id
+ description: The ID of the actor
+ type: number
+ - contextPath: FireEyeHX.Indicators.create_actor.username
+ description: Actor user name
+ type: string
+ - contextPath: FireEyeHX.Indicators.signature
+ description: 'Signature of indicator '
+ type: string
+ - contextPath: FireEyeHX.Indicators._revision
+ description: Indicator revision
+ type: string
+ - contextPath: FireEyeHX.Indicators._id
+ description: FireEye unique indicator ID.
+ type: string
+ - contextPath: FireEyeHX.Indicator.description
+ description: Indicator description
+ type: string
+ - contextPath: FireEyeHX.Indicators.category._id
+ description: Category ID
+ type: number
+ - contextPath: FireEyeHX.Indicators.category.name
+ description: Category name
+ type: string
+ - contextPath: FireEyeHX.Indicators.category.share_mode
+ description: Category share mode
+ type: string
+ - contextPath: FireEyeHX.Indicators.category.uri_name
+ description: Category uri name
+ type: string
+ - contextPath: FireEyeHX.Indicators.category.url
+ description: Category URL
+ type: string
+ - contextPath: FireEyeHX.Indicators.uri_name
+ description: The indicator uri name
+ type: string
+ - contextPath: FireEyeHX.Indicators.stats.active_conditions
+ description: Indicator active conditions
+ type: number
+ - contextPath: FireEyeHX.Indicators.stats.alerted_agents
+ description: Total number of agents with HX alerts associated with this indicator.
+ type: number
+ - contextPath: FireEyeHX.Indicators.stats.source_alerts
+ description: Total number of source alerts associated with this indicator.
+ type: number
+ - contextPath: FireEyeHX.Indicators.update_actor._id
+ description: Update actor ID
+ type: number
+ - contextPath: FireEyeHX.Indicators.update_actor.username
+ description: Update actor name
+ type: string
+ description: Create new indicator
+ - name: fireeye-hx-append-conditions
+ arguments:
+ - name: category
+ required: true
+ description: The indicator category
+ - name: name
+ required: true
+ description: The name of the indicator
+ - name: condition
+ required: true
+ description: 'A list of conditions to add. The list can include a list of IPv4
+ addresses, MD5 files, and domain names. For example: example.netexample.orgexample.lol'
+ description: 'Add conditions to an indicator. Conditions can be MD5, hash values,
+ domain names and IP addresses. '
+ - name: fireeye-hx-get-all-hosts-information
+ arguments: []
+ outputs:
+ - contextPath: FireEyeHX.Hosts._id
+ description: FireEye HX Agent ID.
+ - contextPath: FireEyeHX.Hosts.agent_version
+ description: The agent version.
+ - contextPath: FireEyeHX.Hosts.excluded_from_containment
+ description: Determines whether the host is excluded from containment.
+ - contextPath: FireEyeHX.Hosts.containment_missing_software
+ description: Boolean value to indicate for containment missing software.
+ - contextPath: FireEyeHX.Hosts.containment_queued
+ description: Determines whether the host is queued for containment.
+ - contextPath: FireEyeHX.Hosts.containment_state
+ description: The containment state of the host. Possible values normal|contain|contain_fail|containing|contained|uncontain|uncontaining|wtfc|wtfu
+ - contextPath: FireEyeHX.Hosts.stats.alerting_conditions
+ description: The number of conditions that have alerted for the host.
+ - contextPath: FireEyeHX.Hosts.stats.alerts
+ description: Total number of alerts, including exploit-detection alerts.
+ - contextPath: FireEyeHX.Hosts.stats.exploit_blocks
+ description: The number of blocked exploits on the host.
+ - contextPath: FireEyeHX.Hosts.stats.malware_alerts
+ description: The number of malware alerts associated with the host.
+ - contextPath: FireEyeHX.Hosts.hostname
+ description: The host name.
+ - contextPath: FireEyeHX.Hosts.domain
+ description: Domain name.
+ - contextPath: FireEyeHX.Hosts.timezone
+ description: Host time zone.
+ - contextPath: FireEyeHX.Hosts.primary_ip_address
+ description: The host IP address.
+ - contextPath: FireEyeHX.Hosts.last_poll_timestamp
+ description: The timestamp of the last system poll preformed on the host.
+ - contextPath: FireEyeHX.Hosts.initial_agent_checkin
+ description: Timestamp of the initial agent check-in.
+ - contextPath: FireEyeHX.Hosts.last_alert_timestamp
+ description: The time stamp of the last alert for the host.
+ - contextPath: FireEyeHX.Hosts.last_exploit_block_timestamp
+ description: Time when the last exploit was blocked on the host. The value is
+ null if no exploits have been blocked.
+ - contextPath: FireEyeHX.Hosts.os.product_name
+ description: Specific operating system
+ - contextPath: FireEyeHX.Hosts.os.bitness
+ description: OS Bitness.
+ - contextPath: FireEyeHX.Hosts.os.platform
+ description: Family of operating systems. Valid values are win, osx, and linux.
+ - contextPath: FireEyeHX.Hosts.primary_mac
+ description: The host MAC address.
+ description: Get information on all hosts
+ isfetch: true
+ runonce: false
+tests:
+ - FireEye HX Test
diff --git a/Integrations/FireEyeHX/FireEyeHX_description.md b/Integrations/FireEyeHX/FireEyeHX_description.md
new file mode 100644
index 000000000000..92021452a0c7
--- /dev/null
+++ b/Integrations/FireEyeHX/FireEyeHX_description.md
@@ -0,0 +1 @@
+FireEye Endpoint Security is an integrated solution that detects what others miss and protects endpoint against known and unknown threats. The HX Demisto integration provides access to information about endpoints, acquisitions, alerts, indicators, and containment. Customers can extract critical data and effectively operate security operations automated playbook
diff --git a/Integrations/FireEyeHX/FireEyeHX_image.png b/Integrations/FireEyeHX/FireEyeHX_image.png
new file mode 100644
index 000000000000..6d9ffcfe9cd1
Binary files /dev/null and b/Integrations/FireEyeHX/FireEyeHX_image.png differ
diff --git a/Integrations/Forescout/Forescout.py b/Integrations/Forescout/Forescout.py
new file mode 100644
index 000000000000..d879e3e3b2ca
--- /dev/null
+++ b/Integrations/Forescout/Forescout.py
@@ -0,0 +1,851 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+
+import json
+import requests
+from typing import Dict, List, Tuple, Any, Union, cast
+import xml.etree.ElementTree as ET_PHONE_HOME
+from copy import deepcopy
+from datetime import datetime, timedelta, timezone
+from dateutil.parser import parse as parsedate
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+PARAMS = demisto.params()
+WEB_API_CREDENTIALS = PARAMS.get('web_api_credentials')
+WEB_API_CREDENTIALS = {} if not WEB_API_CREDENTIALS else WEB_API_CREDENTIALS
+WEB_API_USERNAME = WEB_API_CREDENTIALS.get('identifier', '')
+WEB_API_PASSWORD = WEB_API_CREDENTIALS.get('password', '')
+
+DEX_CREDENTIALS = PARAMS.get('dex_credentials')
+DEX_CREDENTIALS = {} if not DEX_CREDENTIALS else DEX_CREDENTIALS
+DEX_USERNAME = DEX_CREDENTIALS.get('identifier', '')
+DEX_PASSWORD = DEX_CREDENTIALS.get('password', '')
+DEX_ACCOUNT = PARAMS.get('dex_account', '')
+DEX_ACCOUNT = '' if not DEX_ACCOUNT else DEX_ACCOUNT
+
+# Remove trailing slash to prevent wrong URL path to service
+BASE_URL = PARAMS.get('url', '').strip().rstrip('/')
+# Should we use SSL
+USE_SSL = not PARAMS.get('insecure', False)
+
+WEB_AUTH = ''
+LAST_JWT_FETCH = None
+# Default JWT validity time set in Forescout Web API
+JWT_VALIDITY_TIME = timedelta(minutes=5)
+
+DEX_AUTH = (DEX_USERNAME + '@' + DEX_ACCOUNT, DEX_PASSWORD)
+DEX_HEADERS = {
+ 'Content-Type': 'application/xml',
+ 'Accept': 'application/xml'
+}
+
+# Host fields to be included in output of get_host_command
+HOSTFIELDS_TO_INCLUDE = {
+ 'os_classification': 'OSClassification',
+ 'classification_source_os': 'ClassificationSourceOS',
+ 'onsite': 'Onsite',
+ 'access_ip': 'AccessIP',
+ 'macs': 'MAC',
+ 'openports': 'OpenPort',
+ 'mac_vendor_string': 'MacVendorString',
+ 'cl_type': 'ClType',
+ 'cl_rule': 'ClRule',
+ 'vendor': 'Vendor',
+ 'fingerprint': 'Fingerprint',
+ 'gst_signed_in_stat': 'GstSignedInStat',
+ 'misc': 'Misc',
+ 'prim_classification': 'PrimClassification',
+ 'agent_install_mode': 'AgentInstallMode',
+ 'vendor_classification': 'VendorClassification',
+ 'user_def_fp': 'UserDefFp',
+ 'agent_visible_mode': 'AgentVisibleMode',
+ 'classification_source_func': 'ClassificationSourceFunc',
+ 'dhcp_class': 'DhcpClass',
+ 'samba_open_ports': 'SambaOpenPort',
+ 'mac_prefix32': 'MacPrefix32',
+ 'adm': 'ADM',
+ 'last_nbt_report_time': 'LastNbtReportTime',
+ 'agent_version': 'AgentVersion',
+ 'matched_fingerprints': 'MatchedFingerprint',
+ 'manage_agent': 'ManageAgent',
+ 'dhcp_req_fingerprint': 'DhcpReqFingerprint',
+ 'dhcp_opt_fingerprint': 'DhcpOptFingerprint',
+ 'online': 'Online',
+ 'nmap_def_fp7': 'NmapDefFp7',
+ 'ipv4_report_time': 'Ipv4ReportTime',
+ 'nmap_def_fp5': 'NmapDefFp5',
+ 'va_netfunc': 'VaNetfunc',
+ 'dhcp_os': 'DhcpOS',
+ 'engine_seen_packet': 'EngineSeenPacket',
+ 'nmap_netfunc7': 'NmapNetfunc7',
+ 'nmap_fp7': 'NmapFp7',
+ 'dhcp_hostname': 'DhcpHostname'
+}
+
+
+''' HELPER FUNCTIONS '''
+
+
+def check_web_api_credentials():
+ """
+ Verify that credentials were entered for Data Exchange (DEX)
+ """
+ if not (WEB_API_USERNAME and WEB_API_PASSWORD):
+ err_msg = 'Error in Forescout Integration - Web API credentials must' \
+ ' be entered in the Forescout integration configuration in order to execute this command.'
+ return_error(err_msg)
+
+
+def check_dex_credentials():
+ """
+ Verify that credentials were entered for Data Exchange (DEX)
+ """
+ if not (DEX_USERNAME and DEX_PASSWORD and DEX_ACCOUNT):
+ err_msg = 'Error in Forescout Integration - Data Exchange (DEX) credentials must' \
+ ' be entered in the Forescout integration configuration in order to execute this command.'
+ return_error(err_msg)
+
+
+def create_update_lists_request_body(update_type: str, lists: str) -> ET_PHONE_HOME.Element:
+ """
+ Create XML request body formatted to DEX expectations
+
+ Parameters
+ ----------
+ update_type : str
+ The type of update to execute.
+ lists : str
+ The list names and associated values to update the list with.
+
+ Returns
+ -------
+ XML Request Body Element
+ """
+ root = ET_PHONE_HOME.Element('FSAPI', attrib={'TYPE': 'request', 'API_VERSION': '2.0'})
+ transaction = ET_PHONE_HOME.SubElement(root, 'TRANSACTION', attrib={'TYPE': update_type})
+ lists_xml = ET_PHONE_HOME.SubElement(transaction, 'LISTS')
+ if lists:
+ list_val_pairs = lists.split('&')
+ for list_val_pair in list_val_pairs:
+ list_name, *values = list_val_pair.split('=')
+ list_xml = ET_PHONE_HOME.SubElement(lists_xml, 'LIST', attrib={'NAME': list_name})
+ if update_type != 'delete_all_list_values' and values:
+ list_of_vals = '='.join(values).split(':')
+ for val in list_of_vals:
+ val_xml = ET_PHONE_HOME.SubElement(list_xml, 'VALUE')
+ val_xml.text = val
+
+ return root
+
+
+def create_update_hostfields_request_body(host_ip: str, update_type: str,
+ field: str, value: str, fields_json: str) -> ET_PHONE_HOME.Element:
+ """
+ Create XML request body formatted to DEX expectations
+
+ Parameters
+ ----------
+ host_ip : str
+ IP address of the target host.
+ update_type : str
+ The type of update to execute.
+ field : str
+ The host field to update.
+ value : str
+ The value to assign to the specified host field.
+ fields_json: str
+ Field-value pairs in valid JSON format. Useful for Forescout composite fields.
+
+ Returns
+ -------
+ XML Request Body Element
+ """
+ root = ET_PHONE_HOME.Element('FSAPI', attrib={'TYPE': 'request', 'API_VERSION': '2.0'})
+ transaction = ET_PHONE_HOME.SubElement(root, 'TRANSACTION', attrib={'TYPE': update_type})
+ if update_type == 'update':
+ ET_PHONE_HOME.SubElement(transaction, 'OPTIONS', attrib={'CREATE_NEW_HOST': 'false'})
+
+ ET_PHONE_HOME.SubElement(transaction, 'HOST_KEY', attrib={'NAME': 'ip', 'VALUE': host_ip})
+ props_xml = ET_PHONE_HOME.SubElement(transaction, 'PROPERTIES')
+
+ # parse fields_json
+ non_composite_fields = {}
+ composite_fields: Dict[Any, Any] = {}
+ if fields_json:
+ fields_json_dict = json.loads(fields_json)
+ for key, val in fields_json_dict.items():
+ if isinstance(val, dict):
+ composite_fields[key] = val
+ elif isinstance(val, list):
+ if len(val) >= 1 and isinstance(val[0], dict):
+ composite_fields[key] = val
+ else:
+ non_composite_fields[key] = val
+ else:
+ non_composite_fields[key] = val
+
+ # put non-composite fields all together
+ if field:
+ non_composite_fields[field] = argToList(value)
+
+ for key, val in non_composite_fields.items():
+ prop_xml = ET_PHONE_HOME.SubElement(props_xml, 'PROPERTY', attrib={'NAME': key})
+ if update_type != 'delete':
+ if isinstance(val, list):
+ for sub_val in val:
+ val_xml = ET_PHONE_HOME.SubElement(prop_xml, 'VALUE')
+ val_xml.text = sub_val
+ else:
+ val_xml = ET_PHONE_HOME.SubElement(prop_xml, 'VALUE')
+ val_xml.text = val
+
+ if composite_fields:
+ for table_prop_name, values in composite_fields.items():
+ table_property_xml = ET_PHONE_HOME.SubElement(props_xml, 'TABLE_PROPERTY',
+ attrib={'NAME': table_prop_name})
+ if update_type == 'update':
+ if isinstance(values, list):
+
+ for row in values:
+ row_xml = ET_PHONE_HOME.SubElement(table_property_xml, 'ROW')
+
+ for key, val in row.items():
+ key_xml = ET_PHONE_HOME.SubElement(row_xml, 'CPROPERTY', attrib={'NAME': key})
+
+ if isinstance(val, list):
+ for sub_val in val:
+ value_xml = ET_PHONE_HOME.SubElement(key_xml, 'CVALUE')
+ value_xml.text = sub_val
+
+ else:
+ value_xml = ET_PHONE_HOME.SubElement(key_xml, 'CVALUE')
+ value_xml.text = val
+ else:
+ row_xml = ET_PHONE_HOME.SubElement(table_property_xml, 'ROW')
+ for key, val in values.items():
+ key_xml = ET_PHONE_HOME.SubElement(row_xml, 'CPROPERTY', attrib={'NAME': key})
+ if isinstance(val, list):
+ for sub_val in val:
+ value_xml = ET_PHONE_HOME.SubElement(key_xml, 'CVALUE')
+ value_xml.text = sub_val
+ else:
+ value_xml = ET_PHONE_HOME.SubElement(key_xml, 'CVALUE')
+ value_xml.text = val
+
+ return root
+
+
+def filter_hostfields_data(args: Dict, data: Dict) -> List:
+ """
+ Filter host fields data by get_host_fields_command arguments.
+
+ Parameters
+ ----------
+ args : dict
+ The get_host_fields_command arguments.
+ data : dict
+ The data to filter.
+
+ Returns
+ -------
+ list
+ Filtered list of hostfields
+ """
+ search_term = args.get('search_term')
+ host_fields = data.get('hostFields', [])
+ host_field_type = args.get('host_field_type', 'all_types')
+ if not search_term:
+ # Still check to see if should filter host fields by their type
+ if host_field_type == 'all_types':
+ return host_fields
+ else:
+ host_field_types = argToList(host_field_type)
+ filtered_hostfields = []
+ for host_field in host_fields:
+ if host_field.get('type') in host_field_types:
+ filtered_hostfields.append(host_field)
+ return filtered_hostfields
+ case_sensitive = args.get('case_sensitive', 'false')
+ case_sensitive = False if case_sensitive.casefold() == 'false' else True
+ if not case_sensitive:
+ search_term = search_term.casefold()
+ match_exactly = args.get('match_exactly', 'False')
+ match_exactly = False if match_exactly.casefold() == 'false' else True
+ if host_field_type != 'all_types':
+ host_field_type = argToList(host_field_type)
+ search_in = args.get('search_in', 'name')
+ search_in = argToList(search_in)
+
+ filtered_hostfields = []
+ for host_field in host_fields:
+ if isinstance(host_field_type, list):
+ if host_field.get('type') not in host_field_type:
+ continue
+ vals_to_search = [host_field.get(part) for part in search_in]
+ vals_to_search = ['' if val is None else val for val in vals_to_search]
+ for val in vals_to_search:
+ val_to_search = val
+ if not case_sensitive:
+ val_to_search = val.casefold()
+ if match_exactly:
+ if search_term == val_to_search:
+ filtered_hostfields.append(host_field)
+ break
+ else:
+ continue
+ else:
+ if search_term in val_to_search:
+ filtered_hostfields.append(host_field)
+ break
+
+ return filtered_hostfields
+
+
+def dict_to_formatted_string(dictionary: Union[Dict, List]) -> str:
+ """
+ Return dictionary as clean string for war room output.
+
+ Parameters
+ ----------
+ dictionary : dict | list
+ The dictionary or list to format as a string.
+
+ Returns
+ -------
+ str
+ Clean string version of a dictionary
+
+ Examples
+ --------
+ >>> example_dict = {'again': 'FsoD',
+ ... 'church': {'go': 'pArcB', 'month': '2009-08-11 16:42:51'},
+ ... 'production': 5507,
+ ... 'so': [9350, 'awzn', 7105, 'mMRxc']}
+ >>> dict_to_formatted_string(example_dict)
+ 'again: FsoD, church: {go: pArcB, month: 2009-08-11 16:42:51}, production: 5507, so: [9350, awzn, 7105, mMRxc]'
+ """
+ return json.dumps(dictionary).lstrip('{').rstrip('}').replace('\'', '').replace('\"', '')
+
+
+def format_policies_data(data: Dict) -> List:
+ """
+ Return policies formatted to Demisto standards.
+
+ Parameters
+ ----------
+ data : dict
+ The data returned from making API call to Forescout Web API policies endpoint.
+
+ Returns
+ -------
+ list
+ Formatted Policies
+ """
+ formatted_policies = []
+ policies = data.get('policies', [])
+ for policy in policies:
+ formatted_policy = {
+ 'ID': str(policy.get('policyId')),
+ 'Name': policy.get('name'),
+ 'Description': policy.get('description')
+ }
+ formatted_rules = []
+ rules = policy.get('rules', [])
+ for rule in rules:
+ formatted_rule = {
+ 'ID': str(rule.get('ruleId')),
+ 'Name': rule.get('name'),
+ 'Description': rule.get('description')
+ }
+ formatted_rules.append(formatted_rule)
+ formatted_policy['Rule'] = formatted_rules
+ formatted_policies.append(formatted_policy)
+ return formatted_policies
+
+
+def create_web_api_headers() -> Dict:
+ """
+ Update JWT if it has expired and return headers object that formats to Forescout Web API expectations
+
+ Returns
+ -------
+ dict
+ Headers object for the Forescout Web API calls
+ """
+ web_api_login()
+ headers = {
+ 'Authorization': WEB_AUTH,
+ 'Accept': 'application/hal+json'
+ }
+ return headers
+
+
+def web_api_login():
+ """
+ Get a JWT (Javascript Web Token) for authorization in calls to Web API
+ """
+ global LAST_JWT_FETCH
+ global WEB_AUTH
+ if not LAST_JWT_FETCH or datetime.now(timezone.utc) >= LAST_JWT_FETCH + JWT_VALIDITY_TIME:
+ url_suffix = '/api/login'
+ headers = {'Content-Type': 'application/x-www-form-urlencoded'}
+ params = {'username': WEB_API_USERNAME, 'password': WEB_API_PASSWORD}
+ response = http_request('POST', url_suffix, headers=headers, params=params, resp_type='response')
+ fetch_time = parsedate(response.headers.get('Date', ''))
+ WEB_AUTH = response.text
+ LAST_JWT_FETCH = fetch_time
+
+
+def http_request(method: str, url_suffix: str, full_url: str = None, headers: Dict = None,
+ auth: Tuple = None, params: Dict = None, data: Dict = None, files: Dict = None,
+ timeout: float = 10, resp_type: str = 'json') -> Any:
+ """
+ A wrapper for requests lib to send our requests and handle requests
+ and responses better
+
+ Parameters
+ ----------
+ method : str
+ HTTP method, e.g. 'GET', 'POST' ... etc.
+ url_suffix : str
+ API endpoint.
+ full_url : str
+ Bypasses the use of BASE_URL + url_suffix. Useful if there is a need to
+ make a request to an address outside of the scope of the integration
+ API.
+ headers : dict
+ Headers to send in the request.
+ auth : tuple
+ Auth tuple to enable Basic/Digest/Custom HTTP Auth.
+ params : dict
+ URL parameters.
+ data : dict
+ Data to be sent in a 'POST' request.
+ files : dict
+ File data to be sent in a 'POST' request.
+ timeout : int
+ The amount of time in seconds a Request will wait for a client to
+ establish a connection to a remote machine.
+ resp_type : str
+ Determines what to return from having made the HTTP request. The default
+ is 'json'. Other options are 'text', 'content' or 'response' if the user
+ would like the full response object returned.
+
+ Returns
+ -------
+ dict | str | bytes | obj
+ Response JSON from having made the request.
+ """
+ try:
+ address = full_url if full_url else BASE_URL + url_suffix
+ res = requests.request(
+ method,
+ address,
+ verify=USE_SSL,
+ params=params,
+ data=data,
+ files=files,
+ headers=headers,
+ auth=auth,
+ timeout=timeout
+ )
+
+ # Handle error responses gracefully
+ if res.status_code not in {200, 304}:
+ err_msg = 'Error in Forescout Integration API call [{}] - {}'.format(res.status_code, res.reason)
+ try:
+ # Try to parse json error response
+ res_json = res.json()
+ message = res_json.get('message')
+ if message.endswith(' See log for more details.'):
+ message = message.replace(' See log for more details.', '')
+ err_msg += '\n{}'.format(message)
+ return_error(err_msg)
+ except json.decoder.JSONDecodeError:
+ if res.status_code in {400, 401, 501}:
+ # Try to parse xml error response
+ resp_xml = ET_PHONE_HOME.fromstring(res.content)
+ codes = [child.text for child in resp_xml.iter() if child.tag == 'CODE']
+ messages = [child.text for child in resp_xml.iter() if child.tag == 'MESSAGE']
+ err_msg += ''.join([f'\n{code}: {msg}' for code, msg in zip(codes, messages)])
+ return_error(err_msg)
+
+ resp_type = resp_type.casefold()
+ try:
+ if resp_type == 'json':
+ return res.json()
+ elif resp_type == 'text':
+ return res.text
+ elif resp_type == 'content':
+ return res.content
+ else:
+ return res
+ except json.decoder.JSONDecodeError:
+ return_error(f'Failed to parse json object from response: {res.content}')
+
+ except requests.exceptions.ConnectTimeout:
+ err_msg = 'Connection Timeout Error - potential reasons may be that the Server URL parameter' \
+ ' is incorrect or that the Server is not accessible from your host.'
+ return_error(err_msg)
+ except requests.exceptions.SSLError:
+ err_msg = 'SSL Certificate Verification Failed - try selecting \'Trust any certificate\' in' \
+ ' the integration configuration.'
+ return_error(err_msg)
+ except requests.exceptions.ProxyError:
+ err_msg = 'Proxy Error - if \'Use system proxy\' in the integration configuration has been' \
+ ' selected, try deselecting it.'
+ return_error(err_msg)
+ except requests.exceptions.ConnectionError as e:
+ # Get originating Exception in Exception chain
+ while '__context__' in dir(e) and e.__context__:
+ e = cast(Any, e.__context__)
+
+ error_class = str(e.__class__)
+ err_type = '<' + error_class[error_class.find('\'') + 1: error_class.rfind('\'')] + '>'
+ err_msg = f'\nERRTYPE: {err_type}\nERRNO: [{e.errno}]\nMESSAGE: {e.strerror}\n' \
+ f'ADVICE: Check that the Server URL parameter is correct and that you' \
+ f' have access to the Server from your host.'
+ return_error(err_msg)
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """
+ Performs API calls to Forescout Web API and DEX that require proper authentication
+ """
+ if WEB_API_USERNAME and WEB_API_PASSWORD:
+ get_hosts({})
+ if DEX_USERNAME and DEX_PASSWORD and DEX_ACCOUNT:
+ update_lists({'update_type': 'add_list_values'})
+ demisto.results('ok')
+
+
+def get_host(args):
+ fields = args.get('fields', '')
+ ip = args.get('ip', '')
+ mac = args.get('mac', '')
+ id = args.get('id', '')
+ url_suffix = '/api/hosts/'
+ if not (ip or mac or id):
+ err_msg = 'One of the command arguments, \'ip\', \'mac\' or \'id\' must be entered in order to identify the ' \
+ 'endpoint to retrieve. '
+ return_error(err_msg)
+
+ if ip:
+ # API endpoint format - https://{EM.IP}/api/hosts/ip/{ipv4}?fields={prop},..,{prop_n}
+ url_suffix += 'ip/' + ip
+ elif mac:
+ # API endpoint format - https://{EM.IP}/api/hosts/mac/{mac}?fields={prop},..,{prop_n}
+ url_suffix += 'mac/' + mac
+ elif id:
+ # API endpoint format - https://{EM.IP}/api/hosts/{obj_ID}?fields={prop},..,{prop_n}
+ url_suffix += id
+
+ params = {'fields': fields} if fields != '' else None
+ headers = create_web_api_headers()
+ response_data = http_request('GET', url_suffix, headers=headers, params=params, resp_type='json')
+ return response_data
+
+
+def get_host_command():
+ check_web_api_credentials()
+ args = demisto.args()
+ ip = args.get('ip', '')
+ mac = args.get('mac', '')
+ id = args.get('id', '')
+ identifier = 'IP=' + ip if ip else ('MAC=' + mac if mac else 'ID=' + id)
+ requested_fields = argToList(args.get('fields', ''))
+ data = get_host(args)
+ host = data.get('host', {})
+ fields = host.get('fields', {})
+
+ included_fields = {HOSTFIELDS_TO_INCLUDE.get(key, key): val for key, val in fields.items()}
+ for key, val in included_fields.items():
+ if isinstance(val, list):
+ new_val = [item.get('value') for item in val]
+ included_fields[key] = new_val
+ else:
+ included_fields[key] = val.get('value')
+
+ if not requested_fields:
+ for key in list(included_fields.keys()):
+ if key not in HOSTFIELDS_TO_INCLUDE.values():
+ del included_fields[key]
+
+ included_fields_readable = {}
+ for key, val in included_fields.items():
+ included_fields_readable[key] = dict_to_formatted_string(val) if isinstance(val, (dict, list)) else val
+
+ content = {
+ 'ID': str(host.get('id')),
+ 'IPAddress': host.get('ip', ''),
+ 'MACAddress': host.get('mac', ''),
+ **included_fields
+ }
+
+ # Construct endpoint object from API data according to Demisto conventions
+ endpoint = {
+ 'IPAddress': host.get('ip', ''),
+ 'MACAddress': host.get('mac', '')
+ }
+ dhcp_server = fields.get('dhcp_server', {}).get('value')
+ if dhcp_server:
+ endpoint['DHCPServer'] = dhcp_server
+ hostname = fields.get('hostname', {}).get('value')
+ nbt_host = fields.get('nbthost', {}).get('value')
+ hostname = hostname if hostname else nbt_host
+ if hostname:
+ endpoint['Hostname'] = hostname
+ os = fields.get('os_classification', {}).get('value')
+ if os:
+ endpoint['OS'] = os
+ vendor_and_model = fields.get('vendor_classification', {}).get('value')
+ if vendor_and_model:
+ endpoint['Model'] = vendor_and_model
+ domain = fields.get('nbtdomain', {}).get('value')
+ if domain:
+ endpoint['Domain'] = domain
+
+ human_readable_content = deepcopy(content)
+ human_readable_content.update(included_fields_readable)
+
+ context = {
+ 'Forescout.Host(val.ID && val.ID === obj.ID)': content,
+ 'Endpoint(val.ID && val.ID === obj.ID)': endpoint
+ }
+
+ title = 'Endpoint Details for {}'.format(identifier) if identifier else 'Endpoint Details'
+ human_readable = tableToMarkdown(title, human_readable_content, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=context, raw_response=data)
+
+
+def get_hosts(args={}):
+ url_suffix = '/api/hosts'
+ headers = create_web_api_headers()
+ rule_ids = args.get('rule_ids')
+ fields = args.get('fields')
+ if rule_ids and fields:
+ url_suffix += '?matchRuleId=' + rule_ids + '&' + fields
+ elif rule_ids:
+ url_suffix += '?matchRuleId=' + rule_ids
+ elif fields:
+ url_suffix += '?' + fields
+ response_data = http_request('GET', url_suffix, headers=headers, resp_type='json')
+ return response_data
+
+
+def get_hosts_command():
+ check_web_api_credentials()
+ args = demisto.args()
+ response_data = get_hosts(args)
+ content = [
+ {
+ 'ID': str(x.get('hostId')),
+ 'IPAddress': x.get('ip', ''),
+ 'MACAddress': x.get('mac', '')
+ } for x in response_data.get('hosts', [])
+ ]
+ endpoints = [
+ {
+ 'IPAddress': x.get('ip', ''),
+ 'MACAddress': x.get('mac', '')
+ } for x in response_data.get('hosts', [])
+ ]
+ context = {
+ 'Forescout.Host(val.ID && val.ID === obj.ID)': content,
+ 'Endpoint(val.ID && val.ID === obj.ID)': endpoints
+ }
+ title = 'Active Endpoints'
+ human_readable = tableToMarkdown(title, content, removeNull=True)
+ if not content:
+ demisto.results('No hosts found for the specified filters.')
+ else:
+ return_outputs(readable_output=human_readable, outputs=context, raw_response=response_data)
+
+
+def get_host_fields():
+ url_suffix = '/api/hostfields'
+ headers = create_web_api_headers()
+ response_data = http_request('GET', url_suffix, headers=headers, resp_type='json')
+ return response_data
+
+
+def get_host_fields_command():
+ check_web_api_credentials()
+ args = demisto.args()
+ data = get_host_fields()
+ filtered_data = filter_hostfields_data(args, data)
+ if not filtered_data:
+ demisto.results('No host fields matched the specified filters.')
+ else:
+ content = [{key.title(): val for key, val in x.items()} for x in filtered_data]
+ context = {'Forescout.HostField': content}
+ title = 'Index of Host Fields'
+ table_headers = ['Label', 'Name', 'Description', 'Type']
+ human_readable = tableToMarkdown(title, content, headers=table_headers, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=context, raw_response=data)
+
+
+def get_policies():
+ url_suffix = '/api/policies'
+ headers = create_web_api_headers()
+ response_data = http_request('GET', url_suffix, headers=headers, resp_type='json')
+ return response_data
+
+
+def get_policies_command():
+ check_web_api_credentials()
+ data = get_policies()
+ content = format_policies_data(data)
+ readable_content = deepcopy(content)
+ for policy in readable_content:
+ readable_rules = []
+ for rule in policy.get('Rule', []):
+ readable_rules.append(dict_to_formatted_string(rule))
+ policy['Rule'] = readable_rules
+ context = {'Forescout.Policy(val.ID && val.ID === obj.ID)': content}
+ title = 'Forescout Policies'
+ human_readable = tableToMarkdown(title, readable_content, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=context, raw_response=data)
+
+
+def update_lists(args={}):
+ update_type = args.get('update_type', '')
+ list_names = argToList(args.get('list_names', ''))
+ values = ':'.join(argToList(args.get('values', '')))
+ if values:
+ lists = '&'.join([list_name + '=' + values for list_name in list_names])
+ else:
+ lists = '&'.join(list_names)
+ req_body = create_update_lists_request_body(update_type, lists)
+ data = ET_PHONE_HOME.tostring(req_body, encoding='UTF-8', method='xml')
+ url_suffix = '/fsapi/niCore/Lists'
+ resp_content = http_request('POST', url_suffix, headers=DEX_HEADERS, auth=DEX_AUTH, data=data, resp_type='content')
+ return resp_content
+
+
+def update_lists_command():
+ check_dex_credentials()
+ args = demisto.args()
+ response_content = update_lists(args)
+ resp_xml = ET_PHONE_HOME.fromstring(response_content)
+ msg_list = [child.text for child in resp_xml.iter() if child.tag == 'MESSAGE']
+ if len(msg_list) >= 1 and msg_list[0] is not None:
+ msg = msg_list[0]
+ msg = msg.replace('[', '').replace(']', '')
+ else:
+ err_msg = 'The response from Forescout could not be parsed correctly. It is uncertain if the list updates ' \
+ 'were successfully executed.'
+ return_error(err_msg)
+ demisto.results(msg)
+
+
+def update_host_fields(args={}):
+ host_ip = args.get('host_ip', '')
+ update_type = args.get('update_type', '')
+ field = args.get('field', '')
+ value = args.get('value', '')
+ fields_json = args.get('fields_json', '')
+ req_body = create_update_hostfields_request_body(host_ip, update_type, field, value, fields_json)
+ data = ET_PHONE_HOME.tostring(req_body, encoding='UTF-8', method='xml')
+ url_suffix = '/fsapi/niCore/Hosts'
+ resp_content = http_request('POST', url_suffix, headers=DEX_HEADERS, auth=DEX_AUTH, data=data, resp_type='content')
+ return resp_content
+
+
+def update_host_fields_command():
+ check_dex_credentials()
+ args = demisto.args()
+ update_type = args.get('update_type', '')
+ field = args.get('field', '')
+ host_ip = args.get('host_ip', '')
+ fields_json = args.get('fields_json', '{}')
+ try:
+ fields_json_dict = json.loads(fields_json)
+ except json.decoder.JSONDecodeError:
+ return_error(f'Failed to parse \'fields_json\' command argument - invalid JSON format.')
+
+ # Because the API has an error and says it deletes multiple things when it only deletes one
+ # have to take care of it behind the curtains
+ if update_type == 'delete':
+ temp_args = {'update_type': update_type, 'host_ip': host_ip}
+ for key, val in fields_json_dict.items():
+ temp_args['fields_json'] = json.dumps({key: val})
+ update_host_fields(temp_args)
+ if field:
+ temp_args['fields_json'] = json.dumps({field: ''})
+ update_host_fields(temp_args)
+ temp_args['field'] = ''
+ update_host_fields(args) # Takes care of composite_field
+
+ response_content = update_host_fields(args)
+
+ resp_xml = ET_PHONE_HOME.fromstring(response_content)
+ msg_list = [child.text for child in resp_xml.iter() if child.tag == 'MESSAGE']
+ if len(msg_list) >= 1 and msg_list[0] is not None:
+ msg = msg_list[0]
+ msg = msg.replace('[', '').replace(']', '')
+ else:
+ err_msg = 'The response from Forescout could not be parsed correctly. It is uncertain if the host fields ' \
+ 'were successfully updated.'
+ return_error(err_msg)
+ demisto.results(msg)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+COMMANDS = {
+ 'test-module': test_module,
+ 'forescout-get-host': get_host_command,
+ 'forescout-get-hosts': get_hosts_command,
+ 'forescout-get-host-fields': get_host_fields_command,
+ 'forescout-get-policies': get_policies_command,
+ 'forescout-update-lists': update_lists_command,
+ 'forescout-update-host-fields': update_host_fields_command
+}
+
+''' EXECUTION '''
+
+
+def main():
+ """Main execution block"""
+
+ try:
+ ''' SETUP '''
+
+ if not ((WEB_API_USERNAME and WEB_API_PASSWORD) or (DEX_USERNAME and DEX_PASSWORD)):
+ err_msg = 'The username and password for at least one of the \'Data Exchange (DEX)\' or the \'Web API\' ' \
+ 'credentials are required though it is advisable to enter both in order for the integration to' \
+ ' be fully functional.'
+ return_error(err_msg)
+
+ if (DEX_USERNAME and DEX_PASSWORD) and not DEX_ACCOUNT:
+ err_msg = 'When entering your \'Data Exchange (DEX)\' credentials, the \'Data Exchange (DEX) Account\' ' \
+ 'configuration parameter is also required. For information on the correct value to enter here' \
+ ' - see Detailed Instructions (?).'
+ return_error(err_msg)
+
+ # Remove proxy if not set to true in params
+ handle_proxy()
+
+ cmd_name = demisto.command()
+ LOG('Command being called is {}'.format(cmd_name))
+
+ if cmd_name in COMMANDS.keys():
+ COMMANDS[cmd_name]()
+
+ except Exception as e:
+ return_error(str(e))
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == '__builtin__' or __name__ == 'builtins':
+ main()
diff --git a/Integrations/Forescout/Forescout.yml b/Integrations/Forescout/Forescout.yml
new file mode 100644
index 000000000000..b4658405bfe7
--- /dev/null
+++ b/Integrations/Forescout/Forescout.yml
@@ -0,0 +1,486 @@
+category: Network Security
+commonfields:
+ id: Forescout
+ version: -1
+configuration:
+- display: |-
+ The network address of the Forescout Enterprise Manager or
+ standalone Appliance, e.g., 'https://10.0.0.8'. #disable-secrets-detection
+ name: url
+ required: true
+ type: 0
+- display: Web API Username (see Detailed Instructions (?))
+ name: web_api_credentials
+ required: false
+ type: 9
+- display: Data Exchange (DEX) Username (see Detailed Instructions (?))
+ name: dex_credentials
+ required: false
+ type: 9
+- display: Data Exchange (DEX) Account (see Detailed Instructions (?))
+ name: dex_account
+ required: false
+ type: 0
+- defaultvalue: 'true'
+ display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Unified device visibility and control platform for IT and OT Security.
+display: Forescout
+name: Forescout
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Filter hosts by those selected by policies or policy sub-rules.
+ Policies and/or rules should be specified by their IDs. To find policy and
+ rule IDs by which you can filter, run the 'forescout-get-policies'
+ command. If multiple policy and/or rule IDs are entered, only hosts that are
+ selected by all of the policies and/or rules specifed will be returned. Multiple
+ policy or rule IDs should be separated by a comma.
+ isArray: true
+ name: rule_ids
+ required: false
+ secret: false
+ - default: false
+ description: Filter hosts based on host field values. Enter fields with their
+ associated values in the following format, '{field_1}={val_1}&{field_2}={val_2}
+ … &{field_n}={val_n}' where '{field_1}' through '{field_n}' are replaced by
+ actual field names and '{val_1}' through '{val_n}' are replaced by the desired
+ matching values. Note that a list field may be specified with the values separated
+ by commas. Only hosts whose properties match all the specified values will
+ be returned. For a list of potential host fields that may be specified, try
+ executing the 'forescout-get-hostfields' command. A composite property may
+ also be specified. If entered in the format where all the field-value pairs
+ are in a single set of square brackets, for example, '{composite_prop}=[{field_1},{val_1},..,{field_n},{val_n}]'
+ then only hosts for which the specified composite property's fields all match
+ the values entered will be returned. If entered in the format, '{composite_prop}=[{field_1},{val}_1],..,[{field_n},{val_n}]'
+ where each field-value pair is enclosed in its own set of brackets, then hosts
+ for which the composite property contains any of the field-values specified
+ will be returned. Note that for composite properties, sub-fields should be
+ entered as their internal representation in Forescout. To find internal representation
+ for a composite property's sub-fields try executing 'forescout-get-host' command
+ with the host specified in the 'identifier' argument and the name of the composite
+ property entered in the 'fields' argument of the command.
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a list of active endpoints.
+ execution: false
+ name: forescout-get-hosts
+ outputs:
+ - contextPath: Forescout.Host.ID
+ description: Forescout ID for the host.
+ type: Number
+ - contextPath: Forescout.Host.IPAddress
+ description: IP address of the host.
+ type: String
+ - contextPath: Forescout.Host.MACAddress
+ description: MAC address of the host.
+ type: String
+ - contextPath: Endpoint.IPAddress
+ description: IP address of the host.
+ type: String
+ - contextPath: Endpoint.MACAddress
+ description: MAC address of the host.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: true
+ defaultValue: name
+ description: Each host field has three searchable parts, the 'name', 'label',
+ and 'description'. By default only the 'name' will be searched. If you want
+ to expand the search to include the description, you would enter 'name,description' for this argument.
+ isArray: true
+ name: search_in
+ predefined:
+ - name
+ - description
+ - label
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: true
+ defaultValue: 'false'
+ description: Determines whether to match the case of the entered search term.
+ isArray: false
+ name: case_sensitive
+ predefined:
+ - 'false'
+ - 'true'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: true
+ defaultValue: 'False'
+ description: Determines whether the search term is matched against the entirety
+ of the potential host field instead of just seeing whether the host field
+ contains the search term.
+ isArray: false
+ name: match_exactly
+ predefined:
+ - 'False'
+ - 'True'
+ required: false
+ secret: false
+ - default: false
+ description: The term by which to filter host fields. By default, the search will
+ be case insensitive and checked to see if a host field contains the search
+ term unless otherwise specified in the 'case_sensitive' and 'match_exactly'
+ arguments respectively.
+ isArray: false
+ name: search_term
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: true
+ defaultValue: all_types
+ description: Limit the search to host fields whose values are of a certain type.
+ For example, to limit the search to host properties whose values are either
+ boolean, ip, or a date, enter 'boolean,ip,date'.
+ isArray: true
+ name: host_field_type
+ predefined:
+ - string
+ - boolean
+ - appliance
+ - port
+ - service
+ - list_change
+ - change
+ - ip
+ - composite
+ - ipv6
+ - session
+ - date
+ - integer
+ - tree_path
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves an index of Forescout host fields that match the specified
+ criteria.
+ execution: false
+ name: forescout-get-host-fields
+ outputs:
+ - contextPath: Forescout.HostField
+ description: List index of host properties.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: List of host properties to include in the output for the targeted
+ endpoint. If a specified host property is not found, the property is omitted
+ from the outputs. For a list of potential host properties that can be specified,
+ run the 'forescout-get-host-fields' command. Requested fields should
+ be comma separated.
+ isArray: true
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: IP (ipv4) of the endpoint. To get the Endpoint identifiers - IPs,
+ MAC addresses, and object IDs, run the `forescout-get-hosts` command.
+ isArray: false
+ name: ip
+ required: false
+ secret: false
+ - default: false
+ description: MAC address of the endpoint. To get the Endpoint identifiers - IPs,
+ MAC addresses, and object IDs, run the `forescout-get-hosts` command.
+ isArray: false
+ name: mac
+ required: false
+ secret: false
+ - default: false
+ description: Forescout ID of the endpoint. To get the Endpoint identifiers - IPs,
+ MAC addresses, and object IDs, run the `forescout-get-hosts` command.
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves details of specified host.
+ execution: false
+ name: forescout-get-host
+ outputs:
+ - contextPath: Forescout.Host.MatchedFingerprint
+ description: An endpoint might match multiple profiles. This property indicates
+ all the classification profiles that this endpoint matches.
+ type: Unknown
+ - contextPath: Forescout.Host.EngineSeenPacket
+ description: Indicates the host was seen by CounterACT.
+ type: String
+ - contextPath: Forescout.Host.Online
+ description: Host is online.
+ type: String
+ - contextPath: Forescout.Host.PrimClassification
+ description: Indicates the most specific endpoint function detected. If CounterACT
+ detects multiple endpoint functions, the property is resolved as the most
+ specific value that is common to all the detected functions. If there is no
+ common value, the property is resolved as 'Multiple Suggestions'.
+ type: String
+ - contextPath: Forescout.Host.MacVendorString
+ description: Indicates a value associated with the NIC Vendor.
+ type: String
+ - contextPath: Forescout.Host.SambaOpenPort
+ description: NetBIOS ports that are open.
+ type: String
+ - contextPath: Forescout.Host.UserDefFp
+ description: Indicates the operating system of the endpoint, as determined by
+ classification tools.
+ type: String
+ - contextPath: Forescout.Host.Vendor
+ description: Network Device Vendor, Type, and Model.
+ type: String
+ - contextPath: Forescout.Host.AgentVersion
+ description: Indicates the SecureConnector version installed on a Windows host.
+ type: String
+ - contextPath: Forescout.Host.Fingerprint
+ description: Passive OS detection based on Syn packets.
+ type: String
+ - contextPath: Forescout.Host.AccessIP
+ description: Indicates the last IP that was investigated for this host.
+ type: String
+ - contextPath: Forescout.Host.VendorClassification
+ description: Indicates the most specific vendor and model detected.
+ type: String
+ - contextPath: Forescout.Host.ManageAgent
+ description: Indicates if the host is running SecureConnector.
+ type: String
+ - contextPath: Forescout.Host.Onsite
+ description: Indicates that a host is connected to the organizational network.
+ type: String
+ - contextPath: Forescout.Host.MacPrefix32
+ description: MAC prefix.
+ type: String
+ - contextPath: Forescout.Host.VaNetfunc
+ description: Reported CDP VoIP device description for VA netfunc.
+ type: String
+ - contextPath: Forescout.Host.NmapDefFp7
+ description: Nmap-OS Fingerprint (Ver. 7.01).
+ type: String
+ - contextPath: Forescout.Host.NmapDefFp5
+ description: Nmap-OS Fingerprint (Ver. 5.3).
+ type: String
+ - contextPath: Forescout.Host.AgentInstallMode
+ description: Indicates the SecureConnector deployment mode installed on the
+ host.
+ type: String
+ - contextPath: Forescout.Host.NmapFp7
+ description: Nmap-OS Class(Ver. 7.01) (Obsolete).
+ type: String
+ - contextPath: Forescout.Host.ClType
+ description: Indicates how CounterACT determines the Network Function property
+ of the endpoint.
+ type: String
+ - contextPath: Forescout.Host.ClRule
+ description: Indicates the rule responsible for classifying the host.
+ type: String
+ - contextPath: Forescout.Host.AgentVisibleMode
+ description: Indicates the SecureConnector visible mode installed on the host.
+ type: String
+ - contextPath: Forescout.Host.OSClassification
+ description: Operating system.
+ type: String
+ - contextPath: Forescout.Host.ClassificationSourceOS
+ description: Indicates how the OS classification property was
+ determined for this endpoint.
+ type: String
+ - contextPath: Forescout.Host.LastNbtReportTime
+ description: Last time the NBT name was reported.
+ type: String
+ - contextPath: Forescout.Host.Misc
+ description: Miscellaneous.
+ type: String
+ - contextPath: Forescout.Host.ClassificationSourceFunc
+ description: Indicates how the Function classification property was determined
+ for this endpoint.
+ type: String
+ - contextPath: Forescout.Host.NmapNetfunc7
+ description: Nmap-Network Function (Ver. 7.01).
+ type: String
+ - contextPath: Forescout.Host.MAC
+ description: ARP spoofing (Obsolete).
+ type: Unknown
+ - contextPath: Forescout.Host.OpenPort
+ description: Open ports.
+ type: Unknown
+ - contextPath: Forescout.Host.GstSignedInStat
+ description: Logged in status.
+ type: String
+ - contextPath: Forescout.Host.DhcpClass
+ description: The device class, according to the DHCP fingerprint.
+ type: String
+ - contextPath: Forescout.Host.ADM
+ description: Admission events.
+ type: String
+ - contextPath: Forescout.Host.DhcpReqFingerprint
+ description: The host DHCP request fingerprint.
+ type: String
+ - contextPath: Forescout.Host.DhcpOptFingerprint
+ description: The host DHCP options fingerprint.
+ type: String
+ - contextPath: Forescout.Host.Ipv4ReportTime
+ description: Indicates the last time that IPv4 reported to the infrastructure.
+ type: String
+ - contextPath: Forescout.Host.DhcpOS
+ description: The device OS, according to the DHCP fingerprint.
+ type: String
+ - contextPath: Forescout.Host.DhcpHostname
+ description: The device host name, as advertised by DHCP.
+ type: String
+ - contextPath: Forescout.Host.IPAddress
+ description: Host IP address.
+ type: String
+ - contextPath: Forescout.Host.MACAddress
+ description: Host MAC address.
+ type: String
+ - contextPath: Forescout.Host.ID
+ description: Forescout ID number for the host.
+ type: Number
+ - contextPath: Endpoint.IPAddress
+ description: IP Address of the host.
+ type: String
+ - contextPath: Endpoint.MACAddress
+ description: MAC address of the host.
+ type: String
+ - contextPath: Endpoint.DHCPServer
+ description: Endpoint DHCP server.
+ type: String
+ - contextPath: Endpoint.Hostname
+ description: Hostname of the endpoint.
+ type: String
+ - contextPath: Endpoint.OS
+ description: Endpoint OS.
+ type: String
+ - contextPath: Endpoint.Model
+ description: Vendor and model of the endpoint.
+ type: String
+ - contextPath: Endpoint.Domain
+ description: Domain of the endpoint.
+ type: String
+ - deprecated: false
+ description: |-
+ Retrieves a list of all policies defined in the Forescout platform and
+ their sub-rules.
+ execution: false
+ name: forescout-get-policies
+ outputs:
+ - contextPath: Forescout.Policy.ID
+ description: Forescout ID for the policy.
+ type: String
+ - contextPath: Forescout.Policy.Name
+ description: Forescout name of the policy.
+ type: String
+ - contextPath: Forescout.Policy.Description
+ description: Description of the policy.
+ type: String
+ - contextPath: Forescout.Policy.Rule
+ description: List of rules that make up the policy.
+ type: Unknown
+ - arguments:
+ - auto: PREDEFINED
+ default: true
+ defaultValue: update
+ description: The type of update to perform on a host field.
+ isArray: false
+ name: update_type
+ predefined:
+ - update
+ - delete
+ required: false
+ secret: false
+ - default: false
+ description: The IP address of the target host. Required if 'updated_type' is
+ "update" or "delete".
+ isArray: false
+ name: host_ip
+ required: true
+ secret: false
+ - default: false
+ description: Enter the the name of the field to update. Composite fields should
+ be updated using the 'fields_json' command argument.
+ isArray: false
+ name: field
+ required: false
+ secret: false
+ - default: false
+ description: Value to be assigned to the field specified in the 'field' argument.
+ If the value is a list of items, then items should be separated using a comma.
+ isArray: true
+ name: value
+ required: false
+ secret: false
+ - default: false
+ description: 'One may perform multiple field-value assignments using this command
+ argument. The argument should be entered in valid JSON format. This argument
+ is useful for setting composite fields although other fields may be entered
+ as well. For example, ''{"Example_Composite": [{"Shape": "Triangle", "Color":
+ "Beige"}, {"Shape": "Square", "Color": "Violet"}], "String_Field": "Example"}''
+ where ''Example_Composite'' is the name of the Composite field in Forescout
+ and ''Shape'' and ''Color'' are sub fields. In the example, ''String_Field''
+ is a regular host field of type string whose value will be assigned ''Example''.
+ If the composite field was defined in Forescout as an aggregate property then
+ additional records will be appended, otherwise they will be overwritten. '
+ isArray: false
+ name: fields_json
+ required: false
+ secret: false
+ deprecated: false
+ description: Update a host's field. If a List field or Composite field
+ has not been defined in Forescout to 'Aggregate new values from each update'
+ then performing an update operation on a field will overwrite previous data
+ written to that field.
+ execution: false
+ name: forescout-update-host-fields
+ - arguments:
+ - auto: PREDEFINED
+ default: true
+ defaultValue: add_list_values
+ description: The type of update to perform on a Forescout list.
+ isArray: false
+ name: update_type
+ predefined:
+ - add_list_values
+ - delete_list_values
+ - delete_all_list_values
+ required: false
+ secret: false
+ - default: false
+ description: Names of lists defined in the Forescout platform that you wish
+ to update. If the 'update_type' is set to 'delete_all_list_values' then it
+ is unnecessary to fill in the 'values' command argument. Multiple list names
+ should be separated by a comma. To find names of lists that may be updated,
+ navigate to *Tools* > *Options* > *Lists* in the Forescout platform.
+ isArray: true
+ name: list_names
+ required: true
+ secret: false
+ - default: false
+ description: The values to add or delete from the lists entered in the 'list_names'
+ command argument. Multiple values should separated by a comma. Note that the
+ values entered here will be updated for all of the lists entered in the 'list_names'
+ command argument.
+ isArray: true
+ name: values
+ required: false
+ secret: false
+ deprecated: false
+ description: Update Forescout lists.
+ execution: false
+ name: forescout-update-lists
+ dockerimage: demisto/python3:3.7.3.221
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- Forescout-Test
diff --git a/Integrations/Forescout/Forescout_description.md b/Integrations/Forescout/Forescout_description.md
new file mode 100644
index 000000000000..71f9a3d09dba
--- /dev/null
+++ b/Integrations/Forescout/Forescout_description.md
@@ -0,0 +1,40 @@
+# Instructions
+
+## Forescout Module Requirements
+Before you can use this integration in Demisto, you need to enable certain modules in your Forescout environment.
+1. In the Forescout console, from the navigation bar select *Tools > Options*.
+2. In the dialog that appears, from the categories section on the left, click *Modules*.
+3. In the main area of the dialog, from the drop-down menu, select *Open Integration Module*.
+Make sure that the integration module and the following submodules are installed and enabled: *Data Exchange (DEX)* and *Web API* are all installed and enabled. If they aren't, install and enable them.
+
+## Configuration Parameters
+
+**url**
+This is the network address of the Forescout Enterprise Manager or standalone Appliance. (The host on which the the Forescout Appliance is hosted.) For example, if the Forescout Appliance is hosted at the IP address *192.168.10.23*, then you enter *https://192.168.10.23*.
+
+**Web API Username** and **Password**
+The credentials entered here should be those created in the Forescout console for the *Web API*.
+1. In the Forescout console, from the top navigation bar, click *Tools > Options*.
+2. From the dialog that appears, in the categories section on the left, click *Web API*, and select *User Settings*.
+3. Create a username and password by clicking the *Add* button, and completing the fields. These are the credentials that you will enter when configuring the Demisto-Forescout integration: *Web API Username* and *Password*.
+4. Select *Client IPs* towards the top of the main area of the dialog, next to *User Settings*.
+5. Add the IP address where your Demisto instance is hosted or allow requests from all IP addresses to make sure that requests made by the Demisto-Forescout integration will be permitted.
+5. Click the *Apply* button to save the changes you made.
+
+**Data Exchange (DEX) Username** and **Password**
+The credentials entered here should be those created in the Forescout console for *Data Exchange (DEX)*.
+1. In the Forescout console, from the top navigation bar, click *Tools > Options*.
+2. From the dialog that appears, in the categories section on the left, click *Data Exchange (DEX)*.
+3. Select *CounterACT Web Service > Accounts*.
+4. Create a username and password by clicking the *Add* button, and completing the fields. **Note**: The value you entered for the *Name* field in the account-creation pop-up window is the value that you should enter for the *Data Exchange (DEX) Account* configuration parameter.
+5. Click the *Apply* button to save the changes you made.
+
+The username and password entered in the account-creation dialog are the credentials that you will enter when configuring the Demisto-Forescout integration: *Data Exchange (DEX) Username* and *Password*.
+
+**Data Exchange (DEX) Account**
+The *Data Exchange (DEX)* credentials *Name* field. This can be found by navigating to *Tools > Options > Data Exchange (DEX) > CounterACT Web Service > Accounts*.
+
+## Important Usage Notes
+This integration allows the user to update host properties and Forescout Lists.
+To create Forescout properties, which can then be updated using the Demisto-Forescout integration, from the Forescout console, navigate to *Tools > Options > Data Exchange (DEX) > CounterACT Web Console > Properties*. This is where you create new properties. Make sure to associate the properties with the account you created, and which you used in the configuration parameters of the Forescout integration in Demisto.
+Lists must also be defined and created in the Forescout console before you can update them using the Demisto-Forescout integration. For more information, reference the *Defining and Managing Lists* section in the [Forescout Administration Guide](https://www.forescout.com/wp-content/uploads/2018/04/CounterACT_Administration_Guide_8.0.pdf).
diff --git a/Integrations/Forescout/Forescout_image.png b/Integrations/Forescout/Forescout_image.png
new file mode 100644
index 000000000000..885d68cc0619
Binary files /dev/null and b/Integrations/Forescout/Forescout_image.png differ
diff --git a/Integrations/Forescout/Forescout_test.py b/Integrations/Forescout/Forescout_test.py
new file mode 100644
index 000000000000..c45532e75b6b
--- /dev/null
+++ b/Integrations/Forescout/Forescout_test.py
@@ -0,0 +1,49 @@
+from Forescout import dict_to_formatted_string
+
+# disable-secrets-detection-start
+example_dict = {
+ 'again': 'FsoDxgFGKJYhqNQPmWRY',
+ 'church': {
+ 'buy': 'omarjoseph@gmail.com',
+ 'cut': 14,
+ 'full': 6526,
+ 'go': 'pArcBeCGHYaqtfhFqVzU',
+ 'grow': 'gtaylor@hotmail.com',
+ 'month': '2009-08-11 16:42:51',
+ 'phone': 2311,
+ 'recent': 7775,
+ 'second': -66328998740807.2,
+ 'see': 'woodchristine@delgado-tucker.com',
+ 'some': 'TYuihvEVpjSzyzMdVlbc',
+ 'thus': 9646,
+ 'win': 6003
+ },
+ 'investment': 'HEIWSzGzpPSVsBdIePAh',
+ 'line': 'lambertkevin@vincent-thomas.com',
+ 'maintain': 'KucNqjHoKxPVoKGhocyk',
+ 'production': 5507,
+ 'so': [
+ 9350,
+ 9854,
+ 'awznwdFCSyFiGcCEZRLS',
+ 7105,
+ 'mMRxcMllqqxMcrBaIaYX',
+ 'NrGaqvEJQSEVjkgGiglk',
+ 'UbuLUckTjNVemGIfGaDs',
+ 'ZOhHcMjlXpWgbNkdSrDP',
+ 'XWOejRXLOvujrZyPvTKp',
+ 4.7568,
+ 'http://gonzales.org/',
+ 2643
+ ]
+}
+# disable-secrets-detection-end
+
+
+class TestDictToFormattedString(object):
+ def test_dict_to_formatted_string_1(self):
+ example_dict_string = dict_to_formatted_string(example_dict)
+ assert not example_dict_string.startswith('{')
+ assert not example_dict_string.endswith('}')
+ assert '\'' not in example_dict_string
+ assert '"' not in example_dict_string
diff --git a/Integrations/FortiGate/CHANGELOG.md b/Integrations/FortiGate/CHANGELOG.md
new file mode 100644
index 000000000000..fb6818f089c4
--- /dev/null
+++ b/Integrations/FortiGate/CHANGELOG.md
@@ -0,0 +1,12 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Added 3 commannds.
+- ***fortigate-ban-ip***
+- ***fortigate-unban-ip***
+- ***fortigate-get-banned-ips***
+
+
+## [19.8.0] - 2019-08-06
+-
diff --git a/Integrations/FortiGate/FortiGate.py b/Integrations/FortiGate/FortiGate.py
new file mode 100644
index 000000000000..a5d22058f031
--- /dev/null
+++ b/Integrations/FortiGate/FortiGate.py
@@ -0,0 +1,1154 @@
+import demistomock as demisto
+from CommonServerPython import *
+''' IMPORTS '''
+
+import json
+import requests
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+USER_NAME = demisto.params().get('credentials').get('identifier')
+PASSWORD = demisto.params().get('credentials').get('password')
+SERVER = demisto.params()['server'][:-1] if (demisto.params()['server'] and demisto.params()
+ ['server'].endswith('/')) else demisto.params()['server']
+USE_SSL = not demisto.params().get('unsecure', False)
+BASE_URL = SERVER + '/api/v2/'
+
+
+# remove proxy if not set to true in params
+if not demisto.params().get('proxy'):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+
+''' HELPER FUNCTIONS '''
+
+
+def login():
+ """
+ Due to token not providing the right level of access, we are going to create a session
+ and inject into its headers the csrf token provided with the service.
+ This won't work with usual requests as the session must be kept alive during this time.
+ """
+ # create session.
+ session = requests.session()
+ url_suffix = '/logincheck'
+ params = {
+ 'username': USER_NAME,
+ 'secretkey': PASSWORD,
+ 'ajax': 1
+ }
+ session.post(SERVER + url_suffix, data=params, verify=USE_SSL) # type: ignore
+ # check for the csrf token in cookies we got, add it to headers of session,
+ # or else we can't perform HTTP request that is not get.
+ for cookie in session.cookies:
+ if cookie.name == 'ccsrftoken':
+ csrftoken = cookie.value[1:-1] # strip quotes
+ session.headers.update({'X-CSRFTOKEN': csrftoken})
+ return session
+
+
+SESSION = login()
+
+
+def http_request(method, url_suffix, params={}, data=None):
+
+ res = SESSION.request(
+ method,
+ BASE_URL + url_suffix,
+ verify=USE_SSL,
+ params=params,
+ data=data
+ )
+ if res.status_code not in {200}:
+ return_error('Error in API call to FortiGate [%d] - %s' % (res.status_code, res.reason))
+ if method.upper() != 'GET':
+ return res.status_code
+ return res.json()
+
+
+def does_path_exist(target_url):
+ """
+ Check if the path itself already exists in the instance, if it does we will not want to resume with certain requests.
+ """
+ res = SESSION.get(BASE_URL + target_url, verify=USE_SSL)
+ if res.status_code == 200:
+ return True
+ return False
+
+
+def create_addr_string(list_of_addr_data_dicts):
+ addr_string = ""
+ for addr_index in range(0, len(list_of_addr_data_dicts)):
+ cur_addr_data = list_of_addr_data_dicts[addr_index]
+ cur_addr_name = cur_addr_data.get("name")
+ if addr_index == len(list_of_addr_data_dicts) - 1:
+ addr_string += "{}".format(cur_addr_name)
+ else:
+ addr_string += "{}\n".format(cur_addr_name)
+ return addr_string
+
+
+def convert_arg_to_int(arg_str, arg_name_str):
+ try:
+ arg_int = int(arg_str)
+ except ValueError:
+ return_error("Error: {0} must have an integer value.".format(arg_name_str))
+ return arg_int
+
+
+def prettify_date(date_string):
+ """
+ This function receives a string representing a date, for example 2018-07-28T10:47:55.000Z.
+ It returns the same date in a readable format - for example, 2018-07-28 10:47:55.
+ """
+ date_string = date_string[:-5] # remove the .000z at the end
+ date_prettified = date_string.replace("T", " ")
+ return date_prettified
+
+
+def create_banned_ips_entry_context(ips_data_array):
+ ips_contexts_array = []
+ for ip_data in ips_data_array:
+ current_ip_context = {
+ "IP": ip_data.get("ip_address"),
+ "Source": ip_data.get("source")
+ }
+ if ip_data.get("expires"):
+ expiration_in_ms = 1000 * int(ip_data.get("expires", 0))
+ current_ip_context["Expires"] = prettify_date(timestamp_to_datestring(expiration_in_ms))
+ if ip_data.get("created"):
+ creation_in_ms = 1000 * int(ip_data.get("created", 0))
+ current_ip_context["Created"] = prettify_date(timestamp_to_datestring(creation_in_ms))
+ ips_contexts_array.append(current_ip_context)
+ return ips_contexts_array
+
+
+def create_banned_ips_human_readable(entry_context):
+ banned_ip_headers = ["IP", "Created", "Expires", "Source"]
+ human_readable = tableToMarkdown("Banned IP Addresses", entry_context, banned_ip_headers)
+ return human_readable
+
+
+def str_to_bool(str_representing_bool):
+ return str_representing_bool and str_representing_bool.lower() == 'true'
+
+
+def generate_src_or_dst_request_data(policy_id, policy_field, policy_field_value, keep_original_data, add_or_remove):
+ address_list_for_request = policy_field_value.split(",")
+ if str_to_bool(keep_original_data):
+ policy_data = get_policy_request(policy_id)[0] # the return value is an array with one element
+ existing_adresses_list = policy_data.get(policy_field)
+ existing_adresses_list = [address_data["name"] for address_data in existing_adresses_list]
+ if add_or_remove.lower() == "add":
+ for address in existing_adresses_list:
+ if address not in address_list_for_request:
+ address_list_for_request.append(address)
+ else:
+ address_list_for_request = [address for address in existing_adresses_list if address not in address_list_for_request]
+
+ address_data_dicts_for_request = policy_addr_array_from_arg(address_list_for_request, False)
+ return address_data_dicts_for_request
+
+
+def logout(session):
+ """
+ Due to limited amount of simultaneous connections we log out after each API request.
+ Simple post request to /logout endpoint without params.
+ """
+ url_suffix = '/logout'
+ params = {} # type: dict
+ session.post(SERVER + url_suffix, data=params, verify=USE_SSL)
+
+
+def policy_addr_array_from_arg(policy_addr_data, is_data_string=True):
+ # if the data isn't in string format, it's already an array and requires no formatting
+ policy_adr_str_array = policy_addr_data.split(",") if is_data_string else policy_addr_data
+ policy_addr_dict_array = []
+ for src_addr_name in policy_adr_str_array:
+ cur_addr_dict = {
+ "name": src_addr_name
+ }
+ policy_addr_dict_array.append(cur_addr_dict)
+ return policy_addr_dict_array
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """
+ Perform basic login and logout operation, validate connection.
+ """
+ http_request('GET', 'cmdb/system/vdom')
+ return True
+
+
+def get_addresses_command():
+ contents = []
+ context = {}
+ addresses_context = []
+ address = demisto.args().get('address')
+ name = demisto.args().get('name', '')
+
+ addresses = get_addresses_request(address, name)
+ for address in addresses:
+ subnet = address.get('subnet')
+ if subnet:
+ subnet = subnet.replace(" ", "-")
+ contents.append({
+ 'Name': address.get('name'),
+ 'Subnet': subnet,
+ 'StartIP': address.get('start-ip'),
+ 'EndIP': address.get('end-ip')
+ })
+ addresses_context.append({
+ 'Name': address.get('name'),
+ 'Subnet': subnet,
+ 'StartIP': address.get('start-ip'),
+ 'EndIP': address.get('end-ip')
+ })
+
+ context['Fortigate.Address(val.Name && val.Name === obj.Name)'] = addresses_context
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FortiGate addresses', contents),
+ 'EntryContext': context
+ })
+
+
+def get_addresses_request(address, name):
+ uri_suffix = 'cmdb/firewall/address/' + name
+ params = {
+ 'vdom': address
+ }
+ response = http_request('GET', uri_suffix, params)
+ # Different structure if we choose all domains
+ if address == '*':
+ return response[0].get('results')
+ return response.get('results')
+
+
+def get_service_groups_command():
+ contents = []
+ context = {}
+ service_groups_context = []
+ name = demisto.args().get('name', '')
+
+ service_groups = get_service_groups_request(name)
+ for service_group in service_groups:
+ service_group_members = []
+ members = service_group.get('member')
+ for member in members:
+ service_group_members.append(member.get('name'))
+ contents.append({
+ 'Name': service_group.get('name'),
+ 'Members': service_group_members
+ })
+ service_groups_context.append({
+ 'Name': service_group.get('name'),
+ 'Member': {'Name': service_group_members}
+ })
+
+ context['Fortigate.ServiceGroup(val.Name && val.Name === obj.Name)'] = service_groups_context
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FortiGate service groups', contents),
+ 'EntryContext': context
+ })
+
+
+def get_service_groups_request(name):
+ uri_suffix = 'cmdb/firewall.service/group/' + name
+ response = http_request('GET', uri_suffix)
+ return response.get('results')
+
+
+def update_service_group_command():
+ context = {}
+
+ group_name = demisto.args().get('groupName')
+ service_name = demisto.args().get('serviceName')
+ action = demisto.args().get('action')
+ if action not in ['add', 'remove']:
+ return_error('Action must be add or remove')
+
+ old_service_groups = get_service_groups_request(group_name)
+ service_group_members = [] # type: list
+ new_service_group_members = [] # type: list
+
+ if isinstance(old_service_groups, list):
+ old_service_group = old_service_groups[0]
+ service_group_members = old_service_group.get('member')
+ if action == 'add':
+ service_group_members.append({'name': service_name})
+ new_service_group_members = service_group_members
+ if action == 'remove':
+ for service_group_member in service_group_members:
+ if service_group_member.get('name') != service_name:
+ new_service_group_members.append(service_group_member)
+
+ update_service_group_request(group_name, new_service_group_members)
+ service_group = get_service_groups_request(group_name)[0]
+
+ service_group_members = []
+ members = service_group.get('member')
+ for member in members:
+ service_group_members.append(member.get('name'))
+
+ contents = {
+ 'Name': service_group.get('name'),
+ 'Services': service_group_members
+ }
+
+ service_group_context = {
+ 'Name': service_group.get('name'),
+ 'Service': {
+ 'Name': service_group_members
+ }
+ }
+
+ context['Fortigate.ServiceGroup(val.Name && val.Name === obj.Name)'] = service_group_context
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FortiGate service group: ' + group_name + ' was successfully updated', contents),
+ 'EntryContext': context
+ })
+
+
+def update_service_group_request(group_name, members_list):
+ uri_suffix = 'cmdb/firewall.service/group/' + group_name
+ if not does_path_exist(uri_suffix):
+ return_error('Requested service group ' + group_name + ' does not exist in Firewall config.')
+
+ payload = {
+ 'member': members_list
+ }
+
+ response = http_request('PUT', uri_suffix, {}, json.dumps(payload))
+ return response
+
+
+def delete_service_group_command():
+ context = {}
+ group_name = demisto.args().get('groupName').encode('utf-8')
+
+ delete_service_group_request(group_name)
+
+ service_group_context = {
+ 'Name': group_name,
+ 'Deleted': True
+ }
+
+ contents = service_group_context
+ context['Fortigate.ServiceGroup(val.Name && val.Name === obj.Name)'] = service_group_context
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FortiGate service group: ' + group_name + ' was deleted successfully', contents),
+ 'EntryContext': context
+ })
+
+
+def delete_service_group_request(group_name):
+ uri_suffix = 'cmdb/firewall.service/group/' + group_name
+ response = http_request('DELETE', uri_suffix)
+ return response
+
+
+def get_firewall_service_command():
+ contents = []
+ context = {}
+ service_context = []
+ service_name = demisto.args().get('serviceName', '')
+ service_title = service_name
+ if not service_name:
+ service_title = 'all services'
+
+ services = get_firewall_service_request(service_name)
+ for service in services:
+ contents.append({
+ 'Name': service.get('name'),
+ 'Ports': {
+ 'TCP': service.get('tcp-portrange'),
+ 'UDP': service.get('udp-portrange')
+ }
+ })
+ service_context.append({
+ 'Name': service.get('name'),
+ 'Ports': {
+ 'TCP': service.get('tcp-portrange'),
+ 'UDP': service.get('udp-portrange')
+ }
+ })
+
+ context['Fortigate.Service(val.Name && val.Name === obj.Name)'] = service_context
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FortiGate firewall services ' + service_title, contents),
+ 'EntryContext': context
+ })
+
+
+def get_firewall_service_request(service_name):
+ uri_suffix = 'cmdb/firewall.service/custom/' + service_name
+ response = http_request('GET', uri_suffix)
+ return response.get('results')
+
+
+def create_firewall_service_command():
+ contents = []
+ context = {}
+ service_context = []
+ service_name = demisto.args().get('serviceName')
+ tcp_range = demisto.args().get('tcpRange', '')
+ udp_range = demisto.args().get('udpRange', '')
+
+ create_firewall_service_request(service_name, tcp_range, udp_range)
+
+ contents.append({
+ 'Name': service_name,
+ 'Ports': {
+ 'TCP': tcp_range,
+ 'UDP': udp_range
+ }
+ })
+ service_context.append({
+ 'Name': service_name,
+ 'Ports': {
+ 'TCP': tcp_range,
+ 'UDP': udp_range
+ }
+ })
+
+ context['Fortigate.Service(val.Name && val.Name === obj.Name)'] = service_context
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FortiGate firewall service ' + service_name + ' created successfully', contents),
+ 'EntryContext': context
+ })
+
+
+def create_firewall_service_request(service_name, tcp_range, udp_range):
+ uri_suffix = 'cmdb/firewall.service/custom/'
+ if does_path_exist(uri_suffix + service_name):
+ return_error('Firewall service already exists.')
+
+ payload = {
+ 'name': service_name,
+ 'tcp-portrange': tcp_range,
+ 'udp-portrange': udp_range
+ }
+
+ response = http_request('POST', uri_suffix, {}, json.dumps(payload))
+ return response
+
+
+def ban_ip(ip_addresses_array, time_to_expire=0):
+ uri_suffix = 'monitor/user/banned/add_users/'
+
+ payload = {
+ 'ip_addresses': ip_addresses_array,
+ 'expiry': time_to_expire
+ }
+
+ response = http_request('POST', uri_suffix, data=json.dumps(payload))
+ return response
+
+
+def ban_ip_command():
+ ip_addresses_string = demisto.args()['ip_address']
+ ip_addresses_array = argToList(ip_addresses_string)
+ for ip_address in ip_addresses_array:
+ if not is_ip_valid(ip_address, accept_v6_ips=True):
+ return_error('Error: invalid IP address sent as argument.')
+
+ time_to_expire = demisto.args().get('expiry')
+ if time_to_expire:
+ time_to_expire = convert_arg_to_int(time_to_expire, 'expiry')
+ else:
+ # The default time to expiration is 0, which means infinite time (It will remain banned).
+ time_to_expire = 0
+
+ response = ban_ip(ip_addresses_array, time_to_expire)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': response,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': 'IPs {0} banned successfully'.format(ip_addresses_string)
+ })
+
+
+def unban_ip(ip_addresses_array):
+ uri_suffix = 'monitor/user/banned/clear_users/'
+
+ payload = {
+ 'ip_addresses': ip_addresses_array
+ }
+ response = http_request('POST', uri_suffix, data=json.dumps(payload))
+ return response
+
+
+def unban_ip_command():
+ ip_addresses_string = demisto.args()['ip_address']
+ ip_addresses_array = argToList(ip_addresses_string)
+ for ip_address in ip_addresses_array:
+ if not is_ip_valid(ip_address, accept_v6_ips=True):
+ return_error('Error: invalid IP address sent as argument.')
+
+ response = unban_ip(ip_addresses_array)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': response,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': 'IPs {0} un-banned successfully'.format(ip_addresses_string)
+ })
+
+
+def get_banned_ips():
+ uri_suffix = 'monitor/user/banned/select/'
+ response = http_request('GET', uri_suffix)
+ return response
+
+
+def get_banned_ips_command():
+ response = get_banned_ips()
+ ips_data_array = response.get('results')
+ entry_context = create_banned_ips_entry_context(ips_data_array)
+ human_readable = create_banned_ips_human_readable(entry_context)
+ return_outputs(
+ raw_response=response,
+ readable_output=human_readable,
+ outputs={
+ 'Fortigate.BannedIP(val.IP===obj.IP)': entry_context
+ }
+ )
+
+
+def get_policy_command():
+ contents = []
+ context = {}
+ policy_context = []
+ policy_name = demisto.args().get('policyName')
+ policy_id = demisto.args().get('policyID')
+ policy_title = 'all policies'
+
+ policies = get_policy_request(policy_id)
+
+ for policy in policies:
+ if policy_name == policy.get('name') or not policy_name:
+ if policy_name or policy_id:
+ policy_title = policy.get('name')
+ security_profiles = []
+ all_security_profiles = [policy.get('webfilter-profile'), policy.get('ssl-ssh-profile'),
+ policy.get('dnsfilter-profile'), policy.get('profile-protocol-options'),
+ policy.get('profile-type'), policy.get('av-profile')]
+ for security_profile in all_security_profiles:
+ if security_profile:
+ security_profiles.append(security_profile)
+
+ src_address = policy.get('srcaddr')
+ if src_address and isinstance(src_address, list) and isinstance(src_address[0], dict):
+ src_address = create_addr_string(src_address)
+ dest_address = policy.get('dstaddr')
+ if dest_address and isinstance(dest_address, list) and isinstance(dest_address[0], dict):
+ dest_address = create_addr_string(dest_address)
+ service = policy.get('service')
+ if service and isinstance(service, list) and isinstance(service[0], dict):
+ service = service[0].get('name')
+
+ contents.append({
+ 'Name': policy.get('name'),
+ 'ID': int(policy.get('policyid')),
+ 'Description': policy.get('comments'),
+ 'Status': policy.get('status'),
+ 'Source': src_address,
+ 'Destination': dest_address,
+ 'Service': service,
+ 'Action': policy.get('action'),
+ 'Log': policy.get('logtraffic'),
+ 'Security': security_profiles,
+ 'NAT': policy.get('nat')
+ })
+ policy_context.append({
+ 'Name': policy.get('name'),
+ 'ID': int(policy.get('policyid')),
+ 'Description': policy.get('comments'),
+ 'Status': policy.get('status'),
+ 'Source': src_address,
+ 'Destination': dest_address,
+ 'Service': service,
+ 'Action': policy.get('action'),
+ 'Log': policy.get('logtraffic'),
+ 'Security': security_profiles,
+ 'NAT': policy.get('nat')
+ })
+
+ context['Fortigate.Policy(val.ID && val.ID === obj.ID)'] = policy_context
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FortiGate policy details for ' + policy_title, contents),
+ 'EntryContext': context
+ })
+
+
+def get_policy_request(policy_id):
+ uri_suffix = 'cmdb/firewall/policy/'
+ if policy_id:
+ uri_suffix = uri_suffix + policy_id + '/'
+ # We have the option to filter only the data we need from each policy,
+ # reducing by over 80% the amount of data we need to read.
+ params = {
+ 'format': 'policyid|action|name|comments|status|service|logtraffic|srcaddr|'
+ 'dstaddr|webfilter-profile|ssl-ssh-profile|dnsfilter-profile|'
+ 'profile-protocol-options|profile-type|av-profile|nat'
+ }
+ response = http_request('GET', uri_suffix, params)
+ return response.get('results')
+
+
+def update_policy_command():
+ contents = []
+ context = {}
+ policy_context = []
+ security_profiles = []
+
+ policy_id = demisto.args().get('policyID')
+ policy_field = demisto.args().get('field')
+ policy_field_value = demisto.args().get('value')
+ keep_original_data = demisto.args().get('keep_original_data')
+ add_or_remove = demisto.args().get('add_or_remove')
+
+ if keep_original_data and keep_original_data.lower() == 'true' and not add_or_remove:
+ return_error('Error: add_or_remove must be specified if keep_original_data is true.')
+
+ update_policy_request(policy_id, policy_field, policy_field_value, keep_original_data, add_or_remove)
+ policy = get_policy_request(policy_id)[0]
+ all_security_profiles = [policy.get('webfilter-profile'), policy.get('ssl-ssh-profile'), policy.get(
+ 'dnsfilter-profile'), policy.get('profile-protocol-options'), policy.get('profile-type'), policy.get('av-profile')]
+
+ for security_profile in all_security_profiles:
+ if security_profile:
+ security_profiles.append(security_profile)
+
+ src_address = policy.get('srcaddr')
+ if src_address and isinstance(src_address, list) and isinstance(src_address[0], dict):
+ src_address = src_address[0].get('name')
+ dest_address = policy.get('dstaddr')
+ if dest_address and isinstance(dest_address, list) and isinstance(dest_address[0], dict):
+ dest_address = dest_address[0].get('name')
+ service = policy.get('service')
+ if service and isinstance(service, list) and isinstance(service[0], dict):
+ service = service[0].get('name')
+
+ contents.append({
+ 'Name': policy.get('name'),
+ 'ID': policy.get('policyid'),
+ 'Description': policy.get('comments'),
+ 'Status': policy.get('status'),
+ 'Source': src_address,
+ 'Destination': dest_address,
+ 'Service': service,
+ 'Action': policy.get('action'),
+ 'Log': policy.get('logtraffic'),
+ 'Security': security_profiles,
+ 'NAT': policy.get('nat')
+ })
+ policy_context.append({
+ 'Name': policy.get('name'),
+ 'ID': policy.get('policyid'),
+ 'Description': policy.get('comments'),
+ 'Status': policy.get('status'),
+ 'Source': src_address,
+ 'Destination': dest_address,
+ 'Service': service,
+ 'Action': policy.get('action'),
+ 'Log': policy.get('logtraffic'),
+ 'Security': security_profiles,
+ 'NAT': policy.get('nat')
+ })
+
+ context['Fortigate.Policy(val.ID && val.ID === obj.ID)'] = policy_context
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FortiGate policy ID ' + policy_id + ' has been updated successfully.', contents),
+ 'EntryContext': context
+ })
+
+
+def update_policy_request(policy_id, policy_field, policy_field_value, keep_original_data, add_or_remove):
+ uri_suffix = 'cmdb/firewall/policy/' + policy_id
+ if not does_path_exist(uri_suffix):
+ return_error('Requested policy ID ' + policy_id + ' does not exist in Firewall config.')
+
+ field_to_api_key = {
+ 'description': 'comments',
+ 'source': 'srcaddr',
+ 'destination': 'dstaddr',
+ 'log': 'logtraffic'
+ }
+
+ if policy_field in field_to_api_key:
+ policy_field = field_to_api_key[policy_field]
+
+ if policy_field in {'srcaddr', 'dstaddr'}:
+ policy_field_value = generate_src_or_dst_request_data(
+ policy_id, policy_field, policy_field_value, keep_original_data, add_or_remove)
+
+ payload = {
+ 'policyid': int(policy_id),
+ 'q_origin_key': int(policy_id),
+ policy_field: policy_field_value
+ }
+
+ response = http_request('PUT', uri_suffix, {}, json.dumps(payload))
+ return response
+
+
+def create_policy_command():
+ contents = []
+ context = {}
+ policy_context = []
+
+ policy_name = demisto.args().get('policyName')
+ policy_description = demisto.args().get('description', '')
+ policy_srcintf = demisto.args().get('sourceIntf')
+ policy_dstintf = demisto.args().get('dstIntf')
+ policy_source_address = policy_addr_array_from_arg(demisto.args().get('source'))
+ policy_destination_address = policy_addr_array_from_arg(demisto.args().get('destination'))
+ policy_service = demisto.args().get('service')
+ policy_action = demisto.args().get('action')
+ policy_status = demisto.args().get('status')
+ policy_log = demisto.args().get('log')
+ policy_nat = demisto.args().get('nat')
+
+ create_policy_request(policy_name, policy_description, policy_srcintf, policy_dstintf,
+ policy_source_address, policy_destination_address, policy_service,
+ policy_action, policy_status, policy_log, policy_nat)
+ contents.append({
+ 'Name': policy_name,
+ 'Description': policy_description,
+ 'Status': policy_status,
+ 'Service': policy_service,
+ 'Action': policy_action,
+ 'Log': policy_log,
+ 'Source': {
+ 'Interface': policy_srcintf,
+ 'Address': policy_source_address
+ },
+ 'Destination': {
+ 'Interface': policy_dstintf,
+ 'Address': policy_destination_address
+ },
+ 'NAT': policy_nat
+ })
+
+ policy_context.append({
+ 'Name': policy_name,
+ 'Description': policy_description,
+ 'Status': policy_status,
+ 'Service': policy_service,
+ 'Action': policy_action,
+ 'Log': policy_log,
+ 'Source': {
+ 'Interface': policy_srcintf,
+ 'Address': policy_source_address
+ },
+ 'Destination': {
+ 'Interface': policy_dstintf,
+ 'Address': policy_destination_address
+ },
+ 'NAT': policy_nat
+ })
+
+ context['Fortigate.Policy(val.Name && val.Name === obj.Name)'] = policy_context
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FortiGate policy ' + policy_name + ' created successfully', contents),
+ 'EntryContext': context
+ })
+
+
+def create_policy_request(policy_name, policy_description, policy_srcintf, policy_dstintf,
+ policy_source_address, policy_destination_address, policy_service,
+ policy_action, policy_status, policy_log, policy_nat):
+
+ uri_suffix = 'cmdb/firewall/policy/'
+
+ payload = {
+ 'json': {
+ 'name': policy_name,
+ 'srcintf': [{'name': policy_srcintf}],
+ 'dstintf': [{'name': policy_dstintf}],
+ 'srcaddr': policy_source_address,
+ 'dstaddr': policy_destination_address,
+ 'action': policy_action,
+ 'status': policy_status,
+ 'schedule': 'always',
+ 'service': [{'name': policy_service}],
+ 'comments': policy_description,
+ 'logtraffic': policy_log,
+ 'nat': policy_nat
+ }
+ }
+
+ response = http_request('POST', uri_suffix, {}, json.dumps(payload))
+ return response
+
+
+def move_policy_command():
+ contents = []
+ context = {}
+ policy_id = demisto.args().get('policyID')
+ position = demisto.args().get('position')
+ neighbour = demisto.args().get('neighbor')
+
+ move_policy_request(policy_id, position, neighbour)
+
+ policy_context = {
+ 'ID': int(policy_id),
+ 'Moved': True
+ }
+ contents.append({
+ 'ID': policy_id,
+ 'Moved': True
+ })
+
+ context['Fortigate.Policy(val.ID && val.ID === obj.ID)'] = policy_context
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FortiGate policy with ID ' + policy_id + ' moved successfully', contents),
+ 'EntryContext': context
+ })
+
+
+def move_policy_request(policy_id, position, neighbour):
+ uri_suffix = 'cmdb/firewall/policy/' + policy_id
+ params = {
+ 'action': 'move',
+ position: neighbour
+ }
+
+ response = http_request('PUT', uri_suffix, params)
+ return response
+
+
+def delete_policy_command():
+ contents = []
+ context = {}
+ policy_id = demisto.args().get('policyID')
+
+ delete_policy_request(policy_id)
+
+ policy_context = {
+ 'ID': policy_id,
+ 'Deleted': True
+ }
+ contents.append({
+ 'ID': policy_id,
+ 'Deleted': True
+ })
+
+ context['Fortigate.Policy(val.ID && val.ID === obj.ID)'] = policy_context
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FortiGate policy with ID ' + policy_id + ' deleted successfully', contents),
+ 'EntryContext': context
+ })
+
+
+def delete_policy_request(policy_id):
+ uri_suffix = 'cmdb/firewall/policy/' + policy_id
+ response = http_request('DELETE', uri_suffix)
+ return response
+
+
+def get_address_groups_command():
+ contents = []
+ context = {}
+ address_groups_context = []
+ address_group_name = demisto.args().get('groupName', '')
+ title = address_group_name if address_group_name else 'all'
+
+ address_groups = get_address_groups_request(address_group_name)
+ for address_group in address_groups:
+ members = address_group.get('member')
+ members_list = []
+ for member in members:
+ members_list.append(member.get('name'))
+ contents.append({
+ 'Name': address_group.get('name'),
+ 'Members': members_list,
+ 'UUID': address_group.get('uuid')
+ })
+ address_groups_context.append({
+ 'Name': address_group.get('name'),
+ 'Member': {
+ 'Name': members_list
+ },
+ 'UUID': address_group.get('uuid')
+ })
+
+ context['Fortigate.AddressGroup(val.Name && val.Name === obj.Name)'] = address_groups_context
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FortiGate address groups ' + title, contents),
+ 'EntryContext': context
+ })
+
+
+def get_address_groups_request(address_group_name):
+ uri_suffix = 'cmdb/firewall/addrgrp/' + address_group_name
+ response = http_request('GET', uri_suffix)
+ return response.get('results')
+
+
+def update_address_group_command():
+ contents = []
+ context = {}
+ address_group_context = []
+ group_name = demisto.args().get('groupName', '')
+ address = demisto.args().get('address', '')
+ action = demisto.args().get('action')
+ if action not in ['add', 'remove']:
+ return_error('Action must be add or remove')
+
+ old_address_groups = get_address_groups_request(group_name)
+ address_group_members = [] # type: list
+ new_address_group_members = [] # type: list
+
+ if isinstance(old_address_groups, list):
+ old_address_group = old_address_groups[0]
+ address_group_members = old_address_group.get('member')
+ if action == 'add':
+ address_group_members.append({'name': address})
+ new_address_group_members = address_group_members
+ if action == 'remove':
+ for address_group_member in address_group_members:
+ if address_group_member.get('name') != address:
+ new_address_group_members.append(address_group_member)
+
+ update_address_group_request(group_name, new_address_group_members)
+ address_group = get_address_groups_request(group_name)[0]
+ members = address_group.get('member')
+ members_list = []
+ for member in members:
+ members_list.append(member.get('name'))
+ contents.append({
+ 'Name': address_group.get('name'),
+ 'Members': members_list,
+ 'UUID': address_group.get('uuid')
+ })
+ address_group_context.append({
+ 'Name': address_group.get('name'),
+ 'Address': {
+ 'Name': members_list
+ },
+ 'UUID': address_group.get('uuid')
+ })
+
+ context['Fortigate.AddressGroup(val.Name && val.Name === obj.Name)'] = address_group_context
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FortiGate address group ' + group_name + ' updated successfully', contents),
+ 'EntryContext': context
+ })
+
+
+def update_address_group_request(group_name, new_address_group_members):
+ uri_suffix = 'cmdb/firewall/addrgrp/' + group_name
+ # Check whether target object already exists
+ if not does_path_exist(uri_suffix):
+ return_error('Requested address group' + group_name + 'does not exist in Firewall config.')
+ payload = {
+ 'member': new_address_group_members
+ }
+ result = http_request('PUT', uri_suffix, {}, json.dumps(payload))
+ return result
+
+
+def create_address_group_command():
+ contents = []
+ context = {}
+ address_group_context = []
+ group_name = demisto.args().get('groupName', '')
+ address = demisto.args().get('address', '')
+
+ create_address_group_request(group_name, address)
+
+ contents.append({
+ 'Name': group_name,
+ 'Address': address,
+ })
+ address_group_context.append({
+ 'Name': group_name,
+ 'Address': address
+ })
+
+ context['Fortigate.AddressGroup(val.Name && val.Name === obj.Name)'] = address_group_context
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FortiGate address group ' + group_name + ' created successfully', contents),
+ 'EntryContext': context
+ })
+
+
+def create_address_group_request(group_name, address):
+ uri_suffix = 'cmdb/firewall/addrgrp/'
+ if does_path_exist(uri_suffix + group_name):
+ return_error('Address group already exists.')
+ payload = {
+ 'name': group_name, 'member': [{'name': address}]
+ }
+ result = http_request('POST', uri_suffix, {}, json.dumps(payload))
+ return result
+
+
+def delete_address_group_command():
+ contents = []
+ context = {}
+ address_group_context = []
+ name = demisto.args().get('name', '')
+
+ delete_address_group_request(name)
+
+ contents.append({
+ 'Name': name,
+ 'Deleted': True
+ })
+ address_group_context.append({
+ 'Name': name,
+ 'Deleted': True
+ })
+
+ context['Fortigate.AddressGroup(val.Name && val.Name === obj.Name)'] = address_group_context
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('FortiGate address group ' + name + ' deleted successfully', contents),
+ 'EntryContext': context
+ })
+
+
+def delete_address_group_request(name):
+ uri_suffix = 'cmdb/firewall/addrgrp/' + name
+ response = http_request('DELETE', uri_suffix)
+ return response
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('command is %s' % (demisto.command(), ))
+
+try:
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module()
+ demisto.results('ok')
+ elif demisto.command() == 'fortigate-get-addresses':
+ get_addresses_command()
+ elif demisto.command() == 'fortigate-get-service-groups':
+ get_service_groups_command()
+ elif demisto.command() == 'fortigate-update-service-group':
+ update_service_group_command()
+ elif demisto.command() == 'fortigate-delete-service-group':
+ delete_service_group_command()
+ elif demisto.command() == 'fortigate-get-firewall-service':
+ get_firewall_service_command()
+ elif demisto.command() == 'fortigate-create-firewall-service':
+ create_firewall_service_command()
+ elif demisto.command() == 'fortigate-get-policy':
+ get_policy_command()
+ elif demisto.command() == 'fortigate-update-policy':
+ update_policy_command()
+ elif demisto.command() == 'fortigate-create-policy':
+ create_policy_command()
+ elif demisto.command() == 'fortigate-move-policy':
+ move_policy_command()
+ elif demisto.command() == 'fortigate-delete-policy':
+ delete_policy_command()
+ elif demisto.command() == 'fortigate-get-address-groups':
+ get_address_groups_command()
+ elif demisto.command() == 'fortigate-update-address-group':
+ update_address_group_command()
+ elif demisto.command() == 'fortigate-create-address-group':
+ create_address_group_command()
+ elif demisto.command() == 'fortigate-delete-address-group':
+ delete_address_group_command()
+ elif demisto.command() == 'fortigate-ban-ip':
+ ban_ip_command()
+ elif demisto.command() == 'fortigate-unban-ip':
+ unban_ip_command()
+ elif demisto.command() == 'fortigate-get-banned-ips':
+ get_banned_ips_command()
+
+except Exception, e:
+ LOG(e.message)
+ LOG.print_log()
+ raise
+
+finally:
+ logout(SESSION)
diff --git a/Integrations/FortiGate/FortiGate.yml b/Integrations/FortiGate/FortiGate.yml
new file mode 100644
index 000000000000..1a726a1992d9
--- /dev/null
+++ b/Integrations/FortiGate/FortiGate.yml
@@ -0,0 +1,476 @@
+commonfields:
+ id: FortiGate
+ version: -1
+name: FortiGate
+display: FortiGate
+category: Network Security
+description: Manage FortiGate Firewall
+configuration:
+- display: Server URL (e.g. https://192.168.0.1)
+ name: server
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Account username
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: true
+- display: Trust any certificate (unsecure)
+ name: unsecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: "false"
+ type: 8
+ required: false
+script:
+ script: ''
+ type: python
+ commands:
+ - name: fortigate-get-addresses
+ arguments:
+ - name: address
+ description: Filter by address (ip or domain)
+ defaultValue: root
+ - name: name
+ description: Filter by address name
+ outputs:
+ - contextPath: Fortigate.Address.Name
+ description: Address name
+ type: string
+ - contextPath: Fortigate.Address.Subnet
+ description: Address subnet
+ type: string
+ - contextPath: Fortigate.Address.StartIP
+ description: Address object start IP address
+ type: string
+ - contextPath: Fortigate.Address.EndIP
+ description: Address object end IP address
+ type: string
+ description: Get all address objects from your firewall
+ - name: fortigate-get-service-groups
+ arguments:
+ - name: name
+ description: Filter by group name
+ outputs:
+ - contextPath: Fortigate.ServiceGroup.Name
+ description: Service Group name
+ type: string
+ - contextPath: Fortigate.ServiceGroup.Member.Name
+ description: Service Group member name
+ type: string
+ description: Get information about FortiGate service groups
+ - name: fortigate-update-service-group
+ arguments:
+ - name: groupName
+ required: true
+ description: Group name to update
+ - name: serviceName
+ required: true
+ description: Service name to update from the group (If providing data argument
+ value does not matter)
+ - name: action
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - add
+ - remove
+ description: Action to take on the service
+ outputs:
+ - contextPath: Fortigate.ServiceGroup.Name
+ description: Service group name
+ type: string
+ - contextPath: Fortigate.ServiceGroup.Service.Name
+ description: Service name
+ type: string
+ description: Update a FortiGate service group
+ - name: fortigate-delete-service-group
+ arguments:
+ - name: groupName
+ required: true
+ description: Group name of the group you would like to delete
+ outputs:
+ - contextPath: Fortigate.ServiceGroup.Name
+ description: Service group name
+ type: string
+ - contextPath: Fortigate.ServiceGroup.Deleted
+ description: Was service group deleted
+ type: boolean
+ description: Delete a service group from FortiGate
+ - name: fortigate-get-firewall-service
+ arguments:
+ - name: serviceName
+ description: Service name
+ outputs:
+ - contextPath: Fortigate.Service.Name
+ description: Service name
+ type: string
+ - contextPath: Fortigate.Service.Ports.TCP
+ description: TCP Port range included for the service
+ type: string
+ - contextPath: Fortigate.Service.Ports.UDP
+ description: UDP Port range included for the service
+ type: string
+ description: Get information about a service from FortiGate Firewall
+ - name: fortigate-create-firewall-service
+ arguments:
+ - name: serviceName
+ required: true
+ description: Service name
+ - name: tcpRange
+ description: 'TCP Port range for the service. Example: 100-120 or simply a single
+ port.'
+ - name: udpRange
+ description: 'UDP Port range for the service. Example: 100-120, or simply a
+ single port.'
+ outputs:
+ - contextPath: Fortigate.Service.Name
+ description: Service name
+ type: string
+ - contextPath: Fortigate.Service.Ports.TCP
+ description: TCP Port range included for the service
+ type: string
+ - contextPath: Fortigate.Service.Ports.UDP
+ description: UDP Port range included for the service
+ type: string
+ description: Create a service in FortiGate firewall
+ - name: fortigate-get-policy
+ arguments:
+ - name: policyName
+ description: Policy name
+ - name: policyID
+ description: Policy ID
+ outputs:
+ - contextPath: Fortigate.Policy.Name
+ description: Policy name
+ type: string
+ - contextPath: Fortigate.Policy.ID
+ description: Policy ID
+ type: number
+ - contextPath: Fortigate.Policy.Description
+ description: Policy description
+ type: string
+ - contextPath: Fortigate.Policy.Status
+ description: The status of the policy (Enabled or Disabled)
+ type: string
+ - contextPath: Fortigate.Policy.Source
+ description: Source address
+ type: string
+ - contextPath: Fortigate.Policy.Destination
+ description: Destination address
+ type: string
+ - contextPath: Fortigate.Policy.Service
+ description: Service for the policy (for example HTTP)
+ type: string
+ - contextPath: Fortigate.Policy.Action
+ description: Policy action (Allow, Block)
+ type: string
+ - contextPath: Fortigate.Policy.Log
+ description: Does the policy log the traffic or not
+ type: string
+ - contextPath: Fortigate.Policy.Security
+ description: Policy attached security profile
+ type: string
+ description: Get information about a firewall policy on FortiGate
+ - name: fortigate-update-policy
+ arguments:
+ - name: policyID
+ required: true
+ description: Policy ID
+ - name: field
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - description
+ - status
+ - source
+ - destination
+ - service
+ - action
+ - log
+ - nat
+ description: Field parameter to update
+ - name: value
+ required: true
+ description: Value of field parameter to update
+ - name: keep_original_data
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Whether to keep the original data or not. Only relevant if the
+ updated field is "source" or "destination". If the supplied value is 'True',
+ the current data will not be replaced. Instead, the supplied addresses will
+ be added / removed from the existing data.
+ - name: add_or_remove
+ auto: PREDEFINED
+ predefined:
+ - add
+ - remove
+ description: Whether to add or remove the supplied addresses from the existing
+ data. Only relevant in case the field to update is "source" or "destination",
+ and keep_original_data is specified to 'True'.
+ outputs:
+ - contextPath: Fortigate.Policy.Name
+ description: Policy name
+ type: string
+ - contextPath: Fortigate.Policy.ID
+ description: Policy ID
+ type: number
+ - contextPath: Fortigate.Policy.Description
+ description: Policy description
+ type: string
+ - contextPath: Fortigate.Policy.Status
+ description: The status of the policy (Enabled or Disabled)
+ type: string
+ - contextPath: Fortigate.Policy.Source
+ description: Source address
+ type: string
+ - contextPath: Fortigate.Policy.Destination
+ description: Destination address
+ type: string
+ - contextPath: Fortigate.Policy.Service
+ description: Service for the policy (for example HTTP)
+ type: string
+ - contextPath: Fortigate.Policy.Action
+ description: Policy action (Allow, Block)
+ type: string
+ - contextPath: Fortigate.Policy.Log
+ description: Does the policy log the traffic or not
+ type: boolean
+ description: Update a firewall policy on FortiGate
+ execution: true
+ - name: fortigate-create-policy
+ arguments:
+ - name: policyName
+ required: true
+ description: Policy name
+ - name: description
+ description: Description for the policy
+ - name: sourceIntf
+ required: true
+ description: 'Source interface (examples : port1, port2 or port3)'
+ - name: dstIntf
+ required: true
+ description: 'Destination interface (examples : port1, port2 or port3)'
+ - name: source
+ required: true
+ description: 'Source IP address, range or domain (examples : all, update.microsoft.com)'
+ - name: destination
+ required: true
+ description: 'Destination IP address, range or domain (examples : all, update.microsoft.com)'
+ - name: service
+ required: true
+ description: Service for the policy (for example HTTP)
+ - name: action
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - accept
+ - block
+ description: Action to take
+ - name: status
+ auto: PREDEFINED
+ predefined:
+ - enable
+ - disable
+ description: policy status
+ defaultValue: enable
+ - name: log
+ auto: PREDEFINED
+ predefined:
+ - all
+ - utm
+ - disable
+ description: Will the policy log the traffic or not
+ defaultValue: enable
+ - name: nat
+ auto: PREDEFINED
+ predefined:
+ - enable
+ - disable
+ description: enable/disable NAT
+ defaultValue: enable
+ outputs:
+ - contextPath: Fortigate.Policy.Name
+ description: Policy name
+ type: string
+ - contextPath: Fortigate.Policy.Description
+ description: Policy description
+ type: string
+ - contextPath: Fortigate.Policy.Status
+ description: The status of the policy (Enabled or Disabled)
+ type: string
+ - contextPath: Fortigate.Policy.Source.Address
+ description: Source address
+ type: string
+ - contextPath: Fortigate.Policy.Destination.Address
+ description: Destination address
+ type: string
+ - contextPath: Fortigate.Policy.Service
+ description: Service for the policy (for example HTTP)
+ type: string
+ - contextPath: Fortigate.Policy.Action
+ description: Policy action (Allow, Block)
+ type: string
+ - contextPath: Fortigate.Policy.Log
+ description: Does the policy log the traffic or not
+ type: boolean
+ - contextPath: Fortigate.Policy.Source.Intf
+ description: Source interface
+ type: string
+ - contextPath: Fortigate.Policy.Destination.Intf
+ description: Destination interface
+ type: string
+ - contextPath: Fortigate.Policy.NAT
+ description: Policy NAT
+ type: string
+ description: Create a firewall policy (rule) on FortiGate
+ execution: true
+ - name: fortigate-move-policy
+ arguments:
+ - name: policyID
+ required: true
+ description: Policy ID
+ - name: position
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - before
+ - after
+ description: Position for the policy (before or after)
+ - name: neighbor
+ required: true
+ description: The ID of the policy being used as a positional anchor
+ outputs:
+ - contextPath: Fortigate.Policy.ID
+ description: Policy ID
+ type: number
+ - contextPath: Fortigate.Policy.Moved
+ description: Was policy moved successfully
+ type: boolean
+ description: Move a firewall policy rule to a different position
+ - name: fortigate-delete-policy
+ arguments:
+ - name: policyID
+ required: true
+ description: Policy ID
+ outputs:
+ - contextPath: Fortigate.Policy.ID
+ description: Policy ID
+ type: number
+ - contextPath: Fortigate.Policy.Deleted
+ description: Was policy deleted successfully
+ type: boolean
+ description: Delete a policy from FortiGate firewall
+ execution: true
+ - name: fortigate-get-address-groups
+ arguments:
+ - name: groupName
+ description: Filter by group name
+ outputs:
+ - contextPath: Fortigate.AddressGroup.Name
+ description: Address Group name
+ type: string
+ - contextPath: Fortigate.AddressGroup.Member.Name
+ description: Address Group member name
+ type: string
+ - contextPath: Fortigate.AddressGroup.UUID
+ description: Address Group UUID
+ type: string
+ description: Get information about address groups from FortiGate
+ - name: fortigate-update-address-group
+ arguments:
+ - name: groupName
+ required: true
+ description: Group name
+ - name: address
+ required: true
+ description: An address to add or remove from the group (If providing data argument,
+ value does not matter)
+ - name: action
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - add
+ - remove
+ description: Action to take on the service
+ outputs:
+ - contextPath: Fortigate.AddressGroup.Name
+ description: Address group name
+ type: string
+ - contextPath: Fortigate.AddressGroup.Address.Name
+ description: Address name
+ type: string
+ - contextPath: Fortigate.AddressGroup.UUID
+ description: Address group UUID
+ description: Update an address group on FortiGate firewall
+ - name: fortigate-create-address-group
+ arguments:
+ - name: groupName
+ required: true
+ description: Group name
+ - name: address
+ required: true
+ description: Address member to add to the group
+ outputs:
+ - contextPath: Fortigate.AddressGroup.Name
+ description: Address group name
+ type: string
+ - contextPath: Fortigate.AddressGroup.Address
+ description: Address group member address
+ type: string
+ description: Create an address group in FortiGate firewall
+ - name: fortigate-delete-address-group
+ arguments:
+ - name: name
+ required: true
+ description: Address group name
+ outputs:
+ - contextPath: Fortigate.AddressGroup.Name
+ description: Address group name
+ type: string
+ - contextPath: Fortigate.AddressGroup.Deleted
+ description: Was address group deleted
+ type: boolean
+ description: Delete an address group from FortiGate firewall
+ - name: fortigate-ban-ip
+ arguments:
+ - name: ip_address
+ required: true
+ description: CSV list of IP addresses to ban. IPv4 and IPv6 addresses
+ are supported. For example, "1.1.1.1,6.7.8.9".
+ isArray: true
+ - name: expiry
+ description: Time until ban expires in seconds. 0 for indefinite ban.
+ description: Adds IP addresses to the banned list.
+ - name: fortigate-unban-ip
+ arguments:
+ - name: ip_address
+ required: true
+ description: CSV list of banned user IP addresses to clear. IPv4 and IPv6
+ addresses are supported. For example, "1.1.1.1,6.7.8.9".
+ isArray: true
+ description: Clears a list of specific banned IP addresses.
+ - name: fortigate-get-banned-ips
+ arguments: []
+ outputs:
+ - contextPath: Fortigate.BannedIP.IP
+ description: The IP address.
+ type: string
+ - contextPath: Fortigate.BannedIP.Created
+ description: Date/time the IP address was added to the banned list.
+ type: string
+ - contextPath: Fortigate.BannedIP.Expires
+ description: Date/time the IP address expires from the banned list.
+ type: string
+ - contextPath: Fortigate.BannedIP.Source
+ description: Source of the ban.
+ type: string
+ description: Returns a list of banned IP addresses.
+ runonce: false
+tests:
+- Fortigate Test
diff --git a/Integrations/FortiGate/FortiGate_image.png b/Integrations/FortiGate/FortiGate_image.png
new file mode 100644
index 000000000000..5b8b6a33e22f
Binary files /dev/null and b/Integrations/FortiGate/FortiGate_image.png differ
diff --git a/Integrations/FortiGate/Pipfile b/Integrations/FortiGate/Pipfile
new file mode 100644
index 000000000000..66ad1243db8b
--- /dev/null
+++ b/Integrations/FortiGate/Pipfile
@@ -0,0 +1,22 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+
+[packages]
+certifi = "==2017.11.5"
+chardet = "==3.0.4"
+idna = "==2.6"
+olefile = "==0.44"
+requests = "==2.18.4"
+urllib3 = "==1.22"
+PyYAML = "==3.12"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/FortiGate/Pipfile.lock b/Integrations/FortiGate/Pipfile.lock
new file mode 100644
index 000000000000..93b3016a010a
--- /dev/null
+++ b/Integrations/FortiGate/Pipfile.lock
@@ -0,0 +1,376 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "dbc7e9dc0a5be3767de3b107d1afe7a3e3b6c57f7cb8a820195e76b8ee681d40"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "certifi": {
+ "hashes": [
+ "sha256:244be0d93b71e93fc0a0a479862051414d0e00e16435707e5bf5000f92e04694",
+ "sha256:5ec74291ca1136b40f0379e1128ff80e866597e4e2c1e755739a913bbc3613c0"
+ ],
+ "index": "pypi",
+ "version": "==2017.11.5"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "index": "pypi",
+ "version": "==2.6"
+ },
+ "olefile": {
+ "hashes": [
+ "sha256:61f2ca0cd0aa77279eb943c07f607438edf374096b66332fae1ee64a6f0f73ad"
+ ],
+ "index": "pypi",
+ "version": "==0.44"
+ },
+ "pyyaml": {
+ "hashes": [
+ "sha256:16b20e970597e051997d90dc2cddc713a2876c47e3d92d59ee198700c5427736",
+ "sha256:3262c96a1ca437e7e4763e2843746588a965426550f3797a79fca9c6199c431f",
+ "sha256:592766c6303207a20efc445587778322d7f73b161bd994f227adaa341ba212ab",
+ "sha256:5ac82e411044fb129bae5cfbeb3ba626acb2af31a8d17d175004b70862a741a7",
+ "sha256:827dc04b8fa7d07c44de11fabbc888e627fa8293b695e0f99cb544fdfa1bf0d1",
+ "sha256:bc6bced57f826ca7cb5125a10b23fd0f2fff3b7c4701d64c439a300ce665fff8",
+ "sha256:c01b880ec30b5a6e6aa67b09a2fe3fb30473008c85cd6a67359a1b15ed6d83a4",
+ "sha256:e863072cdf4c72eebf179342c94e6989c67185842d9997960b3e69290b2fa269"
+ ],
+ "index": "pypi",
+ "version": "==3.12"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "index": "pypi",
+ "version": "==1.22"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==1.5"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:244be0d93b71e93fc0a0a479862051414d0e00e16435707e5bf5000f92e04694",
+ "sha256:5ec74291ca1136b40f0379e1128ff80e866597e4e2c1e755739a913bbc3613c0"
+ ],
+ "index": "pypi",
+ "version": "==2017.11.5"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32",
+ "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==3.7.4"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.3'",
+ "version": "==1.0.2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16",
+ "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.3.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "index": "pypi",
+ "version": "==2.6"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7",
+ "sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db"
+ ],
+ "version": "==0.18"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af",
+ "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"
+ ],
+ "version": "==19.0"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e",
+ "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8"
+ ],
+ "markers": "python_version == '3.4.*' or python_version < '3'",
+ "version": "==2.3.4"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42",
+ "sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300"
+ ],
+ "index": "pypi",
+ "version": "==1.9.5"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:43c5486cefefa536c9aab528881c992328f020eefe4f6d06332449c365218580",
+ "sha256:d6c5ffe9d0305b9b977f7a642d36b9370954d1da7ada4c62393382cbadad4265"
+ ],
+ "version": "==2.4.1.1"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae",
+ "sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6"
+ ],
+ "index": "pypi",
+ "version": "==4.6.4"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:12e17c7ad1397fd1df5ead7727eb3f1bdc9fe1c18293b0492e0e01b57997e38d",
+ "sha256:dc9e416a095ee7c3360056990d52e5611fb94469352fc1c2dc85be1ff2189146"
+ ],
+ "index": "pypi",
+ "version": "==1.6.0"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "index": "pypi",
+ "version": "==1.22"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:0a04d9fa9a03b8d3371d6b6d6341f116e5b7b7127b4db1a71619b13b37529e23",
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/FortiSIEM/FortiSIEM.py b/Integrations/FortiSIEM/FortiSIEM.py
new file mode 100644
index 000000000000..b4099ecdadbc
--- /dev/null
+++ b/Integrations/FortiSIEM/FortiSIEM.py
@@ -0,0 +1,711 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import requests
+import json
+import time
+import re
+from xml.dom.minidom import Node, Document, parseString
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+USERNAME = demisto.params()['credentials']['identifier']
+PASSWORD = demisto.params()['credentials']['password']
+AUTH = ('super/' + USERNAME, PASSWORD)
+VERIFY_SSL = not demisto.params().get('unsecure', False)
+HOST = demisto.params()['host']
+QUERY_URL = HOST + "/phoenix/rest/query/"
+REST_ADDRESS = HOST + "/phoenix/rest/h5"
+
+EXTENDED_KEYS = {} # type: dict
+
+
+def load_extended_keys():
+ global EXTENDED_KEYS
+ if demisto.command() == 'fetch-incidents':
+ last_run = demisto.getLastRun()
+ EXTENDED_KEYS = last_run.get('extended_keys', {})
+ else:
+ integration_context = demisto.getIntegrationContext()
+ EXTENDED_KEYS = integration_context.get('extended_keys', {})
+
+ if not EXTENDED_KEYS:
+ session = login()
+ url = REST_ADDRESS + '/eventAttributeType/all'
+ response = session.get(url, verify=VERIFY_SSL, auth=AUTH)
+ EXTENDED_KEYS = dict((attr['attributeId'], attr['displayName']) for attr in response.json())
+
+ if demisto.command() != 'fetch-incidents':
+ demisto.setIntegrationContext({'extended_keys': EXTENDED_KEYS})
+
+
+def parse_resource_type(resource_type):
+ type_to_url_path = {
+ 'Reports': 'report',
+ 'Rules': 'rule',
+ 'Networks': 'resource/network',
+ 'Watch Lists': 'rule/wl',
+ 'Protocols': 'resource/port',
+ 'Event Type': 'eventType',
+ 'Malware IP': 'mal/ip',
+ 'Malware Domains': 'mal/site',
+ 'Malware Urls': 'mal/url',
+ 'Malware Hash': 'mal/hash',
+ 'Malware Processes': 'mal/proc',
+ 'Country Groups': 'resource/geo',
+ 'Default Password': 'mal/pwd',
+ 'Anonymity Network': 'mal/proxy',
+ 'User Agents': 'mal/agent',
+ 'Remediations': 'remediation',
+ }
+ return type_to_url_path.get(resource_type, resource_type)
+
+
+@logger
+def validateSuccessfulResponse(resp, error_text):
+ if resp.status_code != 200:
+ return_error('Got response status {} when {}'.format(resp.status_code, error_text))
+
+
+@logger
+def login():
+ session = requests.session()
+ login_url = HOST + '/phoenix/login-html.jsf'
+
+ response = session.get(login_url, verify=VERIFY_SSL)
+
+ # get the VIEW_STATE from the xml returned in the UI login page.
+ p = re.compile('(value=".{1046}==")')
+ viewState = p.findall(response.text.encode('utf-8'))
+ VIEW_STATE = viewState[0][len('value="'):][:-1]
+
+ headers = {
+ 'Upgrade-Insecure-Requests': '1',
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
+ 'Accept-Encoding': 'gzip, deflate, br',
+ 'Accept-Language': 'en-US,en;q=0.9,he;q=0.8'
+ }
+ data = {
+ 'loginHtml': 'loginHtml',
+ 'loginHtml:username': USERNAME,
+ 'loginHtml:password': PASSWORD,
+ 'loginHtml:userDomain': 'Empty',
+ 'loginHtml:loginBtn': 'Log In',
+ 'javax.faces.ViewState': VIEW_STATE
+ }
+
+ response = session.post(login_url, headers=headers, data=data, verify=VERIFY_SSL) # type: ignore
+ return session
+
+
+def clear_incident_command():
+ args = demisto.args()
+ incident_id = args['incident_id']
+ reason = args.get('close_reason', '')
+
+ raw_response = clear_incident(incident_id, reason)
+ return_outputs("Incident cleared successfully.", {}, raw_response)
+
+
+@logger
+def clear_incident(incident_id, reason):
+ session = login()
+ headers = {
+ 'Accept': 'application/json, text/plain, */*',
+ 'Content-Type': 'application/json'
+ }
+ response = session.put(
+ HOST + '/phoenix/rest/h5/incident/clear',
+ params={'ids': [incident_id], 'user': USERNAME},
+ headers=headers,
+ data=reason,
+ verify=VERIFY_SSL)
+ validateSuccessfulResponse(response, "triggering events report")
+
+ return response.text
+
+
+@logger
+def getEventsByIncident(incident_id, max_results, extended_data, max_wait_time):
+ session = login()
+ response = session.get(HOST + '/phoenix/rest/h5/report/triggerEvent?rawMsg=' + incident_id)
+ validateSuccessfulResponse(response, "triggering events report")
+
+ try:
+ jsonRes = response.json()
+ queryData = jsonRes[0]['right']
+ except (ValueError, KeyError):
+ return_error("Got wrong response format when triggering events report. "
+ "Expected a json array but got:\n" + response.text)
+
+ return getEventsByQuery(session, queryData, max_results, extended_data, max_wait_time,
+ "FortiSIEM events for Incident " + incident_id, incident_id=incident_id)
+
+
+@logger
+def getEventsByQuery(session, queryData, max_results, extended_data, max_wait_time, tableTitle, incident_id=None):
+ headers = {
+ 'Accept': 'application/json, text/plain, */*',
+ 'Content-Type': 'application/json'
+ }
+
+ response = session.post(REST_ADDRESS + '/report/run', headers=headers, data=json.dumps(queryData),
+ verify=VERIFY_SSL)
+ validateSuccessfulResponse(response, "running report")
+
+ data = response.json()
+ data["report"] = queryData
+ data = json.dumps(data)
+
+ # poll until report progress reaches 100
+ response = session.post(REST_ADDRESS + '/report/reportProgress', headers=headers, data=data, verify=VERIFY_SSL)
+
+ # response contain the percentage of the report loading
+ while response.text != "100" and max_wait_time > 0:
+ response = session.post(REST_ADDRESS + '/report/reportProgress', headers=headers, data=data, verify=VERIFY_SSL)
+ max_wait_time = int(max_wait_time) - 1
+ time.sleep(1)
+
+ params = {
+ 'start': 0,
+ 'perPage': max_results,
+ 'allData': extended_data,
+ }
+
+ response = session.post(REST_ADDRESS + '/report/resultByReport', params=params, headers=headers, data=data,
+ verify=VERIFY_SSL)
+
+ try:
+ res = response.json()
+ eventKeys = res["headerData"]["columnNames"]
+ except (ValueError, KeyError):
+ return_error("Got wrong response format when getting report results. "
+ "Expected a json object but got:\n" + response.text)
+
+ # reformat results
+ eventData = []
+ md = ""
+ for key in res["lightValueObjects"]:
+ cur = {
+ 'Event ID': key.get("naturalId", ""),
+ 'Incident ID': incident_id,
+ }
+ for i in range(0, len(eventKeys)):
+ if len(key["data"]) == 0 or key["data"][0] == "No report results found.":
+ md = "No report results found."
+ break
+ else:
+ cur[eventKeys[i]] = key["data"][i]
+ if md != "":
+ # no results were found, not need to loop
+ break
+ cur["ExtendedData"] = {}
+ for extItem in key["extData"]:
+ if EXTENDED_KEYS.get(extItem["left"]) is not None:
+ cur[EXTENDED_KEYS.get(extItem["left"]).replace(' ', '')] = extItem["right"] # type: ignore
+ else:
+ cur["ExtendedData"][extItem["left"]] = extItem["right"]
+ eventData.append(cur)
+ md = tableToMarkdown(tableTitle, eventData, eventKeys) if md == "" else md
+
+ demisto.results({
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': {'FortiSIEM.Events(val["Event ID"] && val["Event ID"] == obj["Event ID"])': eventData}
+ })
+
+
+@logger
+def GetEventQuery():
+ in_xml = create_query_xml("all", interval='1')
+ url = QUERY_URL + "eventQuery"
+ headers = {'Content-Type': 'text/xml'}
+ resp = requests.request('POST', url, headers=headers, data=in_xml, verify=VERIFY_SSL, auth=AUTH)
+ validateSuccessfulResponse(resp, "fetching event query")
+ queryId = resp.text
+ if 'error code="255"' in queryId:
+ return_error("Got error code 255 while getting event query. Make sure the query has valid syntax")
+
+ return queryId
+
+
+@logger
+def GetIncidentsByOrg(queryId):
+ # The request will poll until the server completes the query.
+ url = QUERY_URL + "progress/" + queryId
+ resp = requests.request('GET', url, verify=VERIFY_SSL, auth=AUTH)
+
+ while resp.text != '100':
+ resp = requests.request('GET', url, verify=VERIFY_SSL, auth=AUTH)
+
+ outXML = []
+ if resp.text == '100':
+ url = QUERY_URL + 'events/' + queryId + '/0/1000'
+ resp = requests.request('GET', url, verify=VERIFY_SSL, auth=AUTH)
+ content = resp.text
+ if content != '':
+ outXML.append(content)
+
+ # this code is taken directly from their documentation.
+ # get all results (last "page" has less than 1000 records)
+ p = re.compile(r'totalCount="\d+"')
+ mlist = p.findall(content)
+ if mlist and mlist[0] != '':
+ mm = mlist[0].replace('"', '')
+ m = mm.split("=")[-1]
+ num = 0
+ if int(m) > 1000:
+ num = int(m) / 1000
+ if int(m) % 1000 > 0:
+ num += 1
+ if num > 0:
+ for i in range(num):
+ url = QUERY_URL + 'events/' + queryId + '/' + str(i * 1000 + 1) + '/1000'
+ resp = requests.request('GET', url, verify=VERIFY_SSL, auth=AUTH)
+ content = resp.text
+ if content != '':
+ outXML.append(content)
+ else:
+ sys.exit(0)
+ phCustId = "all"
+ param = dumpXML(outXML, phCustId)
+ return param
+
+
+@logger
+def create_query_xml(include_value, interval="", single_evt_value="phEventCategory=1", interval_type="Minute",
+ attr_list=None, limit="All"):
+ doc = Document()
+ reports = doc.createElement("Reports")
+ doc.appendChild(reports)
+ report = doc.createElement("Report")
+ report.setAttribute("id", "")
+ report.setAttribute("group", "report")
+ reports.appendChild(report)
+ name = doc.createElement("Name")
+ report.appendChild(name)
+ doc.createTextNode("All Incidents")
+ custScope = doc.createElement("CustomerScope")
+ custScope.setAttribute("groupByEachCustomer", "true")
+ report.appendChild(custScope)
+ include = doc.createElement("Include")
+ if include_value == "all":
+ include.setAttribute("all", "true")
+ custScope.appendChild(include)
+ else:
+ custScope.appendChild(include)
+ include_text = doc.createTextNode(include_value)
+ include.appendChild(include_text)
+ exclude = doc.createElement("Exclude")
+ custScope.appendChild(exclude)
+ description = doc.createElement("description")
+ report.appendChild(description)
+ select = doc.createElement("SelectClause")
+ select.setAttribute("numEntries", limit)
+ report.appendChild(select)
+ attrList = doc.createElement("AttrList")
+ if attr_list:
+ attr_text = doc.createTextNode(str(attr_list))
+ attrList.appendChild(attr_text)
+ select.appendChild(attrList)
+ reportInterval = doc.createElement("ReportInterval")
+ report.appendChild(reportInterval)
+ window = doc.createElement("Window")
+ window.setAttribute("unit", interval_type)
+ window.setAttribute("val", interval)
+ reportInterval.appendChild(window)
+ pattern = doc.createElement("PatternClause")
+ pattern.setAttribute("window", "3600")
+ report.appendChild(pattern)
+ subPattern = doc.createElement("SubPattern")
+ subPattern.setAttribute("displayName", "Events")
+ subPattern.setAttribute("name", "Events")
+ pattern.appendChild(subPattern)
+ single = doc.createElement("SingleEvtConstr")
+ subPattern.appendChild(single)
+ single_text = doc.createTextNode(single_evt_value)
+ single.appendChild(single_text)
+ _filter = doc.createElement("RelevantFilterAttr")
+ report.appendChild(_filter)
+ return doc.toxml()
+
+
+@logger
+def dumpXML(xmlList, phCustId):
+ param = []
+ for xml in xmlList:
+ doc = parseString(xml.encode('utf-8'))
+ for node in doc.getElementsByTagName("events"):
+ for node1 in node.getElementsByTagName("event"):
+ mapping = {}
+ for node2 in node1.getElementsByTagName("attributes"):
+ for node3 in node2.getElementsByTagName("attribute"):
+ item_name = node3.getAttribute("name")
+ for node4 in node3.childNodes:
+ if node4.nodeType == Node.TEXT_NODE:
+ mapping[item_name] = node4.data
+ if phCustId == "all" or mapping['phCustId'] == phCustId:
+ param.append(mapping)
+ return param
+
+
+@logger
+def buildQueryString(args):
+ res_list = []
+ for key in args:
+ if 'IpAddr' not in key:
+ res_list.append('{} = "{}"'.format(key, args[key]))
+ else:
+ res_list.append("{} = {}".format(key, args[key]))
+ return " AND ".join(res_list)
+
+
+@logger
+def getEventsByFilter(maxResults, extendedData, maxWaitTime, reportWindow, reportWindowUnit):
+ session = login()
+
+ args = demisto.args()
+ del args["maxResults"]
+ del args["extendedData"]
+ del args["maxWaitTime"]
+ del args["reportWindow"]
+ del args["reportWindowUnit"]
+
+ query_string = buildQueryString(args)
+ query_data = {
+ "isReportService": True,
+ "selectClause": "phRecvTime,reptDevIpAddr,eventType,eventName,rawEventMsg,destIpAddr",
+ "reportWindow": int(reportWindow),
+ "reportWindowUnit": reportWindowUnit,
+ "timeRangeRelative": True,
+ "eventFilters": [{
+ "groupBy": "",
+ "singleConstraint": query_string
+ }],
+ "custId": 1
+ }
+ return getEventsByQuery(
+ session,
+ query_data,
+ maxResults,
+ extendedData,
+ maxWaitTime,
+ "FortiSIEM Event Results")
+
+
+def parse_cmdb_list(cmdb_device):
+ device_dict = {
+ 'DiscoverMethod': cmdb_device.get('discoverMethod', 'N/A'),
+ 'Approved': cmdb_device.get('approved', 'false'),
+ 'CreationMethod': cmdb_device.get('creationMethod', 'N/A'),
+ 'AccessIp': cmdb_device.get('accessIp', 'N/A'),
+ 'Name': cmdb_device.get('name', 'N/A'),
+ 'WinMachineGuid': cmdb_device.get('winMachineGuid', 'N/A'),
+ 'Unmanaged': cmdb_device.get('unmanaged', 'false'),
+ 'Version': cmdb_device.get('version', 'N/A'),
+ 'UpdateMethod': cmdb_device.get('updateMethod', 'N/A'),
+ }
+ timestamp = cmdb_device.get('discoverTime', None)
+
+ if timestamp and timestamp.isdigit():
+ device_dict['DiscoverTime'] = timestamp_to_datestring(timestamp)
+ elif timestamp:
+ device_dict['DiscoverTime'] = timestamp
+ else:
+ device_dict['DiscoverTime'] = 'N/A'
+
+ device_type = cmdb_device.get('deviceType')
+ if device_type:
+ device_dict['DeviceType'] = "{} {}".format(device_type['model'], device_type['vendor'])
+ else:
+ device_dict['DeviceType'] = 'N/A'
+
+ return device_dict
+
+
+def get_cmdb_devices_command():
+ args = demisto.args()
+ device_ip = args.get('device_ip')
+ limit = int(args.get('limit'))
+
+ raw_response = get_cmdb_devices(device_ip, limit)
+ list_of_devices = list(map(parse_cmdb_list, raw_response))
+
+ return_outputs(
+ tableToMarkdown("Devices", list_of_devices),
+ {'FortiSIEM.CmdbDevices': list_of_devices},
+ raw_response
+ )
+
+
+@logger
+def get_cmdb_devices(device_ip=None, limit=100):
+ cmdb_url = HOST + "/phoenix/rest/cmdbDeviceInfo/devices"
+
+ if device_ip:
+ cmdb_url += "?includeIps=" + device_ip
+
+ response = requests.get(cmdb_url, verify=VERIFY_SSL, auth=AUTH)
+ list_of_devices = json.loads(xml2json(response.text))
+
+ if 'response' in list_of_devices:
+ return_error(list_of_devices["response"]["error"]["description"])
+ elif 'devices' in list_of_devices:
+ list_of_devices = list_of_devices['devices']['device']
+ elif 'device' in list_of_devices:
+ list_of_devices = [list_of_devices['device']]
+
+ return list_of_devices[:limit]
+
+
+@logger
+def get_events_by_query(query, report_window="60", interval_type="Minute", limit="20", extended_data='false',
+ max_wait_time=60):
+ session = login()
+
+ query_data = {
+ "isReportService": True,
+ "selectClause": "phRecvTime,reptDevIpAddr,eventType,eventName,rawEventMsg,destIpAddr",
+ "reportWindow": int(report_window),
+ "reportWindowUnit": interval_type,
+ "timeRangeRelative": True,
+ "eventFilters": [{
+ "groupBy": "",
+ "singleConstraint": query
+ }],
+ "custId": 1
+ }
+ return getEventsByQuery(
+ session,
+ query_data,
+ limit,
+ extended_data,
+ max_wait_time,
+ "FortiSIEM Event Results")
+
+
+def get_lists_command():
+ raw_resources = get_lists()
+
+ resources = []
+ for r in flatten_resources(raw_resources):
+ resources.append({
+ 'DisplayName': r['displayName'],
+ 'NatualID': r['naturalId'],
+ 'ID': r['id'],
+ 'ResourceType': r['groupType']['displayName'],
+ 'Children': [c['displayName'] for c in r['children']],
+ })
+
+ return_outputs(
+ tableToMarkdown('Lists:', resources, removeNull=True),
+ {'FortiSIEM.ResourceList(val.ID && val.ID == obj.ID)': resources},
+ raw_response=raw_resources)
+
+
+@logger
+def get_lists():
+ session = login()
+ url = REST_ADDRESS + '/group/resource'
+ response = session.get(url, verify=VERIFY_SSL, auth=AUTH)
+
+ return response.json()
+
+
+def flatten_resources(raw_resources):
+ for r in raw_resources:
+ yield r
+ # possible stackoverflow
+ for sub_resource in flatten_resources(r['children']):
+ yield sub_resource
+
+
+def add_item_to_resource_list_command():
+ args = demisto.args()
+ resource_type = parse_resource_type(args['resource_type'])
+ group_id = args['group_id']
+ object_info = args.get('object-info', [])
+ object_info = dict(object_property.strip().split('=', 1) for object_property in object_info.split(','))
+
+ raw_response = add_item_to_resource_list(resource_type, group_id, object_info)
+ outputs = {'FortiSIEM.Resource(val.id && val.id == obj.id)': createContext(raw_response, removeNull=True)}
+
+ return_outputs(tableToMarkdown('Resource was added:', raw_response, removeNull=True), outputs, raw_response)
+
+
+@logger
+def add_item_to_resource_list(resource_type, group_id, object_info):
+ session = login()
+ url = '{}/{}/save'.format(REST_ADDRESS, resource_type)
+ object_info['groupId'] = group_id
+ object_info['active'] = True
+ object_info['sysDefined'] = False
+
+ response = session.post(url, data=json.dumps(object_info), verify=VERIFY_SSL, auth=AUTH)
+ response = response.json()
+
+ if response.get('code', 0) == -1:
+ return_error(response['msg'])
+
+ return response
+
+
+def remove_item_from_resource_list_command():
+ args = demisto.args()
+ resource_type = parse_resource_type(args['resource_type'])
+ deleted_ids = args.get('ids', '').split(',')
+
+ raw_response = remove_item_from_resource_list(resource_type, deleted_ids)
+
+ return_outputs(raw_response, {}, raw_response=raw_response)
+
+
+@logger
+def remove_item_from_resource_list(resource_type, deleted_ids):
+ session = login()
+ url = '{}/{}/del'.format(REST_ADDRESS, resource_type)
+
+ response = session.delete(url, params={'ids': json.dumps(deleted_ids)}, verify=VERIFY_SSL, auth=AUTH)
+
+ if response.text != '"OK"':
+ return_error(response.text)
+
+ return 'items with id {} were removed.'.format(deleted_ids)
+
+
+def get_resource_list_command():
+ args = demisto.args()
+ resource_type = parse_resource_type(args['resource_type'])
+ group_id = args['group_id']
+
+ raw_response = get_resource_list(resource_type, group_id)
+ headers = raw_response.get('headerData', {}).get('keys', [])
+ ec = []
+ for element in raw_response.get('lightValueObjects', []):
+ e = dict(zip(headers, element.get('data', [])))
+ e['id'] = element.get('objectId')
+ ec.append(e)
+ outputs = {'FortiSIEM.Resource(val.id && val.id == obj.id)': createContext(ec, removeNull=True)}
+
+ return_outputs(tableToMarkdown('Resource list:', ec, headerTransform=pascalToSpace, removeNull=True),
+ outputs,
+ raw_response)
+
+
+@logger
+def get_resource_list(resource_type, group_id):
+ session = login()
+ url = '{}/{}/list'.format(REST_ADDRESS, resource_type)
+
+ params = {
+ 'groupId': group_id,
+ 'start': 0,
+ 'size': 50,
+ }
+
+ response = session.get(url, params=params, verify=VERIFY_SSL, auth=AUTH)
+ response = response.json()
+
+ if response.get('code', 0) == -1:
+ return_error(response['msg'])
+
+ return response
+
+
+def convert_keys_to_snake_case(d):
+ d = dict((k.replace("-", "_"), v) for k, v in d.items())
+ return d
+
+
+def test():
+ try:
+ login()
+ except Exception as e:
+ if isinstance(e, requests.exceptions.SSLError):
+ demisto.results("Not verified certificate")
+ else:
+ demisto.results(str(e))
+ demisto.results('ok')
+
+
+def fetch_incidents():
+ query_id = GetEventQuery()
+ res = GetIncidentsByOrg(query_id)
+ known_ids = demisto.getLastRun().get('ids', None)
+ if known_ids is None or not known_ids:
+ known_ids = []
+
+ incidents = []
+ for inc in res:
+ if inc.get('incidentId') not in known_ids:
+ incidents.append({"name": inc.get('eventName', 'New FortiSIEM Event'), "rawJSON": json.dumps(inc)})
+ if len(known_ids) >= 1000:
+ known_ids.pop(0)
+ known_ids.append(inc.get('incidentId'))
+
+ demisto.setLastRun({
+ 'ids': known_ids,
+ 'extended_keys': EXTENDED_KEYS
+ })
+ demisto.incidents(incidents)
+ sys.exit(0)
+
+
+def main():
+ try:
+ handle_proxy()
+ load_extended_keys()
+ if demisto.command() == 'test-module':
+ test()
+
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+
+ elif demisto.command() == 'fortisiem-get-events-by-incident':
+ args = demisto.args()
+ getEventsByIncident(args['incID'], args['maxResults'], args['extendedData'], args['maxWaitTime'])
+
+ elif demisto.command() == 'fortisiem-clear-incident':
+ clear_incident_command()
+
+ elif demisto.command() == 'fortisiem-get-events-by-filter':
+ args = demisto.args()
+ getEventsByFilter(args['maxResults'], args['extendedData'], args['maxWaitTime'], args['reportWindow'],
+ args['reportWindowUnit'])
+
+ elif demisto.command() == 'fortisiem-get-events-by-query':
+ args = convert_keys_to_snake_case(demisto.args())
+ get_events_by_query(**args)
+
+ elif demisto.command() == 'fortisiem-get-cmdb-devices':
+ get_cmdb_devices_command()
+
+ elif demisto.command() == 'fortisiem-get-lists':
+ get_lists_command()
+
+ elif demisto.command() == 'fortisiem-add-item-to-resource-list':
+ add_item_to_resource_list_command()
+
+ elif demisto.command() == 'fortisiem-remove-item-from-resource-list':
+ remove_item_from_resource_list_command()
+
+ elif demisto.command() == 'fortisiem-get-resource-list':
+ get_resource_list_command()
+
+ except Exception as e:
+ if demisto.command() == 'fetch-incidents':
+ LOG(str(e))
+ LOG.print_log()
+ raise
+ else:
+ return_error(str(e))
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/FortiSIEM/FortiSIEM.yml b/Integrations/FortiSIEM/FortiSIEM.yml
new file mode 100644
index 000000000000..02a3b2afbc49
--- /dev/null
+++ b/Integrations/FortiSIEM/FortiSIEM.yml
@@ -0,0 +1,384 @@
+commonfields:
+ id: FortiSIEM
+ version: -1
+name: FortiSIEM
+display: FortiSIEM
+category: Analytics & SIEM
+description: Search and update events of FortiSIEM and manage resource lists.
+configuration:
+- display: Fetch incidents
+ name: isFetch
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Incident type
+ name: incidentType
+ defaultvalue: ""
+ type: 13
+ required: false
+- display: Server URL (e.g. https://192.168.0.1)
+ name: host
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Credentials
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: true
+- display: Trust any certificate (unsecure)
+ name: unsecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ type: 8
+ required: false
+script:
+ script: '-'
+ type: python
+ commands:
+ - name: fortisiem-get-events-by-incident
+ arguments:
+ - name: incID
+ required: true
+ description: ID of the incident by which to filter.
+ - name: maxResults
+ description: Maximum number of results to return.
+ defaultValue: "10"
+ - name: extendedData
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Whether to extend the data.
+ defaultValue: "true"
+ - name: maxWaitTime
+ description: Maximum time for the event report to finish (in seconds).
+ defaultValue: "60"
+ outputs:
+ - contextPath: FortiSIEM.Events.EventType
+ description: Event type.
+ type: string
+ - contextPath: FortiSIEM.Events.EventID
+ description: FortiSIEM Event ID.
+ type: string
+ - contextPath: FortiSIEM.Events.RawEventLog
+ description: Raw Event Log.
+ type: string
+ - contextPath: FortiSIEM.Events.ReportingDevice
+ description: Reporting Device.
+ type: string
+ - contextPath: FortiSIEM.Events.IncidentID
+ description: Incident ID.
+ type: number
+ - contextPath: FortiSIEM.Events.User
+ description: Event User.
+ type: string
+ - contextPath: FortiSIEM.Events.EventReceiveTime
+ description: Event received timestamp.
+ type: number
+ - contextPath: FortiSIEM.Events.EventName
+ description: Event Name.
+ type: string
+ - contextPath: FortiSIEM.Events.ReportingIP
+ description: Reporting IP address.
+ type: string
+ - contextPath: FortiSIEM.Events.SystemEventCategory
+ description: System Event Category.
+ type: string
+ - contextPath: FortiSIEM.Events.EventAction
+ description: EventAction.
+ type: string
+ - contextPath: FortiSIEM.Events.RelayingIP
+ description: Relaying IP address.
+ type: string
+ - contextPath: FortiSIEM.Events.EventSeverityCategory
+ description: Severity Category.
+ type: string
+ - contextPath: FortiSIEM.Events.OrganizationName
+ description: Organization Name.
+ type: string
+ - contextPath: FortiSIEM.Events.ReportingVendor
+ description: Reporting Vendor.
+ type: string
+ - contextPath: FortiSIEM.Events.ReportingModel
+ description: Reporting Model.
+ type: string
+ - contextPath: FortiSIEM.Events.OrganizationName
+ description: Organization name.
+ type: string
+ - contextPath: FortiSIEM.Events.CollectorID
+ description: Collector ID.
+ type: number
+ - contextPath: FortiSIEM.Events.EventParserName
+ description: Name of raw event parser.
+ type: string
+ - contextPath: FortiSIEM.Events.HostIP
+ description: Host IP address.
+ type: string
+ - contextPath: FortiSIEM.Events.HostName
+ description: Host name.
+ type: string
+ - contextPath: FortiSIEM.Events.FileName
+ description: Name of the file associated with the event.
+ type: string
+ - contextPath: FortiSIEM.Events.ProcessName
+ description: Name of the process associated with the event.
+ type: string
+ - contextPath: FortiSIEM.Events.JobName
+ description: Name of the job associated with the event.
+ type: string
+ - contextPath: FortiSIEM.Events.Status
+ description: Event status.
+ type: string
+ - contextPath: FortiSIEM.Events.DestinationPort
+ description: Port of the traffic's destination.
+ type: string
+ - contextPath: FortiSIEM.Events.SourcePort
+ description: Port of the traffic's origin.
+ type: string
+ - contextPath: FortiSIEM.Events.DestinationIP
+ description: Destination IP address for the web.
+ type: string
+ - contextPath: FortiSIEM.Events.SourceIP
+ description: "IP address of the traffic's origin. The source varies by the direction:
+ l In HTTP requests, this is the web browser or other client. l In HTTP responses,
+ this is the physical server."
+ type: string
+ - contextPath: FortiSIEM.Events.ExtendedData
+ description: All additional data returned by FortiSIEM.
+ type: string
+ - contextPath: FortiSIEM.Events.DestinationInterface
+ description: Interface of the traffic's destination.
+ type: string
+ - contextPath: FortiSIEM.Events.NATTranslation
+ description: NAT source port.
+ type: string
+ - contextPath: FortiSIEM.Events.Protocol
+ description: 'tcp: The protocol used by web traffic (tcp by default).'
+ type: string
+ - contextPath: FortiSIEM.Events.SourceMAC
+ description: MAC address associated with the source IP address.
+ type: string
+ - contextPath: FortiSIEM.Events.NATIP
+ description: NAT source IP.
+ type: string
+ description: Get events by incidents
+ - name: fortisiem-clear-incident
+ arguments:
+ - name: incident_id
+ required: true
+ description: ID of the incident to close.
+ - name: close_reason
+ description: Reason for closing.
+ description: Clear (close) a FortiSIEM incident.
+ - name: fortisiem-get-events-by-filter
+ arguments:
+ - name: maxResults
+ description: Maximum number of results to return.
+ defaultValue: "10"
+ - name: extendedData
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Whether to extend the data.
+ defaultValue: "true"
+ - name: maxWaitTime
+ description: Maximum time for the event report to finish (in seconds).
+ defaultValue: "60"
+ - name: reptDevIpAddr
+ description: Reporting IP address.
+ - name: destIpAddr
+ description: Destination IP address.
+ - name: srcIpAddr
+ description: Source IP address.
+ - name: destMACAddr
+ description: Destination MAC address.
+ - name: srcMACAddr
+ description: Source MAC address.
+ - name: destDomain
+ description: Destination domain.
+ - name: srcDomain
+ description: Source domain.
+ - name: destName
+ description: Destination name.
+ - name: srcName
+ description: Source name.
+ - name: destAction
+ description: Destination action.
+ - name: destUser
+ description: Destination user.
+ - name: reportWindow
+ description: Relative report time value.
+ defaultValue: "1"
+ - name: reportWindowUnit
+ auto: PREDEFINED
+ predefined:
+ - Minute
+ - Day
+ - Hour
+ - Daily
+ description: Relative report time unit.
+ defaultValue: Minute
+ - name: eventType
+ description: event type.
+ - name: srcGeoCountry
+ description: Source geo country.
+ - name: User
+ description: User.
+ outputs:
+ - contextPath: FortiSIEM.Events.EventType
+ description: FortiSIEM event type.
+ type: Unknown
+ - contextPath: FortiSIEM.Events.SourceCountry
+ description: Event source country.
+ type: Unknown
+ description: Returns an event list according to the specified filters.
+ - name: fortisiem-get-cmdb-devices
+ arguments:
+ - name: device_ip
+ description: CSV list of device IPs.
+ - name: limit
+ description: Maximum number of results to return.
+ defaultValue: "50"
+ outputs:
+ - contextPath: FortiSIEM.CmdbDevice
+ description: CMDB devices.
+ type: Unknown
+ description: Returns the description of each device.
+ - name: fortisiem-get-events-by-query
+ arguments:
+ - name: query
+ required: true
+ description: The query to get events.
+ - name: report-window
+ description: Interval time of the search.
+ defaultValue: "60"
+ - name: interval-type
+ auto: PREDEFINED
+ predefined:
+ - Minute
+ - Hourly
+ - Daily
+ description: Interval unit.
+ defaultValue: Minute
+ - name: limit
+ description: Maximum number of results to return.
+ - name: extended-data
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Whether to extend the data.
+ defaultValue: "true"
+ - name: max-wait-time
+ description: Command timeout.
+ outputs:
+ - contextPath: FortiSIEM.Events.EventType
+ description: FortiSIEM event type.
+ type: Unknown
+ - contextPath: FortiSIEM.Events.SourceCountry
+ description: Event source country.
+ type: Unknown
+ description: Returns event list filtered by query
+ - name: fortisiem-get-lists
+ arguments: []
+ description: Get all FortiSIEM resource lists hierarchy.
+ - name: fortisiem-add-item-to-resource-list
+ arguments:
+ - name: group_id
+ required: true
+ description: ID of the resource group. Run the "fortisiem-get-lists" command
+ to get the ID. command.
+ - name: object-info
+ required: true
+ description: 'CSV list of key-value pairs of attributes, for example: name=SomeName,lowIp=192.168.1.1,highIp=192.168.1.2'
+ - name: resource_type
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - Reports
+ - Rules
+ - Networks
+ - Watch Lists
+ - Protocols
+ - Malware IP
+ - Event Type
+ - Malware Domains
+ - Malware Urls
+ - Malware Hash
+ - Malware Processes
+ - Country Groups
+ - Default Password
+ - Anonymity Network
+ - User Agents
+ - Remediations
+ description: Resource Type
+ outputs:
+ - contextPath: FortiSIEM.Resource
+ description: 'Resource object in fortiSIEM lists. '
+ type: Unknown
+ description: add element to a resource list.
+ - name: fortisiem-remove-item-from-resource-list
+ arguments:
+ - name: ids
+ required: true
+ description: CSV list of resource IDs.
+ - name: resource_type
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - Reports
+ - Rules
+ - Networks
+ - Watch Lists
+ - Protocols
+ - Malware IP
+ - Event Type
+ - Malware Domains
+ - Malware Urls
+ - Malware Hash
+ - Malware Processes
+ - Country Groups
+ - Default Password
+ - Anonymity Network
+ - User Agents
+ - Remediations
+ description: Resource Type
+ description: Removes elements from a resource list.
+ - name: fortisiem-get-resource-list
+ arguments:
+ - name: group_id
+ required: true
+ description: ID of the resource group. Run the "fortisiem-get-lists" command
+ to get the ID.
+ - name: resource_type
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - Reports
+ - Rules
+ - Networks
+ - Watch Lists
+ - Protocols
+ - Malware IP
+ - Event Type
+ - Malware Domains
+ - Malware Urls
+ - Malware Hash
+ - Malware Processes
+ - Country Groups
+ - Default Password
+ - Anonymity Network
+ - User Agents
+ - Remediations
+ description: Resource type.
+ description: Lists all elements in a resource list.
+ isfetch: true
+ runonce: false
+tests:
+ - No Test
\ No newline at end of file
diff --git a/Integrations/FortiSIEM/FortiSIEM_description.md b/Integrations/FortiSIEM/FortiSIEM_description.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Integrations/FortiSIEM/FortiSIEM_image.png b/Integrations/FortiSIEM/FortiSIEM_image.png
new file mode 100644
index 000000000000..44911461a572
Binary files /dev/null and b/Integrations/FortiSIEM/FortiSIEM_image.png differ
diff --git a/Integrations/FreshDesk/CHANGELOG.md b/Integrations/FreshDesk/CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/Integrations/FreshDesk/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/Integrations/FreshDesk/FreshDesk.py b/Integrations/FreshDesk/FreshDesk.py
new file mode 100644
index 000000000000..c9c3de525e05
--- /dev/null
+++ b/Integrations/FreshDesk/FreshDesk.py
@@ -0,0 +1,1845 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+
+
+import requests
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+# Remove proxy if not set to true in params
+if not demisto.params().get('proxy'):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+''' GLOBALS/PARAMS '''
+
+PARAMS = demisto.params()
+CREDS = PARAMS.get('credentials')
+USERNAME = CREDS.get('identifier') if CREDS else None
+PASSWORD = CREDS.get('password') if CREDS else None
+TOKEN = PARAMS.get('token')
+
+if not (USERNAME and PASSWORD) and not TOKEN:
+ err_msg = 'You must provide either your Freshdesk account API key or the ' \
+ 'username and password you use to sign into your Freshdesk account ' \
+ 'when instantiating an instance of the Freshdesk integration.'
+ return_error(err_msg)
+
+AUTH = (TOKEN, 'X') if TOKEN else (USERNAME, PASSWORD)
+
+# How much time before the first fetch to retrieve incidents
+FETCH_TIME = PARAMS.get('fetch_time', '24 hours')
+# Remove trailing slash to prevent wrong URL path to service
+SERVER = PARAMS['url'][:-1] if (PARAMS.get('url') and PARAMS['url'].endswith('/')) else PARAMS['url']
+# Should we use SSL
+USE_SSL = not demisto.params().get('unsecure', False)
+# Service base URL
+BASE_URL = SERVER + '/api/v2/'
+
+# Headers to be sent in requests
+HEADERS = {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+}
+# Headers to be used when making a request to POST a multi-part encoded file
+MULTIPART_HEADERS = {'Accept': 'application/json'}
+
+# Default amount of results returned per-page/per-api-call when the
+# fd-search-tickets command's results that match the command's specified
+# filter criteria exceeds 30
+PER_PAGE = 30
+
+# The API response ticket attributes that will be included
+# in most command's context outputs
+DEFAULT_TICKET_CONTEXT_FIELDS = [
+ 'priority', 'due_by', 'subject', 'status',
+ 'requester_id', 'tags', 'group_id', 'source', 'created_at',
+ 'responder_id', 'fr_due_by', 'id'
+]
+
+
+''' HELPER FUNCTIONS '''
+
+
+def reformat_canned_response_context(context):
+ """
+ Reformat context for canned-response related commands (from having used string_to_context_key)
+ to desired output format.
+
+ parameter: (dict) context
+ The context to reformat
+
+ returns:
+ The reformatted context
+ """
+ for key, val in context.iteritems():
+ if 'Id' in key:
+ new_key = key.replace('Id', 'ID')
+ context[new_key] = val
+ del context[key]
+ elif 'Html' in key:
+ new_key = key.replace('Html', 'HTML')
+ context[new_key] = val
+ del context[key]
+ return context
+
+
+def reformat_conversation_context(context):
+ """
+ Reformat context for conversation related commands (from having used string_to_context_key)
+ to desired output format.
+
+ parameter: (dict) context
+ The context to reformat
+
+ returns:
+ The reformatted context
+ """
+ to_emails = context.get('ToEmails')
+ body = context.get('Body')
+ attachments = context.get('Attachments')
+ if to_emails:
+ context['ToEmail'] = to_emails
+ del context['ToEmails']
+ if body:
+ context['BodyHTML'] = body
+ del context['Body']
+ if attachments:
+ del context['Attachments']
+ return context
+
+
+def format_contact_context(contact):
+ """
+ Format context for contact related commands.
+
+ parameter: (dict) contact
+ The API response from executing a contact related command whose attributes need
+ to be parsed into context
+
+ returns:
+ The formatted context
+ """
+ dont_include = ['other_companies', 'other_emails', 'custom_fields', 'avatar']
+ # Parse response into context
+ context = {}
+ for key, val in contact.iteritems():
+ if key not in dont_include and val:
+ new_key = string_to_context_key(key)
+ if 'Id' in new_key:
+ new_key = new_key.replace('Id', 'ID')
+ elif new_key == 'Tags':
+ new_key = 'Tag'
+ context[new_key] = val
+ return context
+
+
+def reformat_ticket_context(context):
+ """
+ Reformat context outputs (from having used string_to_context_key) to desired output format.
+
+ parameter: (dict) context
+ The context to reformat
+
+ returns:
+ The reformatted context
+ """
+ standard_context_outputs = [
+ 'Priority', 'DueBy', 'Subject', 'Status', 'RequesterID',
+ 'Tag', 'GroupID', 'Source', 'CreatedAt', 'ResponderID',
+ 'FrDueBy', 'ID', 'UpdatedAt', 'Attachment', 'AdditionalFields',
+ 'UserID', 'BodyText', 'Category', 'Private', 'Incoming'
+ ]
+
+ for key, val in context.iteritems():
+ if key == 'Tags':
+ new_key = key[:-1]
+ context[new_key] = val
+ del context[key]
+ elif key == 'CustomFields':
+ new_key = key[:-1]
+ context[new_key] = val
+ del context[key]
+ elif key == 'FwdEmails':
+ new_key = key[:-1]
+ context[new_key] = val
+ del context[key]
+ elif key == 'Description':
+ new_key = 'DescriptionHTML'
+ context[new_key] = val
+ del context[key]
+ elif 'Id' in key:
+ new_key = key.replace('Id', 'ID')
+ context[new_key] = val
+ del context[key]
+ elif 'Cc' in key:
+ if key.endswith('s'):
+ new_key = key[:-1].replace('Cc', 'CC')
+ else:
+ new_key = key.replace('Cc', 'CC')
+ context[new_key] = val
+ del context[key]
+
+ # If 'Attachments' are still in context get rid of them - should be 'Attachment'
+ attachments = context.get('Attachments')
+ if attachments:
+ del context['Attachments']
+
+ new_context = {}
+ new_context['AdditionalFields'] = context.get('AdditionalFields') if context.get('AdditionalFields') else {}
+ additional_fields = {}
+ for key, val in context.iteritems():
+ if key not in standard_context_outputs:
+ if not ((isinstance(val, list) or isinstance(val, dict)) and len(val) == 0):
+ additional_fields[key] = val
+ else:
+ new_context[key] = val
+ new_context['AdditionalFields'] = dict(new_context.get('AdditionalFields', {}), **additional_fields)
+ return new_context
+
+
+def handle_search_tickets_pagination(args, response):
+ """
+ Retrieve all resulting tickets even over the default 30 returned by a single API call.
+
+ When the search_tickets_command results in more tickets than the default per page count (30) returned from
+ making an API call, then this function retrieves the remaining tickets by iterating and making API calls
+ per additional page of results.
+
+ parameter: (dict) args
+ search_tickets_command arguments
+
+ parameter: (dict) response
+ The initial json response from making an API call in the search_tickets function
+
+ returns:
+ All Ticket Objects
+ """
+ # If user entered custom_query arg, the resulting tickets are in the 'results' attribute of the response
+ if args.get('custom_query'):
+ # Max page count allowed by API when using custom query
+ max_pages = 10
+ # Deal with pagination if necessary
+ tickets = response.get('results')
+ total_tickets = response.get('total')
+ total_tickets -= PER_PAGE
+ page = 1
+ while total_tickets > 0 and page <= max_pages:
+ page += 1
+ args['page'] = page
+ tickets_page = search_tickets(args)
+ tickets.extend(tickets_page.get('results'))
+ total_tickets -= PER_PAGE
+ else:
+ # Max page count allowed by API when using normal filters
+ max_pages = 300
+ tickets = response
+ page = 1
+ next_page = tickets
+ while next_page and page <= max_pages:
+ page += 1
+ args['page'] = page
+ next_page = search_tickets(args)
+ if next_page:
+ tickets.extend(next_page)
+ return tickets
+
+
+def attachments_into_context(api_response, context):
+ """
+ Get the attachments field from the api_response argument if present and parse it into the context.
+
+ parameter: (dict) api_response
+ The json response returned by the calling function's associated 'requests' function in which calls
+ to the API are made.
+
+ parameter: (dict) context
+ The context that will be modified and returned to the war room
+
+ returns:
+ The modified context, and the modified context with the attachments in readable format for the
+ human readable output
+ """
+ attachment_keys_to_include = [
+ 'attachment_url', 'content_type', 'id', 'name', 'size'
+ ]
+
+ context_readable = dict(**context)
+ # Parse attachments into context
+ attachments = api_response.get('attachments')
+ if attachments:
+ attachments_context = []
+ attachments_context_readable = []
+ for attachment in attachments:
+ attachment_context = {}
+ for key, val in attachment.iteritems():
+ if key in attachment_keys_to_include:
+ if key == 'attachment_url':
+ key = 'AttachmentURL'
+ elif key == 'id':
+ key = 'ID'
+ else:
+ key = string_to_context_key(key)
+ attachment_context[key] = val
+ attachment_formatted = formatCell(attachment_context).split('\n')
+ attachment_formatted = ', '.join(attachment_formatted)
+ attachments_context_readable.append(attachment_formatted)
+
+ attachments_context.append(attachment_context)
+ context['Attachment'] = attachments_context
+ context_readable['Attachment'] = attachments_context_readable
+ return context, context_readable
+
+
+def additional_fields_to_context(context, already_in_context, additional_fields, additional_values):
+ """
+ Parses fields not presented as part of the command's standard arguments into the context.
+
+ For commands where the user can enter additional fields and their associated values beyond what
+ is offered by the standard arguments (but are still supported by the API endpoint). If the additional
+ fields are not part of the standard context output for that command, then those fields and values
+ from the API call are parsed and subcategorized under the 'AdditionalFields' context output.
+
+ parameter: (dict) context
+ The context that will be modified and returned to the war room
+
+ parameter: (list) already_in_context
+ List of fields which are ordinarily/already parsed into the context
+
+ parameter: (list) additional_fields
+ List of the fields beyond the command's standard arguments that the user entered as part
+ of the call to the API endpoint
+
+ parameter: (list) additional_values
+ List of values corresponding to the additional_fields argument
+
+ returns:
+ The modified context
+ """
+ # Parse additional fields into context
+ if additional_fields and additional_values:
+ added_context = {}
+ for field, value in zip(additional_fields, additional_values):
+ if field not in already_in_context and field != 'attachments':
+ key = string_to_context_key(field)
+ added_context[key] = value
+ context['AdditionalFields'] = added_context
+ return context
+
+
+def additional_fields_to_args(args, additional_fields_arg_name):
+ """
+ Parses the additional_fields command argument for the individual fields and values and
+ reassigns them to the args dictionary.
+
+ parameter: (dict) args
+ The command's arguments
+
+ parameter: (string) additional_fields_arg_name
+ The name of the command argument that contains the additional fields and values
+
+ returns:
+ The args dictionary that has been updated with the additional fields and values,
+ the list of additional fields, and the list of additional values.
+ """
+ additional_fields = args.get(additional_fields_arg_name)
+ if additional_fields:
+ fields, values = [], []
+ fields_and_vals = additional_fields.split(';')
+ # For the case there is only one additional field + value
+ if len(fields_and_vals) == 1:
+ fields_and_vals = list(fields_and_vals)
+ for field_and_val in fields_and_vals:
+ field_and_val = field_and_val.split('=')
+ # If the length doesn't equal 2, means there were either no equal signs or more than one
+ if not len(field_and_val) == 2:
+ err_msg = 'It appears you entered either too many or too few' \
+ ' equal signs in the \'additional_fields\' argument.'
+ return_error(err_msg)
+ field = field_and_val[0].strip()
+ val = field_and_val[1]
+
+ # If the value contains commas, then it is a list
+ if ',' in val:
+ val = argToList(val)
+ args[field] = val
+ fields.append(field)
+ values.append(val)
+ del args[additional_fields_arg_name]
+ return args, fields, values
+ return args, None, None
+
+
+def ticket_to_incident(ticket):
+ """
+ Create incident from ticket object.
+
+ parameter: (object) ticket
+ Ticket object
+
+ returns:
+ Incident Object
+ """
+ incident = {}
+ # Incident Title
+ incident['name'] = 'Freshdesk Ticket: "{}"'.format(ticket.get('subject'))
+ # Incident occurrence time - the ticket's creation time
+ incident['occurred'] = ticket.get('created_at')
+ # The raw response from the service, providing full info regarding the item
+ incident['rawJSON'] = json.dumps(ticket)
+ return incident
+
+
+def get_additional_fields(args):
+ """
+ Determine which fields need to be added to context based off arguments given in the search_tickets_command.
+
+ parameter: (dict) args
+ The search_tickets_command arguments
+
+ returns:
+ List of fields to be added to the context outputs
+ """
+ additional_fields = [] # fields that should be added to output context
+ filter = args.get('filter')
+ if filter:
+ if filter == 'deleted':
+ additional_fields.append('deleted')
+ elif filter == 'spam':
+ additional_fields.append('spam')
+ requester = args.get('requester')
+ if requester:
+ if '@' in requester:
+ additional_fields.append('email')
+ company_id = args.get('company_id')
+ if company_id:
+ additional_fields.append('company_id')
+ if args.get('include_description') and args.get('include_description').lower() == 'yes':
+ additional_fields.extend(['description', 'description_text'])
+ return additional_fields
+
+
+def entries_to_files(entry_ids):
+ """
+ Format file details (retrieved using the files' entry IDs) to API expectations to include files in API call.
+
+ parameter: (list) entry_ids
+ List of entry ID strings for files uploaded to the warroom
+
+ returns:
+ List of attachment field, value tuples formatted according to API expectations
+ """
+ attachments = []
+ for entry_id in entry_ids:
+ execute_results = demisto.getFilePath(entry_id)
+ file_path = execute_results['path']
+ file_name = execute_results['name']
+ attachments.append(('attachments[]', (file_name, open(file_path, 'rb'))))
+
+ return attachments
+
+
+def handle_array_input(args):
+ """
+ Format any command argument that is supposed to be an array from a string to a list.
+
+ parameter: (dict) args
+ The command arguments dictionary
+
+ returns:
+ The arguments dict with field values transformed from strings to lists where necessary
+ """
+ array_inputs = [
+ 'tags', 'attachments', 'cc_emails', 'bcc_emails', 'to_emails',
+ 'update_fields', 'update_values', 'notify_emails'
+ ]
+ attchs_present = args.get('attachments')
+ if attchs_present:
+ for arr_input in array_inputs:
+ if arr_input in args.keys():
+ if arr_input != 'attachments':
+ args[arr_input + '[]'] = argToList(args.get(arr_input))
+ del args[arr_input]
+ else:
+ args[arr_input] = argToList(args.get(arr_input))
+ else:
+ for arr_input in array_inputs:
+ if arr_input in args.keys():
+ args[arr_input] = argToList(args.get(arr_input))
+ return args
+
+
+def validate_priority_input(args):
+ """
+ Check entered value for command argument 'priority' and format to API expectations.
+
+ parameter: (dict) args
+ The command arguments dictionary
+
+ returns:
+ The arguments dict with the value for 'priority' field reformatted if necessary
+ """
+ # Parse and ensure valid command argument
+ priority = args.get('priority', None)
+
+ # If priority wasn't given by the user as a cmd arg
+ # then no need to alter it to API expectations
+ if not priority:
+ return args
+
+ priorities = ['low', 'medium', 'high', 'urgent']
+
+ # Check if the user entered status as words - aka the
+ # options listed above in 'statuses'
+ err_msg = 'priority should be one of these values: 1, 2, 3, 4, {}'.format(', '.join(priorities))
+ if len(priority) > 1:
+ if priority.lower() in priorities:
+ # Add 1 since API status numbers for tickets start at 1
+ # Cast to string so clean_arguments helper function doesn't throw any errors
+ args['priority'] = str(priorities.index(priority.lower()) + 1)
+ else:
+ return_error(err_msg)
+ # Otherwise make sure the user entered valid status number
+ elif not (0 < int(priority) < 5):
+ return_error(err_msg)
+ return args
+
+
+def validate_status_input(args):
+ """
+ Check entered value for command argument 'status' and format to API expectations.
+
+ parameter: (dict) args
+ The command arguments dictionary
+
+ returns:
+ The arguments dict with the value for 'status' field reformatted if necessary
+ """
+ # Parse and ensure valid command argument
+ status = args.get('status', None)
+
+ # If status wasn't given by the user as a cmd arg
+ # then no need to alter it to API expectations
+ if not status:
+ return args
+
+ statuses = [
+ 'open', 'pending', 'resolved', 'closed',
+ 'waiting on customer', 'waiting on third party'
+ ]
+
+ # Check if the user entered status as words - aka the
+ # options listed above in 'statuses'
+ err_msg = 'status should be one of these values: 2, 3, 4, 5, 6, 7, {}'.format(', '.join(statuses))
+ if len(status) > 1:
+ if status.lower() in statuses:
+ # Add 2 since API status numbers for tickets start at 2
+ # Cast to string so clean_arguments helper function doesn't throw any errors
+ args['status'] = str(statuses.index(status.lower()) + 2)
+ else:
+ return_error(err_msg)
+ # Otherwise make sure the user entered valid status number
+ elif not (1 < int(status) < 8):
+ return_error(err_msg)
+ return args
+
+
+def handle_number_input(args):
+ """
+ Format any command argument that is supposed to be a number from a string to an int.
+
+ parameter: (dict) args
+ The command arguments dictionary
+
+ returns:
+ The arguments dict with field values transformed from strings to numbers where necessary
+ """
+ # Command args that should be numbers
+ number_args = [
+ 'requester_id', 'status', 'priority', 'responder_id',
+ 'email_config_id', 'group_id', 'product_id', 'source', 'company_id'
+ ]
+ # Convert cmd args that are expected to be numbers from strings to numbers
+ for num_arg in number_args:
+ if num_arg in args.keys():
+ args[num_arg] = int(args.get(num_arg))
+ return args
+
+
+def clean_arguments(args):
+ """
+ Perform all validation and reformatting of command arguments.
+
+ parameter: (dict) args
+ The command arguments dictionary
+
+ returns:
+ The arguments dict with all field values reformatted where necessary
+ """
+ args = validate_status_input(args)
+ args = validate_priority_input(args)
+ args = handle_array_input(args)
+ args = handle_number_input(args)
+ return args
+
+
+def determine_identifier(args):
+ """
+ Determine whether the input for the 'identifier' argument is an
+ email or twitter handle and adjust 'args' accordingly.
+
+ parameter: (dict) args
+ The command arguments dictionary
+
+ returns:
+ The arguments dict with the email or twitter_id field (depending on what the value was)
+ assigned the value entered for the 'identifier' argument
+ """
+ identifier = args.get('identifier')
+ if identifier.startswith('@'):
+ # Then it's a twitter handle
+ args['twitter_id'] = identifier
+ elif '@' in identifier:
+ # Otherwise assume it's an email address
+ args['email'] = identifier
+ else:
+ err_msg = 'The entered value for the \'identifier\' argument must ' \
+ 'be either a Twitter handle or an Email Address.'
+ return_error(err_msg)
+ # Delete identifier field from args since it doesn't match API expected inputs
+ del args['identifier']
+ return args
+
+
+def determine_responder(args):
+ """
+ Determine whether the input for the 'responder' argument is a group or an agent and adjust 'args' accordingly.
+
+ parameter: (dict) args
+ The command arguments dictionary
+
+ returns:
+ The arguments dict with the group_id or responder_id field (depending on what the value was)
+ assigned the value entered for the 'responder' argument
+ """
+ responder = args.get('responder', None)
+ if responder:
+ args = determine_group(args, 'responder')
+ args = determine_agent(args, 'responder')
+ return args
+
+
+def determine_agent(args, key_name):
+ """
+ Determine if the value points to an agent by checking against all agent names, emails, and IDs, and adjust
+ 'args' accordingly.
+
+ parameter: (string) key_name
+ The name of the command argument
+
+ parameter: (dict) args
+ The command arguments dictionary
+
+ returns:
+ The arguments dict with responder_id field assigned the appropriate value if the value passed
+ for the command argument represented by 'key_name' is associated with an Agent's details
+ """
+ assigned_agent = args.get(key_name, None)
+ if assigned_agent:
+ agent_emails, agent_names, agent_ids = [], [], []
+ # Get names, emails and ids of agents
+ agents = list_agents()
+ for agent in agents:
+ agent_ids.append(agent.get('id'))
+ contact_info = agent.get('contact')
+ if contact_info:
+ agent_names.append(contact_info.get('name', '').lower())
+ agent_emails.append(contact_info.get('email', '').lower())
+ # Check if responder value is a contact ID
+ if assigned_agent in agent_ids:
+ args['responder_id'] = assigned_agent
+ # Delete assigned_agent field from args since it doesn't match API expected inputs
+ del args[key_name]
+ elif assigned_agent.lower() in agent_names:
+ agent_name_idx = agent_names.index(assigned_agent.lower())
+ args['responder_id'] = agent_ids[agent_name_idx]
+ del args[key_name]
+ elif assigned_agent.lower() in agent_emails:
+ agent_email_idx = agent_emails.index(assigned_agent.lower())
+ args['responder_id'] = agent_ids[agent_email_idx]
+ del args[key_name]
+
+ return args
+
+
+def determine_group(args, key_name):
+ """
+ Determine if the value points to a group by checking against all group names and IDs, and adjust
+ 'args' accordingly.
+
+ parameter: (string) key_name
+ The name of the command argument
+
+ parameter: (dict) args
+ The command arguments dictionary
+
+ returns:
+ The arguments dict with group_id field assigned the appropriate value if the value passed
+ for the command argument represented by 'key_name' is associated with a Group's details
+ """
+ assigned_group = args.get(key_name, None)
+ if assigned_group:
+ group_names, group_ids = [], []
+ # Get names and ids of groups
+ groups = list_groups()
+ for group in groups:
+ group_ids.append(group.get('id'))
+ group_names.append(group.get('name', '').lower())
+ # Check if responder value is a group ID
+ if assigned_group in group_ids:
+ args['group_id'] = assigned_group
+ del args[key_name]
+ # Or the name of a group
+ elif assigned_group.lower() in group_names:
+ group_name_idx = group_names.index(assigned_group.lower())
+ args['group_id'] = group_ids[group_name_idx]
+ del args[key_name]
+ return args
+
+
+def http_request(method, url_suffix, params=None, data=None, files=None, headers=HEADERS):
+ """
+ A wrapper for requests lib to send our requests and handle requests and responses better.
+
+ parameter: (string) method
+ A string denoting the http request method to use.
+ Can be 'GET', 'POST, 'PUT', 'DELETE', etc.
+
+ parameter: (string) url_suffix
+ The API endpoint that determines which data we are trying to access/change in our
+ call to the API
+
+ parameter: (dict) params
+ The key/value pairs to be encoded as part of the URL's query string
+
+ parameter: (dict) data
+ The key/value pairs to be form-encoded
+
+ parameter: (list) files
+ The multipart-encoded files to upload
+
+ parameter: (dict) headers
+ The headers to use with the request
+
+ returns:
+ JSON Response Object
+ """
+ res = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ verify=USE_SSL,
+ params=params,
+ data=data,
+ headers=headers,
+ auth=AUTH,
+ files=files
+ )
+ # Handle error responses gracefully
+ if res.status_code not in {200, 201, 202, 204}:
+ LOG(res.json())
+ LOG(res.text)
+ LOG.print_log()
+ err_msg = 'Error in API call to Freshdesk Integration [{}] - {}'.format(res.status_code, res.reason)
+ err = json.loads(res.content)
+ if err.get('errors'):
+ for error in err.get('errors'):
+ err_msg += '\n' + json.dumps(error, indent=2)
+ else:
+ for key, value in res.json().iteritems():
+ err_msg += '\n{}: {}'.format(key, value)
+ return_error(err_msg)
+ # Handle response with no content
+ elif res.status_code == 204:
+ return res
+
+ return res.json()
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """
+ Will try to make a request to the API endpoint for listing all tickets.
+ """
+ http_request('GET', 'tickets')
+
+
+def fetch_incidents():
+ # demisto.getLastRun() will returns an obj with the previous run in it.
+ last_run = demisto.getLastRun()
+ # Get the last fetch time, if exists
+ last_fetch = last_run.get('last_created_incident_timestamp')
+
+ # Handle first time fetch, fetch incidents retroactively
+ if not last_fetch:
+ last_fetch, _ = parse_date_range(FETCH_TIME, to_timestamp=True)
+ updated_since = timestamp_to_datestring(last_fetch, date_format='%Y-%m-%dT%H:%M:%SZ')
+ args = {'updated_since': updated_since, 'order_type': 'asc'}
+
+ tickets = search_tickets(args)
+ # convert the ticket/events to demisto incidents
+ incidents = []
+ for ticket in tickets:
+ incident = ticket_to_incident(ticket)
+ incident_date = date_to_timestamp(incident.get('occurred'), '%Y-%m-%dT%H:%M:%SZ')
+ # Update last run and add incident if the incident is newer than last fetch
+ if incident_date > last_fetch:
+ last_fetch = incident_date
+ incidents.append(incident)
+
+ demisto.setLastRun({'last_created_incident_timestamp': last_fetch})
+ demisto.incidents(incidents)
+
+
+'''<------ TICKETS ------>'''
+
+
+def create_ticket(args):
+ args = determine_identifier(args)
+ args = determine_responder(args)
+ args = clean_arguments(args)
+ endpoint_url = 'tickets'
+
+ response = None
+ if not args.get('attachments'):
+ # The service endpoint to request from
+ # Send a request using our http_request wrapper
+ response = http_request('POST', endpoint_url, data=json.dumps(args))
+ else:
+ # Get the files from their entry IDs
+ attachments = entries_to_files(args.get('attachments'))
+ # Format to API expectations
+ del args['attachments']
+ # Send a request and get raw response
+ response = http_request('POST', endpoint_url, data=args, files=attachments, headers=MULTIPART_HEADERS)
+ return response
+
+
+def create_ticket_command():
+ """
+ Create a new Freshdesk ticket.
+
+ demisto parameter: (string) subject
+ Subject of the ticket. The default Value is null.
+
+ demisto parameter: (string) description
+ Details of the issue for which you are creating a ticket.
+
+ demisto parameter: (number) priority
+ Priority of the ticket. Each number has a corresponding value.
+ 1 is Low, 2 is Medium, 3 is High, 4 is Urgent.
+
+ demisto parameter: (number) status
+ Status of the ticket. Each number has a corresponding value.
+ 2 is Open, 3 is Pending, 4 is Resolved, 5 is Closed, 6 is Waiting
+ on Customer, 7 is Waiting on Third Party.
+
+ demisto parameter: (string) identifier
+ This can be an email address or a twitter handle
+
+ demisto parameter: (list) responder
+ ID or name of the group or agent to whom you wish to assign this ticket.
+ To find potential assignees, try executing the fd-list-groups command.
+
+ demisto parameter: (list) attachments
+ Entry IDs of files to attach to the ticket.
+ The total size of these attachments cannot exceed 15MB.
+
+ demisto parameter: (list) additional_fields
+ Additional ticket fields you wish to set the value of
+
+ returns:
+ Ticket Object
+ """
+ # Get command arguments from user
+ args = demisto.args()
+
+ # Handle additional_fields command arguments
+ args, additional_fields, additional_values = additional_fields_to_args(args, 'additional_fields')
+
+ # Make request and get raw response
+ ticket = create_ticket(args)
+ # Parse response into context
+ include_in_context = DEFAULT_TICKET_CONTEXT_FIELDS[:]
+
+ context = {string_to_context_key(key): val for key, val in ticket.iteritems() if val}
+ context = additional_fields_to_context(context, include_in_context, additional_fields, additional_values)
+ context, context_readable = attachments_into_context(ticket, context)
+ context = reformat_ticket_context(context)
+ context_readable = reformat_ticket_context(context_readable)
+ title = 'Newly Created Ticket #{}'.format(context.get('ID'))
+ human_readable = tableToMarkdown(title, context_readable, removeNull=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': ticket,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Freshdesk.Ticket(val.ID && val.ID === obj.ID)': context
+ }
+ })
+
+
+def update_ticket(args):
+ # Get ticket number
+ ticket_number = args.get('id')
+ # Remove ticket number from args
+ del args['id']
+
+ args = determine_agent(args, 'assigned_agent')
+ args = determine_group(args, 'assigned_group')
+
+ args = clean_arguments(args)
+
+ # The service endpoint to request from
+ endpoint_url = 'tickets/{}'.format(ticket_number)
+
+ response = None
+ if not args.get('attachments'):
+ # Send a request using our http_request wrapper
+ response = http_request('PUT', endpoint_url, data=json.dumps(args))
+ else:
+ # Get the files from their entry IDs
+ attachments = entries_to_files(args.get('attachments'))
+ # Format to API expectations
+ del args['attachments']
+ # Send a request and get raw response
+ response = http_request('PUT', endpoint_url, data=args, files=attachments, headers=MULTIPART_HEADERS)
+ return response
+
+
+def update_ticket_command():
+ """
+ Update the ticket specified by a ticket ID number.
+
+ demisto parameter: (string) subject
+ Update the ticket's subject field
+
+ demisto parameter: (number,string) status
+ Update the ticket's status. Possible values are 2,3,4,5,6,7 or
+ 'Open' , 'Pending', 'Resolved', 'Closed', 'Waiting on Customer',
+ 'Waiting on Third Party'
+
+ demisto parameter: (number,string) priority
+ Update the ticket's priority. Possible values are 1,2,3,4 or
+ 'Low', 'Medium', 'High', 'Urgent'
+
+ demisto parameter: (string) description
+ The HTML content of the ticket
+
+ demisto parameter: (number) id
+ ID number of the ticket to update
+
+ demisto parameter: assigned_agent
+ Update which agent is assigned to respond to this ticket.
+ Values can be either the agent's ID number, name, or email.
+
+ demisto parameter: assigned_group
+ Update which group is assigned to respond to this ticket.
+ Values can be either the group's ID number or name.
+
+ demisto parameter: (list) additional_fields
+ Fields not included in the default command arguments that
+ you wish to enter the value for
+
+ returns:
+ Ticket specified by the ticket ID number with its updated values
+ """
+ args = demisto.args()
+ args, additional_fields, additional_fields_values = additional_fields_to_args(args, 'additional_fields')
+
+ # Make request and get raw response
+ ticket = update_ticket(args)
+
+ # Parse response into context
+ include_in_context = DEFAULT_TICKET_CONTEXT_FIELDS[:]
+ include_in_context.append('updated_at')
+ # Parse default context fields
+ context = {string_to_context_key(key): val for key, val in ticket.iteritems() if val}
+ # Parse additional fields into context
+ context = additional_fields_to_context(context, include_in_context, additional_fields, additional_fields_values)
+ # Parse attachments into context
+ context, context_readable = attachments_into_context(ticket, context)
+ context = reformat_ticket_context(context)
+ context_readable = reformat_ticket_context(context_readable)
+ title = 'Ticket #{} Updated'.format(context.get('ID'))
+ human_readable = tableToMarkdown(title, context_readable, removeNull=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': ticket,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Freshdesk.Ticket(val.ID && val.ID === obj.ID)': context
+ }
+ })
+
+
+def get_ticket(args):
+ ticket_number = args.get('id')
+ endpoint_url = 'tickets/{}'.format(ticket_number)
+ url_params = {}
+
+ # Check if embedding additional info in API response was specified in cmd args
+ include = ''
+ if args.get('include_stats') and args.get('include_stats').lower() == 'true':
+ include += 'stats'
+ if args.get('include_requester') and args.get('include_requester').lower() == 'true':
+ include += 'requester' if include == '' else ', requester'
+ if include != '':
+ url_params['include'] = include
+
+ response = http_request('GET', endpoint_url, params=url_params)
+ return response
+
+
+def get_ticket_command():
+ """
+ View a Ticket.
+
+ demisto parameter: (number) id
+ ID number of the ticket to fetch
+
+ demisto parameter: (string) include_requester
+ If set to 'yes' then the ticket requester's id, email, mobile, name, and phone
+ will be included in the ticket's output.
+
+ demisto parameter: (string) include_stats
+ If set to 'yes' then the ticket's closed_at, resolved_at and first_responded_at times will be included.
+
+ returns:
+ Ticket Object
+ """
+ # Get command arguments from user
+ args = demisto.args()
+ # Make request and get raw response
+ ticket = get_ticket(args)
+
+ nonstd_context_fields = ['requester', 'stats']
+
+ # Parse response into context
+ context = {
+ string_to_context_key(key): val
+ for key, val in ticket.iteritems()
+ if key not in nonstd_context_fields and val is not None
+ }
+
+ # Parse attachments into context
+ context, context_readable = attachments_into_context(ticket, context)
+
+ context['AdditionalFields'] = {}
+ requester = ticket.get('requester')
+ if requester:
+ requester_context = {string_to_context_key(key): val for key, val in requester.iteritems() if val}
+ context['AdditionalFields']['Requestor'] = requester_context
+ stats = ticket.get('stats')
+ if stats:
+ stats_context = {string_to_context_key(key): val for key, val in stats.iteritems() if val}
+ context['AdditionalFields']['Stats'] = stats_context
+
+ if not ticket.get('deleted'):
+ context['AdditionalFields']['Deleted'] = False
+
+ context = reformat_ticket_context(context)
+ context_readable = reformat_ticket_context(context_readable)
+ title = 'Viewing Ticket #{}'.format(ticket.get('id'))
+ human_readable = tableToMarkdown(title, context_readable, removeNull=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': ticket,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Freshdesk.Ticket(val.ID && val.ID === obj.ID)': context
+ }
+ })
+
+
+def delete_ticket(ticket_id):
+ endpoint_url = 'tickets/{}'.format(ticket_id)
+ response = http_request('DELETE', endpoint_url)
+ return response
+
+
+def delete_ticket_command():
+ """
+ Soft-Delete the ticket specified by the 'id' command argument.
+
+ demisto parameter: (number) id
+ ID of the ticket to delete
+
+ returns:
+ Success Message
+ """
+ ticket_id = demisto.args().get('id')
+ # Make request
+ delete_ticket(ticket_id)
+ ticket_context = {
+ 'ID': int(ticket_id),
+ 'AdditionalFields': {'Deleted': True}
+ }
+ message = 'Soft-Deleted Ticket #{}'.format(ticket_id)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': ticket_context,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': message,
+ 'EntryContext': {
+ 'Freshdesk.Ticket(val.ID && val.ID === obj.ID)': ticket_context
+ }
+ })
+
+
+def search_tickets(args):
+ endpoint_url = 'tickets'
+ url_params = {}
+
+ # Filter By
+ filter = args.get('filter')
+ if filter:
+ url_params['filter'] = filter
+ requester = args.get('requester')
+ if requester:
+ if '@' in requester:
+ url_params['email'] = requester
+ else:
+ url_params['requester_id'] = requester
+ updated_since = args.get('updated_since')
+ if updated_since:
+ url_params['updated_since'] = updated_since
+
+ # Sort By
+ order_by = args.get('order_by')
+ if order_by:
+ url_params['order_by'] = order_by
+ order_type = args.get('order_type')
+ if order_type:
+ url_params['order_type'] = order_type
+
+ # Embeddings (include additional information)
+ include = ''
+ if args.get('include_stats') and args.get('include_stats').lower() == 'true':
+ include += 'stats'
+ if args.get('include_requester') and args.get('include_requester').lower() == 'true':
+ include += 'requester' if include == '' else ', requester'
+ if args.get('include_description') and args.get('include_description').lower() == 'true':
+ include += 'description' if include == '' else ', description'
+ if include != '':
+ url_params['include'] = include
+
+ # Custom Query
+ custom_query = args.get('custom_query')
+ if custom_query and url_params:
+ err_msg = 'You cannot use the custom_query argument in conjunction with the other command arguments. You can ' \
+ 'either use the other arguments that allow you to choose options for filtering, sorting, ' \
+ 'and including information for tickets, or to use the custom_query alone to create a custom filter ' \
+ 'that determines which tickets are listed.'
+ return_error(err_msg)
+ elif custom_query:
+ endpoint_url = 'search/tickets'
+ url_params['query'] = '"' + custom_query + '"'
+
+ page = args.get('page')
+ if page:
+ url_params['page'] = page
+
+ # Make request and get raw response
+ response = http_request('GET', endpoint_url, params=url_params)
+ return response
+
+
+def search_tickets_command():
+ """
+ List all tickets that match the filter criteria you specify.
+
+ demisto parameter: (string) filter
+ Predefined filters
+
+ demisto parameter: requester
+ Filter by either the ticket requester's email or ID
+
+ demisto parameter: (datetime) updated_since
+ By default, only tickets that have been created within the past 30 days will be returned.
+ For older tickets, use this filter. Example value for this field would be '2015-01-19T02:00:00Z'
+
+ demisto parameter: (string) order_by
+ Reference field for ordering the list of tickets. The default sort order is created_at.
+
+ demisto parameter: (string) order_type
+ Whether to order the resulting tickets in ascending or descending order.
+ The default is descending. Value can be either 'asc' or 'desc'.
+
+ demisto parameter: (string) include_stats
+ If set to 'yes' then the ticket's closed_at, resolved_at and first_responded_at times will be included.
+
+ demisto parameter: (string) include_requester
+ If set to 'yes' then the ticket requester's id, email, mobile, name, and phone
+ will be included in the ticket's output for each ticket.
+
+ demisto parameter: (string) include_description
+ If set to 'yes' then the ticket's description and description_text will be included the tickets' outputs.
+
+ demisto parameter: (string) custom_query
+ Filter tickets using a custom query.
+ Format - "(ticket_field:integer OR ticket_field:'string') AND ticket_field:boolean"
+ Example - "(type:'Question' OR type:'Problem') AND (due_by:>'2017-10-01' AND due_by:<'2017-10-07')"
+ Note that the custom_query argument cannot be used in conjunction with this command's other arguments.
+
+ returns:
+ Ticket Objects
+ """
+ args = demisto.args()
+ additional_fields = get_additional_fields(args)
+ response = search_tickets(args) # page 1
+
+ tickets = handle_search_tickets_pagination(args, response)
+
+ context_outputs = DEFAULT_TICKET_CONTEXT_FIELDS[:]
+ context_outputs.append('updated_at')
+
+ # Parse response into context
+ contexts = []
+ readable_contexts = []
+ for ticket in tickets:
+ # Parse ticket into the standard outputs
+ context = {string_to_context_key(key): val for key, val in ticket.iteritems() if key in context_outputs}
+
+ # Parse ticket attachments into context
+ context, context_readable = attachments_into_context(ticket, context)
+
+ # Parse ticket for the additionally requested fields
+ context['AdditionalFields'] = {
+ string_to_context_key(key): val for key, val in ticket.iteritems() if key in additional_fields
+ }
+ requester = ticket.get('requester')
+ if requester:
+ requester_context = {string_to_context_key(key): val for key, val in requester.iteritems() if val}
+ context['AdditionalFields']['Requestor'] = requester_context
+ stats = ticket.get('stats')
+ if stats:
+ stats_context = {string_to_context_key(key): val for key, val in stats.iteritems() if val}
+ context['AdditionalFields']['Stats'] = stats_context
+
+ context_readable = reformat_ticket_context(context_readable)
+ readable_contexts.append(context_readable)
+ context = reformat_ticket_context(context)
+ contexts.append(context)
+
+ table_headers = [
+ 'ID', 'Priority', 'Status', 'Subject', 'DueBy', 'FrDueBy', 'RequesterID', 'GroupID',
+ 'Source', 'CreatedAt', 'UpdatedAt', 'Tag', 'AdditionalFields', 'Attachment'
+ ]
+ title = 'Viewing All Requested Tickets'
+ human_readable = tableToMarkdown(title, readable_contexts, headers=table_headers, removeNull=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': tickets,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Freshdesk.Ticket(val.ID && val.ID === obj.ID)': contexts
+ }
+ })
+
+
+'''<------ CONVERSATIONS ------>'''
+
+
+def ticket_reply(args):
+ ticket_id = args.get('ticket_id')
+ del args['ticket_id']
+ args = handle_array_input(args)
+ endpoint_url = 'tickets/{}/reply'.format(ticket_id)
+
+ response = None
+ if not args.get('attachments'):
+ # The service endpoint to request from
+ # Send a request using our http_request wrapper
+ response = http_request('POST', endpoint_url, data=json.dumps(args))
+ else:
+ # Get the files from their entry IDs
+ attachments = entries_to_files(args.get('attachments'))
+ # Format to API expectations
+ del args['attachments']
+ # Send a request and get raw response
+ response = http_request('POST', endpoint_url, data=args, files=attachments, headers=MULTIPART_HEADERS)
+ return response
+
+
+def ticket_reply_command():
+ """
+ Reply to a specified ticket.
+
+ demisto parameter: (number) ticket_id
+ ID of the ticket you wish to respond to
+
+ demisto parameter: (string) body
+ Content of the reply in HTML format
+
+ demisto parameter: (string) from_email
+ The email address from which the reply is sent. By default the global support email will be used.
+
+ demisto parameter: (number) user_id
+ ID of the agent who is adding the note
+
+ demisto parameter: (list) cc_emails
+ Array of email address strings added in the 'cc' field of the outgoing ticket email.
+
+ demisto parameter: (list) bcc_emails
+ Array of email address strings added in the 'bcc' field of the outgoing ticket email.
+
+ demisto parameter: (list) attachments
+ Entry IDs of files to attach to the reply. The total size of these attachments cannot exceed 15MB.
+
+ returns:
+ Ticket Reply Object
+ """
+ args = demisto.args()
+ # Make request and get raw response
+ reply = ticket_reply(args)
+ # Parse response into context
+ context = {string_to_context_key(key): val for key, val in reply.iteritems() if val}
+ context = reformat_conversation_context(context)
+ # Parse attachments into context
+ context, context_readable = attachments_into_context(reply, context)
+ context = reformat_ticket_context(context)
+ context_readable = reformat_ticket_context(context_readable)
+ complete_context = {
+ 'ID': int(reply.get('ticket_id')),
+ 'Conversation': context
+ }
+ title = 'Reply to Ticket #{}'.format(reply.get('ticket_id'))
+ human_readable = tableToMarkdown(title, context_readable, removeNull=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': reply,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Freshdesk.Ticket(val.ID && val.ID === obj.ID)': complete_context
+ }
+ })
+
+
+def create_ticket_note(args):
+ ticket_id = args.get('ticket_id')
+ del args['ticket_id']
+ args = handle_array_input(args)
+ # Set defaults for 'private' and 'incoming' fields if not set by user
+ args['private'] = args.get('private', 'true')
+ args['incoming'] = args.get('incoming', 'false')
+ endpoint_url = 'tickets/{}/notes'.format(ticket_id)
+
+ response = None
+ if not args.get('attachments'):
+ # Format boolean args to API expectations
+ dumped_args = json.dumps(args).replace('"false"', 'false').replace('"true\"', 'true')
+ # The service endpoint to request from
+ # Send a request using our http_request wrapper
+ response = http_request('POST', endpoint_url, data=dumped_args)
+ else:
+ # Get the files from their entry IDs
+ attachments = entries_to_files(args.get('attachments'))
+ # Format to API expectations
+ del args['attachments']
+ # Send a request and get raw response
+ response = http_request('POST', endpoint_url, data=args, files=attachments, headers=MULTIPART_HEADERS)
+ return response
+
+
+def create_ticket_note_command():
+ """
+ Create a note for a specified ticket.
+
+ Notes by default are private (AKA not visible to non-agents) unless you
+ set the 'private' command argument to False.
+
+ demisto parameter: (number) ticket_id
+ ID of the ticket you wish to make a note for
+
+ demisto parameter: (string) body
+ Content of the note in HTML format
+
+ demisto parameter: (boolean) private
+ Set to false if the note is not private
+
+ demisto parameter: (number) user_id
+ ID of the agent who is adding the note
+
+ demisto parameter: (list) notify_emails
+ Array of email addresses of agents/users who need to be notified about this note
+
+ demisto parameter: (boolean) incoming
+ Set to true if a particular note should appear as being created from outside (i.e., not through web portal).
+
+ demisto parameter: (list) attachments
+ Entry IDs of files to attach to the note. The total size of these attachments cannot exceed 15MB.
+
+ returns:
+ Note Object
+ """
+ # Get command arguments
+ args = demisto.args()
+ # Make request and get raw response
+ note = create_ticket_note(args)
+ # Parse response into context
+ context = {string_to_context_key(key): val for key, val in note.iteritems() if val}
+ context = reformat_conversation_context(context)
+ # Parse attachments into context
+ context, context_readable = attachments_into_context(note, context)
+ context = reformat_ticket_context(context)
+ context_readable = reformat_ticket_context(context_readable)
+ complete_context = {
+ 'ID': int(note.get('ticket_id')),
+ 'Conversation': context
+ }
+ title = 'Note for Ticket #{}'.format(note.get('ticket_id'))
+ human_readable = tableToMarkdown(title, context_readable, removeNull=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': note,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Freshdesk.Ticket(val.ID && val.ID === obj.ID)': complete_context
+ }
+ })
+
+
+def get_ticket_conversations(ticket_id):
+ endpoint_url = 'tickets/{}/conversations'.format(ticket_id)
+ response = http_request('GET', endpoint_url)
+ return response
+
+
+def get_ticket_conversations_command():
+ """
+ Lists all replies and notes for a specified ticket.
+
+ demisto parameter: (number) ticket_id
+ ID of the ticket for which you would like to list all of its conversations
+
+ returns:
+ Conversation Objects
+ """
+ # Get id number of ticket as cmd arg for which you want to see all the conversations
+ ticket_id = demisto.args().get('ticket_id')
+ # Make request and get raw response
+ conversations = get_ticket_conversations(ticket_id)
+ # Parse response into context
+ contexts = []
+ readable_contexts = []
+ for conversation in conversations:
+ context = {string_to_context_key(key): val for key, val in conversation.iteritems() if val}
+ context = reformat_conversation_context(context)
+ # Parse attachments into context
+ context, context_readable = attachments_into_context(conversation, context)
+ context = reformat_ticket_context(context)
+ context_readable = reformat_ticket_context(context_readable)
+ contexts.append(context)
+ readable_contexts.append(context_readable)
+ complete_context = {
+ 'ID': int(ticket_id),
+ 'Conversation': contexts
+ }
+ title = 'Conversations of Ticket #{}'.format(ticket_id)
+ human_readable = tableToMarkdown(title, readable_contexts, removeNull=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': conversations,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Freshdesk.Ticket(val.ID && val.ID === obj.ID)': complete_context
+ }
+ })
+
+
+'''<------ CONTACTS ------>'''
+
+
+def list_contacts(filters):
+ # Alter to match API expected inputs
+ updated_since = filters.get('updated_since', None)
+ if updated_since:
+ del filters['updated_since']
+ filters['_updated_since'] = updated_since
+
+ endpoint_url = 'contacts'
+ response = http_request('GET', endpoint_url, params=filters)
+ return response
+
+
+def list_contacts_command():
+ """
+ List all contacts.
+
+ Lists all contacts matching the specified filters. If no filters are provided
+ then all unblocked and undeleted contacts will be returned.
+
+ demisto parameter: (number) mobile
+ mobile number to filter the contacts by
+
+ demisto parameter: (number) phone
+ phone number to filter contacts by
+
+ demisto parameter: (string) state
+ The state of contacts by which you want to filter the contacts. Values
+ are 'verified', 'unverified', 'blocked', or 'deleted'.
+
+ demisto parameter: (datetime) updated_since
+ return contacts that have been updated after the timestamp given as this argument value
+
+ returns:
+ Contact Objects
+ """
+ # Get command arguments from user
+ filters = demisto.args()
+ # Make request and get raw response
+ contacts = list_contacts(filters)
+ # Parse response into context
+ contexts = []
+ for contact in contacts:
+ # Parse individual contact response in context
+ context = format_contact_context(contact)
+ contexts.append(context)
+ filters_as_strings = ', '.join(['{}: {}'.format(key, val) for key, val in filters.iteritems()])
+ title = 'Contacts Filtered by {}'.format(filters_as_strings) if filters else 'All Contacts'
+ human_readable = tableToMarkdown(title, contexts, removeNull=False)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contacts,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Freshdesk.Contact(val.ID && val.ID === obj.ID)': contexts
+ }
+ })
+
+
+def get_contact(args):
+ contact_id = 0
+ if not args:
+ err_msg = 'You must provide a value for either the mobile, email or id command argument fields.'
+ return_error(err_msg)
+ elif args.get('id') is not None:
+ contact_id = args.get('id')
+ elif args.get('email') is not None:
+ try:
+ filters = {'email': args.get('email')}
+ # Get id field of first result of contacts with that email (There should only be one)
+ contact_id = list_contacts(filters)[0].get('id')
+ # If there is an IndexError, it means no results were returned for the given filter
+ except IndexError as e:
+ err_msg = 'Couldn\'t find a contact with that email address.'\
+ ' Double check that you wrote the email address correctly'\
+ ' and/or that you have a FreshDesk contact with that exact'\
+ ' email address.'
+ return_error(err_msg)
+ except Exception as e:
+ return_error(e.message)
+ else:
+ try:
+ filters = {'mobile': args.get('mobile')}
+ # Get id field of first result of contacts with that mobile number
+ contact_id = list_contacts(filters)[0].get('id')
+ # If there is an IndexError, it means no results were returned for the given filter
+ except IndexError as e:
+ err_msg = 'Couldn\'t find a contact with that mobile number.'\
+ ' Double check that you wrote it correctly and/or that '\
+ 'you have a FreshDesk contact with that exact mobile number.'
+ return_error(err_msg)
+ except Exception as e:
+ return_error(e.message)
+
+ endpoint_url = 'contacts/{}'.format(contact_id)
+ response = http_request('GET', endpoint_url)
+ return response
+
+
+def get_contact_command():
+ """
+ View the details of the contact specified by the ID number.
+
+ demisto parameter: (number) id
+ ID of the contact you wish to view the details of
+
+ demisto parameter: (number) mobile
+ Mobile number of the contact you wish to view the details of
+
+ demisto parameter: (string) email
+ Email address of the contact you wish to view the details of
+
+ returns:
+ Contact Object
+ """
+ # Get command arguments from user
+ args = demisto.args()
+ # Make request and get raw response
+ contact = get_contact(args)
+
+ context = format_contact_context(contact)
+ title = 'Viewing Contact #{}'.format(contact.get('id'))
+ human_readable = tableToMarkdown(title, context, removeNull=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contact,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Freshdesk.Contact(val.ID && val.ID === obj.ID)': context
+ }
+ })
+
+
+'''<------ CANNED RESPONSES ------>'''
+
+
+def list_canned_response_folders():
+ endpoint_url = 'canned_response_folders'
+ response = http_request('GET', endpoint_url)
+ return response
+
+
+def list_canned_response_folders_command():
+ """
+ List all Canned Response Folders (Only users with Admin Privileges).
+
+ returns:
+ Folder Objects
+ """
+ # Make request and get raw response
+ cr_folders = list_canned_response_folders()
+ # Parse response into context
+ contexts = []
+ for folder in cr_folders:
+ # Parse individual contact response in context
+ context = {string_to_context_key(key): val for key, val in folder.iteritems() if val}
+ context = reformat_canned_response_context(context)
+ contexts.append(context)
+ title = 'All Canned Response Folders'
+ human_readable = tableToMarkdown(title, contexts, removeNull=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': cr_folders,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Freshdesk.CRFolder(val.ID && val.ID === obj.ID)': contexts
+ }
+ })
+
+
+def get_canned_response_folder(id):
+ endpoint_url = 'canned_response_folders/{}/responses'.format(id)
+ response = http_request('GET', endpoint_url)
+ return response
+
+
+def get_canned_response_folder_command():
+ """
+ View the details of all the Canned Responses in a Folder.
+
+ demisto parameter: (number) id
+ ID of the Folder containing the Canned Responses you wish to view the details of
+
+ returns:
+ Canned Response Objects with more details, aka all of a Canned Response Object's attributes
+ """
+ # Get id of the containing canned response folder as cmd argument
+ cr_folder_id = demisto.args().get('id')
+ # Make request and get raw response
+ canned_responses = get_canned_response_folder(cr_folder_id)
+ # Parse the responses into context
+ contexts = []
+ readable_contexts = []
+ for cr in canned_responses:
+ context = {string_to_context_key(key): val for key, val in cr.iteritems() if val}
+ context = reformat_canned_response_context(context)
+ context, context_readable = attachments_into_context(cr, context)
+ contexts.append(context)
+ readable_contexts.append(context_readable)
+ title = 'Details of Canned Responses in CR Folder #{}'.format(cr_folder_id)
+ human_readable = tableToMarkdown(title, readable_contexts, removeNull=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': canned_responses,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Freshdesk.CRFolder(val.ID && val.ID === obj.ID).CR(val.ID && val.ID === obj.ID)': contexts
+ }
+ })
+
+
+'''<------ GROUPS ------>'''
+
+
+def list_groups():
+ endpoint_url = 'groups'
+ response = http_request('GET', endpoint_url)
+ return response
+
+
+def list_groups_command():
+ """
+ List all groups.
+
+ returns:
+ Group Objects
+ """
+ # Make request and get raw response
+ groups = list_groups()
+ # Parse response into context
+ contexts = []
+ for group in groups:
+ # Parse individual group response in context
+ context = {}
+ for key, val in group.iteritems():
+ if val:
+ if key == 'agent_ids':
+ key = 'agent_id'
+ new_key = string_to_context_key(key)
+ if 'Id' in new_key:
+ new_key = new_key.replace('Id', 'ID')
+ context[new_key] = val
+ contexts.append(context)
+ title = 'All Groups'
+ human_readable = tableToMarkdown(title, contexts, removeNull=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': groups,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Freshdesk.Group(val.ID && val.ID === obj.ID)': contexts
+ }
+ })
+
+
+'''<------ AGENTS ------>'''
+
+
+def list_agents(filters={}):
+ endpoint_url = 'agents'
+ response = http_request('GET', endpoint_url, params=filters)
+ return response
+
+
+def list_agents_command():
+ """
+ List agents that match the criteria of the filters entered as command arguments.
+
+ demisto parameter: (number) mobile
+ Mobile phone number to filter agents by
+
+ demisto parameter: (number) phone
+ Telephone number to filter agents by
+
+ demisto parameter: (string) state
+ Filter agents by whether they are 'fulltime' or 'occasional'
+
+ returns:
+ Agent Objects
+ """
+ # Get filter as cmd arg
+ args = demisto.args()
+ # Make request and get raw response
+ agents = list_agents(args)
+ # Parse response into context
+ contexts = []
+ for agent in agents:
+ # Parse the individual agent into context
+ context = {}
+ for key, val in agent.iteritems():
+ if val:
+ if key == 'group_ids':
+ key = 'group_id'
+ elif key == 'role_ids':
+ key = 'role_id'
+ new_key = string_to_context_key(key)
+ if 'Id' in new_key:
+ new_key = new_key.replace('Id', 'ID')
+ context[new_key] = val
+ context['Contact'] = {string_to_context_key(key): val for key, val in agent.get('contact').iteritems() if val}
+ contexts.append(context)
+ title = 'All Agents'
+ human_readable = tableToMarkdown(title, contexts, removeNull=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': agents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Freshdesk.Agent(val.ID && val.ID === obj.ID)': contexts
+ }
+ })
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+
+# Commands Switch Panel
+commands = {
+ 'fd-create-ticket': create_ticket_command,
+ 'fd-update-ticket': update_ticket_command,
+ 'fd-get-ticket': get_ticket_command,
+ 'fd-get-contact': get_contact_command,
+ 'fd-list-contacts': list_contacts_command,
+ 'fd-list-canned-response-folders': list_canned_response_folders_command,
+ 'fd-get-canned-response-folder': get_canned_response_folder_command,
+ 'fd-list-groups': list_groups_command,
+ 'fd-ticket-reply': ticket_reply_command,
+ 'fd-create-ticket-note': create_ticket_note_command,
+ 'fd-get-ticket-conversations': get_ticket_conversations_command,
+ 'fd-list-agents': list_agents_command,
+ 'fd-delete-ticket': delete_ticket_command,
+ 'fd-search-tickets': search_tickets_command,
+}
+
+LOG('Command being called is %s' % (demisto.command()))
+
+try:
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module()
+ demisto.results('ok')
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+ elif demisto.command() in commands.keys():
+ # Execute that command
+ commands[demisto.command()]()
+
+# Log exceptions
+except Exception as e:
+ LOG(e)
+ return_error(e.message)
diff --git a/Integrations/FreshDesk/FreshDesk.yml b/Integrations/FreshDesk/FreshDesk.yml
new file mode 100644
index 000000000000..417a15765ae1
--- /dev/null
+++ b/Integrations/FreshDesk/FreshDesk.yml
@@ -0,0 +1,1325 @@
+category: Case Management
+commonfields:
+ id: Freshdesk
+ version: -1
+configuration:
+- defaultvalue: https://domain.freshdesk.com
+ display: Server URL (e.g., https://demistohelp.freshdesk.com)
+ name: url
+ required: true
+ type: 0
+- display: API Token. (You must enter either the API token or your Freshdesk credentials)
+ name: token
+ required: false
+ type: 4
+- defaultvalue: 'true'
+ display: Trust any certificate (not secure)
+ name: insecure
+ defaultvalue: ""
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ required: false
+ type: 8
+- defaultvalue: 24 hours
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days)
+ name: fetch_time
+ required: false
+ type: 0
+- display: Username
+ name: credentials
+ required: false
+ type: 9
+- defaultvalue: ""
+ display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- defaultvalue: ""
+ display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+description: The Freshdesk integration allows you to create, update, and delete tickets;
+ reply to and create notes for tickets as well as view Groups, Agents and Contacts.
+display: Freshdesk
+name: Freshdesk
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Subject of the ticket
+ isArray: false
+ name: subject
+ required: true
+ secret: false
+ - default: false
+ description: Details of the issue that you are creating a ticket for
+ isArray: false
+ name: description
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '1'
+ description: Priority of the ticket. Each number has a corresponding value.
+ 1 - Low, 2 - Medium, 3 - High, 4 - Urgent. This argument accepts the priority
+ number or string.
+ isArray: false
+ name: priority
+ predefined:
+ - Low
+ - Medium
+ - High
+ - Urgent
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '2'
+ description: Status of the ticket. Each number has a corresponding value. 2
+ - Open, 3 - Pending, 4 - Resolved, 5 - Closed, 6 - Waiting on Customer, 7
+ - Waiting on Third Party. This argument accepts the ticket status number or
+ string.
+ isArray: false
+ name: status
+ predefined:
+ - Open
+ - Pending
+ - Resolved
+ - Closed
+ - Waiting on Customer
+ - Waiting on Third Party
+ required: false
+ secret: false
+ - default: false
+ description: Email address or a Twitter handle of the requesting user. If an
+ email address is entered and no contact exists with this email address in
+ Freshdesk, it will be added as a new contact. If a Twitter handle is entered
+ and no contact exists with this handle in Freshdesk, it will be added as a
+ new contact.
+ isArray: false
+ name: identifier
+ required: true
+ secret: false
+ - default: false
+ description: ID or name of the group or agent to assign the ticket to. Use the
+ `fd-list-groups` command to find potential assignees.
+ isArray: false
+ name: responder
+ required: false
+ secret: false
+ - default: false
+ description: CSV list of entry IDs of files to attach to the ticket, e.g., "468@73f988d1-bda2-4adc-8e02-926f02190070,560@73f988d1-bda2-4adc-8e02-926f02190070".
+ The total size of these attachments cannot exceed 15MB.
+ isArray: true
+ name: attachments
+ required: false
+ secret: false
+ - default: false
+ description: Additional, optional ticket fields. Format - "field=value" where
+ field value pairs are delimited from subsequent pairs by a semicolon symbol
+ ';' and where values that are lists are delimited by commas ',', e.g., "name=Example
+ Example;email=example@example.com;tags=new,attention needed,billing related"
+ isArray: true
+ name: additional_fields
+ predefined:
+ - name
+ - requester_id
+ - facebook_id
+ - phone
+ - unique_external_id
+ - type
+ - cc_emails
+ - due_by
+ - email_config_id
+ - fr_due_by
+ - product_id
+ - source
+ - tags
+ - company_id
+ - email
+ - twitter_id
+ - responder_id
+ - group_id
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a new Freshdesk ticket.
+ execution: false
+ name: fd-create-ticket
+ outputs:
+ - contextPath: Freshdesk.Ticket.ID
+ description: ID number of the ticket
+ type: Number
+ - contextPath: Freshdesk.Ticket.Priority
+ description: Ticket priority
+ type: Number
+ - contextPath: Freshdesk.Ticket.DueBy
+ description: Timestamp that denotes when the ticket is due to be resolved
+ type: Date
+ - contextPath: Freshdesk.Ticket.Subject
+ description: Ticket subject
+ type: String
+ - contextPath: Freshdesk.Ticket.Status
+ description: Status of the ticket
+ type: Number
+ - contextPath: Freshdesk.Ticket.RequesterID
+ description: User ID of the requester
+ type: Number
+ - contextPath: Freshdesk.Ticket.Tag
+ description: Tags associated with the ticket
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.GroupID
+ description: ID of the group the ticket was assigned to
+ type: Number
+ - contextPath: Freshdesk.Ticket.Source
+ description: The channel through which the ticket was created
+ type: Number
+ - contextPath: Freshdesk.Ticket.CreatedAt
+ description: Ticket creation timestamp
+ type: Date
+ - contextPath: Freshdesk.Ticket.ResponderID
+ description: ID of the agent the ticket was assigned to
+ type: Number
+ - contextPath: Freshdesk.Ticket.FrDueBy
+ description: Timestamp that denotes when the first response is due
+ type: Date
+ - contextPath: Freshdesk.Ticket.AdditionalFields
+ description: Additional fields and values that were entered using the 'additional_fields'
+ arguments
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Attachment.AttachmentURL
+ description: URL to download the file attached to the ticket to your local machine
+ type: String
+ - contextPath: Freshdesk.Ticket.Attachment.Name
+ description: The name of the file attached to the ticket
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Attachment.ContentType
+ description: Content type of the file attached to the ticket
+ type: String
+ - contextPath: Freshdesk.Ticket.Attachment.ID
+ description: ID number for the file attached to the ticket
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Attachment.Size
+ description: Size of the file attached to the ticket
+ type: String
+ - arguments:
+ - default: false
+ description: ID number of the ticket to fetch
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: If set to `true`, the ticket requester's ID, email address, mobile
+ number, name, and phone number will be included in the ticket's output. Note
+ that this is not set by default because setting this to 'true' will consume
+ an additional API credit per API call. To see more details, please turn to
+ the Freshdesk API documentation - https://developers.freshdesk.com/api/#embedding
+ isArray: false
+ name: include_requester
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: If set to `true` then the ticket’s closed_at, resolved_at and first_responded_at
+ time will be included in the response. Note that this is not set by default
+ because setting this to 'true' will consume an additional API credit per API
+ call. To see more details, please turn to the Freshdesk API documentation
+ - https://developers.freshdesk.com/api/#embedding
+ isArray: false
+ name: include_stats
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Gets details of a ticket, specified by the ticket ID number.
+ execution: false
+ name: fd-get-ticket
+ outputs:
+ - contextPath: Freshdesk.Ticket.ID
+ description: ID number of the fetched ticket
+ type: Number
+ - contextPath: Freshdesk.Ticket.Priority
+ description: Ticket priority
+ type: Number
+ - contextPath: Freshdesk.Ticket.DueBy
+ description: Timestamp that denotes when the ticket is due to be resolved
+ type: Date
+ - contextPath: Freshdesk.Ticket.Subject
+ description: Ticket subject
+ type: String
+ - contextPath: Freshdesk.Ticket.Status
+ description: Ticket status
+ type: Number
+ - contextPath: Freshdesk.Ticket.RequesterID
+ description: User ID of the requester
+ type: Number
+ - contextPath: Freshdesk.Ticket.Tag
+ description: Tags associated with the ticket
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.GroupID
+ description: ID of the group the ticket was assigned to
+ type: Number
+ - contextPath: Freshdesk.Ticket.Source
+ description: The channel through which the ticket was created
+ type: Number
+ - contextPath: Freshdesk.Ticket.CreatedAt
+ description: Ticket creation timestamp
+ type: Date
+ - contextPath: Freshdesk.Ticket.ResponderID
+ description: ID of the agent the ticket was assigned to
+ type: Number
+ - contextPath: Freshdesk.Ticket.FrDueBy
+ description: Timestamp that denotes when the first response is due
+ type: Date
+ - contextPath: Freshdesk.Ticket.Conversation
+ description: Conversations associated with this ticket
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Attachment.AttachmentURL
+ description: URL to download the file attached to the ticket to your local machine
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Attachment.Name
+ description: The name of the file attached to the ticket
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Attachment.ContentType
+ description: Content type of the file attached to the ticket
+ type: String
+ - contextPath: Freshdesk.Ticket.Attachment.ID
+ description: ID number of the file attached to the ticket
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Attachment.Size
+ description: Size of the file attached to the ticket
+ type: String
+ - contextPath: Freshdesk.Ticket.UpdatedAt
+ description: Ticket update timestamp
+ type: Date
+ - arguments:
+ - default: false
+ description: Subject of the ticket
+ isArray: false
+ name: subject
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '2'
+ description: Status of the ticket. Each number has a corresponding value. 2
+ is Open, 3 is Pending, 4 is Resolved, 5 is Closed, 6 is Waiting on Customer,
+ 7 is Waiting on Third Party. Acceptable values for this command argument are
+ the digits 2,3,4,5,6,7, or their corresponding strings 'Open','Pending','Resolved','Closed','Waiting
+ on Customer','Waiting on Third Party'.
+ isArray: false
+ name: status
+ predefined:
+ - Open
+ - Pending
+ - Resolved
+ - Closed
+ - Waiting on Customer
+ - Waiting on Third Party
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '1'
+ description: Priority of the ticket. Each number has a corresponding value.
+ 1 is Low, 2 is Medium, 3 is High, 4 is Urgent. Acceptable values for this
+ command argument are the digits 1,2,3,4, or their corresponding strings 'Low','Medium','High','Urgent'.
+ isArray: false
+ name: priority
+ predefined:
+ - Low
+ - Medium
+ - High
+ - Urgent
+ required: false
+ secret: false
+ - default: false
+ description: HTML content of the ticket.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: ID number of the ticket to update
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: Update which agent is assigned to respond to this ticket by entering
+ either their unique agent ID, name, or email.
+ isArray: false
+ name: assigned_agent
+ required: false
+ secret: false
+ - default: false
+ description: Update the group assigned to respond to this ticket by entering
+ the group's unique ID or the name of the group.
+ isArray: false
+ name: assigned_group
+ required: false
+ secret: false
+ - default: false
+ description: Fields not included in the default command arguments that you wish
+ to enter the value for. Format - "field=value" where field value pairs are
+ delimited from subsequent pairs by a semicolon symbol ';' and where values
+ that are lists are delimited by commas ','. E.g. - "name=Example Example;email=example@example.com;tags=new,attention
+ needed,billing related"
+ isArray: true
+ name: additional_fields
+ predefined:
+ - name
+ - requester_id
+ - email
+ - facebook_id
+ - phone
+ - twitter_id
+ - unique_external_id
+ - type
+ - attachments
+ - email_config_id
+ - product_id
+ - source
+ - tags
+ - company_id
+ - due_by
+ - fr_due_by
+ required: false
+ secret: false
+ deprecated: false
+ description: Update the ticket specified by the ID
+ execution: false
+ name: fd-update-ticket
+ outputs:
+ - contextPath: Freshdesk.Ticket.ID
+ description: ID of the updated ticket
+ type: Number
+ - contextPath: Freshdesk.Ticket.Priority
+ description: Ticket priority
+ type: Number
+ - contextPath: Freshdesk.Ticket.DueBy
+ description: Timestamp that denotes when the ticket is due to be resolved
+ type: Date
+ - contextPath: Freshdesk.Ticket.Subject
+ description: Ticket subject
+ type: String
+ - contextPath: Freshdesk.Ticket.Status
+ description: Ticket status
+ type: Number
+ - contextPath: Freshdesk.Ticket.RequesterID
+ description: User ID of the requester
+ type: Number
+ - contextPath: Freshdesk.Ticket.Tag
+ description: Tags associated with the ticket
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.GroupID
+ description: ID of the group assigned to the ticket
+ type: Number
+ - contextPath: Freshdesk.Ticket.Source
+ description: The channel through which the ticket was created
+ type: Number
+ - contextPath: Freshdesk.Ticket.CreatedAt
+ description: Ticket creation timestamp
+ type: Date
+ - contextPath: Freshdesk.Ticket.ResponderId
+ description: ID of the agent the ticket was assigned to
+ type: Number
+ - contextPath: Freshdesk.Ticket.FrDueBy
+ description: Timestamp that denotes when the first response is due
+ type: Date
+ - contextPath: Freshdesk.Ticket.UpdatedAt
+ description: Ticket update timestamp
+ type: Date
+ - contextPath: Freshdesk.Ticket.AdditionalFields
+ description: Additional fields and values that were updated using the 'additional_fields'
+ argument
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Attachment.AttachmentURL
+ description: URL to download the file attached to the ticket to your local machine
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Attachment.Name
+ description: The name of the file attached to the ticket
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Attachment.ContentType
+ description: Content type of the attached file
+ type: String
+ - contextPath: Freshdesk.Ticket.Attachment.ID
+ description: ID number for the attached file
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Attachment.Size
+ description: Size of the attached file in bytes
+ type: String
+ - arguments:
+ - default: false
+ description: mobile number to filter the contacts by
+ isArray: false
+ name: mobile
+ required: false
+ secret: false
+ - default: false
+ description: phone number to filter contacts by
+ isArray: false
+ name: phone
+ required: false
+ secret: false
+ - default: false
+ description: The state of contacts by which you want to filter the contacts
+ isArray: false
+ name: state
+ predefined:
+ - blocked
+ - deleted
+ - unverified
+ - verified
+ required: false
+ secret: false
+ - default: false
+ description: Return contacts that have been updated after the timestamp given
+ as this argument value. Acceptable format is 'YYYY-MM-DDTHH:MM:SSZ' E.g. '2018-01-19T02:00:00Z'.
+ isArray: false
+ name: updated_since
+ required: false
+ secret: false
+ deprecated: false
+ description: List all contacts matching the specified filters. If no filters are
+ provided then all unblocked and undeleted contacts will be returned.
+ execution: false
+ name: fd-list-contacts
+ outputs:
+ - contextPath: Freshdesk.Contact.Active
+ description: Set to true if the contact has been verified
+ type: Boolean
+ - contextPath: Freshdesk.Contact.Address
+ description: Address of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.CompanyID
+ description: ID of the primary company to which this contact belongs
+ type: Number
+ - contextPath: Freshdesk.Contact.ViewAllTickets
+ description: Set to true if the contact can see all tickets that are associated
+ with the company to which s/he belong
+ type: Boolean
+ - contextPath: Freshdesk.Contact.Deleted
+ description: Set to true if the contact has been deleted
+ type: Boolean
+ - contextPath: Freshdesk.Contact.Description
+ description: A short description of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.Email
+ description: Primary email address of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.ID
+ description: ID of the contact
+ type: Number
+ - contextPath: Freshdesk.Contact.JobTitle
+ description: Job Title of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.Language
+ description: Language of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.Mobile
+ description: Mobile number of the contact
+ type: Number
+ - contextPath: Freshdesk.Contact.Name
+ description: Name of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.Phone
+ description: Telephone number of the contact
+ type: Number
+ - contextPath: Freshdesk.Contact.Tag
+ description: Tags associated with this contact
+ type: Unknown
+ - contextPath: Freshdesk.Contact.TimeZone
+ description: Time zone in which the contact resides
+ type: String
+ - contextPath: Freshdesk.Contact.TwitterID
+ description: Twitter handle of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.UniqueExternalID
+ description: External ID of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.CreatedAt
+ description: Contact creation stamp
+ type: Date
+ - contextPath: Freshdesk.Contact.UpdatedAt
+ description: Contact updated timestamp
+ type: Date
+ - arguments:
+ - default: false
+ description: ID of the contact you wish to view the details of. To find ID numbers
+ for your contacts try executing the 'fd-list-contacts' command.
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ - default: false
+ description: Mobile number of the contact you wish to view the details of
+ isArray: false
+ name: mobile
+ required: false
+ secret: false
+ - default: false
+ description: Email address of the contact you wish to view the details of
+ isArray: false
+ name: email
+ required: false
+ secret: false
+ deprecated: false
+ description: View the details of the contact specified by the ID number
+ execution: false
+ name: fd-get-contact
+ outputs:
+ - contextPath: Freshdesk.Contact.Active
+ description: Set to true if the contact has been verified
+ type: Boolean
+ - contextPath: Freshdesk.Contact.Address
+ description: Address of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.CompanyID
+ description: ID of the primary company to which this contact belongs
+ type: Number
+ - contextPath: Freshdesk.Contact.ViewAllTickets
+ description: Set to true if the contact can see all tickets that are associated
+ with the company to which s/he belong
+ type: Boolean
+ - contextPath: Freshdesk.Contact.Deleted
+ description: Set to true if the contact has been deleted
+ type: Boolean
+ - contextPath: Freshdesk.Contact.Description
+ description: A short description of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.Email
+ description: Primary email address of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.Id
+ description: ID of the contact
+ type: Number
+ - contextPath: Freshdesk.Contact.JobTitle
+ description: Job Title of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.Language
+ description: Language of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.Mobile
+ description: Mobile number of the contact
+ type: Number
+ - contextPath: Freshdesk.Contact.Name
+ description: Name of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.Phone
+ description: Telephone number of the contact
+ type: Number
+ - contextPath: Freshdesk.Contact.Tag
+ description: Tags associated with this contact
+ type: Unknown
+ - contextPath: Freshdesk.Contact.TimeZone
+ description: Time zone in which the contact resides
+ type: String
+ - contextPath: Freshdesk.Contact.TwitterID
+ description: Twitter handle of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.UniqueExternalID
+ description: External ID of the contact
+ type: String
+ - contextPath: Freshdesk.Contact.CreatedAt
+ description: Contact creation timestamp
+ type: Date
+ - contextPath: Freshdesk.Contact.UpdatedAt
+ description: Contact updated timestamp
+ type: Date
+ - deprecated: false
+ description: List all Canned Response Folders (Only users with Admin Privileges)
+ execution: false
+ name: fd-list-canned-response-folders
+ outputs:
+ - contextPath: Freshdesk.CRFolder.ID
+ description: Unique ID of the canned response folder
+ type: Number
+ - contextPath: Freshdesk.CRFolder.Name
+ description: Name of the canned response folder
+ type: String
+ - contextPath: Freshdesk.CRFolder.Personal
+ description: Set true if the folder can only be accessed by you
+ type: Boolean
+ - contextPath: Freshdesk.CRFolder.ResponsesCount
+ description: Number of canned responses in the folder
+ type: Number
+ - contextPath: Freshdesk.CRFolder.CreatedAt
+ description: Canned Response Folder's creation timestamp
+ type: Date
+ - contextPath: Freshdesk.CRFolder.UpdatedAt
+ description: Canned Response Folder's updated timestamp
+ type: Date
+ - arguments:
+ - default: false
+ description: ID of the Folder containing the Canned Responses you wish to view
+ the details of. To find ID numbers for your Canned Response folders try executing
+ the 'fd-list-canned-response-folders' command.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: View the details of all the Canned Responses in a Folder.
+ execution: false
+ name: fd-get-canned-response-folder
+ outputs:
+ - contextPath: Freshdesk.CRFolder.CR.ID
+ description: Unique ID of the canned response
+ type: Number
+ - contextPath: Freshdesk.CRFolder.CR.Title
+ description: Title of the canned response
+ type: String
+ - contextPath: Freshdesk.CRFolder.CR.FolderID
+ description: ID of the containing folder
+ type: Number
+ - contextPath: Freshdesk.CRFolder.CR.Content
+ description: Plaintext version of the canned response content
+ type: String
+ - contextPath: Freshdesk.CRFolder.CR.ContentHTML
+ description: HTML version of the canned response content
+ type: String
+ - contextPath: Freshdesk.CRFolder.CR.Attachment.AttachmentURL
+ description: URL to download the file attached to the ticket to your local machine
+ type: String
+ - contextPath: Freshdesk.CRFolder.CR.Attachment.Name
+ description: The name of the file attached to the ticket
+ type: String
+ - contextPath: Freshdesk.CRFolder.CR.Attachment.ContentType
+ description: Content type of the file attached to the ticket
+ type: String
+ - contextPath: Freshdesk.CRFolder.CR.Attachment.ID
+ description: ID number for the file attached to the ticket
+ type: Number
+ - contextPath: Freshdesk.CRFolder.CR.Attachment.Size
+ description: Size of the file attached to the file
+ type: Number
+ - deprecated: false
+ description: Lists all groups.
+ execution: false
+ name: fd-list-groups
+ outputs:
+ - contextPath: Freshdesk.Group.AgentID
+ description: Array of agent user IDs separated by commas
+ type: Unknown
+ - contextPath: Freshdesk.Group.AutoTicketAssign
+ description: Set to true when automatic ticket assignment was enabled. Automatic
+ ticket assignment is only available on certain plans
+ type: Boolean
+ - contextPath: Freshdesk.Group.BusinessHourID
+ description: Unique ID of the business hour associated with the group
+ type: Number
+ - contextPath: Freshdesk.Group.Description
+ description: Description of the group
+ type: String
+ - contextPath: Freshdesk.Group.EscalateTo
+ description: The ID of the user that an escalation email is sent to if a ticket
+ is unassigned
+ type: Number
+ - contextPath: Freshdesk.Group.ID
+ description: Unique ID of the group
+ type: Number
+ - contextPath: Freshdesk.Group.Name
+ description: Name of the group
+ type: String
+ - contextPath: Freshdesk.Group.UnassignedFor
+ description: The time after which an escalation email is sent if a ticket remains
+ unassigned
+ type: String
+ - contextPath: Freshdesk.Group.CreatedAt
+ description: Group creation timestamp
+ type: Date
+ - contextPath: Freshdesk.Group.UpdatedAt
+ description: Grup updated timestamp
+ type: Date
+ - contextPath: Freshdesk.Group.GroupType
+ description: Group Type of the group
+ type: String
+ - arguments:
+ - default: false
+ description: ID of the ticket to add a reply to
+ isArray: false
+ name: ticket_id
+ required: true
+ secret: false
+ - default: false
+ description: Content of the reply (in HTML format)
+ isArray: false
+ name: body
+ required: true
+ secret: false
+ - default: false
+ description: The email address from which the reply is sent. By default, the
+ global support email is used.
+ isArray: false
+ name: from_email
+ required: false
+ secret: false
+ - default: false
+ description: ID of the agent who is adding the reply to the ticket
+ isArray: false
+ name: user_id
+ required: false
+ secret: false
+ - default: false
+ description: CSV list of email addresses to add to the 'cc' field of the outgoing
+ ticket email, e.g., "example1@example.com,example2@example.com,example3@example.com"
+ isArray: true
+ name: cc_emails
+ required: false
+ secret: false
+ - default: false
+ description: CSV list of email addresses to add to the 'bcc' field of the outgoing
+ ticket email, e.g., "example1@example.com,example2@example.com,example3@example.com"
+ isArray: true
+ name: bcc_emails
+ required: false
+ secret: false
+ - default: false
+ description: CSV list of Entry IDs of files to attach to the reply, e.g., "468@73f988d1-bda2-4adc-8e02-926f02190070,560@73f988d1-bda2-4adc-8e02-926f02190070".
+ The total size of these attachments cannot exceed 15MB.
+ isArray: true
+ name: attachments
+ required: false
+ secret: false
+ deprecated: false
+ description: Adds a reply to a specified ticket.
+ execution: false
+ name: fd-ticket-reply
+ outputs:
+ - contextPath: Freshdesk.Ticket.Conversation.BodyHTML
+ description: Content of the conversation (in HTML format)
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.BodyText
+ description: Content of the conversation (in plain text format)
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.ID
+ description: ID of the conversation
+ type: Number
+ - contextPath: Freshdesk.Ticket.Conversation.Incoming
+ description: Set to true when a particular conversation should appear as being
+ created outside of the web portal
+ type: Boolean
+ - contextPath: Freshdesk.Ticket.Conversation.ToEmail
+ description: Array of email addresses of agents/users who need to be notified
+ about this conversation
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Conversation.Private
+ description: Set to true if the note is private
+ type: Boolean
+ - contextPath: Freshdesk.Ticket.Conversation.Source
+ description: Denotes the type of conversation
+ type: Number
+ - contextPath: Freshdesk.Ticket.Conversation.SupportEmail
+ description: Email address from which the reply is sent. For notes
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.TicketID
+ description: ID of the ticket that the conversation was added to
+ type: Number
+ - contextPath: Freshdesk.Ticket.Conversation.UserID
+ description: ID of the agent/user who added the conversation
+ type: Number
+ - contextPath: Freshdesk.Ticket.Conversation.CreatedAt
+ description: Conversation creation timestamp
+ type: Date
+ - contextPath: Freshdesk.Ticket.Conversation.UpdatedAt
+ description: Conversation updated timestamp
+ type: Date
+ - contextPath: Freshdesk.Ticket.Conversation.FromEmail
+ description: The email address that the reply was sent from. By default
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.Attachment.AttachmentURL
+ description: URL of the file attached to the ticket
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.Attachment.Name
+ description: The name of the file attached to the ticket
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.Attachment.ContentType
+ description: Content type of the file attached to the ticket
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.Attachment.ID
+ description: ID number of the file attached to the ticket
+ type: Number
+ - contextPath: Freshdesk.Ticket.Conversation.Attachment.Size
+ description: Size of the file attached to the ticket
+ type: Number
+ - arguments:
+ - default: false
+ description: ID of the ticket for which you would like to list all of its conversations
+ isArray: false
+ name: ticket_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Lists all replies and notes for a specified ticket.
+ execution: false
+ name: fd-get-ticket-conversations
+ outputs:
+ - contextPath: Freshdesk.Ticket.Conversation.BodyHTML
+ description: Content of the conversation (in HTML format)
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.BodyText
+ description: Content of the conversation (in plain text format)
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.ID
+ description: ID of the conversation
+ type: Number
+ - contextPath: Freshdesk.Ticket.Conversation.Incoming
+ description: Set to true when a particular conversation should appear as being
+ created outside of the web portal
+ type: Boolean
+ - contextPath: Freshdesk.Ticket.Conversation.ToEmail
+ description: Array of email addresses of agents/users who need to be notified
+ about this conversation
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Conversation.Private
+ description: Set to true if the note is private
+ type: Boolean
+ - contextPath: Freshdesk.Ticket.Conversation.Source
+ description: Denotes the type of conversation
+ type: Number
+ - contextPath: Freshdesk.Ticket.Conversation.SupportEmail
+ description: Email address from which the reply is sent. For notes
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.TicketID
+ description: ID of the ticket that the conversation was added to
+ type: Number
+ - contextPath: Freshdesk.Ticket.Conversation.UserID
+ description: ID of the agent/user who added the conversation
+ type: Number
+ - contextPath: Freshdesk.Ticket.Conversation.CreatedAt
+ description: Conversation creation timestamp
+ type: Date
+ - contextPath: Freshdesk.Ticket.Conversation.UpdatedAt
+ description: Conversation updated timestamp
+ type: Date
+ - contextPath: Freshdesk.Ticket.Conversation.FromEmail
+ description: The email address that the reply was sent from. By default
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.Attachment.AttachmentURL
+ description: URL of the file attached to the ticket
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.Attachment.Name
+ description: The name of the file attached to the ticket
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.Attachment.ContentType
+ description: Content type of the file attached to the ticket
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.Attachment.ID
+ description: ID number of the file attached to the ticket
+ type: Number
+ - contextPath: Freshdesk.Ticket.Conversation.Attachment.Size
+ description: Size of the file attached to the ticket
+ type: Number
+ - arguments:
+ - default: false
+ description: Mobile phone number to filter agents by. Enter the number without
+ dashes or spaces between the numbers. Numbers should be entered as they appear
+ in your Freshdesk web portal. If the number appears in your web portal with
+ a plus sign and country code, then that is how you should enter here, e.g.,
+ '+972501231231'.
+ isArray: false
+ name: mobile
+ required: false
+ secret: false
+ - default: false
+ description: Telephone number to filter agents by. Enter the number without
+ dashes or spaces between the numbers. Numbers should be entered as they appear
+ in your Freshdesk web portal. If the number appears in your web portal with
+ a plus sign and country code, then that is how you should enter it here, e.g.,
+ '+972501231231'.
+ isArray: false
+ name: phone
+ required: false
+ secret: false
+ - default: false
+ description: List all agents who are either 'fulltime' or 'occasional'
+ isArray: false
+ name: state
+ predefined:
+ - fulltime
+ - occasional
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of agents that match the filter criteria.
+ execution: false
+ name: fd-list-agents
+ outputs:
+ - contextPath: Freshdesk.Agent.Available
+ description: Set to `true` when the agent is in a group that has enabled "Automatic
+ Ticket Assignment" and is accepting new tickets
+ type: Boolean
+ - contextPath: Freshdesk.Agent.AvailableSince
+ description: Timestamp that denotes when the agent became available/unavailable
+ (depending on the value of the 'available' attribute)
+ type: Date
+ - contextPath: Freshdesk.Agent.ID
+ description: User ID of the agent
+ type: Number
+ - contextPath: Freshdesk.Agent.Occasional
+ description: Set to true when the agent is an occasional agent (true => occasional
+ type: Boolean
+ - contextPath: Freshdesk.Agent.Signature
+ description: Signature of the agent (in HTML format)
+ type: String
+ - contextPath: Freshdesk.Agent.TicketScope
+ description: |-
+ Ticket permission of the agent
+ (1 - Global Access, 2 - Group Access, 3 - Restricted Access)
+ type: Number
+ - contextPath: Freshdesk.Agent.GroupID
+ description: Group IDs associated with the agent
+ type: Unknown
+ - contextPath: Freshdesk.Agent.RoleID
+ description: Role IDs associated with the agent
+ type: Unknown
+ - contextPath: Freshdesk.Agent.CreatedAt
+ description: Agent creation timestamp
+ type: Date
+ - contextPath: Freshdesk.Agent.UpdatedAt
+ description: Agent updated timestamp
+ type: Date
+ - contextPath: Freshdesk.Agent.Contact.Active
+ description: Set to true when the agent is verified
+ type: Boolean
+ - contextPath: Freshdesk.Agent.Contact.Email
+ description: Email Address of the agent
+ type: String
+ - contextPath: Freshdesk.Agent.Contact.JobTitle
+ description: Job title of the agent
+ type: String
+ - contextPath: Freshdesk.Agent.Contact.Language
+ description: Language of the agent. Default language is "en"
+ type: String
+ - contextPath: Freshdesk.Agent.Contact.LastLoginAt
+ description: Timestamp of the agent's last successful login
+ type: Date
+ - contextPath: Freshdesk.Agent.Contact.Mobile
+ description: Mobile number of the agent
+ type: Number
+ - contextPath: Freshdesk.Agent.Contact.Name
+ description: Name of the agent
+ type: String
+ - contextPath: Freshdesk.Agent.Contact.Phone
+ description: Telephone number of the agent
+ type: Number
+ - contextPath: Freshdesk.Agent.Contact.TimeZone
+ description: Time zone of the agent
+ type: String
+ - contextPath: Freshdesk.Agent.Contact.CreatedAt
+ description: Contact creation timestamp
+ type: Date
+ - contextPath: Freshdesk.Agent.Contact.UpdatedAt
+ description: Contact updated timestamp
+ type: Date
+ - contextPath: Freshdesk.Agent.Type
+ description: Type of agent
+ type: String
+ - arguments:
+ - default: false
+ description: ID of the ticket to make a note for
+ isArray: false
+ name: ticket_id
+ required: true
+ secret: false
+ - default: false
+ description: Content of the note (in HTML format)
+ isArray: false
+ name: body
+ required: true
+ secret: false
+ - default: false
+ description: Set to false if the note is public
+ isArray: false
+ name: private
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: ID of the agent who is adding the note. To find agent ID numbers,
+ run the 'fd-list-agents' command.
+ isArray: false
+ name: user_id
+ required: false
+ secret: false
+ - default: false
+ description: CSV list of agent email addresses to be notify about this note,
+ e.g., "example1@example.com,example2@example.com,example3@example.com"
+ isArray: true
+ name: notify_emails
+ required: false
+ secret: false
+ - default: false
+ description: Set to true if a particular note should appear as being created
+ outside of the web portal
+ isArray: false
+ name: incoming
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: CSV list of entry IDs of files to attach to the note, e.g., "468@73f988d1-bda2-4adc-8e02-926f02190070,560@73f988d1-bda2-4adc-8e02-926f02190070".
+ The total size of these attachments cannot exceed 15MB.
+ isArray: true
+ name: attachments
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a note for a specified ticket. By default, notes are private.
+ To make a note public, set the 'private' argument to false.
+ execution: false
+ name: fd-create-ticket-note
+ outputs:
+ - contextPath: Freshdesk.Ticket.Conversation.BodyHTML
+ description: Content of the conversation (in HTML)
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.BodyText
+ description: Content of the conversation (in plain text)
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.ID
+ description: ID of the conversation
+ type: Number
+ - contextPath: Freshdesk.Ticket.Conversation.Incoming
+ description: Set to true when a particular conversation should appear as being
+ created outside of the web portal
+ type: Boolean
+ - contextPath: Freshdesk.Ticket.Conversation.ToEmail
+ description: List of agent/user email addresses of agents/users who need to
+ be notified about this conversation
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Conversation.Private
+ description: Is the conversation private
+ type: Boolean
+ - contextPath: Freshdesk.Ticket.Conversation.Source
+ description: Conversation type
+ type: Number
+ - contextPath: Freshdesk.Ticket.Conversation.SupportEmail
+ description: Email address the reply is sent from. For notes
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.TicketID
+ description: ID of the ticket the conversation was added to
+ type: Number
+ - contextPath: Freshdesk.Ticket.Conversation.UserID
+ description: ID of the agent/user who added the conversation
+ type: Number
+ - contextPath: Freshdesk.Ticket.Conversation.CreatedAt
+ description: Conversation creation timestamp
+ type: Date
+ - contextPath: Freshdesk.Ticket.Conversation.UpdatedAt
+ description: Conversation updated timestamp
+ type: Date
+ - contextPath: Freshdesk.Ticket.Conversation.FromEmail
+ description: The email address that the reply/note was sent from. By default
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.Attachment.AttachmentURL
+ description: URL of the file attached to the ticket
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.Attachment.Name
+ description: The name of the file attached to the ticket
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.Attachment.ContentType
+ description: Content type of the file attached to the ticket
+ type: String
+ - contextPath: Freshdesk.Ticket.Conversation.Attachment.ID
+ description: ID number of the file attached to the ticket
+ type: Number
+ - contextPath: Freshdesk.Ticket.Conversation.Attachment.Size
+ description: ize of the file attached to the ticket
+ type: Number
+ - arguments:
+ - default: false
+ description: ID of the ticket to delete
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes a ticket.
+ execution: false
+ name: fd-delete-ticket
+ - arguments:
+ - default: false
+ description: |-
+ Predefined filters for filtering tickets. The options are:
+ 'new_and_my_open' - New and my open tickets.
+ 'watching' - Tickets I'm watching.
+ 'spam' - Tickets that have been marked as spam.
+ 'deleted' - Tickets that have been soft-deleted, aka moved to Trash.
+ isArray: false
+ name: filter
+ predefined:
+ - new_and_my_open
+ - watching
+ - spam
+ - deleted
+ required: false
+ secret: false
+ - default: false
+ description: Filter results by the ticket requester's email address or ID. To
+ find your contacts' ID numbers or email addresses, run the 'fd-list-contacts'
+ command.
+ isArray: false
+ name: requester
+ required: false
+ secret: false
+ - default: false
+ description: By default, tickets created within the previous 30 days are returned.
+ For older tickets, use this filter ("2015-01-19T02:00:00Z")
+ isArray: false
+ name: updated_since
+ required: false
+ secret: false
+ - default: false
+ description: Field for ordering the list of tickets. The default sort order
+ uses the 'created_at' field.
+ isArray: false
+ name: order_by
+ predefined:
+ - created_at
+ - due_by
+ - updated_at
+ - status
+ required: false
+ secret: false
+ - default: false
+ description: Return list results in ascending or descending order according
+ to the order_by value, default is descending
+ isArray: false
+ name: order_type
+ predefined:
+ - asc
+ - desc
+ required: false
+ secret: false
+ - default: false
+ description: If set to 'true' then ticket’s closed_at, resolved_at and first_responded_at
+ time will be included. Note that this is not set by default because setting
+ this to 'true' will consume an additional 2 API credits per API call. To see
+ more details, see the Freshdesk API documentation - https://developers.freshdesk.com/api/#embedding
+ isArray: false
+ name: include_stats
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: If set to 'true' then the ticket requester's ID, email address,
+ mobile number, name, and phone number are included in the ticket's output
+ for each ticket. Note that this is not set by default because setting this
+ to 'true' will consume an additional 2 API credits per API call. To see more
+ details, see the Freshdesk API documentation - https://developers.freshdesk.com/api/#embedding
+ isArray: false
+ name: include_requester
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: If set to 'true' then the ticket's description and description_text
+ are included the tickets' outputs. Note that this is not set by default because
+ setting this to 'true' will consume an additional 2 API credits per API call.
+ To see more details, see the Freshdesk API documentation - https://developers.freshdesk.com/api/#embedding
+ isArray: false
+ name: include_description
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: |-
+ Filter tickets using a custom query.
+
+ Format - "(ticket_field:integer OR ticket_field:'string') AND ticket_field:boolean"
+ Example - "(type:'Question' OR type:'Problem') AND (due_by:>'2017-10-01' AND due_by:<'2017-10-07')"
+ For more examples, see the Freshdesk API documentation https://developers.freshdesk.com/api/#filter_tickets
+
+ Note that the custom_query argument cannot be used in conjunction with this command's other arguments.
+ isArray: false
+ name: custom_query
+ required: false
+ secret: false
+ deprecated: false
+ description: |-
+ Returns a list of all tickets that match the filter criteria. If
+ no filters are specified, all tickets from the last 30 days are returned.
+ Note that this command can consume multiple API credits - this can occur
+ if the count of tickets resulting from your query exceeds 30 - in that instance
+ this command makes additional calls to the API to retrieve additional tickets
+ matching your query.
+ execution: false
+ name: fd-search-tickets
+ outputs:
+ - contextPath: Freshdesk.Ticket.ID
+ description: ID number of the fetched ticket
+ type: Number
+ - contextPath: Freshdesk.Ticket.Priority
+ description: Ticket priority
+ type: Number
+ - contextPath: Freshdesk.Ticket.DueBy
+ description: Timestamp that denotes when the ticket is due to be resolved
+ type: Date
+ - contextPath: Freshdesk.Ticket.Subject
+ description: Ticket subject
+ type: String
+ - contextPath: Freshdesk.Ticket.Status
+ description: Ticket status
+ type: Number
+ - contextPath: Freshdesk.Ticket.RequesterID
+ description: User ID of the requester
+ type: Number
+ - contextPath: Freshdesk.Ticket.Tag
+ description: Tags associated with the ticket
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.GroupID
+ description: ID of the group the ticket was assigned to
+ type: Number
+ - contextPath: Freshdesk.Ticket.Source
+ description: The channel through which the ticket was created
+ type: Number
+ - contextPath: Freshdesk.Ticket.CreatedAt
+ description: Ticket creation timestamp
+ type: Date
+ - contextPath: Freshdesk.Ticket.ResponderID
+ description: ID of the agent the ticket was assigned to
+ type: Number
+ - contextPath: Freshdesk.Ticket.FrDueBy
+ description: Timestamp that denotes when the first response is due
+ type: Date
+ - contextPath: Freshdesk.Ticket.Conversation
+ description: Conversations associated with this ticket
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Attachment.AttachmentURL
+ description: URL to download the file attached to the ticket to your local machine
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Attachment.Name
+ description: The name of the file attached to the ticket
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Attachment.ContentType
+ description: Content type of the file attached to the ticket
+ type: String
+ - contextPath: Freshdesk.Ticket.Attachment.ID
+ description: ID number of the file attached to the ticket
+ type: Unknown
+ - contextPath: Freshdesk.Ticket.Attachment.Size
+ description: Size of the file attached to the ticket
+ type: String
+ subtype: python2
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- Freshdesk-Test
diff --git a/Integrations/FreshDesk/FreshDesk_description.md b/Integrations/FreshDesk/FreshDesk_description.md
new file mode 100644
index 000000000000..124dc7f59cd6
--- /dev/null
+++ b/Integrations/FreshDesk/FreshDesk_description.md
@@ -0,0 +1,14 @@
+Follow these instructions to successfully instantiate an instance of the Freshdesk integration.
+Enter an API token or Freshdesk user credentials
+To use an API token:
+Follow these instructions to retrieve the API token.
+1. Log in to your Freshdesk support portal.
+2. Click on your profile picture in the top right corner.
+3. Go to the Profile settings page.
+4. Your API key is below the change password section.
+
+To use User Credentials:
+Enter the same user/password login credentials you use when logging in to your Freshdesk support portal.
+
+Enter your Server URL -
+If the Freshdesk helpdesk name for your company is, for example, 'dodecahedron', then the Server URL configuration parameter you would enter would be 'https://dodecahedron.freshdesk.com'.
\ No newline at end of file
diff --git a/Integrations/FreshDesk/FreshDesk_image.png b/Integrations/FreshDesk/FreshDesk_image.png
new file mode 100644
index 000000000000..0803e66af47f
Binary files /dev/null and b/Integrations/FreshDesk/FreshDesk_image.png differ
diff --git a/Integrations/GitHub/CHANGELOG.md b/Integrations/GitHub/CHANGELOG.md
new file mode 100644
index 000000000000..a331cce01e8b
--- /dev/null
+++ b/Integrations/GitHub/CHANGELOG.md
@@ -0,0 +1,27 @@
+## [Unreleased]
+ - Improved implementation of the default value for the fetch time parameter.
+ - Added 4 commands:
+ - ***GitHub-list-pr-review-comments***
+ - ***GitHub-update-pull-request***
+ - ***GitHub-is-pr-merged***
+ - ***GitHub-create-pull-request***
+
+## [19.10.0] - 2019-10-03
+-
+
+## [19.9.1] - 2019-09-18
+Added 14 commands:
+ - ***GitHub-get-stale-prs***
+ - ***GitHub-get-branch***
+ - ***GitHub-create-branch***
+ - ***GitHub-delete-branch***
+ - ***GitHub-list-teams***
+ - ***GitHub-get-team-membership***
+ - ***GitHub-request-review***
+ - ***GitHub-create-comment***
+ - ***GitHub-list-issue-comments***
+ - ***GitHub-list-pr-files***
+ - ***GitHub-list-pr-reviews***
+ - ***GitHub-get-commit***
+ - ***GitHub-add-label***
+ - ***GitHub-get-pull-request***
diff --git a/Integrations/GitHub/GitHub.py b/Integrations/GitHub/GitHub.py
new file mode 100644
index 000000000000..28d7053f1a4a
--- /dev/null
+++ b/Integrations/GitHub/GitHub.py
@@ -0,0 +1,1171 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import json
+import requests
+from typing import Union, Any
+from datetime import datetime
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+USER = demisto.params().get('user')
+TOKEN = demisto.params().get('token', '')
+BASE_URL = 'https://api.github.com'
+REPOSITORY = demisto.params().get('repository')
+USE_SSL = not demisto.params().get('insecure', False)
+FETCH_TIME = demisto.params().get('fetch_time', '3')
+
+USER_SUFFIX = '/repos/{}/{}'.format(USER, REPOSITORY)
+ISSUE_SUFFIX = USER_SUFFIX + '/issues'
+RELEASE_SUFFIX = USER_SUFFIX + '/releases'
+PULLS_SUFFIX = USER_SUFFIX + '/pulls'
+
+RELEASE_HEADERS = ['ID', 'Name', 'Download_count', 'Body', 'Created_at', 'Published_at']
+ISSUE_HEADERS = ['ID', 'Repository', 'Title', 'State', 'Body', 'Created_at', 'Updated_at', 'Closed_at', 'Closed_by',
+ 'Assignees', 'Labels']
+
+# Headers to be sent in requests
+HEADERS = {
+ 'Authorization': "Bearer " + TOKEN
+}
+
+
+''' HELPER FUNCTIONS '''
+
+
+def safe_get(obj_to_fetch_from: dict, what_to_fetch: str, default_val: Union[dict, list, str]) -> Any:
+ """Guarantees the default value in place of a Nonetype object when the value for a given key is explicitly None
+
+ Args:
+ obj_to_fetch_from (dict): The dictionary to fetch from
+ what_to_fetch (str): The key for the desired value
+ default_val: The default value to set instead of None
+
+ Returns:
+ The fetched value unless it is None in which case the default is returned instead
+ """
+ val = obj_to_fetch_from.get(what_to_fetch, default_val)
+ if val is None:
+ val = default_val
+ return val
+
+
+def http_request(method, url_suffix, params=None, data=None):
+ res = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ verify=USE_SSL,
+ params=params,
+ data=json.dumps(data),
+ headers=HEADERS
+ )
+ if res.status_code >= 400:
+ try:
+ json_res = res.json()
+
+ if json_res.get('errors') is None:
+ return_error('Error in API call to the GitHub Integration [%d] - %s' % (res.status_code, res.reason))
+
+ else:
+ error_code = json_res.get('errors')[0].get('code')
+ if error_code == 'missing_field':
+ return_error(
+ 'Error: the field: "{}" requires a value'.format(json_res.get('errors')[0].get('field')))
+
+ elif error_code == 'invalid':
+ field = json_res.get('errors')[0].get('field')
+ if field == 'q':
+ return_error('Error: invalid query - {}'.format(json_res.get('errors')[0].get('message')))
+
+ else:
+ return_error('Error: the field: "{}" has an invalid value'.format(field))
+
+ elif error_code == 'missing':
+ return_error('Error: {} does not exist'.format(json_res.get('errors')[0].get('resource')))
+
+ elif error_code == 'already_exists':
+ return_error('Error: the field {} must be unique'.format(json_res.get('errors')[0].get('field')))
+
+ else:
+ return_error(
+ 'Error in API call to the GitHub Integration [%d] - %s' % (res.status_code, res.reason))
+
+ except ValueError:
+ return_error('Error in API call to GitHub Integration [%d] - %s' % (res.status_code, res.reason))
+
+ try:
+ if res.status_code == 204:
+ return res
+ else:
+ return res.json()
+
+ except Exception as excep:
+ return_error('Error in HTTP request - {}'.format(str(excep)))
+
+
+def data_formatting(title, body, labels, assignees, state):
+ """This method creates a dictionary to be used as "data" field in an http request."""
+ data = {}
+ if title is not None:
+ data['title'] = title
+
+ if body is not None:
+ data['body'] = body
+
+ if state is not None:
+ data['state'] = state
+
+ if labels is not None:
+ data['labels'] = labels.split(',')
+
+ if assignees is not None:
+ data['assignees'] = assignees.split(',')
+
+ return data
+
+
+def context_create_issue(response, issue):
+ """ Create GitHub.Issue EntryContext and results to be printed in Demisto.
+
+ Args:
+ response (dict): The raw HTTP response to be inserted to the 'Contents' field.
+ issue (dict or list): A dictionary or a list of dictionaries formatted for Demisto results.
+ """
+ ec = {
+ 'GitHub.Issue(val.Repository == obj.Repository && val.ID == obj.ID)': issue
+ }
+ return_outputs(tableToMarkdown("Issues:", issue, headers=ISSUE_HEADERS, removeNull=True), ec, response)
+
+
+def list_create(issue, list_name, element_name):
+ """ Creates a list if parameters exist in issue.
+
+ Args:
+ issue(dict): an issue from GitHub.
+ list_name (str): the name of the list in the issue.
+ element_name (str): the field name of the element in the list.
+
+ Returns:
+ The created list or None if it does not exist.
+ """
+ if issue.get(list_name) is not None:
+ return [element.get(element_name) for element in issue.get(list_name)]
+
+ else:
+ None
+
+
+def issue_format(issue):
+ """ Create a dictionary with selected fields representing an issue in Demisto.
+
+ Args:
+ issue (dict): An HTTP response representing an issue, formatted as a dictionary
+
+ Returns:
+ (dict). representing an issue in Demisto.
+ """
+ closed_by = None
+ if issue.get('closed_by') is not None and issue.get('state') == 'closed':
+ closed_by = issue.get('closed_by').get('login')
+
+ form = {
+ 'ID': issue.get('number'),
+ 'Repository': REPOSITORY,
+ 'Title': issue.get('title'),
+ 'Body': issue.get('body'),
+ 'State': issue.get('state'),
+ 'Labels': list_create(issue, 'labels', 'name'),
+ 'Assignees': list_create(issue, 'assignees', 'login'),
+ 'Created_at': issue.get('created_at'),
+ 'Updated_at': issue.get('updated_at'),
+ 'Closed_at': issue.get('closed_at'),
+ 'Closed_by': closed_by
+ }
+ return form
+
+
+def create_issue_table(issue_list, response, limit):
+ """ Get an HTTP response and a list containing several issues, sends each issue to be reformatted.
+
+ Args:
+ issue_list(list): derived from the HTTP response
+ response (dict):A raw HTTP response sent for 'Contents' field in context
+
+ Returns:
+ The issues are sent to Demisto as a list.
+ """
+ issue_list.reverse()
+ issue_table = []
+ issue_count = 0
+ for issue in issue_list:
+ issue_table.append(issue_format(issue))
+ issue_count = issue_count + 1
+ if issue_count == limit:
+ break
+
+ context_create_issue(response, issue_table)
+
+
+def format_commit_outputs(commit: dict = {}) -> dict:
+ """Take GitHub API commit data and format to expected context outputs
+
+ Args:
+ commit (dict): commit data returned from GitHub API
+
+ Returns:
+ (dict): commit object formatted to expected context outputs
+ """
+ author = commit.get('author', {})
+ ec_author = {
+ 'Date': author.get('date'),
+ 'Name': author.get('name'),
+ 'Email': author.get('email')
+ }
+ committer = commit.get('committer', {})
+ ec_committer = {
+ 'Date': committer.get('date'),
+ 'Name': committer.get('name'),
+ 'Email': committer.get('email')
+ }
+ parents = commit.get('parents', [])
+ formatted_parents = [{'SHA': parent.get('sha')} for parent in parents]
+
+ verification = commit.get('verification', {})
+ ec_verification = {
+ 'Verified': verification.get('verified'),
+ 'Reason': verification.get('reason'),
+ 'Signature': verification.get('signature'),
+ 'Payload': verification.get('payload')
+ }
+
+ ec_object = {
+ 'SHA': commit.get('sha'),
+ 'Author': ec_author,
+ 'Committer': ec_committer,
+ 'Message': commit.get('message'),
+ 'Parent': formatted_parents,
+ 'TreeSHA': commit.get('tree', {}).get('sha'),
+ 'Verification': ec_verification
+ }
+ return ec_object
+
+
+def format_label_outputs(label: dict = {}) -> dict:
+ """Take GitHub API label data and format to expected context outputs
+
+ Args:
+ label (dict): label data returned from GitHub API
+
+ Returns:
+ (dict): label object formatted to expected context outputs
+ """
+ ec_object = {
+ 'ID': label.get('id'),
+ 'NodeID': label.get('node_id'),
+ 'Name': label.get('name'),
+ 'Description': label.get('description'),
+ 'Color': label.get('Color'),
+ 'Default': label.get('default')
+ }
+ return ec_object
+
+
+def format_user_outputs(user: dict = {}) -> dict:
+ """Take GitHub API user data and format to expected context outputs
+
+ Args:
+ user (dict): user data returned from GitHub API
+
+ Returns:
+ (dict): user object formatted to expected context outputs
+ """
+ ec_user = {
+ 'Login': user.get('login'),
+ 'ID': user.get('id'),
+ 'NodeID': user.get('node_id'),
+ 'Type': user.get('type'),
+ 'SiteAdmin': user.get('site_admin')
+ }
+ return ec_user
+
+
+def format_pr_review_comment_outputs(review_comment: dict = {}) -> dict:
+ """Take GitHub API pr review comment data and format to expected context outputs
+
+ Args:
+ review_comment (dict): pre review comment data returned from GitHub API
+
+ Returns:
+ (dict): pr review comment object formatted to expected context outputs
+ """
+ ec_pr_review_comment = {
+ 'ID': review_comment.get('id'),
+ 'NodeID': review_comment.get('node_id'),
+ 'PullRequestReviewID': review_comment.get('pull_request_review_id'),
+ 'DiffHunk': review_comment.get('diff_hunk'),
+ 'Path': review_comment.get('path'),
+ 'Position': review_comment.get('position'),
+ 'OriginalPosition': review_comment.get('original_position'),
+ 'CommitID': review_comment.get('commit_id'),
+ 'OriginalCommitID': review_comment.get('original_commit_id'),
+ 'InReplyToID': review_comment.get('in_reply_to_id'),
+ 'User': format_user_outputs(review_comment.get('user', {})),
+ 'Body': review_comment.get('body'),
+ 'CreatedAt': review_comment.get('created_at'),
+ 'UpdatedAt': review_comment.get('updated_at'),
+ 'AuthorAssociation': review_comment.get('author_association')
+ }
+ return ec_pr_review_comment
+
+
+def format_team_outputs(team: dict = {}) -> dict:
+ """Take GitHub API team data and format to expected context outputs
+
+ Args:
+ team (dict): team data returned from GitHub API
+
+ Returns:
+ (dict): team object formatted to expected context outputs
+ """
+ ec_team = {
+ 'ID': team.get('id'),
+ 'NodeID': team.get('node_id'),
+ 'Name': team.get('name'),
+ 'Slug': team.get('slug'),
+ 'Description': team.get('description'),
+ 'Privacy': team.get('privacy'),
+ 'Permission': team.get('permission'),
+ 'Parent': team.get('parent')
+ }
+ return ec_team
+
+
+def format_head_or_base_outputs(head_or_base: dict = {}) -> dict:
+ """Take GitHub API head or base branch data and format to expected context outputs
+
+ Args:
+ head_or_base (dict): head or base branch data returned from GitHub API
+
+ Returns:
+ (dict): head or base branch object formatted to expected context outputs
+ """
+ head_or_base_user = head_or_base.get('user', {})
+ ec_head_or_base_user = format_user_outputs(head_or_base_user)
+ head_or_base_repo = head_or_base.get('repo', {})
+ head_or_base_repo_owner = head_or_base_repo.get('owner', {})
+ ec_head_or_base_repo_owner = format_user_outputs(head_or_base_repo_owner)
+ ec_head_repo = {
+ 'ID': head_or_base_repo.get('id'),
+ 'NodeID': head_or_base_repo.get('node_id'),
+ 'Name': head_or_base_repo.get('name'),
+ 'FullName': head_or_base_repo.get('full_name'),
+ 'Owner': ec_head_or_base_repo_owner,
+ 'Private': head_or_base_repo.get('private'),
+ 'Description': head_or_base_repo.get('description'),
+ 'Fork': head_or_base_repo.get('fork'),
+ 'Language': head_or_base_repo.get('language'),
+ 'ForksCount': head_or_base_repo.get('forks_count'),
+ 'StargazersCount': head_or_base_repo.get('stargazers_count'),
+ 'WatchersCount': head_or_base_repo.get('watchers_count'),
+ 'Size': head_or_base_repo.get('size'),
+ 'DefaultBranch': head_or_base_repo.get('default_branch'),
+ 'OpenIssuesCount': head_or_base_repo.get('open_issues_count'),
+ 'Topics': head_or_base_repo.get('topics'),
+ 'HasIssues': head_or_base_repo.get('has_issues'),
+ 'HasProjects': head_or_base_repo.get('has_projects'),
+ 'HasWiki': head_or_base_repo.get('has_wiki'),
+ 'HasPages': head_or_base_repo.get('has_pages'),
+ 'HasDownloads': head_or_base_repo.get('has_downloads'),
+ 'Archived': head_or_base_repo.get('archived'),
+ 'Disabled': head_or_base_repo.get('disabled'),
+ 'PushedAt': head_or_base_repo.get('pushed_at'),
+ 'CreatedAt': head_or_base_repo.get('created_at'),
+ 'UpdatedAt': head_or_base_repo.get('updated_at'),
+ 'AllowRebaseMerge': head_or_base_repo.get('allow_rebase_merge'),
+ 'AllowSquashMerge': head_or_base_repo.get('allow_squash_merge'),
+ 'AllowMergeCommit': head_or_base_repo.get('allow_merge_commit'),
+ 'SucscribersCount': head_or_base_repo.get('subscribers_count')
+ }
+ ec_head_or_base = {
+ 'Label': head_or_base.get('label'),
+ 'Ref': head_or_base.get('ref'),
+ 'SHA': head_or_base.get('sha'),
+ 'User': ec_head_or_base_user,
+ 'Repo': ec_head_repo,
+ }
+ return ec_head_or_base
+
+
+def format_pr_outputs(pull_request: dict = {}) -> dict:
+ """Take GitHub API Pull Request data and format to expected context outputs
+
+ Args:
+ pull_request (dict): Pull Request data returned from GitHub API
+
+ Returns:
+ (dict): Pull Request object formatted to expected context outputs
+ """
+ user_data = safe_get(pull_request, 'user', {})
+ ec_user = format_user_outputs(user_data)
+
+ labels_data = safe_get(pull_request, 'labels', [])
+ ec_labels = [format_label_outputs(label) for label in labels_data]
+
+ milestone_data = safe_get(pull_request, 'milestone', {})
+ creator = safe_get(milestone_data, 'creator', {})
+ ec_creator = format_user_outputs(creator)
+ ec_milestone = {
+ 'ID': milestone_data.get('id'),
+ 'NodeID': milestone_data.get('node_id'),
+ 'Number': milestone_data.get('number'),
+ 'State': milestone_data.get('state'),
+ 'Title': milestone_data.get('title'),
+ 'Description': milestone_data.get('description'),
+ 'OpenIssues': milestone_data.get('open_issues'),
+ 'ClosedIssues': milestone_data.get('closed_issues'),
+ 'CreatedAt': milestone_data.get('created_at'),
+ 'UpdatedAt': milestone_data.get('updated_at'),
+ 'ClosedAt': milestone_data.get('closed_at'),
+ 'DueOn': milestone_data.get('due_on'),
+ }
+ if creator:
+ ec_milestone['Creator'] = ec_creator
+
+ assignees_data = safe_get(pull_request, 'assignees', [])
+ ec_assignee = [format_user_outputs(assignee) for assignee in assignees_data]
+
+ requested_reviewers_data = safe_get(pull_request, 'requested_reviewers', [])
+ ec_requested_reviewer = [format_user_outputs(requested_reviewer) for requested_reviewer in requested_reviewers_data]
+
+ requested_teams_data = safe_get(pull_request, 'requested_teams', [])
+ ec_requested_team = [format_team_outputs(requested_team) for requested_team in requested_teams_data]
+
+ head_data = safe_get(pull_request, 'head', {})
+ ec_head = format_head_or_base_outputs(head_data)
+
+ base_data = safe_get(pull_request, 'base', {})
+ ec_base = format_head_or_base_outputs(base_data)
+
+ merged_by_data = safe_get(pull_request, 'merged_by', {})
+ ec_merged_by = format_user_outputs(merged_by_data)
+
+ ec_object = {
+ 'ID': pull_request.get('id'),
+ 'NodeID': pull_request.get('node_id'),
+ 'Number': pull_request.get('number'),
+ 'State': pull_request.get('state'),
+ 'Locked': pull_request.get('locked'),
+ 'Body': pull_request.get('body'),
+ 'ActiveLockReason': pull_request.get('active_lock_reason'),
+ 'CreatedAt': pull_request.get('created_at'),
+ 'UpdatedAt': pull_request.get('updated_at'),
+ 'ClosedAt': pull_request.get('closed_at'),
+ 'MergedAt': pull_request.get('merged_at'),
+ 'MergeCommitSHA': pull_request.get('merge_commit_sha'),
+ 'AuthorAssociation': pull_request.get('author_association'),
+ 'Draft': pull_request.get('draft'),
+ 'Merged': pull_request.get('merged'),
+ 'Mergeable': pull_request.get('mergeable'),
+ 'Rebaseable': pull_request.get('rebaseable'),
+ 'MergeableState': pull_request.get('mergeable_state'),
+ 'Comments': pull_request.get('comments'),
+ 'ReviewComments': pull_request.get('review_comments'),
+ 'MaintainerCanModify': pull_request.get('maintainer_can_modify'),
+ 'Commits': pull_request.get('commits'),
+ 'Additions': pull_request.get('additions'),
+ 'Deletions': pull_request.get('deletions'),
+ 'ChangedFiles': pull_request.get('changed_files')
+ }
+ if user_data:
+ ec_object['User'] = ec_user
+ if labels_data:
+ ec_object['Label'] = ec_labels
+ if assignees_data:
+ ec_object['Assignee'] = ec_assignee
+ if requested_reviewers_data:
+ ec_object['RequestedReviewer'] = ec_requested_reviewer
+ if requested_teams_data:
+ ec_object['RequestedTeam'] = ec_requested_team
+ if head_data:
+ ec_object['Head'] = ec_head
+ if base_data:
+ ec_object['Base'] = ec_base
+ if merged_by_data:
+ ec_object['MergedBy'] = ec_merged_by
+ if milestone_data:
+ ec_object['Milestone'] = ec_milestone
+ return ec_object
+
+
+def format_comment_outputs(comment: dict, issue_number: Union[int, str]) -> dict:
+ """Take GitHub API Comment data and format to expected context outputs
+
+ Args:
+ comment (dict): Comment data returned from GitHub API
+ issue_number (int): The number of the issue to which the comment belongs
+
+ Returns:
+ (dict): Comment object formatted to expected context outputs
+ """
+ ec_object = {
+ 'IssueNumber': int(issue_number) if isinstance(issue_number, str) else issue_number,
+ 'ID': comment.get('id'),
+ 'NodeID': comment.get('node_id'),
+ 'Body': comment.get('body'),
+ 'User': format_user_outputs(comment.get('user', {}))
+ }
+ return ec_object
+
+
+''' COMMANDS '''
+
+
+def test_module():
+ http_request(method='GET', url_suffix=ISSUE_SUFFIX, params={'state': 'all'})
+ demisto.results("ok")
+
+
+def create_pull_request(create_vals: dict = {}) -> dict:
+ suffix = PULLS_SUFFIX
+ response = http_request('POST', url_suffix=suffix, data=create_vals)
+ return response
+
+
+def create_pull_request_command():
+ args = demisto.args()
+ create_vals = {key: val for key, val in args.items()}
+ maintainer_can_modify = args.get('maintainer_can_modify')
+ if maintainer_can_modify:
+ create_vals['maintainer_can_modify'] = maintainer_can_modify == 'true'
+ draft = args.get('draft')
+ if draft:
+ create_vals['draft'] = draft == 'true'
+ response = create_pull_request(create_vals)
+
+ ec_object = format_pr_outputs(response)
+ ec = {
+ 'GitHub.PR(val.Number === obj.Number)': ec_object
+ }
+ human_readable = tableToMarkdown(f'Created Pull Request #{response.get("number")}', ec_object, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=ec, raw_response=response)
+
+
+def is_pr_merged(pull_number: Union[int, str]):
+ suffix = PULLS_SUFFIX + f'/{pull_number}/merge'
+ response = http_request('GET', url_suffix=suffix)
+ return response
+
+
+def is_pr_merged_command():
+ args = demisto.args()
+ pull_number = args.get('pull_number')
+
+ # raises 404 not found error if the pr was not merged
+ is_pr_merged(pull_number)
+ demisto.results(f'Pull Request #{pull_number} was Merged')
+
+
+def update_pull_request(pull_number: Union[int, str], update_vals: dict = {}) -> dict:
+ suffix = PULLS_SUFFIX + f'/{pull_number}'
+ response = http_request('PATCH', url_suffix=suffix, data=update_vals)
+ return response
+
+
+def update_pull_request_command():
+ args = demisto.args()
+ pull_number = args.get('pull_number')
+ update_vals = {key: val for key, val in args.items() if key != 'pull_number'}
+ if not update_vals:
+ return_error('You must provide a value for at least one of the command\'s arguments "title", "body", "state",'
+ ' "base" or "maintainer_can_modify" that you would like to update the pull request with')
+ maintainer_can_modify = update_vals.get('maintainer_can_modify')
+ if maintainer_can_modify:
+ update_vals['maintainer_can_modify'] = maintainer_can_modify == 'true'
+ response = update_pull_request(pull_number, update_vals)
+
+ ec_object = format_pr_outputs(response)
+ ec = {
+ 'GitHub.PR(val.Number === obj.Number)': ec_object
+ }
+ human_readable = tableToMarkdown(f'Updated Pull Request #{pull_number}', ec_object, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=ec, raw_response=response)
+
+
+def list_teams(organization: str) -> list:
+ suffix = f'/orgs/{organization}/teams'
+ response = http_request('GET', url_suffix=suffix)
+ return response
+
+
+def list_teams_command():
+ args = demisto.args()
+ organization = args.get('organization')
+ response = list_teams(organization)
+
+ ec_object = [format_team_outputs(team) for team in response]
+ ec = {
+ 'GitHub.Team(val.ID === obj.ID)': ec_object
+ }
+ human_readable = tableToMarkdown(f'Teams for Organization "{organization}"', ec_object, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=ec, raw_response=response)
+
+
+def get_pull_request(pull_number: Union[int, str]):
+ suffix = PULLS_SUFFIX + f'/{pull_number}'
+ response = http_request('GET', url_suffix=suffix)
+ return response
+
+
+def get_pull_request_command():
+ args = demisto.args()
+ pull_number = args.get('pull_number')
+ response = get_pull_request(pull_number)
+
+ ec_object = format_pr_outputs(response)
+ ec = {
+ 'GitHub.PR(val.Number === obj.Number)': ec_object
+ }
+ human_readable = tableToMarkdown(f'Pull Request #{pull_number}', ec_object, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=ec, raw_response=response)
+
+
+def add_label(issue_number: Union[int, str], labels: list):
+ suffix = ISSUE_SUFFIX + f'/{issue_number}/labels'
+ response = http_request('POST', url_suffix=suffix, data={'labels': labels})
+ return response
+
+
+def add_label_command():
+ args = demisto.args()
+ issue_number = args.get('issue_number')
+ labels = argToList(args.get('labels'))
+ add_label(issue_number, labels)
+ labels_for_msg = [f'"{label}"' for label in labels]
+ msg = f'{" and ".join(labels_for_msg)} Successfully Added to Issue #{issue_number}'
+ msg = 'Labels ' + msg if 'and' in msg else 'Label ' + msg
+ demisto.results(msg)
+
+
+def get_commit(commit_sha: str) -> dict:
+ suffix = USER_SUFFIX + f'/git/commits/{commit_sha}'
+ response = http_request('GET', url_suffix=suffix)
+ return response
+
+
+def get_commit_command():
+ args = demisto.args()
+ commit_sha = args.get('commit_sha')
+ response = get_commit(commit_sha)
+
+ ec_object = format_commit_outputs(response)
+ ec = {
+ 'GitHub.Commit(val.SHA === obj.SHA)': ec_object
+ }
+ human_readable = tableToMarkdown(f'Commit *{commit_sha[:10]}*', ec_object, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=ec, raw_response=response)
+
+
+def list_pr_reviews(pull_number: Union[int, str]) -> list:
+ suffix = PULLS_SUFFIX + f'/{pull_number}/reviews'
+ response = http_request('GET', url_suffix=suffix)
+ return response
+
+
+def list_pr_reviews_command():
+ args = demisto.args()
+ pull_number = args.get('pull_number')
+ response = list_pr_reviews(pull_number)
+
+ formatted_pr_reviews = [
+ {
+ 'ID': pr_review.get('id'),
+ 'NodeID': pr_review.get('node_id'),
+ 'Body': pr_review.get('body'),
+ 'CommitID': pr_review.get('commit_id'),
+ 'State': pr_review.get('state'),
+ 'User': format_user_outputs(pr_review.get('user', {}))
+ }
+ for pr_review in response
+ ]
+ ec_object = {
+ 'Number': pull_number,
+ 'Review': formatted_pr_reviews
+ }
+ ec = {
+ 'GitHub.PR(val.Number === obj.Number)': ec_object
+ }
+ human_readable = tableToMarkdown(f'Pull Request Reviews for #{pull_number}', formatted_pr_reviews, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=ec, raw_response=response)
+
+
+def list_pr_files(pull_number: Union[int, str]) -> list:
+ suffix = PULLS_SUFFIX + f'/{pull_number}/files'
+ response = http_request('GET', url_suffix=suffix)
+ return response
+
+
+def list_pr_files_command():
+ args = demisto.args()
+ pull_number = args.get('pull_number')
+ response = list_pr_files(pull_number)
+
+ formatted_pr_files = [
+ {
+ 'SHA': pr_file.get('sha'),
+ 'Name': pr_file.get('filename'),
+ 'Status': pr_file.get('status'),
+ 'Additions': pr_file.get('additions'),
+ 'Deletions': pr_file.get('deletions'),
+ 'Changes': pr_file.get('changes')
+ }
+ for pr_file in response
+ ]
+ ec_object = {
+ 'Number': pull_number,
+ 'File': formatted_pr_files
+ }
+ ec = {
+ 'GitHub.PR(val.Number === obj.Number)': ec_object
+ }
+ human_readable = tableToMarkdown(f'Pull Request Files for #{pull_number}', formatted_pr_files, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=ec, raw_response=response)
+
+
+def list_pr_review_comments(pull_number: Union[int, str]) -> list:
+ suffix = PULLS_SUFFIX + f'/{pull_number}/comments'
+ response = http_request('GET', url_suffix=suffix)
+ return response
+
+
+def list_pr_review_comments_command():
+ args = demisto.args()
+ pull_number = args.get('pull_number')
+ response = list_pr_review_comments(pull_number)
+
+ formatted_pr_review_comments = [format_pr_review_comment_outputs(review_comment) for review_comment in response]
+ ec_object = {
+ 'Number': pull_number,
+ 'ReviewComment': formatted_pr_review_comments
+ }
+ ec = {
+ 'GitHub.PR(val.Number === obj.Number)': ec_object
+ }
+ human_readable = tableToMarkdown(f'Pull Request Review Comments for #{pull_number}', formatted_pr_review_comments,
+ removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=ec, raw_response=response)
+
+
+def list_issue_comments(issue_number: Union[int, str]) -> list:
+ suffix = ISSUE_SUFFIX + f'/{issue_number}/comments'
+ response = http_request('GET', url_suffix=suffix)
+ return response
+
+
+def list_issue_comments_command():
+ args = demisto.args()
+ issue_number = args.get('issue_number')
+ response = list_issue_comments(issue_number)
+
+ ec_object = [format_comment_outputs(comment, issue_number) for comment in response]
+ ec = {
+ 'GitHub.Comment(val.IssueNumber === obj.IssueNumber && val.ID === obj.ID)': ec_object
+ }
+ human_readable = tableToMarkdown(f'Comments for Issue #{issue_number}', ec_object, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=ec, raw_response=response)
+
+
+def create_comment(issue_number: Union[int, str], msg: str) -> dict:
+ suffix = ISSUE_SUFFIX + f'/{issue_number}/comments'
+ response = http_request('POST', url_suffix=suffix, data={'body': msg})
+ return response
+
+
+def create_comment_command():
+ args = demisto.args()
+ issue_number = args.get('issue_number')
+ body = args.get('body')
+ response = create_comment(issue_number, body)
+
+ ec_object = format_comment_outputs(response, issue_number)
+ ec = {
+ 'GitHub.Comment(val.IssueNumber === obj.IssueNumber && val.ID === obj.ID)': ec_object
+ }
+ human_readable = tableToMarkdown('Created Comment', ec_object, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=ec, raw_response=response)
+
+
+def request_review(pull_number: Union[int, str], reviewers: list) -> dict:
+ """Make an API call to GitHub to request reviews from a list of users for a given PR
+
+ Args:
+ pull_number (int): The number of the PR for which the review request(s) is/are being made
+ reviewers (list): The list of GitHub usernames from which you wish to request a review
+
+ Returns:
+ dict: API response
+
+ Raises:
+ Exception: An exception will be raised if one or more of the requested reviewers is not
+ a collaborator of the repo and therefore the API call returns a 'Status: 422 Unprocessable Entity'
+ """
+ suffix = PULLS_SUFFIX + f'/{pull_number}/requested_reviewers'
+ response = http_request('POST', url_suffix=suffix, data={'reviewers': reviewers})
+ return response
+
+
+def request_review_command():
+ args = demisto.args()
+ pull_number = args.get('pull_number')
+ reviewers = argToList(args.get('reviewers'))
+ response = request_review(pull_number, reviewers)
+
+ requested_reviewers = response.get('requested_reviewers', [])
+ formatted_requested_reviewers = [format_user_outputs(reviewer) for reviewer in requested_reviewers]
+ ec_object = {
+ 'Number': response.get('number'),
+ 'RequestedReviewer': formatted_requested_reviewers
+ }
+ ec = {
+ 'GitHub.PR(val.Number === obj.Number)': ec_object
+ }
+ human_readable = tableToMarkdown(f'Requested Reviewers for #{response.get("number")}',
+ formatted_requested_reviewers, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=ec, raw_response=response)
+
+
+def get_team_membership(team_id: Union[int, str], user_name: str) -> dict:
+ suffix = f'/teams/{team_id}/memberships/{user_name}'
+ response = http_request('GET', url_suffix=suffix)
+ return response
+
+
+def get_team_membership_command():
+ args = demisto.args()
+ team_id = args.get('team_id')
+ try:
+ team_id = int(team_id)
+ except ValueError as e:
+ return_error('"team_id" command argument must be an integer value.', e)
+ user_name = args.get('user_name')
+ response = get_team_membership(team_id, user_name)
+
+ ec_object = {
+ 'ID': team_id,
+ 'Member': {
+ 'Login': user_name,
+ 'Role': response.get('role'),
+ 'State': response.get('state')
+ }
+ }
+ ec = {
+ 'GitHub.Team': ec_object
+ }
+ human_readable = tableToMarkdown(f'Team Membership of {user_name}', ec_object, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=ec, raw_response=response)
+
+
+def get_branch(branch: str) -> dict:
+ suffix = USER_SUFFIX + f'/branches/{branch}'
+ response = http_request('GET', url_suffix=suffix)
+ return response
+
+
+def get_branch_command():
+ args = demisto.args()
+ branch_name = args.get('branch_name')
+ response = get_branch(branch_name)
+
+ commit = response.get('commit', {})
+ author = commit.get('author', {})
+ parents = commit.get('parents', [])
+ ec_object = {
+ 'Name': response.get('name'),
+ 'CommitSHA': commit.get('sha'),
+ 'CommitNodeID': commit.get('node_id'),
+ 'CommitAuthorID': author.get('id'),
+ 'CommitAuthorLogin': author.get('login'),
+ 'CommitParentSHA': [parent.get('sha') for parent in parents],
+ 'Protected': response.get('protected')
+ }
+ ec = {
+ 'GitHub.Branch(val.Name === obj.Name && val.CommitSHA === obj.CommitSHA)': ec_object
+ }
+ human_readable = tableToMarkdown(f'Branch "{branch_name}"', ec_object, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=ec, raw_response=response)
+
+
+def create_branch(name: str, sha: str) -> dict:
+ suffix = USER_SUFFIX + '/git/refs'
+ data = {
+ 'ref': f'refs/heads/{name}',
+ 'sha': sha
+ }
+ response = http_request('POST', url_suffix=suffix, data=data)
+ return response
+
+
+def create_branch_command():
+ args = demisto.args()
+ branch_name = args.get('branch_name')
+ commit_sha = args.get('commit_sha')
+ create_branch(branch_name, commit_sha)
+ msg = f'Branch "{branch_name}" Created Successfully'
+ demisto.results(msg)
+
+
+def delete_branch(name: str):
+ suffix = USER_SUFFIX + f'/git/refs/heads/{name}'
+ http_request('DELETE', url_suffix=suffix)
+
+
+def delete_branch_command():
+ args = demisto.args()
+ branch_name = args.get('branch_name')
+ delete_branch(branch_name)
+ msg = f'Branch "{branch_name}" Deleted Successfully'
+ demisto.results(msg)
+
+
+def get_stale_prs(stale_time: str, label: str) -> list:
+ time_range_start, _ = parse_date_range(stale_time)
+ # regex for removing the digits from the end of the isoformat timestamp that don't conform to API expectations
+ timestamp_regex = re.compile('\.\d{6}$')
+ timestamp, _ = timestamp_regex.subn('', time_range_start.isoformat())
+ query = f'repo:{USER}/{REPOSITORY} is:open updated:<{timestamp} is:pr'
+ if label:
+ query += f' label:{label}'
+ matching_issues = search_issue(query).get('items', [])
+ relevant_prs = [get_pull_request(issue.get('number')) for issue in matching_issues]
+ return relevant_prs
+
+
+def get_stale_prs_command():
+ args = demisto.args()
+ stale_time = args.get('stale_time', '3 days')
+ label = args.get('label')
+ results = get_stale_prs(stale_time, label)
+ if results:
+ formatted_results = []
+ for pr in results:
+ requested_reviewers = [
+ requested_reviewer.get('login') for requested_reviewer in pr.get('requested_reviewers', [])
+ ]
+ formatted_pr = {
+ 'URL': f'<{pr.get("html_url")}>',
+ 'Number': pr.get('number'),
+ 'RequestedReviewer': requested_reviewers
+ }
+ formatted_results.append(formatted_pr)
+ ec = {
+ 'GitHub.PR(val.Number === obj.Number)': formatted_results
+ }
+ human_readable = tableToMarkdown('Stale PRs', formatted_results, removeNull=True)
+ return_outputs(readable_output=human_readable, outputs=ec, raw_response=results)
+ else:
+ demisto.results('No stale external PRs found')
+
+
+def create_issue(title, body, labels, assignees):
+ data = data_formatting(title=title,
+ body=body,
+ labels=labels,
+ assignees=assignees,
+ state=None)
+
+ response = http_request(method='POST',
+ url_suffix=ISSUE_SUFFIX,
+ data=data)
+ return response
+
+
+def create_command():
+ args = demisto.args()
+ response = create_issue(args.get('title'), args.get('body'),
+ args.get('labels'), args.get('assignees'))
+ issue = issue_format(response)
+ context_create_issue(response, issue)
+
+
+def close_issue(id):
+ response = http_request(method='PATCH',
+ url_suffix=ISSUE_SUFFIX + '/{}'.format(str(id)),
+ data={'state': 'closed'})
+ return response
+
+
+def close_command():
+ id = demisto.args().get('ID')
+ response = close_issue(id)
+ issue = issue_format(response)
+ context_create_issue(response, issue)
+
+
+def update_issue(id, title, body, state, labels, assign):
+ data = data_formatting(title=title,
+ body=body,
+ labels=labels,
+ assignees=assign,
+ state=state)
+
+ response = http_request(method='PATCH',
+ url_suffix=ISSUE_SUFFIX + '/{}'.format(str(id)),
+ data=data)
+ return response
+
+
+def update_command():
+ args = demisto.args()
+ response = update_issue(args.get('ID'), args.get('title'), args.get('body'), args.get('state'),
+ args.get('labels'), args.get('assignees'))
+ issue = issue_format(response)
+ context_create_issue(response, issue)
+
+
+def list_all_issue(state):
+ params = {'state': state}
+ response = http_request(method='GET',
+ url_suffix=ISSUE_SUFFIX,
+ params=params)
+ return response
+
+
+def list_all_command():
+ state = demisto.args().get('state')
+ limit = int(demisto.args().get('limit'))
+ if limit > 200:
+ limit = 200
+
+ response = list_all_issue(state)
+ create_issue_table(response, response, limit)
+
+
+def search_issue(query):
+ response = http_request(method='GET',
+ url_suffix='/search/issues',
+ params={'q': query})
+ return response
+
+
+def search_command():
+ q = demisto.args().get('query')
+ limit = int(demisto.args().get('limit'))
+ if limit > 200:
+ limit = 200
+
+ response = search_issue(q)
+ create_issue_table(response['items'], response, limit)
+
+
+def get_download_count():
+ response = http_request(method='GET',
+ url_suffix=RELEASE_SUFFIX)
+
+ count_per_release = []
+ for release in response:
+ total_download_count = 0
+ for asset in release.get('assets', []):
+ total_download_count = total_download_count + asset['download_count']
+
+ release_info = {
+ 'ID': release.get('id'),
+ 'Download_count': total_download_count,
+ 'Name': release.get('name'),
+ 'Body': release.get('body'),
+ 'Created_at': release.get('created_at'),
+ 'Published_at': release.get('published_at')
+ }
+ count_per_release.append(release_info)
+
+ ec = {
+ 'GitHub.Release( val.ID == obj.ID )': count_per_release
+ }
+ return_outputs(tableToMarkdown('Releases:', count_per_release, headers=RELEASE_HEADERS, removeNull=True), ec,
+ response)
+
+
+def fetch_incidents_command():
+ last_run = demisto.getLastRun()
+ if last_run and 'start_time' in last_run:
+ start_time = datetime.strptime(last_run.get('start_time'), '%Y-%m-%dT%H:%M:%SZ')
+
+ else:
+ start_time = datetime.now() - timedelta(days=int(FETCH_TIME))
+
+ last_time = start_time
+ issue_list = http_request(method='GET',
+ url_suffix=ISSUE_SUFFIX,
+ params={'state': 'all'})
+
+ incidents = []
+ for issue in issue_list:
+ updated_at_str = issue.get('created_at')
+ updated_at = datetime.strptime(updated_at_str, '%Y-%m-%dT%H:%M:%SZ')
+ if updated_at > start_time:
+ inc = {
+ 'name': issue.get('url'),
+ 'occurred': updated_at_str,
+ 'rawJSON': json.dumps(issue)
+ }
+ incidents.append(inc)
+ if updated_at > last_time:
+ last_time = updated_at
+
+ demisto.setLastRun({'start_time': datetime.strftime(last_time, '%Y-%m-%dT%H:%M:%SZ')})
+ demisto.incidents(incidents)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+COMMANDS = {
+ 'test-module': test_module,
+ 'fetch-incidents': fetch_incidents_command,
+ 'GitHub-create-issue': create_command,
+ 'GitHub-close-issue': close_command,
+ 'GitHub-update-issue': update_command,
+ 'GitHub-list-all-issues': list_all_command,
+ 'GitHub-search-issues': search_command,
+ 'GitHub-get-download-count': get_download_count,
+ 'GitHub-get-stale-prs': get_stale_prs_command,
+ 'GitHub-get-branch': get_branch_command,
+ 'GitHub-create-branch': create_branch_command,
+ 'GitHub-get-team-membership': get_team_membership_command,
+ 'GitHub-request-review': request_review_command,
+ 'GitHub-create-comment': create_comment_command,
+ 'GitHub-list-issue-comments': list_issue_comments_command,
+ 'GitHub-list-pr-files': list_pr_files_command,
+ 'GitHub-list-pr-reviews': list_pr_reviews_command,
+ 'GitHub-get-commit': get_commit_command,
+ 'GitHub-add-label': add_label_command,
+ 'GitHub-get-pull-request': get_pull_request_command,
+ 'GitHub-list-teams': list_teams_command,
+ 'GitHub-delete-branch': delete_branch_command,
+ 'GitHub-list-pr-review-comments': list_pr_review_comments_command,
+ 'GitHub-update-pull-request': update_pull_request_command,
+ 'GitHub-is-pr-merged': is_pr_merged_command,
+ 'GitHub-create-pull-request': create_pull_request_command
+}
+
+
+'''EXECUTION'''
+
+
+def main():
+ handle_proxy()
+ cmd = demisto.command()
+ LOG(f'command is {cmd}')
+ try:
+ if cmd in COMMANDS.keys():
+ COMMANDS[cmd]()
+ except Exception as e:
+ return_error(str(e))
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == '__builtin__' or __name__ == 'builtins':
+ main()
diff --git a/Integrations/GitHub/GitHub.yml b/Integrations/GitHub/GitHub.yml
new file mode 100644
index 000000000000..62b819243b93
--- /dev/null
+++ b/Integrations/GitHub/GitHub.yml
@@ -0,0 +1,2585 @@
+category: Authentication
+commonfields:
+ id: GitHub
+ version: -1
+configuration:
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: API Token
+ name: token
+ required: true
+ type: 4
+- display: 'Username of the repository owner, for example: github.com/repos/{_owner_}/{repo}/issues'
+ name: user
+ required: false
+ type: 0
+- display: The name of the requested repository
+ name: repository
+ required: false
+ type: 0
+- defaultvalue: 3
+ display: First fetch interval (in days)
+ name: fetch_time
+ required: false
+ type: 0
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ required: false
+ type: 8
+- defaultvalue: 'false'
+ display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: ""
+ display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+description: Integration to GitHub API
+display: GitHub
+name: GitHub
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The title of the issue.
+ isArray: false
+ name: title
+ required: true
+ secret: false
+ - default: false
+ description: The contents of the issue.
+ isArray: false
+ name: body
+ required: false
+ secret: false
+ - default: false
+ description: Labels to associate with this issue.
+ isArray: true
+ name: labels
+ required: false
+ secret: false
+ - default: false
+ description: Logins for Users to assign to this issue.
+ isArray: true
+ name: assignees
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates an issue in GitHub.
+ execution: false
+ name: GitHub-create-issue
+ outputs:
+ - contextPath: GitHub.Issue.ID
+ description: The ID of the created issue.
+ type: Number
+ - contextPath: GitHub.Issue.Repository
+ description: The repository of the created issue.
+ type: String
+ - contextPath: GitHub.Issue.Title
+ description: The title of the created issue.
+ type: String
+ - contextPath: GitHub.Issue.Body
+ description: The body of the created issue.
+ type: Unknown
+ - contextPath: GitHub.Issue.State
+ description: The state of the created issue.
+ type: String
+ - contextPath: GitHub.Issue.Labels
+ description: Labels applied to the issue.
+ type: String
+ - contextPath: GitHub.Issue.Assignees
+ description: Users assigned to this issue.
+ type: String
+ - contextPath: GitHub.Issue.Created_at
+ description: Date when the issue was created.
+ type: Date
+ - contextPath: GitHub.Issue.Updated_at
+ description: Date when the issue was last updated.
+ type: Date
+ - contextPath: GitHub.Issue.Closed_at
+ description: Date when the issue was closed.
+ type: Date
+ - contextPath: GitHub.Issue.Closed_by
+ description: User who closed the issue.
+ type: String
+ - arguments:
+ - default: false
+ description: The number of the issue to close.
+ isArray: false
+ name: ID
+ required: true
+ secret: false
+ deprecated: false
+ description: Closes an existing issue.
+ execution: false
+ name: GitHub-close-issue
+ outputs:
+ - contextPath: GitHub.Issue.ID
+ description: The ID of the closed issue.
+ type: Number
+ - contextPath: GitHub.Issue.Repository
+ description: The repository of the closed issue.
+ type: String
+ - contextPath: GitHub.Issue.Title
+ description: The title of the closed issue
+ type: String
+ - contextPath: GitHub.Issue.Body
+ description: The body of the closed issue.
+ type: Unknown
+ - contextPath: GitHub.Issue.State
+ description: The state of the closed issue.
+ type: String
+ - contextPath: GitHub.Issue.Labels
+ description: Labels spplied to the issue.
+ type: String
+ - contextPath: GitHub.Issue.Assignees
+ description: Users assigned to the issue.
+ type: String
+ - contextPath: GitHub.Issue.Created_at
+ description: Date when the issue was created.
+ type: Date
+ - contextPath: GitHub.Issue.Updated_at
+ description: Date when the issue was last updated
+ type: Date
+ - contextPath: GitHub.Issue.Closed_at
+ description: Date when the issue was closed.
+ type: Date
+ - contextPath: GitHub.Issue.Closed_by
+ description: User who closed the issue.
+ type: String
+ - arguments:
+ - default: false
+ description: The number of the issue to update.
+ isArray: false
+ name: ID
+ required: true
+ secret: false
+ - default: false
+ description: The title of the issue.
+ isArray: false
+ name: title
+ required: false
+ secret: false
+ - default: false
+ description: The contents of the issue.
+ isArray: false
+ name: body
+ required: false
+ secret: false
+ - default: false
+ description: State of the issue. Either open or closed.
+ isArray: false
+ name: state
+ required: false
+ secret: false
+ - default: false
+ description: 'Labels to apply to this issue. Pass one or more Labels to replace
+ the set of Labels on this Issue. Send an empty array ([]) to clear all Labels
+ from the Issue. '
+ isArray: true
+ name: labels
+ required: false
+ secret: false
+ - default: false
+ description: Logins for Users to assign to this issue. Pass one or more user
+ logins to replace the set of assignees on this Issue. Send an empty array
+ ([]) to clear all assignees from the Issue.
+ isArray: true
+ name: assignees
+ required: false
+ secret: false
+ deprecated: false
+ description: Updates the parameters of a specified issue.
+ execution: false
+ name: GitHub-update-issue
+ outputs:
+ - contextPath: GitHub.Issue.ID
+ description: The ID of the updated issue.
+ type: Number
+ - contextPath: GitHub.Issue.Repository
+ description: The repository of the updated issue.
+ type: String
+ - contextPath: GitHub.Issue.Title
+ description: The title of the updated issue.
+ type: String
+ - contextPath: GitHub.Issue.Body
+ description: The body of the updated issue.
+ type: Unknown
+ - contextPath: GitHub.Issue.State
+ description: The state of the updated issue.
+ type: String
+ - contextPath: GitHub.Issue.Labels
+ description: Labels applied to the issue.
+ type: String
+ - contextPath: GitHub.Issue.Assignees
+ description: Users assigned to the issue.
+ type: String
+ - contextPath: GitHub.Issue.Created_at
+ description: Date when the issue was created.
+ type: Date
+ - contextPath: GitHub.Issue.Updated_at
+ description: Date when the issue was last updated.
+ type: Date
+ - contextPath: GitHub.Issue.Closed_at
+ description: Date when the issue was closed.
+ type: Date
+ - contextPath: GitHub.Issue.Closed_by
+ description: User who closed the issue.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: open
+ description: The state of the issues to return. Can be 'open', 'closed' or 'all'.
+ Default is 'open'
+ isArray: false
+ name: state
+ predefined:
+ - open
+ - closed
+ - all
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: The number of issues to return. Default is 50. Maximum is 200.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Lists all issues that the user has access to view.
+ execution: false
+ name: GitHub-list-all-issues
+ outputs:
+ - contextPath: GitHub.Issue.ID
+ description: The ID of the issue.
+ type: Number
+ - contextPath: GitHub.Issue.Repository
+ description: The repository of the issue.
+ type: String
+ - contextPath: GitHub.Issue.Title
+ description: The title of the issue.
+ type: String
+ - contextPath: GitHub.Issue.Body
+ description: The body of the issue.
+ type: Unknown
+ - contextPath: GitHub.Issue.State
+ description: The state of the issue.
+ type: String
+ - contextPath: GitHub.Issue.Labels
+ description: Labels applied to the issue.
+ type: String
+ - contextPath: GitHub.Issue.Assignees
+ description: Users assigned to the issue.
+ type: String
+ - contextPath: GitHub.Issue.Created_at
+ description: Date when the issue was created.
+ type: Date
+ - contextPath: GitHub.Issue.Updated_at
+ description: Date when the issue was last updated.
+ type: Date
+ - contextPath: GitHub.Issue.Closed_at
+ description: Date when the issue was closed.
+ type: Date
+ - contextPath: GitHub.Issue.Closed_by
+ description: User who closed the issue.
+ type: String
+ - arguments:
+ - default: false
+ description: The query line for the search. See the [GitHub documentation](https://help.github.com/en/articles/searching-issues-and-pull-requests)
+ for more information.
+ isArray: false
+ name: query
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: The number of issues to return. Default is 50. Maximum is 200.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches for and returns issues that match a given query.
+ execution: false
+ name: GitHub-search-issues
+ outputs:
+ - contextPath: GitHub.Issue.ID
+ description: The ID of the issue.
+ type: Number
+ - contextPath: GitHub.Issue.Repository
+ description: The repository of the issue.
+ type: String
+ - contextPath: GitHub.Issue.Title
+ description: The title of the issue.
+ type: String
+ - contextPath: GitHub.Issue.Body
+ description: The body of the issue.
+ type: Unknown
+ - contextPath: GitHub.Issue.State
+ description: The state of the issue.
+ type: String
+ - contextPath: GitHub.Issue.Labels
+ description: Labels applied to the issue.
+ type: String
+ - contextPath: GitHub.Issue.Assignees
+ description: Users assigned to the issue.
+ type: String
+ - contextPath: GitHub.Issue.Created_at
+ description: Date when the issue was created.
+ type: Date
+ - contextPath: GitHub.Issue.Updated_at
+ description: Date when the issue was last updated.
+ type: Date
+ - contextPath: GitHub.Issue.Closed_at
+ description: Date when the issue was closed.
+ type: Date
+ - contextPath: GitHub.Issue.Closed_by
+ description: User who closed the issue.
+ type: String
+ - deprecated: false
+ description: Returns the total number of downloads for all releases for the specified
+ repository.
+ execution: false
+ name: GitHub-get-download-count
+ outputs:
+ - contextPath: GitHub.Release.ID
+ description: ID of the release.
+ type: Number
+ - contextPath: GitHub.Release.Download_count
+ description: Download count for the release.
+ type: Number
+ - contextPath: GitHub.Release.Name
+ description: Name of the release.
+ type: String
+ - contextPath: GitHub.Release.Body
+ description: Body of the release.
+ type: String
+ - contextPath: GitHub.Release.Created_at
+ description: Date when the release was created.
+ type: Date
+ - contextPath: GitHub.Release.Published_at
+ description: Date when the release was published.
+ type: Date
+ - arguments:
+ - default: true
+ defaultValue: 3 days
+ description: Time of inactivity after which a PR is considered stale
+ isArray: false
+ name: stale_time
+ required: true
+ secret: false
+ - default: false
+ description: The label used to identify PRs of interest
+ isArray: false
+ name: label
+ required: false
+ secret: false
+ deprecated: false
+ description: Get inactive pull requests
+ execution: false
+ name: GitHub-get-stale-prs
+ outputs:
+ - contextPath: GitHub.PR.URL
+ description: The html URL of the PR
+ type: String
+ - contextPath: GitHub.PR.Number
+ description: The GitHub pull request number
+ type: Number
+ - contextPath: GitHub.PR.RequestedReviewer
+ description: A list of the PR's requested reviewers
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The name of the branch to retrieve
+ isArray: false
+ name: branch_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Get a branch
+ execution: false
+ name: GitHub-get-branch
+ outputs:
+ - contextPath: GitHub.Branch.Name
+ description: The name of the branch
+ type: String
+ - contextPath: GitHub.Branch.CommitSHA
+ description: The SHA of the commit the branch references
+ type: String
+ - contextPath: GitHub.Branch.CommitNodeID
+ description: The Node ID of the commit the branch references
+ type: String
+ - contextPath: GitHub.Branch.CommitAuthorID
+ description: The GitHub ID number of the author of the commit the branch references
+ type: Number
+ - contextPath: GitHub.Branch.CommitAuthorLogin
+ description: The GitHub login of the author of the commit the branch references
+ type: String
+ - contextPath: GitHub.Branch.CommitParentSHA
+ description: The SHAs of parent commits
+ type: String
+ - contextPath: GitHub.Branch.Protected
+ description: Whether the branch is a protected one or not
+ type: Boolean
+ - arguments:
+ - default: false
+ description: The name for the new branch
+ isArray: false
+ name: branch_name
+ required: true
+ secret: false
+ - default: false
+ description: The SHA hash of the commit to reference. Try executing the 'GitHub-get-branch'
+ command to find a commit SHA hash to reference.
+ isArray: false
+ name: commit_sha
+ required: true
+ secret: false
+ deprecated: false
+ description: Create a new branch
+ execution: false
+ name: GitHub-create-branch
+ - arguments:
+ - default: false
+ description: The ID number by which the team is identified. Try executing the
+ 'GitHub-list-teams' command to find team IDs to reference.
+ isArray: false
+ name: team_id
+ required: true
+ secret: false
+ - default: false
+ description: The login of the user whose membership you wish to check
+ isArray: false
+ name: user_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieve a user's membership status with a team
+ execution: false
+ name: GitHub-get-team-membership
+ outputs:
+ - contextPath: GitHub.Team.Member.Role
+ description: The user's role on a team
+ type: String
+ - contextPath: GitHub.Team.Member.State
+ description: The user's state for a team
+ type: String
+ - contextPath: GitHub.Team.ID
+ description: The ID number of the team
+ type: Number
+ - contextPath: GitHub.Team.Member.Login
+ description: The login of the team member
+ type: String
+ - arguments:
+ - default: false
+ description: The number of the Pull Request for which you wish to request review
+ isArray: false
+ name: pull_number
+ required: true
+ secret: false
+ - default: false
+ description: A CSV list of GitHub users to request review from for a Pull Request
+ isArray: true
+ name: reviewers
+ required: true
+ secret: false
+ deprecated: false
+ description: Request reviews from GitHub users for a given Pull Request
+ execution: false
+ name: GitHub-request-review
+ outputs:
+ - contextPath: GitHub.PR.Number
+ description: The number of the Pull Request
+ type: Number
+ - contextPath: GitHub.PR.RequestedReviewer.Login
+ description: The login of the user requested for review
+ type: String
+ - contextPath: GitHub.PR.RequestedReviewer.ID
+ description: The ID of the user requested for review
+ type: Number
+ - contextPath: GitHub.PR.RequestedReviewer.NodeID
+ description: The node ID of the user requested for review
+ type: String
+ - contextPath: GitHub.PR.RequestedReviewer.Type
+ description: The type of the user requested for review
+ type: String
+ - contextPath: GitHub.PR.RequestedReviewer.SiteAdmin
+ description: Whether the user requested for review is a site admin or not
+ type: Boolean
+ - arguments:
+ - default: false
+ description: The number of the issue to comment on
+ isArray: false
+ name: issue_number
+ required: true
+ secret: false
+ - default: false
+ description: The contents of the comment
+ isArray: false
+ name: body
+ required: true
+ secret: false
+ deprecated: false
+ description: Create a comment for a given issue
+ execution: false
+ name: GitHub-create-comment
+ outputs:
+ - contextPath: GitHub.Comment.IssueNumber
+ description: The number of the issue to which the comment belongs
+ type: Number
+ - contextPath: GitHub.Comment.ID
+ description: The ID of the comment
+ type: Number
+ - contextPath: GitHub.Comment.NodeID
+ description: The node ID of the comment
+ type: String
+ - contextPath: GitHub.Comment.Body
+ description: The body content of the comment
+ type: String
+ - contextPath: GitHub.Comment.User.Login
+ description: The login of the user who commented
+ type: String
+ - contextPath: GitHub.Comment.User.ID
+ description: The ID of the user who commented
+ type: Number
+ - contextPath: GitHub.Comment.User.NodeID
+ description: The node ID of the user who commented
+ type: String
+ - contextPath: GitHub.Comment.User.Type
+ description: The type of the user who commented
+ type: String
+ - contextPath: GitHub.Comment.User.SiteAdmin
+ description: Whether the user who commented is a site admin or not
+ type: Boolean
+ - arguments:
+ - default: false
+ description: The number of the issue to list comments for
+ isArray: false
+ name: issue_number
+ required: true
+ secret: false
+ deprecated: false
+ description: List comments on an issue
+ execution: false
+ name: GitHub-list-issue-comments
+ outputs:
+ - contextPath: GitHub.Comment.IssueNumber
+ description: The number of the issue to which the comment belongs
+ type: Number
+ - contextPath: GitHub.Comment.ID
+ description: The ID of the comment
+ type: Number
+ - contextPath: GitHub.Comment.NodeID
+ description: The node ID of the comment
+ type: String
+ - contextPath: GitHub.Comment.Body
+ description: The body content of the comment
+ type: String
+ - contextPath: GitHub.Comment.User.Login
+ description: The login of the user who commented
+ type: String
+ - contextPath: GitHub.Comment.User.ID
+ description: The ID of the user who commented
+ type: Number
+ - contextPath: GitHub.Comment.User.NodeID
+ description: The node ID of the user who commented
+ type: String
+ - contextPath: GitHub.Comment.User.Type
+ description: The type of the user who commented
+ type: String
+ - contextPath: GitHub.Comment.User.SiteAdmin
+ description: Whether the user who commented is a site admin or not
+ type: Boolean
+ - arguments:
+ - default: false
+ description: The number of the pull request.
+ isArray: false
+ name: pull_number
+ required: true
+ secret: false
+ deprecated: false
+ description: Lists the pull request files.
+ execution: false
+ name: GitHub-list-pr-files
+ outputs:
+ - contextPath: GitHub.PR.Number
+ description: The number of the pull request
+ type: Number
+ - contextPath: GitHub.PR.File.SHA
+ description: The SHA hash of the last commit involving the file.
+ type: String
+ - contextPath: GitHub.PR.File.Name
+ description: The name of the file.
+ type: String
+ - contextPath: GitHub.PR.File.Status
+ description: The status of the file.
+ type: String
+ - contextPath: GitHub.PR.File.Additions
+ description: The number of additions to the file.
+ type: Number
+ - contextPath: GitHub.PR.File.Deletions
+ description: The number of deletions in the file.
+ type: Number
+ - contextPath: GitHub.PR.File.Changes
+ description: The number of changes made in the file.
+ type: Number
+ - arguments:
+ - default: false
+ description: The number of the pull request
+ isArray: false
+ name: pull_number
+ required: true
+ secret: false
+ deprecated: false
+ description: List reviews on a pull request
+ execution: false
+ name: GitHub-list-pr-reviews
+ outputs:
+ - contextPath: GitHub.PR.Number
+ description: The number of the pull request
+ type: Number
+ - contextPath: GitHub.PR.Review.ID
+ description: The ID of the review
+ type: Number
+ - contextPath: GitHub.PR.Review.NodeID
+ description: The node ID of the review
+ type: String
+ - contextPath: GitHub.PR.Review.Body
+ description: The content of the review
+ type: String
+ - contextPath: GitHub.PR.Review.CommitID
+ description: The ID of the commit for which the review is applicable
+ type: String
+ - contextPath: GitHub.PR.Review.State
+ description: The state of the review
+ type: String
+ - contextPath: GitHub.PR.Review.User.Login
+ description: The reviewer's user login
+ type: String
+ - contextPath: GitHub.PR.Review.User.ID
+ description: The reviewer's user ID
+ type: Number
+ - contextPath: GitHub.PR.Review.User.NodeID
+ description: The reviewer's user node ID
+ type: String
+ - contextPath: GitHub.PR.Review.User.Type
+ description: The reviewer user type
+ type: String
+ - contextPath: GitHub.PR.Review.User.SiteAdmin
+ description: Whether the reviewer is a site admin or not
+ type: Boolean
+ - arguments:
+ - default: false
+ description: The SHA hash of the commit. Try executing the 'GitHub-get-branch'
+ command to find a commit SHA hash to reference.
+ isArray: false
+ name: commit_sha
+ required: true
+ secret: false
+ deprecated: false
+ description: Get a commit
+ execution: false
+ name: GitHub-get-commit
+ outputs:
+ - contextPath: GitHub.Commit.SHA
+ description: The SHA hash of the commit
+ type: String
+ - contextPath: GitHub.Commit.Author.Date
+ description: The commit author date
+ type: String
+ - contextPath: GitHub.Commit.Author.Name
+ description: The name of the author
+ type: String
+ - contextPath: GitHub.Commit.Author.Email
+ description: The email of the author
+ type: String
+ - contextPath: GitHub.Commit.Committer.Date
+ description: The date the commiter committed
+ type: String
+ - contextPath: GitHub.Commit.Committer.Name
+ description: The name of the committer
+ type: String
+ - contextPath: GitHub.Commit.Committer.Email
+ description: The email of the committer
+ type: String
+ - contextPath: GitHub.Commit.Message
+ description: The message associated with the commit
+ type: String
+ - contextPath: GitHub.Commit.Parent
+ description: List of parent SHA hashes
+ type: Unknown
+ - contextPath: GitHub.Commit.TreeSHA
+ description: The SHA hash of the commit's tree
+ type: String
+ - contextPath: GitHub.Commit.Verification.Verified
+ description: Whether the commit was verified or not
+ type: Boolean
+ - contextPath: GitHub.Commit.Verification.Reason
+ description: The reason why the commit was or was not verified
+ type: String
+ - contextPath: GitHub.Commit.Verification.Signature
+ description: The commit verification signature
+ type: Unknown
+ - contextPath: GitHub.Commit.Verification.Payload
+ description: The commit verification payload
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The number of the issue to add labels to
+ isArray: false
+ name: issue_number
+ required: true
+ secret: false
+ - default: false
+ description: A CSV list of labels to add to an issue
+ isArray: true
+ name: labels
+ required: true
+ secret: false
+ deprecated: false
+ description: Add labels to an issue
+ execution: false
+ name: GitHub-add-label
+ - arguments:
+ - default: false
+ description: The number of the pull request to retrieve
+ isArray: false
+ name: pull_number
+ required: true
+ secret: false
+ deprecated: false
+ description: Get a pull request
+ execution: false
+ name: GitHub-get-pull-request
+ outputs:
+ - contextPath: GitHub.PR.ID
+ description: The ID number of the pull request
+ type: Number
+ - contextPath: GitHub.PR.NodeID
+ description: The node ID of the pull request
+ type: String
+ - contextPath: GitHub.PR.Number
+ description: The issue number of the pull request
+ type: Number
+ - contextPath: GitHub.PR.State
+ description: The state of the pull request
+ type: String
+ - contextPath: GitHub.PR.Locked
+ description: Whether the pull request is locked or not
+ type: Boolean
+ - contextPath: GitHub.PR.Title
+ description: The title of the pull request
+ type: String
+ - contextPath: GitHub.PR.User.Login
+ description: The login of the user who opened the pull request
+ type: String
+ - contextPath: GitHub.PR.User.ID
+ description: The ID of the user who opened the pull request
+ type: Number
+ - contextPath: GitHub.PR.User.NodeID
+ description: The node ID of the user who opened the pull request
+ type: String
+ - contextPath: GitHub.PR.User.Type
+ description: The type of the user who opened the pull request
+ type: String
+ - contextPath: GitHub.PR.User.SiteAdmin
+ description: Whether the user who opened the pull request is a site admin or
+ not
+ type: Boolean
+ - contextPath: GitHub.PR.Body
+ description: The body content of the pull request
+ type: String
+ - contextPath: GitHub.PR.Label.ID
+ description: The ID of the label
+ type: Number
+ - contextPath: GitHub.PR.Label.NodeID
+ description: The node ID of the label
+ type: String
+ - contextPath: GitHub.PR.Label.Name
+ description: The name of the label
+ type: String
+ - contextPath: GitHub.PR.Label.Description
+ description: The description of the label
+ type: String
+ - contextPath: GitHub.PR.Label.Color
+ description: The hex color value of the label
+ type: String
+ - contextPath: GitHub.PR.Label.Default
+ description: Whether the label is a default or not
+ type: Boolean
+ - contextPath: GitHub.PR.Milestone.ID
+ description: The ID of the milestone
+ type: Number
+ - contextPath: GitHub.PR.Milestone.NodeID
+ description: The node ID of the milestone
+ type: String
+ - contextPath: GitHub.PR.Milestone.Number
+ description: The number of the milestone
+ type: Number
+ - contextPath: GitHub.PR.Milestone.State
+ description: The state of the milestone
+ type: String
+ - contextPath: GitHub.PR.Milestone.Title
+ description: The title of the milestone
+ type: String
+ - contextPath: GitHub.PR.Milestone.Description
+ description: The description of the milestone
+ type: String
+ - contextPath: GitHub.PR.Milestone.Creator.Login
+ description: The login of the milestone creator
+ type: String
+ - contextPath: GitHub.PR.Milestone.Creator.ID
+ description: The ID the milestone creator
+ type: Number
+ - contextPath: GitHub.PR.Milestone.Creator.NodeID
+ description: The node ID of the milestone creator
+ type: String
+ - contextPath: GitHub.PR.Milestone.Creator.Type
+ description: The type of the milestone creator
+ type: String
+ - contextPath: GitHub.PR.Milestone.Creator.SiteAdmin
+ description: Whether the milestone creator is a site admin or not
+ type: Boolean
+ - contextPath: GitHub.PR.Milestone.OpenIssues
+ description: The number of open issues with this milestone
+ type: Number
+ - contextPath: GitHub.PR.Milestone.ClosedIssues
+ description: The number of closed issues with this milestone
+ type: Number
+ - contextPath: GitHub.PR.Milestone.CreatedAt
+ description: The date the milestone was created
+ type: String
+ - contextPath: GitHub.PR.Milestone.UpdatedAt
+ description: The date the milestone was updated
+ type: String
+ - contextPath: GitHub.PR.Milestone.ClosedAt
+ description: The date the milestone was closed
+ type: String
+ - contextPath: GitHub.PR.Milestone.DueOn
+ description: The due date for the milestone
+ type: String
+ - contextPath: GitHub.PR.ActiveLockReason
+ description: The reason the pull request is locked
+ type: String
+ - contextPath: GitHub.PR.CreatedAt
+ description: The date the pull request was created
+ type: String
+ - contextPath: GitHub.PR.UpdatedAt
+ description: The date the pull request was updated
+ type: String
+ - contextPath: GitHub.PR.ClosedAt
+ description: The date the pull request was closed
+ type: String
+ - contextPath: GitHub.PR.MergedAt
+ description: The date the pull request was merged
+ type: String
+ - contextPath: GitHub.PR.MergeCommitSHA
+ description: The SHA hash of the pull request's merge commit
+ type: String
+ - contextPath: GitHub.PR.Assignee.Login
+ description: The login of the user assigned to the pull request
+ type: String
+ - contextPath: GitHub.PR.Assignee.ID
+ description: The ID of the user assigned to the pull request
+ type: Number
+ - contextPath: GitHub.PR.Assignee.NodeID
+ description: The node ID of the user assigned to the pull request
+ type: String
+ - contextPath: GitHub.PR.Assignee.Type
+ description: The type of the user assigned to the pull request
+ type: String
+ - contextPath: GitHub.PR.Assignee.SiteAdmin
+ description: Whether the user assigned to the pull request is a site admin or
+ not
+ type: Boolean
+ - contextPath: GitHub.PR.RequestedReviewer.Login
+ description: The login of the user requested for review
+ type: String
+ - contextPath: GitHub.PR.RequestedReviewer.ID
+ description: The ID of the user requested for review
+ type: Number
+ - contextPath: GitHub.PR.RequestedReviewer.NodeID
+ description: The node ID of the user requested for review
+ type: String
+ - contextPath: GitHub.PR.RequestedReviewer.Type
+ description: The type of the user requested for review
+ type: String
+ - contextPath: GitHub.PR.RequestedReviewer.SiteAdmin
+ description: Whether the user requested for review is a site admin or not
+ type: Boolean
+ - contextPath: GitHub.PR.RequestedTeam.ID
+ description: The ID of the team requested for review
+ type: Number
+ - contextPath: GitHub.PR.RequestedTeam.NodeID
+ description: The node ID of the team requested for review
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Name
+ description: The name of the team requested for review
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Slug
+ description: The slug of the team requested for review
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Description
+ description: The description of the team requested for review
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Privacy
+ description: The privacy setting of the team requested for review
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Permission
+ description: The permissions of the team requested for review
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Parent
+ description: The parent of the team requested for review
+ type: Unknown
+ - contextPath: GitHub.PR.Head.Label
+ description: The label of the branch that HEAD points to
+ type: String
+ - contextPath: GitHub.PR.Head.Ref
+ description: The reference of the branch that HEAD points to
+ type: String
+ - contextPath: GitHub.PR.Head.SHA
+ description: The SHA hash of the commit that HEAD points to
+ type: String
+ - contextPath: GitHub.PR.Head.User.Login
+ description: The login of the committer of the HEAD commit of the checked out
+ branch
+ type: String
+ - contextPath: GitHub.PR.Head.User.ID
+ description: The ID of the committer of the HEAD commit of the checked out branch
+ type: Number
+ - contextPath: GitHub.PR.Head.User.NodeID
+ description: The node ID of the committer of the HEAD commit of the checked
+ out branch
+ type: String
+ - contextPath: GitHub.PR.Head.User.Type
+ description: The type of the committer of the HEAD commit of the checked out
+ branch
+ type: String
+ - contextPath: GitHub.PR.Head.User.SiteAdmin
+ description: Whether the committer of the HEAD commit of the checked out branch
+ is a site admin or not
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.ID
+ description: The ID of the repository of the checked out branch
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.NodeID
+ description: The node ID of the repository of the checked out branch
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Name
+ description: The name of the repository of the checked out branch
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.FullName
+ description: The full name of the repository of the checked out branch
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Owner.Login
+ description: The user login of the owner of the repository of the checked out
+ branch
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Owner.ID
+ description: The user ID of the owner of the repository of the checked out branch
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.Owner.NodeID
+ description: The user node ID of the owner of the repository of the checked
+ out branch
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Owner.Type
+ description: The user type of the owner of the repository of the checked out
+ branch
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Owner.SiteAdmin
+ description: Whether the owner of the repository of the checked out branch is
+ a site admin or not
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.Private
+ description: Whether the repository of the checked out branch is private or
+ not
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.Description
+ description: The description of the repository of the checked out branch
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Fork
+ description: Whether the repository of the checked out branch is a fork or not
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.Language
+ description: The language of the repository of the checked out branch
+ type: Unknown
+ - contextPath: GitHub.PR.Head.Repo.ForksCount
+ description: The number of forks of the repository of the checked out branch
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.StargazersCount
+ description: The number of stars of the repository of the checked out branch
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.WatchersCount
+ description: The number of entities watching the repository of the checked out
+ branch
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.Size
+ description: The size of the repository of the checked out branch
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.DefaultBranch
+ description: The default branch of the repository of the checked out branch
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.OpenIssuesCount
+ description: The open issues of the repository of the checked out branch
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.Topics
+ description: Topics listed for the repository of the checked out branch
+ type: Unknown
+ - contextPath: GitHub.PR.Head.Repo.HasIssues
+ description: Whether the repository of the checked out branch has issues or
+ not
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.HasProjects
+ description: Whether the repository of the checked out branch has projects or
+ not
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.HasWiki
+ description: Whether the repository of the checked out branch has a wiki or
+ not
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.HasPages
+ description: Whether the repository of the checked out branch has pages or not
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.HasDownloads
+ description: Whether the repository of the checked out branch has downloads
+ or not
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.Archived
+ description: Whether the repository of the checked out branch has been arvhived
+ or not
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.Disabled
+ description: Whether the repository of the checked out branch has been disabled
+ or not
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.PushedAt
+ description: The date of the latest push to the repository of the checked out
+ branch
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.CreatedAt
+ description: The date of creation of the repository of the checked out branch
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.UpdatedAt
+ description: The date the repository of the checked out branch was last updated
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.AllowRebaseMerge
+ description: Whether the repository of the checked out branch permits rebase-style
+ merges or not
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.AllowSquashMerge
+ description: Whether the repository of the checked out branch permits squash
+ merges or not
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.AllowMergeCommit
+ description: Whether the repository of the checked out branch permits merge
+ commits or not
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.SubscribersCount
+ description: The number of entities subscribing to the repository of the checked
+ out branch
+ type: Number
+ - contextPath: GitHub.PR.Base.Label
+ description: The label of the base branch
+ type: String
+ - contextPath: GitHub.PR.Base.Ref
+ description: The reference of the base branch
+ type: String
+ - contextPath: GitHub.PR.Base.SHA
+ description: The SHA hash of the base branch
+ type: String
+ - contextPath: GitHub.PR.Base.User.Login
+ description: The login of the committer of the commit that the base branch points
+ to
+ type: String
+ - contextPath: GitHub.PR.Base.User.ID
+ description: The ID of the committer of the commit that the base branch points
+ to
+ type: Number
+ - contextPath: GitHub.PR.Base.User.NodeID
+ description: The node ID of the committer of the commit that the base branch
+ points to
+ type: String
+ - contextPath: GitHub.PR.Base.User.Type
+ description: The user type of the committer of the commit that the base branch
+ points to
+ type: String
+ - contextPath: GitHub.PR.Base.User.SiteAdmin
+ description: Whether the committer of the commit that the base branch points
+ to is a site admin or not
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.ID
+ description: The ID of the repository that the base branch belongs to
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.NodeID
+ description: The node ID of the repository that the base branch belongs to
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Name
+ description: The name of the repository that the base branch belongs to
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.FullName
+ description: The full name of the repository that the base branch belongs to
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Owner.Login
+ description: The user login of the owner of the repository that the base branch
+ belongs to
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Owner.ID
+ description: The user ID of the owner of the repository that the base branch
+ belongs to
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.Owner.NodeID
+ description: The user node ID of the owner of the repository that the base branch
+ belongs to
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Owner.Type
+ description: The user type of the owner of the repository that the base branch
+ belongs to
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Owner.SiteAdmin
+ description: Whether the owner of the repository that the base branch belongs
+ to is a site admin or not
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.Private
+ description: Whether the repository that the base branch belongs to is private
+ or not
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.Description
+ description: The description of the repository that the base branch belongs
+ to
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Fork
+ description: Whether the repository that the base branch belongs to is a fork
+ or not
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.Language
+ description: The language of the repository that the base branch belongs to
+ type: Unknown
+ - contextPath: GitHub.PR.Base.Repo.ForksCount
+ description: The number of times that the repository that the base branch belongs
+ to has been forked
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.StargazersCount
+ description: The number of times that the repository that the base branch belongs
+ to has been starred
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.WatchersCount
+ description: The number of entities watching the repository that the base branch
+ belongs to
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.Size
+ description: The size of the repository that the base branch belongs to
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.DefaultBranch
+ description: The default branch of the repository that the base branch belongs
+ to
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.OpenIssuesCount
+ description: The number of open issues in the repository that the base branch
+ belongs to
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.Topics
+ description: Topics listed for the repository that the base branch belongs to
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.HasIssues
+ description: Whether the repository that the base branch belongs to has issues
+ or not
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.HasProjects
+ description: Whether the repository that the base branch belongs to has projects
+ or not
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.HasWiki
+ description: Whether the repository that the base branch belongs to has a wiki
+ or not
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.HasPages
+ description: Whether the repository that the base branch belongs to has pages
+ or not
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.HasDownloads
+ description: Whether the repository that the base branch belongs to has downloads
+ or not
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.Archived
+ description: Whether the repository that the base branch belongs to is archived
+ or not
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.Disabled
+ description: Whether the repository that the base branch belongs to is disabled
+ or not
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.PushedAt
+ description: The date that the repository that the base branch belongs to was
+ last pushed to
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.CreatedAt
+ description: The date of creation of the repository that the base branch belongs
+ to
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.UpdatedAt
+ description: The date that the repository that the base branch belongs to was
+ last updated
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.AllowRebaseMerge
+ description: Whether the repository that the base branch belongs to allows rebase-style
+ merges or not
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.AllowSquashMerge
+ description: Whether the repository that the base branch belongs to allows squash
+ merges or not
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.AllowMergeCommit
+ description: Whether the repository that the base branch belongs to allows merge
+ commits or not
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.SubscribersCount
+ description: The number of entities that subscribe to the repository that the
+ base branch belongs to
+ type: Number
+ - contextPath: GitHub.PR.AuthorAssociation
+ description: The pull request author association
+ type: String
+ - contextPath: GitHub.PR.Draft
+ description: Whether the pull request is a draft or not
+ type: Boolean
+ - contextPath: GitHub.PR.Merged
+ description: Whether the pull request is merged or not
+ type: Boolean
+ - contextPath: GitHub.PR.Mergeable
+ description: Whether the pull request is mergeable or not
+ type: Boolean
+ - contextPath: GitHub.PR.Rebaseable
+ description: Whether the pull request is rebaseable or not
+ type: Boolean
+ - contextPath: GitHub.PR.MergeableState
+ description: The mergeable state of the pull request
+ type: String
+ - contextPath: GitHub.PR.MergedBy.Login
+ description: The login of the user who merged the pull request
+ type: String
+ - contextPath: GitHub.PR.MergedBy.ID
+ description: The ID of the user who merged the pull request
+ type: Number
+ - contextPath: GitHub.PR.MergedBy.NodeID
+ description: The node ID of the user who merged the pull request
+ type: String
+ - contextPath: GitHub.PR.MergedBy.Type
+ description: The type of the user who merged the pull request
+ type: String
+ - contextPath: GitHub.PR.MergedBy.SiteAdmin
+ description: Whether the user who merged the pull request is a site admin or
+ not
+ type: Boolean
+ - contextPath: GitHub.PR.Comments
+ description: The number of comments on the pull request
+ type: Number
+ - contextPath: GitHub.PR.ReviewComments
+ description: The number of review comments on the pull request
+ type: Number
+ - contextPath: GitHub.PR.MaintainerCanModify
+ description: Whether the maintainer can modify the pull request or not
+ type: Boolean
+ - contextPath: GitHub.PR.Commits
+ description: The number of commits in the pull request
+ type: Number
+ - contextPath: GitHub.PR.Additions
+ description: The number of additions in the pull request
+ type: Number
+ - contextPath: GitHub.PR.Deletions
+ description: The number of deletions in the pull request
+ type: Number
+ - contextPath: GitHub.PR.ChangedFiles
+ description: The number of changed files in the pull request
+ type: Number
+ - arguments:
+ - default: false
+ description: The name of the organization
+ isArray: false
+ name: organization
+ required: true
+ secret: false
+ deprecated: false
+ description: List the teams for an organization. Note that this API call is only
+ available to authenticated members of the organization.
+ execution: false
+ name: GitHub-list-teams
+ outputs:
+ - contextPath: GitHub.Team.ID
+ description: The ID of the team
+ type: Number
+ - contextPath: GitHub.Team.NodeID
+ description: The node ID of the team
+ type: String
+ - contextPath: GitHub.Team.Name
+ description: The name of the team
+ type: String
+ - contextPath: GitHub.Team.Slug
+ description: The slug of the team
+ type: String
+ - contextPath: GitHub.Team.Description
+ description: The description of the team
+ type: String
+ - contextPath: GitHub.Team.Privacy
+ description: The privacy setting of the team
+ type: String
+ - contextPath: GitHub.Team.Permission
+ description: The permissions of the team
+ type: String
+ - contextPath: GitHub.Team.Parent
+ description: The parent of the team
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The name of the branch to delete
+ isArray: false
+ name: branch_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Delete a branch
+ execution: false
+ name: GitHub-delete-branch
+ - arguments:
+ - default: false
+ description: The issue number of the pull request.
+ isArray: false
+ name: pull_number
+ required: true
+ secret: false
+ deprecated: false
+ description: Lists all the review comments for a pull request.
+ execution: false
+ name: GitHub-list-pr-review-comments
+ outputs:
+ - contextPath: GitHub.PR.Number
+ description: The issue number of the pull request.
+ type: Number
+ - contextPath: GitHub.PR.ReviewComment.ID
+ description: The ID number of the pull request review comment.
+ type: Number
+ - contextPath: GitHub.PR.ReviewComment.NodeID
+ description: The Node ID of the pull request review comment.
+ type: String
+ - contextPath: GitHub.PR.ReviewComment.PullRequestReviewID
+ description: The ID of the pull request review.
+ type: Number
+ - contextPath: GitHub.PR.ReviewComment.DiffHunk
+ description: The diff hunk for which the review comment applies.
+ type: String
+ - contextPath: GitHub.PR.ReviewComment.Path
+ description: The file path of the proposed file changes for which the review comment
+ applies.
+ type: String
+ - contextPath: GitHub.PR.ReviewComment.Position
+ description: The position of the change for which the review comment applies.
+ type: Number
+ - contextPath: GitHub.PR.ReviewComment.OriginalPosition
+ description: The original position of the change for which the review comment applies.
+
+ type: Number
+ - contextPath: GitHub.PR.ReviewComment.CommitID
+ description: The commit ID of the proposed change.
+ type: String
+ - contextPath: GitHub.PR.ReviewComment.OriginalCommitID
+ description: The commit ID of the commit before the proposed change.
+ type: String
+ - contextPath: GitHub.PR.ReviewComment.InReplyToID
+ description: The reply ID of the comment for which the review comment applies.
+ type: Number
+ - contextPath: GitHub.PR.ReviewComment.User.Login
+ description: The login of the user who created the review comment.
+ type: String
+ - contextPath: GitHub.PR.ReviewComment.User.ID
+ description: The ID of the user who created the review comment.
+ type: Number
+ - contextPath: GitHub.PR.ReviewComment.User.NodeID
+ description: The Node ID of the user who created the review comment.
+ type: String
+ - contextPath: GitHub.PR.ReviewComment.User.Type
+ description: The type of the user who created the review comment.
+ type: String
+ - contextPath: GitHub.PR.ReviewComment.User.SiteAdmin
+ description: Whether the user who created the review comment is a site administrator.
+ or not
+ type: Boolean
+ - contextPath: GitHub.PR.ReviewComment.Body
+ description: The body content of the review comment.
+ type: String
+ - contextPath: GitHub.PR.ReviewComment.CreatedAt
+ description: The time the review comment was created.
+ type: String
+ - contextPath: GitHub.PR.ReviewComment.UpdatedAt
+ description: The time the review comment was updated.
+ type: String
+ - contextPath: GitHub.PR.ReviewComment.AuthorAssociation
+ description: The association of the user who created the review comment.
+ type: String
+ - arguments:
+ - default: false
+ description: The new title of the pull request.
+ isArray: false
+ name: title
+ required: false
+ secret: false
+ - default: false
+ description: The new body content of the pull request.
+ isArray: false
+ name: body
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The new state of the pull request. Can be "open", or "closed".'
+ isArray: false
+ name: state
+ predefined:
+ - open
+ - closed
+ required: false
+ secret: false
+ - default: false
+ description: The name of the branch that you want your changes pulled, which must be an existing branch in the current repository. You cannot update
+ the base branch in a pull request to point to another repository.
+ isArray: false
+ name: base
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether maintainers can modify the pull request.
+ isArray: false
+ name: maintainer_can_modify
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: The issue number of the pull request for which to modify.
+ isArray: false
+ name: pull_number
+ required: true
+ secret: false
+ deprecated: false
+ description: Updates a pull request in a repository.
+ execution: false
+ name: GitHub-update-pull-request
+ outputs:
+ - contextPath: GitHub.PR.ID
+ description: The ID number of the pull request.
+ type: Number
+ - contextPath: GitHub.PR.NodeID
+ description: The Node ID of the pull request.
+ type: String
+ - contextPath: GitHub.PR.Number
+ description: The issue number of the pull request.
+ type: Number
+ - contextPath: GitHub.PR.State
+ description: The state of the pull request.
+ type: String
+ - contextPath: GitHub.PR.Locked
+ description: Whether the pull request is locked.
+ type: Boolean
+ - contextPath: GitHub.PR.Title
+ description: The title of the pull request.
+ type: String
+ - contextPath: GitHub.PR.User.Login
+ description: The login of the user who opened the pull request.
+ type: String
+ - contextPath: GitHub.PR.User.ID
+ description: The ID of the user who opened the pull request.
+ type: Number
+ - contextPath: GitHub.PR.User.NodeID
+ description: The Node ID of the user who opened the pull request.
+ type: String
+ - contextPath: GitHub.PR.User.Type
+ description: The type of the user who opened the pull request.
+ type: String
+ - contextPath: GitHub.PR.User.SiteAdmin
+ description: Whether the user who opened the pull request is a site administrator.
+
+ type: Boolean
+ - contextPath: GitHub.PR.Body
+ description: The body content of the pull request.
+ type: String
+ - contextPath: GitHub.PR.Label.ID
+ description: The ID of the label.
+ type: Number
+ - contextPath: GitHub.PR.Label.NodeID
+ description: The Node ID of the label.
+ type: String
+ - contextPath: GitHub.PR.Label.Name
+ description: The name of the label.
+ type: String
+ - contextPath: GitHub.PR.Label.Description
+ description: The description of the label.
+ type: String
+ - contextPath: GitHub.PR.Label.Color
+ description: The hex color value of the label.
+ type: String
+ - contextPath: GitHub.PR.Label.Default
+ description: Whether the label is a default.
+ type: Boolean
+ - contextPath: GitHub.PR.Milestone.ID
+ description: The ID of the milestone.
+ type: Number
+ - contextPath: GitHub.PR.Milestone.NodeID
+ description: The Node ID of the milestone.
+ type: String
+ - contextPath: GitHub.PR.Milestone.Number
+ description: The number of the milestone.
+ type: Number
+ - contextPath: GitHub.PR.Milestone.State
+ description: The state of the milestone.
+ type: String
+ - contextPath: GitHub.PR.Milestone.Title
+ description: The title of the milestone.
+ type: String
+ - contextPath: GitHub.PR.Milestone.Description
+ description: The description of the milestone.
+ type: String
+ - contextPath: GitHub.PR.Milestone.Creator.Login
+ description: The login of the milestone creator.
+ type: String
+ - contextPath: GitHub.PR.Milestone.Creator.ID
+ description: The ID the milestone creator.
+ type: Number
+ - contextPath: GitHub.PR.Milestone.Creator.NodeID
+ description: The Node ID of the milestone creator.
+ type: String
+ - contextPath: GitHub.PR.Milestone.Creator.Type
+ description: The type of the milestone creator.
+ type: String
+ - contextPath: GitHub.PR.Milestone.Creator.SiteAdmin
+ description: Whether the milestone creator is a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.Milestone.OpenIssues
+ description: The number of open issues with this milestone.
+ type: Number
+ - contextPath: GitHub.PR.Milestone.ClosedIssues
+ description: The number of closed issues with this milestone.
+ type: Number
+ - contextPath: GitHub.PR.Milestone.CreatedAt
+ description: The date the milestone was created.
+ type: String
+ - contextPath: GitHub.PR.Milestone.UpdatedAt
+ description: The date the milestone was updated.
+ type: String
+ - contextPath: GitHub.PR.Milestone.ClosedAt
+ description: The date the milestone was closed.
+ type: String
+ - contextPath: GitHub.PR.Milestone.DueOn
+ description: The due date for the milestone.
+ type: String
+ - contextPath: GitHub.PR.ActiveLockReason
+ description: The reason the pull request is locked.
+ type: String
+ - contextPath: GitHub.PR.CreatedAt
+ description: The date the pull request was created.
+ type: String
+ - contextPath: GitHub.PR.UpdatedAt
+ description: The date the pull request was updated.
+ type: String
+ - contextPath: GitHub.PR.ClosedAt
+ description: The date the pull request was closed.
+ type: String
+ - contextPath: GitHub.PR.MergedAt
+ description: The date the pull request was merged.
+ type: String
+ - contextPath: GitHub.PR.MergeCommitSHA
+ description: The SHA hash of the pull request's merge commit.
+ type: String
+ - contextPath: GitHub.PR.Assignee.Login
+ description: The login of the user assigned to the pull request.
+ type: String
+ - contextPath: GitHub.PR.Assignee.ID
+ description: The ID of the user assigned to the pull request.
+ type: Number
+ - contextPath: GitHub.PR.Assignee.NodeID
+ description: The Node ID of the user assigned to the pull request.
+ type: String
+ - contextPath: GitHub.PR.Assignee.Type
+ description: The type of the user assigned to the pull request.
+ type: String
+ - contextPath: GitHub.PR.Assignee.SiteAdmin
+ description: Whether the user assigned to the pull request is a site administrator.
+ not
+ type: Boolean
+ - contextPath: GitHub.PR.RequestedReviewer.Login
+ description: The login of the user requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedReviewer.ID
+ description: The ID of the user requested for review.
+ type: Number
+ - contextPath: GitHub.PR.RequestedReviewer.NodeID
+ description: The Node ID of the user requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedReviewer.Type
+ description: The type of the user requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedReviewer.SiteAdmin
+ description: Whether the user requested for review is a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.RequestedTeam.ID
+ description: The ID of the team requested for review.
+ type: Number
+ - contextPath: GitHub.PR.RequestedTeam.NodeID
+ description: The Node ID of the team requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Name
+ description: The name of the team requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Slug
+ description: The slug of the team requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Description
+ description: The description of the team requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Privacy
+ description: The privacy setting of the team requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Permission
+ description: The permissions of the team requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Parent
+ description: The parent of the team requested for review.
+ type: Unknown
+ - contextPath: GitHub.PR.Head.Label
+ description: The label of the branch for which the HEAD points.
+ type: String
+ - contextPath: GitHub.PR.Head.Ref
+ description: The reference of the branch for which the HEAD points.
+ type: String
+ - contextPath: GitHub.PR.Head.SHA
+ description: The SHA hash of the commit for which the HEAD points.
+ type: String
+ - contextPath: GitHub.PR.Head.User.Login
+ description: The committer login of the HEAD commit of the checked out
+ branch.
+ type: String
+ - contextPath: GitHub.PR.Head.User.ID
+ description: The committer ID of the HEAD commit of the checked out branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.User.NodeID
+ description: The Node committer ID of the HEAD commit of the checked
+ out branch.
+ type: String
+ - contextPath: GitHub.PR.Head.User.Type
+ description: The committer type of the HEAD commit of the checked out
+ branch.
+ type: String
+ - contextPath: GitHub.PR.Head.User.SiteAdmin
+ description: Whether the committer of the HEAD commit of the checked out branch
+ is a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.ID
+ description: The ID of the repository of the checked out branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.NodeID
+ description: The Node ID of the repository of the checked out branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Name
+ description: The name of the repository of the checked out branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.FullName
+ description: The full name of the repository of the checked out branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Owner.Login
+ description: The user login of the owner of the repository of the checked out
+ branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Owner.ID
+ description: The user ID of the owner of the repository of the checked out branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.Owner.NodeID
+ description: The user node ID of the owner of the repository of the checked.
+ out branch
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Owner.Type
+ description: The user type of the owner of the repository of the checked out
+ branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Owner.SiteAdmin
+ description: Whether the owner of the repository of the checked out branch is
+ a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.Private
+ description: Whether the repository of the checked out branch is private.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.Description
+ description: The description of the repository of the checked out branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Fork
+ description: Whether the repository of the checked out branch is a fork.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.Language
+ description: The language of the repository of the checked out branch.
+ type: Unknown
+ - contextPath: GitHub.PR.Head.Repo.ForksCount
+ description: The number of forks of the repository of the checked out branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.StargazersCount
+ description: The number of stars of the repository of the checked out branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.WatchersCount
+ description: The number of entities watching the repository of the checked out
+ branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.Size
+ description: The size of the repository of the checked out branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.DefaultBranch
+ description: The default branch of the repository of the checked out branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.OpenIssuesCount
+ description: The open issues of the repository of the checked out branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.Topics
+ description: Topics listed for the repository of the checked out branch.
+ type: Unknown
+ - contextPath: GitHub.PR.Head.Repo.HasIssues
+ description: Whether the repository of the checked out branch has issues.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.HasProjects
+ description: Whether the repository of the checked out branch has projects.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.HasWiki
+ description: Whether the repository of the checked out branch has a wiki.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.HasPages
+ description: Whether the repository of the checked out branch has pages.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.HasDownloads
+ description: Whether the repository of the checked out branch has downloads.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.Archived
+ description: Whether the repository of the checked out branch has been archived.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.Disabled
+ description: Whether the repository of the checked out branch has been disabled.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.PushedAt
+ description: The date of the latest push to the repository of the checked out
+ branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.CreatedAt
+ description: The date of creation of the repository of the checked out branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.UpdatedAt
+ description: The date the repository of the checked out branch was last updated.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.AllowRebaseMerge
+ description: Whether the repository of the checked out branch permits rebase-style
+ merges.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.AllowSquashMerge
+ description: Whether the repository of the checked out branch permits squash
+ merges.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.AllowMergeCommit
+ description: Whether the repository of the checked out branch permits merge
+ commits.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.SubscribersCount
+ description: The number of entities subscribing to the repository of the checked
+ out branch.
+ type: Number
+ - contextPath: GitHub.PR.Base.Label
+ description: The label of the base branch.
+ type: String
+ - contextPath: GitHub.PR.Base.Ref
+ description: The reference of the base branch.
+ type: String
+ - contextPath: GitHub.PR.Base.SHA
+ description: The SHA hash of the base branch.
+ type: String
+ - contextPath: GitHub.PR.Base.User.Login
+ description: The committer login of the commit for which the base branch points.
+
+ type: String
+ - contextPath: GitHub.PR.Base.User.ID
+ description: The ID of the committer of the commit for which the base branch points.
+ type: Number
+ - contextPath: GitHub.PR.Base.User.NodeID
+ description: The committer Node ID of the commit for which the base branch points.
+ type: String
+ - contextPath: GitHub.PR.Base.User.Type
+ description: The user committer type of the commit for which the base branch points.
+ type: String
+ - contextPath: GitHub.PR.Base.User.SiteAdmin
+ description: Whether the committer of the commit for which the base branch points
+ is a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.ID
+ description: The ID of the repository for which the base branch belongs.
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.NodeID
+ description: The Node ID of the repository for which the base branch belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Name
+ description: The name of the repository for which the base branch belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.FullName
+ description: The full name of the repository for which the base branch belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Owner.Login
+ description: The user login of the owner of the repository for which the base branch
+ belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Owner.ID
+ description: The user ID of the owner of the repository for which the base branch
+ belongs.
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.Owner.NodeID
+ description: The user node ID of the owner of the repository for which the base branch
+ belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Owner.Type
+ description: The user type of the owner of the repository for which the base branch
+ belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Owner.SiteAdmin
+ description: Whether the owner of the repository for which the base branch belongs
+ to is a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.Private
+ description: Whether the repository for which the base branch belongs is private.
+
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.Description
+ description: The description of the repository for which the base branch belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Fork
+ description: Whether the repository for which the base branch belongs to is a fork.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.Language
+ description: The language of the repository for which the base branch belongs.
+ type: Unknown
+ - contextPath: GitHub.PR.Base.Repo.ForksCount
+ description: The number of times that the repository for which the base branch belongs
+ has been forked.
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.StargazersCount
+ description: The number of times that the repository for which the base branch belongs
+ has been starred.
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.WatchersCount
+ description: The number of entities watching the repository for which the base branch
+ belongs.
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.Size
+ description: The size of the repository for which the base branch belongs.
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.DefaultBranch
+ description: The default branch of the repository for which the base branch belongs.
+
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.OpenIssuesCount
+ description: The number of open issues in the repository for which the base branch
+ belongs.
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.Topics
+ description: Topics listed for the repository for which the base branch belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.HasIssues
+ description: Whether the repository for which the base branch belongs has issues.
+
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.HasProjects
+ description: Whether the repository for which the base branch belongs has projects.
+
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.HasWiki
+ description: Whether the repository for which the base branch belongs has a wiki.
+
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.HasPages
+ description: Whether the repository for which the base branch belongs to has pages.
+
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.HasDownloads
+ description: Whether the repository for which the base branch belongs has downloads.
+
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.Archived
+ description: Whether the repository for which the base branch belongs is archived.
+
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.Disabled
+ description: Whether the repository for which the base branch belongs is disabled.
+
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.PushedAt
+ description: The date that the repository for which the base branch belongs to was
+ last pushed.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.CreatedAt
+ description: The date of creation of the repository for which the base branch belongs.
+
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.UpdatedAt
+ description: The date that the repository for which the base branch belongs was
+ last updated.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.AllowRebaseMerge
+ description: Whether the repository for which the base branch belongs allows rebase-style
+ merges.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.AllowSquashMerge
+ description: Whether the repository for which the base branch belongs allows squash
+ merges.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.AllowMergeCommit
+ description: Whether the repository for which the base branch belongs allows merge
+ commits.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.SubscribersCount
+ description: The number of entities for which subscribe to the repository that the
+ base branch belongs.
+ type: Number
+ - contextPath: GitHub.PR.AuthorAssociation
+ description: The pull request author association.
+ type: String
+ - contextPath: GitHub.PR.Draft
+ description: Whether the pull request is a draft.
+ type: Boolean
+ - contextPath: GitHub.PR.Merged
+ description: Whether the pull request is merged.
+ type: Boolean
+ - contextPath: GitHub.PR.Mergeable
+ description: Whether the pull request is mergeable.
+ type: Boolean
+ - contextPath: GitHub.PR.Rebaseable
+ description: Whether the pull request is rebaseable.
+ type: Boolean
+ - contextPath: GitHub.PR.MergeableState
+ description: The mergeable state of the pull request.
+ type: String
+ - contextPath: GitHub.PR.MergedBy.Login
+ description: The login of the user who merged the pull request.
+ type: String
+ - contextPath: GitHub.PR.MergedBy.ID
+ description: The ID of the user who merged the pull request.
+ type: Number
+ - contextPath: GitHub.PR.MergedBy.NodeID
+ description: The Node ID of the user who merged the pull request.
+ type: String
+ - contextPath: GitHub.PR.MergedBy.Type
+ description: The type of the user who merged the pull request.
+ type: String
+ - contextPath: GitHub.PR.MergedBy.SiteAdmin
+ description: Whether the user who merged the pull request is a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.Comments
+ description: The number of comments on the pull request.
+ type: Number
+ - contextPath: GitHub.PR.ReviewComments
+ description: The number of review comments on the pull request.
+ type: Number
+ - contextPath: GitHub.PR.MaintainerCanModify
+ description: Whether the maintainer can modify the pull request.
+ type: Boolean
+ - contextPath: GitHub.PR.Commits
+ description: The number of commits in the pull request.
+ type: Number
+ - contextPath: GitHub.PR.Additions
+ description: The number of additions in the pull request.
+ type: Number
+ - contextPath: GitHub.PR.Deletions
+ description: The number of deletions in the pull request.
+ type: Number
+ - contextPath: GitHub.PR.ChangedFiles
+ description: The number of changed files in the pull request.
+ type: Number
+ - arguments:
+ - default: false
+ description: The issue number of the pull request to check.
+ isArray: false
+ name: pull_number
+ required: true
+ secret: false
+ deprecated: false
+ description: 'Returns a merged pull request. If the pull request has been
+ merged, the API returns ''Status: 204 No Content''. If the pull request has not
+ been merged the API returns ''Status: 404 Not Found'''
+ execution: false
+ name: GitHub-is-pr-merged
+ - arguments:
+ - default: false
+ description: The title of the pull request.
+ isArray: false
+ name: title
+ required: true
+ secret: false
+ - default: false
+ description: The name of the branch where the changes are made.
+ isArray: false
+ name: head
+ required: true
+ secret: false
+ - default: false
+ description: The name of the branch you want the changes pulled into, which must
+ be an existing branch on the current repository.
+ isArray: false
+ name: base
+ required: true
+ secret: false
+ - default: false
+ description: The contents of the pull request.
+ isArray: false
+ name: body
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether maintainers can modify the pull request.
+ isArray: false
+ name: maintainer_can_modify
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates whether the pull request is a draft. For more information, see https://help.github.com/en/articles/about-pull-requests#draft-pull-requests.
+ isArray: false
+ name: draft
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a new pull request.
+ execution: false
+ name: GitHub-create-pull-request
+ outputs:
+ - contextPath: GitHub.PR.ID
+ description: The ID number of the pull request.
+ type: Number
+ - contextPath: GitHub.PR.NodeID
+ description: The Node ID of the pull request.
+ type: String
+ - contextPath: GitHub.PR.Number
+ description: The issue number of the pull request.
+ type: Number
+ - contextPath: GitHub.PR.State
+ description: The state of the pull request.
+ type: String
+ - contextPath: GitHub.PR.Locked
+ description: Whether the pull request is locked.
+ type: Boolean
+ - contextPath: GitHub.PR.Title
+ description: The title of the pull request.
+ type: String
+ - contextPath: GitHub.PR.User.Login
+ description: The login of the user who opened the pull request.
+ type: String
+ - contextPath: GitHub.PR.User.ID
+ description: The ID of the user who opened the pull request.
+ type: Number
+ - contextPath: GitHub.PR.User.NodeID
+ description: The Node ID of the user who opened the pull request.
+ type: String
+ - contextPath: GitHub.PR.User.Type
+ description: The user type who opened the pull request.
+ type: String
+ - contextPath: GitHub.PR.User.SiteAdmin
+ description: Whether the user who opened the pull request is a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.Body
+ description: The body content of the pull request.
+ type: String
+ - contextPath: GitHub.PR.Label.ID
+ description: The ID of the label.
+ type: Number
+ - contextPath: GitHub.PR.Label.NodeID
+ description: The Node ID of the label.
+ type: String
+ - contextPath: GitHub.PR.Label.Name
+ description: The name of the label.
+ type: String
+ - contextPath: GitHub.PR.Label.Description
+ description: The description of the label.
+ type: String
+ - contextPath: GitHub.PR.Label.Color
+ description: The hex color value of the label.
+ type: String
+ - contextPath: GitHub.PR.Label.Default
+ description: Whether the label is a default.
+ type: Boolean
+ - contextPath: GitHub.PR.Milestone.ID
+ description: The ID of the milestone.
+ type: Number
+ - contextPath: GitHub.PR.Milestone.NodeID
+ description: The Node ID of the milestone.
+ type: String
+ - contextPath: GitHub.PR.Milestone.Number
+ description: The number of the milestone.
+ type: Number
+ - contextPath: GitHub.PR.Milestone.State
+ description: The state of the milestone.
+ type: String
+ - contextPath: GitHub.PR.Milestone.Title
+ description: The title of the milestone.
+ type: String
+ - contextPath: GitHub.PR.Milestone.Description
+ description: The description of the milestone.
+ type: String
+ - contextPath: GitHub.PR.Milestone.Creator.Login
+ description: The login of the milestone creator.
+ type: String
+ - contextPath: GitHub.PR.Milestone.Creator.ID
+ description: The ID the milestone creator.
+ type: Number
+ - contextPath: GitHub.PR.Milestone.Creator.NodeID
+ description: The Node ID of the milestone creator.
+ type: String
+ - contextPath: GitHub.PR.Milestone.Creator.Type
+ description: The type of the milestone creator.
+ type: String
+ - contextPath: GitHub.PR.Milestone.Creator.SiteAdmin
+ description: Whether the milestone creator is a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.Milestone.OpenIssues
+ description: The number of open issues with this milestone.
+ type: Number
+ - contextPath: GitHub.PR.Milestone.ClosedIssues
+ description: The number of closed issues with this milestone.
+ type: Number
+ - contextPath: GitHub.PR.Milestone.CreatedAt
+ description: The date the milestone was created.
+ type: String
+ - contextPath: GitHub.PR.Milestone.UpdatedAt
+ description: The date the milestone was updated.
+ type: String
+ - contextPath: GitHub.PR.Milestone.ClosedAt
+ description: The date the milestone was closed.
+ type: String
+ - contextPath: GitHub.PR.Milestone.DueOn
+ description: The due date for the milestone.
+ type: String
+ - contextPath: GitHub.PR.ActiveLockReason
+ description: The reason the pull request is locked.
+ type: String
+ - contextPath: GitHub.PR.CreatedAt
+ description: The date the pull request was created.
+ type: String
+ - contextPath: GitHub.PR.UpdatedAt
+ description: The date the pull request was updated.
+ type: String
+ - contextPath: GitHub.PR.ClosedAt
+ description: The date the pull request was closed.
+ type: String
+ - contextPath: GitHub.PR.MergedAt
+ description: The date the pull request was merged.
+ type: String
+ - contextPath: GitHub.PR.MergeCommitSHA
+ description: The SHA hash of the pull request's merge commit.
+ type: String
+ - contextPath: GitHub.PR.Assignee.Login
+ description: The login of the user assigned to the pull request.
+ type: String
+ - contextPath: GitHub.PR.Assignee.ID
+ description: The ID of the user assigned to the pull request.
+ type: Number
+ - contextPath: GitHub.PR.Assignee.NodeID
+ description: The Node ID of the user assigned to the pull request.
+ type: String
+ - contextPath: GitHub.PR.Assignee.Type
+ description: The type of the user assigned to the pull request.
+ type: String
+ - contextPath: GitHub.PR.Assignee.SiteAdmin
+ description: Whether the user assigned to the pull request is a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.RequestedReviewer.Login
+ description: The login of the user requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedReviewer.ID
+ description: The ID of the user requested for review.
+ type: Number
+ - contextPath: GitHub.PR.RequestedReviewer.NodeID
+ description: The Node ID of the user requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedReviewer.Type
+ description: The type of the user requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedReviewer.SiteAdmin
+ description: Whether the user requested for review is a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.RequestedTeam.ID
+ description: The ID of the team requested for review.
+ type: Number
+ - contextPath: GitHub.PR.RequestedTeam.NodeID
+ description: The Node ID of the team requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Name
+ description: The name of the team requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Slug
+ description: The slug of the team requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Description
+ description: The description of the team requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Privacy
+ description: The privacy setting of the team requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Permission
+ description: The permissions of the team requested for review.
+ type: String
+ - contextPath: GitHub.PR.RequestedTeam.Parent
+ description: The parent of the team requested for review.
+ type: Unknown
+ - contextPath: GitHub.PR.Head.Label
+ description: The label of the branch for which the HEAD points.
+ type: String
+ - contextPath: GitHub.PR.Head.Ref
+ description: The reference of the branch for which the HEAD points.
+ type: String
+ - contextPath: GitHub.PR.Head.SHA
+ description: The SHA hash of the commit for which the HEAD points.
+ type: String
+ - contextPath: GitHub.PR.Head.User.Login
+ description: The committer login of the HEAD commit of the checked out
+ branch.
+ type: String
+ - contextPath: GitHub.PR.Head.User.ID
+ description: The committer ID of the HEAD commit of the checked out branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.User.NodeID
+ description: The Node ID of the committer of the HEAD commit of the checked
+ out branch.
+ type: String
+ - contextPath: GitHub.PR.Head.User.Type
+ description: The committer type of the HEAD commit of the checked out
+ branch.
+ type: String
+ - contextPath: GitHub.PR.Head.User.SiteAdmin
+ description: Whether the committer of the HEAD commit of the checked out branch
+ is a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.ID
+ description: The ID of the repository of the checked out branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.NodeID
+ description: The Node ID of the repository of the checked out branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Name
+ description: The name of the repository of the checked out branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.FullName
+ description: The full name of the repository of the checked out branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Owner.Login
+ description: The user login of the owner of the repository of the checked out
+ branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Owner.ID
+ description: The user ID of the owner of the repository of the checked out branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.Owner.NodeID
+ description: The user Node ID of the owner of the repository of the checked
+ out branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Owner.Type
+ description: The user type of the owner of the repository of the checked out
+ branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Owner.SiteAdmin
+ description: Whether the owner of the repository of the checked out branch is
+ a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.Private
+ description: Whether the repository of the checked out branch is private.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.Description
+ description: The description of the repository of the checked out branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.Fork
+ description: Whether the repository of the checked out branch is a fork.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.Language
+ description: The language of the repository of the checked out branch.
+ type: Unknown
+ - contextPath: GitHub.PR.Head.Repo.ForksCount
+ description: The number of forks of the repository of the checked out branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.StargazersCount
+ description: The number of stars of the repository of the checked out branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.WatchersCount
+ description: The number of entities watching the repository of the checked out
+ branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.Size
+ description: The size of the repository of the checked out branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.DefaultBranch
+ description: The default branch of the repository of the checked out branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.OpenIssuesCount
+ description: The open issues of the repository of the checked out branch.
+ type: Number
+ - contextPath: GitHub.PR.Head.Repo.Topics
+ description: Topics listed for the repository of the checked out branch.
+ type: Unknown
+ - contextPath: GitHub.PR.Head.Repo.HasIssues
+ description: Whether the repository of the checked out branch has issues.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.HasProjects
+ description: Whether the repository of the checked out branch has projects.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.HasWiki
+ description: Whether the repository of the checked out branch has a wiki.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.HasPages
+ description: Whether the repository of the checked out branch has pages.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.HasDownloads
+ description: Whether the repository of the checked out branch has downloads.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.Archived
+ description: Whether the repository of the checked out branch has been archived.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.Disabled
+ description: Whether the repository of the checked out branch has been disabled.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.PushedAt
+ description: The date of the latest push to the repository of the checked out.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.CreatedAt
+ description: The date of creation of the repository of the checked out branch.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.UpdatedAt
+ description: The date the repository of the checked out branch was last updated.
+ type: String
+ - contextPath: GitHub.PR.Head.Repo.AllowRebaseMerge
+ description: Whether the repository of the checked out branch permits rebase-style
+ merges.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.AllowSquashMerge
+ description: Whether the repository of the checked out branch permits squash
+ merges.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.AllowMergeCommit
+ description: Whether the repository of the checked out branch permits merge
+ commits.
+ type: Boolean
+ - contextPath: GitHub.PR.Head.Repo.SubscribersCount
+ description: The number of entities subscribing to the repository of the checked out.
+ type: Number
+ - contextPath: GitHub.PR.Base.Label
+ description: The label of the base branch.
+ type: String
+ - contextPath: GitHub.PR.Base.Ref
+ description: The reference of the base branch.
+ type: String
+ - contextPath: GitHub.PR.Base.SHA
+ description: The SHA hash of the base branch.
+ type: String
+ - contextPath: GitHub.PR.Base.User.Login
+ description: The committer login of the commit for which the base branch points.
+ type: String
+ - contextPath: GitHub.PR.Base.User.ID
+ description: The ID of the committer of the commit for which the base branch points.
+ to
+ type: Number
+ - contextPath: GitHub.PR.Base.User.NodeID
+ description: The committer Node ID of the commit for which the base branch points.
+ type: String
+ - contextPath: GitHub.PR.Base.User.Type
+ description: The user type of the committer for which the commit base branch
+ points.
+ type: String
+ - contextPath: GitHub.PR.Base.User.SiteAdmin
+ description: Whether the committer of the commit for which the base branch points
+ to is a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.ID
+ description: The ID of the repository for which the base branch belongs.
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.NodeID
+ description: The Node ID of the repository for which the base branch belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Name
+ description: The name of the repository for which the base branch belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.FullName
+ description: The full name of the repository for which the base branch belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Owner.Login
+ description: The user login of the owner of the repository for which the base branch
+ belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Owner.ID
+ description: The user ID of the owner of the repository for which the base branch
+ belongs.
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.Owner.NodeID
+ description: The user node ID of the owner of the repository for which the base branch
+ belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Owner.Type
+ description: The user type of the owner of the repository for which the base branch
+ belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Owner.SiteAdmin
+ description: Whether the owner of the repository that the base branch belongs
+ to is a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.Private
+ description: Whether the repository for which the base branch belongs to is private.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.Description
+ description: The description of the repository for which the base branch belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.Fork
+ description: Whether the repository that the base branch belongs to is a fork.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.Language
+ description: The language of the repository for which the base branch belongs.
+ type: Unknown
+ - contextPath: GitHub.PR.Base.Repo.ForksCount
+ description: The number of times that the repository for which the base branch belongs
+ has been forked.
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.StargazersCount
+ description: The number of times that the repository that the base branch belongs
+ to has been starred.
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.WatchersCount
+ description: The number of entities watching the repository for which the base branch
+ belongs.
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.Size
+ description: The size of the repository for which the base branch belongs.
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.DefaultBranch
+ description: The default branch of the repository for which the base branch belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.OpenIssuesCount
+ description: The number of open issues in the repository for which the base branch
+ belongs.
+ type: Number
+ - contextPath: GitHub.PR.Base.Repo.Topics
+ description: Topics listed for the repository for which the base branch belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.HasIssues
+ description: Whether the repository for which the base branch belongs to has issues.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.HasProjects
+ description: Whether the repository for which the base branch belongs to has projects.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.HasWiki
+ description: Whether the repository for which the base branch belongs to has a wiki.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.HasPages
+ description: Whether the repository for which the base branch belongs to has pages.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.HasDownloads
+ description: Whether the repository for which the base branch belongs to has downloads.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.Archived
+ description: Whether the repository for which the base branch belongs to is archived.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.Disabled
+ description: Whether the repository for which the base branch belongs to is disabled.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.PushedAt
+ description: The date that the repository for which the base branch belongs was
+ last pushed.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.CreatedAt
+ description: The date of creation of the repository for which the base branch belongs.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.UpdatedAt
+ description: The date that the repository for which the base branch belongs was
+ last updated.
+ type: String
+ - contextPath: GitHub.PR.Base.Repo.AllowRebaseMerge
+ description: Whether the repository for which the base branch belongs allows rebase-style
+ merges.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.AllowSquashMerge
+ description: Whether the repository for which the base branch belongs allows squash
+ merges.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.AllowMergeCommit
+ description: Whether the repository for which the base branch belongs allows merge
+ commits.
+ type: Boolean
+ - contextPath: GitHub.PR.Base.Repo.SubscribersCount
+ description: The number of entities that subscribe to the repository for which the
+ base branch belongs.
+ type: Number
+ - contextPath: GitHub.PR.AuthorAssociation
+ description: The pull request author association.
+ type: String
+ - contextPath: GitHub.PR.Draft
+ description: Whether the pull request is a draft.
+ type: Boolean
+ - contextPath: GitHub.PR.Merged
+ description: Whether the pull request is merged.
+ type: Boolean
+ - contextPath: GitHub.PR.Mergeable
+ description: Whether the pull request is mergeable.
+ type: Boolean
+ - contextPath: GitHub.PR.Rebaseable
+ description: Whether the pull request is rebaseable.
+ type: Boolean
+ - contextPath: GitHub.PR.MergeableState
+ description: The mergeable state of the pull request.
+ type: String
+ - contextPath: GitHub.PR.MergedBy.Login
+ description: The login of the user who merged the pull request.
+ type: String
+ - contextPath: GitHub.PR.MergedBy.ID
+ description: The ID of the user who merged the pull request.
+ type: Number
+ - contextPath: GitHub.PR.MergedBy.NodeID
+ description: The Node ID of the user who merged the pull request.
+ type: String
+ - contextPath: GitHub.PR.MergedBy.Type
+ description: The user type who merged the pull request.
+ type: String
+ - contextPath: GitHub.PR.MergedBy.SiteAdmin
+ description: Whether the user who merged the pull request is a site administrator.
+ type: Boolean
+ - contextPath: GitHub.PR.Comments
+ description: The number of comments on the pull request.
+ type: Number
+ - contextPath: GitHub.PR.ReviewComments
+ description: The number of review comments on the pull request.
+ type: Number
+ - contextPath: GitHub.PR.MaintainerCanModify
+ description: Whether the maintainer can modify the pull request.
+ type: Boolean
+ - contextPath: GitHub.PR.Commits
+ description: The number of commits in the pull request.
+ type: Number
+ - contextPath: GitHub.PR.Additions
+ description: The number of additions in the pull request.
+ type: Number
+ - contextPath: GitHub.PR.Deletions
+ description: The number of deletions in the pull request.
+ type: Number
+ - contextPath: GitHub.PR.ChangedFiles
+ description: The number of changed files in the pull request.
+ type: Number
+ dockerimage: demisto/python3:3.7.3.286
+ subtype: python3
+ isfetch: true
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
diff --git a/Integrations/GitHub/GitHub_description.md b/Integrations/GitHub/GitHub_description.md
new file mode 100644
index 000000000000..71fe132988ff
--- /dev/null
+++ b/Integrations/GitHub/GitHub_description.md
@@ -0,0 +1,18 @@
+
+ To access the requested repository, you need to provide
+ the username, repository name, and API key as parameters for the
+ integration.
+
+ To generate your personal access token, visit:
+ https://github.com/settings/tokens
+
+ The integration supports the following workflows:
+ - Create an issue
+ - Update an issue
+ - Close an issue
+ - List all issues
+ - Search for specific issues
+ - Get the download count for each release in a repository
+
+
+
diff --git a/Integrations/GitHub/GitHub_image.png b/Integrations/GitHub/GitHub_image.png
new file mode 100644
index 000000000000..a696c09ff234
Binary files /dev/null and b/Integrations/GitHub/GitHub_image.png differ
diff --git a/Integrations/Gmail/.pylintrc b/Integrations/Gmail/.pylintrc
new file mode 100644
index 000000000000..3ee928719ba1
--- /dev/null
+++ b/Integrations/Gmail/.pylintrc
@@ -0,0 +1,6 @@
+[TYPECHECK]
+
+# List of classes names for which member attributes should not be checked
+# (useful for classes with attributes dynamically set). This supports can work
+# with qualified names.
+ignored-classes=Resource
diff --git a/Integrations/Gmail/CHANGELOG.md b/Integrations/Gmail/CHANGELOG.md
new file mode 100644
index 000000000000..e921a2dc3cf4
--- /dev/null
+++ b/Integrations/Gmail/CHANGELOG.md
@@ -0,0 +1,17 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+-
+
+## [19.9.1] - 2019-09-18
+ - Added 7 commands:
+ - ***gmail-hide-user-in-directory***
+ - ***gmail-set-password***
+ - ***gmail-get-autoreply***
+ - ***gmail-set-autoreply***
+ - ***gmail-delegate-user-mailbox***
+ - ***gmail-remove-delegated-mailbox***
+ - ***send-mail***
+ - Fixed an issue where emails from different timezones occaisonally did not create incidents. This may cause duplicate incidents shortly after upgrading.
+
diff --git a/Integrations/Gmail/Gmail.py b/Integrations/Gmail/Gmail.py
new file mode 100644
index 000000000000..5a0ad2236242
--- /dev/null
+++ b/Integrations/Gmail/Gmail.py
@@ -0,0 +1,1785 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+import re
+import json
+import base64
+from datetime import datetime, timedelta
+import httplib2
+import urlparse
+from distutils.util import strtobool
+import sys
+from HTMLParser import HTMLParser, HTMLParseError
+from htmlentitydefs import name2codepoint
+from email.mime.audio import MIMEAudio
+from email.mime.base import MIMEBase
+from email.mime.image import MIMEImage
+from email.mime.multipart import MIMEMultipart
+from email.mime.text import MIMEText
+from email.header import Header
+import mimetypes
+import random
+import string
+from apiclient import discovery
+from oauth2client import service_account
+import itertools as it
+
+
+''' GLOBAL VARS '''
+ADMIN_EMAIL = None
+PRIVATE_KEY_CONTENT = None
+GAPPS_ID = None
+SCOPES = ['https://www.googleapis.com/auth/admin.directory.user.readonly']
+PROXY = demisto.params().get('proxy')
+DISABLE_SSL = demisto.params().get('insecure', False)
+FETCH_TIME = demisto.params().get('fetch_time', '1 days')
+
+''' HELPER FUNCTIONS '''
+
+
+class TextExtractHtmlParser(HTMLParser):
+ def __init__(self):
+ HTMLParser.__init__(self)
+ self._texts = [] # type: list
+ self._ignore = False
+
+ def handle_starttag(self, tag, attrs):
+ if tag in ('p', 'br') and not self._ignore:
+ self._texts.append('\n')
+ elif tag in ('script', 'style'):
+ self._ignore = True
+
+ def handle_startendtag(self, tag, attrs):
+ if tag in ('br', 'tr') and not self._ignore:
+ self._texts.append('\n')
+
+ def handle_endtag(self, tag):
+ if tag in ('p', 'tr'):
+ self._texts.append('\n')
+ elif tag in ('script', 'style'):
+ self._ignore = False
+
+ def handle_data(self, data):
+ if data and not self._ignore:
+ stripped = data.strip()
+ if stripped:
+ self._texts.append(re.sub(r'\s+', ' ', stripped))
+
+ def handle_entityref(self, name):
+ if not self._ignore and name in name2codepoint:
+ self._texts.append(unichr(name2codepoint[name]))
+
+ def handle_charref(self, name):
+ if not self._ignore:
+ if name.startswith('x'):
+ c = unichr(int(name[1:], 16))
+ else:
+ c = unichr(int(name))
+ self._texts.append(c)
+
+ def get_text(self):
+ return "".join(self._texts)
+
+
+def html_to_text(html):
+ parser = TextExtractHtmlParser()
+ try:
+ parser.feed(html)
+ parser.close()
+ except HTMLParseError:
+ pass
+ return parser.get_text()
+
+
+def get_http_client_with_proxy():
+ proxies = handle_proxy()
+ if not proxies or not proxies['https']:
+ raise Exception('https proxy value is empty. Check Demisto server configuration')
+ https_proxy = proxies['https']
+ if not https_proxy.startswith('https') and not https_proxy.startswith('http'):
+ https_proxy = 'https://' + https_proxy
+ parsed_proxy = urlparse.urlparse(https_proxy)
+ proxy_info = httplib2.ProxyInfo(
+ proxy_type=httplib2.socks.PROXY_TYPE_HTTP, # disable-secrets-detection
+ proxy_host=parsed_proxy.hostname,
+ proxy_port=parsed_proxy.port,
+ proxy_user=parsed_proxy.username,
+ proxy_pass=parsed_proxy.password)
+ return httplib2.Http(proxy_info=proxy_info, disable_ssl_certificate_validation=DISABLE_SSL)
+
+
+def get_credentials(additional_scopes=None, delegated_user=None):
+ """Gets valid user credentials from storage.
+
+ If nothing has been stored, or if the stored credentials are invalid,
+ the OAuth2 flow is completed to obtain the new credentials.
+
+ Returns:
+ Credentials, the obtained credential.
+ """
+ if not delegated_user or delegated_user == 'me':
+ delegated_user = ADMIN_EMAIL
+ scopes = SCOPES
+ if additional_scopes is not None:
+ scopes += additional_scopes
+
+ cred = service_account.ServiceAccountCredentials.from_json_keyfile_dict(json.loads(PRIVATE_KEY_CONTENT), # type: ignore
+ scopes=scopes)
+
+ return cred.create_delegated(delegated_user)
+
+
+def get_service(serviceName, version, additional_scopes=None, delegated_user=None):
+ credentials = get_credentials(additional_scopes=additional_scopes, delegated_user=delegated_user)
+ if PROXY or DISABLE_SSL:
+ http_client = credentials.authorize(get_http_client_with_proxy())
+ return discovery.build(serviceName, version, http=http_client)
+ return discovery.build(serviceName, version, credentials=credentials)
+
+
+def parse_mail_parts(parts):
+ body = u''
+ html = u''
+ attachments = [] # type: list
+ for part in parts:
+ if 'multipart' in part['mimeType']:
+ part_body, part_html, part_attachments = parse_mail_parts(
+ part['parts'])
+ body += part_body
+ html += part_html
+ attachments.extend(part_attachments)
+ elif len(part['filename']) == 0:
+ text = unicode(base64.urlsafe_b64decode(
+ part['body'].get('data', '').encode('ascii')), 'utf-8')
+ if 'text/html' in part['mimeType']:
+ html += text
+ else:
+ body += text
+
+ else:
+ if part['body'].get('attachmentId') is not None:
+ attachments.append({
+ 'ID': part['body']['attachmentId'],
+ 'Name': part['filename']
+ })
+
+ return body, html, attachments
+
+
+def localization_extract(time_from_mail):
+ if time_from_mail is None or len(time_from_mail) < 5:
+ return '-0000', 0
+
+ utc = time_from_mail[-5:]
+ if utc[0] != '-' and utc[0] != '+':
+ return '-0000', 0
+
+ for ch in utc[1:]:
+ if not ch.isdigit():
+ return '-0000', 0
+
+ delta_in_seconds = int(utc[0] + utc[1:3]) * 3600 + int(utc[0] + utc[3:]) * 60
+ return utc, delta_in_seconds
+
+
+def create_base_time(internal_date_timestamp, header_date):
+ """
+ Args:
+ internal_date_timestamp: The timestamp from the Gmail API response.
+ header_date: The date string from the email payload.
+
+ Returns: A date string in the senders local time in the format of "Mon, 26 Aug 2019 14:40:04 +0300"
+
+ """
+ # intenalDate timestamp has 13 digits, but epoch-timestamp counts the seconds since Jan 1st 1970
+ # (which is currently less than 13 digits) thus a need to cut the timestamp down to size.
+ timestamp_len = len(str(int(time.time())))
+ if len(str(internal_date_timestamp)) > timestamp_len:
+ internal_date_timestamp = int(str(internal_date_timestamp)[:timestamp_len])
+
+ utc, delta_in_seconds = localization_extract(header_date)
+ base_time = datetime.utcfromtimestamp(internal_date_timestamp) + \
+ timedelta(seconds=delta_in_seconds)
+ base_time = str(base_time.strftime('%a, %d %b %Y %H:%M:%S')) + " " + utc
+ return base_time
+
+
+def get_email_context(email_data, mailbox):
+ context_headers = email_data.get('payload', {}).get('headers', [])
+ context_headers = [{'Name': v['name'], 'Value':v['value']}
+ for v in context_headers]
+ headers = dict([(h['Name'].lower(), h['Value']) for h in context_headers])
+ body = demisto.get(email_data, 'payload.body.data')
+ body = body.encode('ascii') if body is not None else ''
+ parsed_body = base64.urlsafe_b64decode(body)
+ if email_data.get('internalDate') is not None:
+ base_time = create_base_time(email_data.get('internalDate'), str(headers.get('date', '')))
+
+ else:
+ # in case no internalDate field exists will revert to extracting the date from the email payload itself
+ # Note: this should not happen in any command other than other than gmail-move-mail which doesn't return the
+ # email payload nor internalDate
+ demisto.info("No InternalDate timestamp found - getting Date from mail payload - msg ID:" + str(email_data['id']))
+ base_time = str(headers.get('date', ''))
+
+ context_gmail = {
+ 'Type': 'Gmail',
+ 'Mailbox': ADMIN_EMAIL if mailbox == 'me' else mailbox,
+ 'ID': email_data['id'],
+ 'ThreadId': email_data['threadId'],
+ 'Labels': ', '.join(email_data['labelIds']),
+ 'Headers': context_headers,
+ 'Attachments': email_data.get('payload', {}).get('filename', ''),
+ # only for format 'raw'
+ 'RawData': email_data.get('raw'),
+ # only for format 'full' and 'metadata'
+ 'Format': headers.get('content-type', '').split(';')[0],
+ 'Subject': headers.get('subject'),
+ 'From': headers.get('from'),
+ 'To': headers.get('to'),
+ # only for format 'full'
+ 'Body': unicode(parsed_body, 'utf-8'),
+
+ # only for incident
+ 'Cc': headers.get('cc', []),
+ 'Bcc': headers.get('bcc', []),
+ 'Date': base_time,
+ 'Html': None,
+ }
+
+ context_email = {
+ 'ID': email_data['id'],
+ 'Headers': context_headers,
+ 'Attachments': {'entryID': email_data.get('payload', {}).get('filename', '')},
+ # only for format 'raw'
+ 'RawData': email_data.get('raw'),
+ # only for format 'full' and 'metadata'
+ 'Format': headers.get('content-type', '').split(';')[0],
+ 'Subject': headers.get('subject'),
+ 'From': headers.get('from'),
+ 'To': headers.get('to'),
+ # only for format 'full'
+ 'Body/Text': unicode(parsed_body, 'utf-8'),
+
+ 'CC': headers.get('cc', []),
+ 'BCC': headers.get('bcc', []),
+ 'Date': base_time,
+ 'Body/HTML': None,
+ }
+
+ if 'text/html' in context_gmail['Format']: # type: ignore
+ context_gmail['Html'] = context_gmail['Body']
+ context_gmail['Body'] = html_to_text(context_gmail['Body'])
+ context_email['Body/HTML'] = context_gmail['Html']
+ context_email['Body/Text'] = context_gmail['Body']
+
+ if 'multipart' in context_gmail['Format']: # type: ignore
+ context_gmail['Body'], context_gmail['Html'], context_gmail['Attachments'] = parse_mail_parts(
+ email_data.get('payload', {}).get('parts', []))
+ context_gmail['Attachment Names'] = ', '.join(
+ [attachment['Name'] for attachment in context_gmail['Attachments']]) # type: ignore
+ context_email['Body/Text'], context_email['Body/HTML'], context_email['Attachments'] = parse_mail_parts(
+ email_data.get('payload', {}).get('parts', []))
+ context_email['Attachment Names'] = ', '.join(
+ [attachment['Name'] for attachment in context_email['Attachments']]) # type: ignore
+
+ return context_gmail, headers, context_email
+
+
+TIME_REGEX = re.compile(r'^([\w,\d: ]*) (([+-]{1})(\d{2}):?(\d{2}))?[\s\w\(\)]*$')
+
+
+def move_to_gmt(t):
+ # there is only one time refernce is the string
+ base_time, _, sign, hours, minutes = TIME_REGEX.findall(t)[0]
+ if all([sign, hours, minutes]):
+ seconds = -1 * (int(sign + hours) * 3600 + int(sign + minutes) * 60)
+ parsed_time = datetime.strptime(
+ base_time, '%a, %d %b %Y %H:%M:%S') + timedelta(seconds=seconds)
+ return parsed_time.isoformat() + 'Z'
+ else:
+ return datetime.strptime(base_time, '%a, %d %b %Y %H:%M:%S').isoformat() + 'Z'
+
+
+def create_incident_labels(parsed_msg, headers):
+ labels = [
+ {'type': 'Email/ID', 'value': parsed_msg['ID']},
+ {'type': 'Email/subject', 'value': parsed_msg['Subject']},
+ {'type': 'Email/text', 'value': parsed_msg['Body']},
+ {'type': 'Email/from', 'value': parsed_msg['From']},
+ {'type': 'Email/html', 'value': parsed_msg['Html']},
+ ]
+ labels.extend([{'type': 'Email/to', 'value': to}
+ for to in headers.get('To', '').split(',')])
+ labels.extend([{'type': 'Email/cc', 'value': cc}
+ for cc in headers.get('Cc', '').split(',')])
+ labels.extend([{'type': 'Email/bcc', 'value': bcc}
+ for bcc in headers.get('Bcc', '').split(',')])
+ for key, val in headers.items():
+ labels.append({'type': 'Email/Header/' + key, 'value': val})
+
+ return labels
+
+
+def emails_to_entry(title, raw_emails, format_data, mailbox):
+ gmail_emails = []
+ emails = []
+ for email_data in raw_emails:
+ context_gmail, _, context_email = get_email_context(email_data, mailbox)
+ gmail_emails.append(context_gmail)
+ emails.append(context_email)
+
+ headers = {
+ 'minimal': ['Mailbox', 'ID', 'Labels', 'Attachment Names', ],
+ 'raw': ['MailBox', 'ID', 'Labels', 'Attachment Names', 'RawData'],
+ 'metadata': ['MailBox', 'ID', 'Subject', 'From', 'To', 'Labels', 'Attachment Names', 'Format'],
+ 'full': ['Mailbox', 'ID', 'Subject', 'From', 'To', 'Labels', 'Attachment Names', 'Format', 'Body'],
+ }
+
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': raw_emails,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, gmail_emails, headers[format_data], removeNull=True),
+ 'EntryContext': {
+ 'Gmail(val.ID && val.ID == obj.ID)': gmail_emails,
+ 'Email(val.ID && val.ID == obj.ID)': emails
+ }
+ }
+
+
+def mail_to_incident(msg, service, user_key):
+ parsed_msg, headers, _ = get_email_context(msg, user_key)
+
+ file_names = []
+ command_args = {
+ 'messageId': parsed_msg['ID'],
+ 'userId': user_key,
+ }
+
+ for attachment in parsed_msg['Attachments']:
+ command_args['id'] = attachment['ID']
+ result = service.users().messages().attachments().get(**command_args).execute()
+ file_data = base64.urlsafe_b64decode(result['data'].encode('ascii'))
+
+ # save the attachment
+ file_result = fileResult(attachment['Name'], file_data)
+
+ # check for error
+ if file_result['Type'] == entryTypes['error']:
+ demisto.error(file_result['Contents'])
+ raise Exception(file_result['Contents'])
+
+ file_names.append({
+ 'path': file_result['FileID'],
+ 'name': attachment['Name'],
+ })
+ # date in the incident itself is set to GMT time, the correction to local time is done in Demisto
+ gmt_time = move_to_gmt(parsed_msg['Date'])
+
+ incident = {
+ 'type': 'Gmail',
+ 'name': parsed_msg['Subject'],
+ 'details': parsed_msg['Body'],
+ 'labels': create_incident_labels(parsed_msg, headers),
+ 'occurred': gmt_time,
+ 'attachment': file_names,
+ 'rawJSON': json.dumps(parsed_msg),
+ }
+ return incident
+
+
+def organization_format(org_list):
+ if org_list:
+ return ','.join(str(org.get('name')) for org in org_list if org.get('name'))
+ else:
+ return None
+
+
+def users_to_entry(title, response):
+ context = []
+
+ for user_data in response:
+ username = user_data.get('name').get('givenName') if user_data.get('name') \
+ and 'givenName' in user_data.get('name') else None
+
+ display = user_data.get('name').get('fullName') if user_data.get('name') \
+ and 'fullName' in user_data.get('name') else None
+
+ context.append({
+ 'Type': 'Google',
+ 'ID': user_data.get('id'),
+ 'UserName': username,
+ 'Username': username, # adding to fit the new context standard
+ 'DisplayName': display,
+ 'Email': {'Address': user_data.get('primaryEmail')},
+ 'Gmail': {'Address': user_data.get('primaryEmail')},
+ 'Group': user_data.get('kind'),
+ 'Groups': user_data.get('kind'), # adding to fit the new context standard
+ 'CustomerId': user_data.get('customerId'),
+ 'Domain': user_data.get('primaryEmail').split('@')[1],
+ 'VisibleInDirectory': user_data.get('includeInGlobalAddressList'),
+
+ })
+ headers = ['Type', 'ID', 'Username',
+ 'DisplayName', 'Groups', 'CustomerId', 'Domain', 'OrganizationUnit', 'Email', 'VisibleInDirectory']
+
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, context, headers, removeNull=True),
+ 'EntryContext': {'Account(val.ID && val.Type && val.ID == obj.ID && val.Type == obj.Type)': context}
+ }
+
+
+def autoreply_to_entry(title, response, user_id):
+ autoreply_context = []
+ for autoreply_data in response:
+ autoreply_context.append({
+ 'EnableAutoReply': autoreply_data.get('enableAutoReply'),
+ 'ResponseBody': autoreply_data.get('responseBodyPlainText'),
+ 'ResponseSubject': autoreply_data.get('responseSubject'),
+ 'RestrictToContact': autoreply_data.get('restrictToContacts'),
+ 'RestrictToDomain': autoreply_data.get('restrictToDomain'),
+
+ })
+ headers = ['EnableAutoReply', 'ResponseBody',
+ 'ResponseSubject', 'RestrictToContact', 'RestrictToDomain']
+
+ account_context = {
+ "Address": user_id,
+ "AutoReply": autoreply_context
+ }
+
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': autoreply_context,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, autoreply_context, headers, removeNull=True),
+ 'EntryContext': {
+ 'Account.Gmail(val.Address == obj.Address)': account_context
+ }
+ }
+
+
+def sent_mail_to_entry(title, response, to, emailfrom, cc, bcc, bodyHtml, body, subject):
+ gmail_context = []
+ for mail_results_data in response:
+ gmail_context.append({
+ 'Type': "Gmail",
+ 'ID': mail_results_data.get('id'),
+ 'Labels': mail_results_data.get('labelIds'),
+ 'ThreadId': mail_results_data.get('threadId'),
+ 'To': ','.join(to),
+ 'From': emailfrom,
+ 'Cc': ','.join(cc) if len(cc) > 0 else None,
+ 'Bcc': ','.join(bcc) if len(bcc) > 0 else None,
+ 'Subject': subject,
+ 'Body': body,
+ 'Mailbox': ','.join(to)
+ })
+
+ headers = ['Type', 'ID', 'To', 'From', 'Cc', 'Bcc', 'Subject', 'Body', 'Labels',
+ 'ThreadId']
+
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, gmail_context, headers, removeNull=True),
+ 'EntryContext': {'Gmail.SentMail(val.ID && val.Type && val.ID == obj.ID && val.Type == obj.Type)': gmail_context}
+ }
+
+
+def roles_to_entry(title, response):
+ context = []
+ for role_data in response:
+ context.append({
+ 'ID': role_data['roleId'],
+ 'AssignedTo': role_data['assignedTo'],
+ 'RoleAssignmentId': role_data['roleAssignmentId'],
+ 'ScopeType': role_data['scopeType'],
+ 'Kind': role_data['kind'],
+ 'OrgUnitId': role_data.get('orgUnitId', ''),
+ })
+ headers = ['ID', 'AssignedTo', 'RoleAssignmentId',
+ 'ScopeType', 'Kind', 'OrgUnitId']
+
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': context,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, context, headers, removeNull=True),
+ 'EntryContext': {'Gmail.Role(val.ID && val.ID == obj.ID)': context}
+ }
+
+
+def tokens_to_entry(title, response):
+ context = []
+ for token_data in response:
+ context.append({
+ 'DisplayText': token_data.get('displayText'),
+ 'ClientId': token_data.get('clientId'),
+ 'Kind': token_data.get('kind'),
+ 'Scopes': token_data.get('scopes', []),
+ 'UserKey': token_data.get('userKey'),
+ })
+
+ headers = ['DisplayText', 'ClientId', 'Kind', 'Scopes', 'UserKey']
+
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': context,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, context, headers, removeNull=True),
+ 'EntryContext': {'Tokens(val.ClientId && val.ClientId == obj.ClientId)': context}
+ }
+
+
+def filters_to_entry(title, mailbox, response):
+ context = []
+ for filter_data in response:
+ context.append({
+ 'ID': filter_data.get('id'),
+ 'Mailbox': mailbox,
+ 'Criteria': filter_data.get('criteria'),
+ 'Action': filter_data.get('action'),
+ })
+
+ headers = ['ID', 'Criteria', 'Action', ]
+
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': context,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, context, headers, removeNull=True),
+ 'EntryContext': {'GmailFilter(val.ID && val.ID == obj.ID)': context}
+ }
+
+
+''' FUNCTIONS '''
+
+
+def list_users_command():
+ args = demisto.args()
+ domain = args.get('domain', ADMIN_EMAIL.split('@')[1]) # type: ignore
+ customer = args.get('customer')
+ event = args.get('event')
+ view_type = args.get('view-type-public-domain', 'admin_view')
+ query = args.get('query')
+ sort_order = args.get('sort-order')
+ max_results = args.get('max-results', 100)
+ show_deleted = bool(strtobool(args.get('show-deleted', 'false')))
+ projection = args.get('projection', 'basic')
+ custom_field_mask = args.get(
+ 'custom_field_mask') if projection == 'custom' else None
+
+ users = list_users(domain, customer, event, query, sort_order, view_type,
+ show_deleted, max_results, projection, custom_field_mask)
+ return users_to_entry('Users:', users)
+
+
+def list_users(domain, customer=None, event=None, query=None, sort_order=None, view_type='admin_view',
+ show_deleted=False, max_results=100, projection='basic', custom_field_mask=None):
+ command_args = {
+ 'domain': domain,
+ 'customer': customer,
+ 'event': event,
+ 'viewType': view_type,
+ 'query': query,
+ 'sortOrder': sort_order,
+ 'projection': projection,
+ 'showDeleted': show_deleted,
+ 'maxResults': max_results,
+ }
+ if projection == 'custom':
+ command_args['customFieldMask'] = custom_field_mask
+
+ service = get_service('admin', 'directory_v1')
+ result = service.users().list(**command_args).execute()
+
+ return result['users']
+
+
+def get_user_command():
+ args = demisto.args()
+ user_key = args.get('user-id')
+ view_type = args.get('view-type-public-domain')
+ projection = args.get('projection')
+ customer_field_mask = args.get('customer-field-mask')
+
+ result = get_user(user_key, view_type, projection, customer_field_mask)
+ return users_to_entry('User {}:'.format(user_key), [result])
+
+
+def get_user(user_key, view_type, projection, customer_field_mask=None):
+ command_args = {
+ 'userKey': user_key if user_key != 'me' else ADMIN_EMAIL,
+ 'projection': projection,
+ 'viewType': view_type,
+ }
+ if projection == 'custom':
+ command_args['customFieldMask'] = customer_field_mask
+
+ service = get_service('admin', 'directory_v1')
+ result = service.users().get(**command_args).execute()
+
+ return result
+
+
+def hide_user_command():
+ args = demisto.args()
+ user_key = args.get('user-id')
+ hide_value = args.get('visible-globally')
+ result = hide_user(user_key, hide_value)
+
+ return users_to_entry('User {}:'.format(user_key,), [result])
+
+
+def hide_user(user_key, hide_value):
+ command_args = {
+ 'userKey': user_key if user_key != 'me' else ADMIN_EMAIL,
+ 'body': {
+ 'includeInGlobalAddressList': hide_value,
+ }}
+
+ service = get_service('admin', 'directory_v1',
+ additional_scopes=['https://www.googleapis.com/auth/admin.directory.user'])
+ result = service.users().update(**command_args).execute()
+
+ return result
+
+
+def set_user_password_command():
+ args = demisto.args()
+ user_key = args.get('user-id')
+ password = args.get('password')
+ result = set_user_password(user_key, password)
+ return result
+
+
+def set_user_password(user_key, password):
+ command_args = {
+ 'userKey': user_key if user_key != 'me' else ADMIN_EMAIL,
+ 'body': {
+ 'password': password,
+ }}
+
+ service = get_service('admin', 'directory_v1',
+ additional_scopes=['https://www.googleapis.com/auth/admin.directory.user'])
+ service.users().update(**command_args).execute()
+
+ return 'User {} password has been set.'.format(command_args['userKey'])
+
+
+def get_autoreply_command():
+ args = demisto.args()
+ user_id = args.get('user-id', ADMIN_EMAIL)
+
+ autoreply_message = get_autoreply(user_id)
+
+ return autoreply_to_entry('User {}:'.format(user_id), [autoreply_message], user_id)
+
+
+def get_autoreply(user_id):
+ command_args = {
+ 'userId': user_id
+ }
+
+ service = get_service('gmail', 'v1',
+ additional_scopes=['https://mail.google.com', 'https://www.googleapis.com/auth/gmail.modify',
+ 'https://www.googleapis.com/auth/gmail.readonly',
+ 'https://www.googleapis.com/auth/gmail.settings.basic'],
+ delegated_user=user_id)
+ result = service.users().settings().getVacation(**command_args).execute()
+
+ return result
+
+
+def set_autoreply_command():
+ args = demisto.args()
+
+ user_id = args.get('user-id')
+ enable_autoreply = args.get('enable-autoReply')
+ response_subject = args.get('response-subject')
+ response_body_plain_text = args.get('response-body')
+
+ autoreply_message = set_autoreply(user_id, enable_autoreply, response_subject, response_body_plain_text)
+
+ return autoreply_to_entry('User {}:'.format(user_id), [autoreply_message], user_id)
+
+
+def set_autoreply(user_id, enable_autoreply, response_subject, response_body_plain_text):
+ command_args = {
+ 'userId': user_id if user_id != 'me' else ADMIN_EMAIL,
+ 'body': {
+ 'enableAutoReply': enable_autoreply,
+ 'responseSubject': response_subject,
+ 'responseBodyPlainText': response_body_plain_text,
+ }}
+
+ service = get_service('gmail', 'v1', additional_scopes=['https://www.googleapis.com/auth/gmail.settings.basic'])
+ result = service.users().settings().updateVacation(**command_args).execute()
+ return result
+
+
+def remove_delegate_user_mailbox_command():
+ args = demisto.args()
+ user_id = args.get('user-id')
+ delegate_email = args.get('removed-mail')
+ return delegate_user_mailbox(user_id, delegate_email, False)
+
+
+def delegate_user_mailbox_command():
+ args = demisto.args()
+ user_id = args.get('user-id')
+ delegate_email = args.get('delegate-email')
+ return delegate_user_mailbox(user_id, delegate_email, True)
+
+
+def delegate_user_mailbox(user_id, delegate_email, delegate_token):
+ service = get_service('gmail', 'v1', additional_scopes=['https://www.googleapis.com/auth/gmail.settings.sharing'])
+ if delegate_token: # guardrails-disable-line
+ command_args = {
+ 'userId': user_id if user_id != 'me' else ADMIN_EMAIL,
+ 'body': {
+ 'delegateEmail': delegate_email,
+ }
+ }
+
+ service.users().settings().delegates().create(**command_args).execute()
+ return 'Email {} has been delegated'.format(delegate_email)
+
+ else:
+ command_args = {
+ 'userId': user_id if user_id != 'me' else ADMIN_EMAIL,
+ 'delegateEmail': delegate_email
+ }
+
+ service.users().settings().delegates().delete(**command_args).execute()
+ return 'Email {} has been removed from delegation'.format(delegate_email)
+
+
+def create_user_command():
+ args = demisto.args()
+ primary_email = args['email']
+ first_name = args['first-name']
+ family_name = args['family-name']
+ password = args.get('password', '')
+
+ if len(password) > 100 or len(password) < 8:
+ raise ValueError('password must be over between 8 and 100 characters')
+
+ result = create_user(primary_email, first_name, family_name, password)
+ return users_to_entry('New User:', [result])
+
+
+def create_user(primary_email, first_name, family_name, password):
+ command_args = {
+ 'primaryEmail': primary_email,
+ 'name': {
+ 'givenName': first_name,
+ 'familyName': family_name,
+ 'fullName': '%s %s' % (first_name, family_name, ),
+ },
+ 'password': password
+ }
+
+ service = get_service(
+ 'admin',
+ 'directory_v1',
+ ['https://www.googleapis.com/auth/admin.directory.user'])
+ result = service.users().insert(body=command_args).execute()
+
+ return result
+
+
+def delete_user_command():
+ args = demisto.args()
+ user_key = args.get('user-id')
+
+ return delete_user(user_key)
+
+
+def delete_user(user_key):
+ command_args = {
+ 'userKey': user_key,
+ }
+
+ service = get_service(
+ 'admin',
+ 'directory_v1',
+ ['https://www.googleapis.com/auth/admin.directory.user'])
+ service.users().delete(**command_args).execute()
+
+ return 'User {} have been deleted.'.format(command_args['userKey'])
+
+
+def get_user_role_command():
+ args = demisto.args()
+ user_key = args['user-id']
+ user_key = ADMIN_EMAIL if user_key == 'me' else user_key
+
+ if GAPPS_ID is None:
+ raise ValueError('Must provide Immutable GoogleApps Id')
+
+ roles = get_user_role(user_key, GAPPS_ID)
+ return roles_to_entry('User Roles of %s:' % (user_key, ), roles)
+
+
+def get_user_role(user_key, customer):
+ command_args = {
+ 'customer': customer,
+ 'maxResults': 100,
+ }
+
+ service = get_service(
+ 'admin',
+ 'directory_v1',
+ ['https://www.googleapis.com/auth/admin.directory.rolemanagement.readonly',
+ 'https://www.googleapis.com/auth/admin.directory.rolemanagement'])
+ result = service.roleAssignments().list(**command_args).execute()
+
+ user_data = service.users().get(userKey=user_key).execute()
+
+ return [role for role in result['items'] if role['assignedTo'] == user_data['id']]
+
+
+def revoke_user_roles_command():
+ args = demisto.args()
+
+ user_key = args.get('user-id')
+ role_assignment_id = args['role-assignment-id']
+
+ revoke_user_roles(user_key, role_assignment_id)
+ return 'Role has been deleted.'
+
+
+def revoke_user_roles(user_id, role_assignment_id):
+ command_args = {
+ 'customer': GAPPS_ID,
+ 'roleAssignmentId': role_assignment_id,
+ }
+
+ if GAPPS_ID is None:
+ raise ValueError('Must provide Immutable GoogleApps Id')
+
+ service = get_service(
+ 'admin',
+ 'directory_v1',
+ ['https://www.googleapis.com/auth/admin.directory.rolemanagement'])
+ return service.roleAssignments().delete(**command_args).execute()
+
+
+def get_user_tokens_command():
+ args = demisto.args()
+ user_id = args.get('user-id')
+ user_id = ADMIN_EMAIL if user_id == 'me' else user_id
+
+ tokens = get_user_tokens(user_id)
+
+ return tokens_to_entry('Tokens:', tokens)
+
+
+def get_user_tokens(user_id):
+ command_args = {
+ 'userKey': user_id,
+ }
+
+ service = get_service(
+ 'admin',
+ 'directory_v1',
+ ['https://www.googleapis.com/auth/admin.directory.user.security'])
+ result = service.tokens().list(**command_args).execute()
+
+ return result.get('items', [])
+
+
+def search_all_mailboxes():
+ command_args = {
+ 'maxResults': 100,
+ 'domain': ADMIN_EMAIL.split('@')[1], # type: ignore
+ }
+
+ service = get_service('admin', 'directory_v1')
+ result = service.users().list(**command_args).execute()
+
+ entries = [search_command(user['primaryEmail'])
+ for user in result['users']]
+ return entries
+
+
+def search_command(mailbox=None):
+ args = demisto.args()
+
+ user_id = args.get('user-id') if mailbox is None else mailbox
+ mailbox = ADMIN_EMAIL if user_id == 'me' else user_id
+ subject = args.get('subject', '')
+ _from = args.get('from', '')
+ to = args.get('to', '')
+ before = args.get('before', '')
+ after = args.get('after', '')
+ filename = args.get('filename', '')
+ _in = args.get('in', '')
+
+ query = args.get('query', '')
+ fields = args.get('fields') # TODO
+ label_ids = [lbl for lbl in args.get('labels-ids', '').split(',') if lbl != '']
+ max_results = int(args.get('max-results', 100))
+ page_token = args.get('page-token')
+ include_spam_trash = args.get('include-spam-trash', False)
+ has_attachments = args.get('has-attachments')
+ has_attachments = None if has_attachments is None else bool(
+ strtobool(has_attachments))
+
+ if max_results > 500:
+ raise ValueError(
+ 'maxResults must be lower than 500, got %s' % (max_results, ))
+
+ mails, q = search(user_id, subject, _from, to, before, after, filename, _in, query,
+ fields, label_ids, max_results, page_token, include_spam_trash, has_attachments)
+
+ res = emails_to_entry('Search in {}:\nquery: "{}"'.format(mailbox, q), mails, 'full', mailbox)
+ return res
+
+
+def search(user_id, subject='', _from='', to='', before='', after='', filename='', _in='', query='',
+ fields=None, label_ids=None, max_results=100, page_token=None, include_spam_trash=False,
+ has_attachments=None):
+ query_values = {
+ 'subject': subject,
+ 'from': _from,
+ 'to': to,
+ 'before': before,
+ 'after': after,
+ 'filename': filename,
+ 'in': _in,
+ 'has': 'attachment' if has_attachments else ''
+ }
+ q = ' '.join('%s:%s ' % (name, value, )
+ for name, value in query_values.iteritems() if value != '')
+ q = ('%s %s' % (q, query, )).strip()
+
+ command_args = {
+ 'userId': user_id,
+ 'q': q,
+ 'maxResults': max_results,
+ 'fields': fields,
+ 'labelIds': label_ids,
+ 'pageToken': page_token,
+ 'includeSpamTrash': include_spam_trash,
+ }
+ service = get_service(
+ 'gmail',
+ 'v1',
+ ['https://www.googleapis.com/auth/gmail.readonly'],
+ command_args['userId'])
+ result = service.users().messages().list(**command_args).execute()
+
+ return [get_mail(user_id, mail['id'], 'full') for mail in result.get('messages', [])], q
+
+
+def get_mail_command():
+ args = demisto.args()
+ user_id = args.get('user-id', ADMIN_EMAIL)
+ _id = args.get('message-id')
+ _format = args.get('format')
+
+ mail = get_mail(user_id, _id, _format)
+ return emails_to_entry('Email:', [mail], _format, user_id)
+
+
+def get_mail(user_id, _id, _format):
+ command_args = {
+ 'userId': user_id,
+ 'id': _id,
+ 'format': _format,
+ }
+
+ service = get_service(
+ 'gmail',
+ 'v1',
+ ['https://www.googleapis.com/auth/gmail.readonly'],
+ delegated_user=command_args['userId'])
+ result = service.users().messages().get(**command_args).execute()
+
+ return result
+
+
+def get_attachments_command():
+ args = demisto.args()
+ user_id = args.get('user-id')
+ _id = args.get('message-id')
+
+ attachments = get_attachments(user_id, _id)
+
+ return [fileResult(name, data) for name, data in attachments]
+
+
+def get_attachments(user_id, _id):
+ mail_args = {
+ 'userId': user_id,
+ 'id': _id,
+ 'format': 'full',
+ }
+ service = get_service(
+ 'gmail',
+ 'v1',
+ ['https://www.googleapis.com/auth/gmail.readonly'],
+ delegated_user=mail_args['userId'])
+ result = service.users().messages().get(**mail_args).execute()
+ result = get_email_context(result, user_id)[0]
+
+ command_args = {
+ 'userId': user_id,
+ 'messageId': _id,
+ }
+ files = []
+ for attachment in result['Attachments']:
+ command_args['id'] = attachment['ID']
+ result = service.users().messages().attachments().get(**command_args).execute()
+ file_data = base64.urlsafe_b64decode(result['data'].encode('ascii'))
+ files.append((attachment['Name'], file_data))
+
+ return files
+
+
+def move_mail_command():
+ args = demisto.args()
+ user_id = args.get('user-id')
+ _id = args.get('message-id')
+ add_labels = [lbl for lbl in args.get('add-labels', '').split(',') if lbl != '']
+ remove_labels = [lbl for lbl in args.get(
+ 'remove-labels', '').split(',') if lbl != '']
+
+ mail = move_mail(user_id, _id, add_labels, remove_labels)
+ return emails_to_entry('Email:', [mail], 'full', user_id)
+
+
+def move_mail(user_id, _id, add_labels, remove_labels):
+ command_args = {
+ 'userId': user_id,
+ 'id': _id,
+ 'body': {
+ 'addLabelIds': add_labels,
+ 'removeLabelIds': remove_labels,
+ }
+
+ }
+ service = get_service(
+ 'gmail',
+ 'v1',
+ ['https://www.googleapis.com/auth/gmail.modify'],
+ delegated_user=user_id)
+ result = service.users().messages().modify(**command_args).execute()
+
+ return result
+
+
+def move_mail_to_mailbox_command():
+ args = demisto.args()
+ src_user_id = args.get('src-user-id')
+ message_id = args.get('message-id')
+ dst_user_id = args.get('dst-user-id')
+
+ new_mail_id = move_mail_to_mailbox(src_user_id, message_id, dst_user_id)
+
+ mail = get_mail(dst_user_id, new_mail_id, 'full')
+ return emails_to_entry('Email:', [mail], 'full', dst_user_id)
+
+
+def move_mail_to_mailbox(src_mailbox, message_id, dst_mailbox):
+ # get the original mail
+ mail = get_mail(src_mailbox, message_id, 'raw')
+
+ # import the mail to the destination mailbox
+ command_args = {
+ 'userId': dst_mailbox,
+ 'body': {
+ 'raw': mail['raw'],
+ }
+ }
+ service = get_service(
+ 'gmail',
+ 'v1',
+ ['https://www.googleapis.com/auth/gmail.modify'],
+ delegated_user=dst_mailbox)
+ result = service.users().messages().import_(**command_args).execute()
+
+ # delete the original mail
+ delete_mail(src_mailbox, message_id, True)
+
+ return result['id']
+
+
+def delete_mail_command():
+ args = demisto.args()
+ user_id = args['user-id']
+ _id = args['message-id']
+ permanent = bool(strtobool(args.get('permanent', 'false')))
+
+ return delete_mail(user_id, _id, permanent)
+
+
+def delete_mail(user_id, _id, permanent):
+ command_args = {
+ 'userId': user_id,
+ 'id': _id,
+ }
+
+ service = get_service(
+ 'gmail',
+ 'v1',
+ ['https://mail.google.com',
+ 'https://www.googleapis.com/auth/gmail.modify'],
+ delegated_user=command_args['userId'])
+ if permanent:
+ service.users().messages().delete(**command_args).execute()
+ return 'Email has been successfully deleted.'
+ else:
+ service.users().messages().trash(**command_args).execute()
+ return 'Email has been successfully moved to trash.'
+
+
+def get_thread_command():
+ args = demisto.args()
+
+ user_id = args.get('user-id', ADMIN_EMAIL)
+ _id = args.get('thread-id')
+ _format = args.get('format')
+
+ messages = get_thread(user_id, _id, _format)
+
+ return emails_to_entry('Emails of Thread:', messages, _format, user_id)
+
+
+def get_thread(user_id, _id, _format):
+ command_args = {
+ 'userId': user_id,
+ 'id': _id,
+ 'format': _format
+ }
+
+ service = get_service(
+ 'gmail',
+ 'v1',
+ ['https://www.googleapis.com/auth/gmail.readonly'],
+ delegated_user=user_id)
+ result = service.users().threads().get(**command_args).execute()
+
+ return result['messages']
+
+
+def add_delete_filter_command():
+ args = demisto.args()
+
+ user_id = args.get('user-id', ADMIN_EMAIL)
+ user_id = user_id if user_id.lower() != 'me' else ADMIN_EMAIL
+ _from = args.get('email-address')
+
+ _filter = add_filter(user_id, _from=_from, add_labels=['TRASH', ])
+
+ return filters_to_entry('New filter:', user_id, [_filter])
+
+
+def add_filter_command():
+ args = demisto.args()
+
+ user_id = args.get('user-id', ADMIN_EMAIL)
+ user_id = user_id if user_id.lower() != 'me' else ADMIN_EMAIL
+ _from = args.get('from')
+ to = args.get('to')
+ subject = args.get('subject')
+ query = args.get('query')
+ has_attachments = args.get('has-attachments')
+ size = args.get('size')
+ size_comparison = args.get('size-comparison')
+ forward = args.get('forward')
+ add_labels = args.get('add-labels', '').split(',')
+ add_labels = add_labels if any(add_labels) else None
+ remove_labels = args.get('remove-labels', '').split(',')
+ remove_labels = remove_labels if any(remove_labels) else None
+
+ _filter = add_filter(user_id,
+ _from=_from,
+ to=to,
+ subject=subject,
+ query=query,
+ has_attachments=has_attachments,
+ size=size,
+ size_comparison=size_comparison,
+ forward=forward,
+ add_labels=add_labels,
+ remove_labels=remove_labels,
+ )
+
+ return filters_to_entry('New filter:', user_id, [_filter])
+
+
+def add_filter(user_id, _from=None, to=None, subject=None, query=None, has_attachments=None, size=None,
+ size_comparison=None, forward=None, add_labels=None, remove_labels=None):
+ command_args = {
+ 'userId': user_id,
+ 'body': {
+ 'criteria': {},
+ 'action': {},
+ }
+ }
+
+ if _from is not None:
+ command_args['body']['criteria']['from'] = _from
+ if to is not None:
+ command_args['body']['criteria']['to'] = to
+ if subject is not None:
+ command_args['body']['criteria']['subject'] = subject
+ if query is not None:
+ command_args['body']['criteria']['query'] = query
+ if has_attachments is not None:
+ command_args['body']['criteria']['hasAttachment'] = has_attachments
+ if size is not None:
+ command_args['body']['criteria']['size'] = size
+ if size_comparison is not None:
+ command_args['body']['criteria']['size_comparison'] = size_comparison
+ if add_labels is not None:
+ command_args['body']['action']['addLabelIds'] = add_labels
+ if remove_labels is not None:
+ command_args['body']['action']['removeLabelIds'] = remove_labels
+ if forward is not None:
+ command_args['body']['action']['forward'] = forward
+
+ service = get_service(
+ 'gmail',
+ 'v1',
+ ['https://www.googleapis.com/auth/gmail.settings.basic'],
+ delegated_user=user_id)
+ result = service.users().settings().filters().create(**command_args).execute()
+
+ return result
+
+
+def list_filters_command():
+ args = demisto.args()
+
+ user_id = args.get('user-id', ADMIN_EMAIL)
+ user_id = user_id if user_id.lower() != 'me' else ADMIN_EMAIL
+ address = args.get('address')
+ limit = int(args.get('limit', 100))
+
+ filters = list_filters(
+ user_id,
+ address=address,
+ limit=limit)
+
+ return filters_to_entry('filters:', user_id, filters)
+
+
+def list_filters(user_id, address=None, limit=100):
+ command_args = {
+ 'userId': user_id,
+ }
+ service = get_service(
+ 'gmail',
+ 'v1',
+ ['https://www.googleapis.com/auth/gmail.settings.basic'],
+ delegated_user=user_id)
+ result = service.users().settings().filters().list(**command_args).execute()
+ filters = result.get('filter', [])
+ if address is not None:
+ filters = [f for f in filters if address in {f['criteria'].get('from'), f['criteria'].get('to')}]
+
+ return filters[:limit]
+
+
+def remove_filter_command():
+ args = demisto.args()
+
+ user_id = args.get('user-id', ADMIN_EMAIL)
+ ids = args.get('filter_ids', '')
+ if isinstance(ids, STRING_TYPES): # alternativly it could be an array
+ ids = ids.split(',')
+
+ for _id in ids:
+ remove_filter(user_id, _id)
+
+ return 'filters were removed successfully.'
+
+
+def remove_filter(user_id, _id):
+ command_args = {
+ 'userId': user_id,
+ 'id': _id
+ }
+
+ service = get_service(
+ 'gmail',
+ 'v1',
+ ['https://www.googleapis.com/auth/gmail.settings.basic'],
+ delegated_user=user_id)
+ result = service.users().settings().filters().delete(**command_args).execute()
+
+ return result
+
+
+'''MAIL SENDER FUNCTIONS'''
+
+
+def randomword(length):
+ """
+ Generate a random string of given length
+ """
+ letters = string.ascii_lowercase
+ return ''.join(random.choice(letters) for i in range(length))
+
+
+def header(s):
+ if not s:
+ return None
+
+ s_no_newlines = ' '.join(s.splitlines())
+ return Header(s_no_newlines, 'utf-8')
+
+
+def template_params(paramsStr):
+ """
+ Translate the template params if they exist from the context
+ """
+ actualParams = {}
+ if paramsStr:
+ try:
+ params = json.loads(paramsStr)
+
+ except ValueError as e:
+ return_error('Unable to parse templateParams: {}'.format(str(e)))
+ # Build a simple key/value
+
+ for p in params:
+ if params[p].get('value'):
+ actualParams[p] = params[p]['value']
+
+ elif params[p].get('key'):
+ actualParams[p] = demisto.dt(demisto.context(), params[p]['key'])
+
+ return actualParams
+
+ else:
+ return None
+
+
+def transient_attachments(transientFile, transientFileContent, transientFileCID):
+ if transientFile is None or len(transientFile) == 0:
+ return []
+
+ if transientFileContent is None:
+ transientFileContent = []
+
+ if transientFileCID is None:
+ transientFileCID = []
+
+ attachments = []
+ for file_name, file_data, file_cid in it.izip_longest(transientFile, transientFileContent, transientFileCID):
+ if file_name is None:
+ break
+
+ content_type, encoding = mimetypes.guess_type(file_name)
+ if content_type is None or encoding is not None:
+ content_type = 'application/octet-stream'
+
+ main_type, sub_type = content_type.split('/', 1)
+
+ attachments.append({
+ 'name': file_name,
+ 'maintype': main_type,
+ 'subtype': sub_type,
+ 'data': file_data,
+ 'cid': file_cid
+ })
+
+ return attachments
+
+
+def handle_html(htmlBody):
+ """
+ Extract all data-url content from within the html and return as separate attachments.
+ Due to security implications, we support only images here
+ We might not have Beautiful Soup so just do regex search
+ """
+ attachments = []
+ cleanBody = ''
+ lastIndex = 0
+ for i, m in enumerate(
+ re.finditer(r' 0:
+ for cid in attach_cids:
+ file = demisto.getFilePath(cid)
+ file_path = file['path']
+
+ content_type, encoding = mimetypes.guess_type(file_path)
+ if content_type is None or encoding is not None:
+ content_type = 'application/octet-stream'
+ main_type, sub_type = content_type.split('/', 1)
+
+ fp = open(file_path, 'rb')
+ data = fp.read()
+ fp.close()
+
+ inline_attachment.append({
+ 'ID': cid,
+ 'name': file['name'],
+ 'maintype': main_type,
+ 'subtype': sub_type,
+ 'data': data,
+ 'cid': cid
+ })
+
+ return inline_attachment
+
+
+def collect_manual_attachments():
+ attachments = []
+ for attachment in demisto.getArg('manualAttachObj') or []:
+ res = demisto.getFilePath(os.path.basename(attachment['RealFileName']))
+
+ path = res['path']
+ content_type, encoding = mimetypes.guess_type(path)
+ if content_type is None or encoding is not None:
+ content_type = 'application/octet-stream'
+ maintype, subtype = content_type.split('/', 1)
+
+ if maintype == 'text':
+ with open(path) as fp:
+ data = fp.read()
+ else:
+ with open(path, 'rb') as fp:
+ data = fp.read()
+ attachments.append({
+ 'name': attachment['FileName'],
+ 'maintype': maintype,
+ 'subtype': subtype,
+ 'data': data,
+ 'cid': None
+ })
+
+ return attachments
+
+
+def collect_attachments(entry_ids, file_names):
+ """
+ Creates a dictionary containing all the info about all attachments
+ """
+ attachments = []
+ entry_number = 0
+ if entry_ids is not None and len(entry_ids) > 0:
+ for entry_id in entry_ids:
+ file = demisto.getFilePath(entry_id)
+ file_path = file['path']
+ if file_names is not None and len(file_names) > entry_number and file_names[entry_number] is not None:
+ file_name = file_names[entry_number]
+
+ else:
+ file_name = file['name']
+
+ content_type, encoding = mimetypes.guess_type(file_path)
+ if content_type is None or encoding is not None:
+ content_type = 'application/octet-stream'
+
+ main_type, sub_type = content_type.split('/', 1)
+
+ fp = open(file_path, 'rb')
+ data = fp.read()
+ fp.close()
+ attachments.append({
+ 'ID': entry_id,
+ 'name': file_name,
+ 'maintype': main_type,
+ 'subtype': sub_type,
+ 'data': data,
+ 'cid': None
+ })
+ entry_number += 1
+ return attachments
+
+
+def attachment_handler(message, attachments):
+ """
+ Adds the attachments to the email message
+ """
+ for att in attachments:
+ if att['maintype'] == 'text':
+ msg_txt = MIMEText(att['data'], att['subtype'], 'utf-8')
+ if att['cid'] is not None:
+ msg_txt.add_header('Content-Disposition', 'inline', filename=att['name'])
+ msg_txt.add_header('Content-ID', '<' + att['name'] + '>')
+
+ else:
+ msg_txt.add_header('Content-Disposition', 'attachment', filename=att['name'])
+ message.attach(msg_txt)
+
+ elif att['maintype'] == 'image':
+ msg_img = MIMEImage(att['data'], att['subtype'])
+ if att['cid'] is not None:
+ msg_img.add_header('Content-Disposition', 'inline', filename=att['name'])
+ msg_img.add_header('Content-ID', '<' + att['name'] + '>')
+
+ else:
+ msg_img.add_header('Content-Disposition', 'attachment', filename=att['name'])
+ message.attach(msg_img)
+
+ elif att['maintype'] == 'audio':
+ msg_aud = MIMEAudio(att['data'], att['subtype'])
+ if att['cid'] is not None:
+ msg_aud.add_header('Content-Disposition', 'inline', filename=att['name'])
+ msg_aud.add_header('Content-ID', '<' + att['name'] + '>')
+
+ else:
+ msg_aud.add_header('Content-Disposition', 'attachment', filename=att['name'])
+ message.attach(msg_aud)
+
+ else:
+ msg_base = MIMEBase(att['maintype'], att['subtype'])
+ msg_base.set_payload(att['data'])
+ if att['cid'] is not None:
+ msg_base.add_header('Content-Disposition', 'inline', filename=att['name'])
+ msg_base.add_header('Content-ID', '<' + att['name'] + '>')
+
+ else:
+ msg_base.add_header('Content-Disposition', 'attachment', filename=att['name'])
+ message.attach(msg_base)
+
+
+def send_mail(emailto, emailfrom, subject, body, entry_ids, cc, bcc, htmlBody, replyTo, file_names, attach_cid,
+ transientFile, transientFileContent, transientFileCID, additional_headers, templateParams):
+ message = MIMEMultipart()
+ message['to'] = header(','.join(emailto))
+ message['cc'] = header(','.join(cc))
+ message['bcc'] = header(','.join(bcc))
+ message['from'] = header(emailfrom)
+ message['subject'] = header(subject)
+ message['reply-to'] = header(replyTo)
+
+ templateParams = template_params(templateParams)
+ if templateParams is not None:
+ if body is not None:
+ body = body.format(**templateParams)
+
+ if htmlBody is not None:
+ htmlBody = htmlBody.format(**templateParams)
+
+ if additional_headers is not None and len(additional_headers) > 0:
+ for h in additional_headers:
+ header_name_and_value = h.split('=')
+ message[header_name_and_value[0]] = header(header_name_and_value[1])
+
+ msg = MIMEText(body, 'plain', 'utf-8')
+ message.attach(msg)
+ htmlAttachments = [] # type: list
+ inlineAttachments = [] # type: list
+
+ if htmlBody is not None:
+ htmlBody, htmlAttachments = handle_html(htmlBody)
+ msg = MIMEText(htmlBody, 'html', 'utf-8')
+ message.attach(msg)
+ if attach_cid is not None and len(attach_cid) > 0:
+ inlineAttachments = collect_inline_attachments(attach_cid)
+
+ else:
+ # if not html body, cannot attach cids in message
+ transientFileCID = None
+
+ attachments = collect_attachments(entry_ids, file_names)
+ manual_attachments = collect_manual_attachments()
+ transientAttachments = transient_attachments(transientFile, transientFileContent, transientFileCID)
+
+ attachments = attachments + htmlAttachments + transientAttachments + inlineAttachments + manual_attachments
+ attachment_handler(message, attachments)
+
+ encoded_message = base64.urlsafe_b64encode(message.as_string())
+ command_args = {
+ 'userId': emailfrom,
+ 'body': {
+ 'raw': encoded_message,
+ }
+ }
+ service = get_service('gmail', 'v1', additional_scopes=['https://www.googleapis.com/auth/gmail.compose',
+ 'https://www.googleapis.com/auth/gmail.send'])
+ result = service.users().messages().send(**command_args).execute()
+ return result
+
+
+def send_mail_command():
+ args = demisto.args()
+ emailto = argToList(args.get('to'))
+ emailfrom = args.get('from')
+ body = args.get('body')
+ subject = args.get('subject')
+ entry_ids = argToList(args.get('attachIDs'))
+ cc = argToList(args.get('cc'))
+ bcc = argToList(args.get('bcc'))
+ htmlBody = args.get('htmlBody')
+ replyTo = args.get('replyTo')
+ file_names = argToList(args.get('attachNames'))
+ attchCID = argToList(args.get('attachCIDs'))
+ transientFile = argToList(args.get('transientFile'))
+ transientFileContent = argToList(args.get('transientFileContent'))
+ transientFileCID = argToList(args.get('transientFileCID'))
+ additional_headers = argToList(args.get('additionalHeader'))
+ template_param = args.get('templateParams')
+
+ if emailfrom is None:
+ emailfrom = ADMIN_EMAIL
+
+ result = send_mail(emailto, emailfrom, subject, body, entry_ids, cc, bcc, htmlBody,
+ replyTo, file_names, attchCID, transientFile, transientFileContent,
+ transientFileCID, additional_headers, template_param)
+ return sent_mail_to_entry('Email sent:', [result], emailto, emailfrom, cc, bcc, htmlBody, body, subject)
+
+
+'''FETCH INCIDENTS'''
+
+
+def fetch_incidents():
+ params = demisto.params()
+ user_key = params.get('queryUserKey')
+ user_key = user_key if user_key else ADMIN_EMAIL
+ query = '' if params['query'] is None else params['query']
+ last_run = demisto.getLastRun()
+ last_fetch = last_run.get('gmt_time')
+ # handle first time fetch - gets current GMT time -1 day
+ if last_fetch is None:
+ last_fetch, _ = parse_date_range(date_range=FETCH_TIME, utc=True, to_timestamp=False)
+ last_fetch = str(last_fetch.isoformat()).split('.')[0] + 'Z'
+
+ last_fetch = datetime.strptime(last_fetch, '%Y-%m-%dT%H:%M:%SZ')
+ current_fetch = last_fetch
+ service = get_service(
+ 'gmail',
+ 'v1',
+ ['https://www.googleapis.com/auth/gmail.readonly'],
+ user_key)
+
+ query += last_fetch.strftime(' after:%Y/%m/%d')
+ LOG('GMAIL: fetch parameters:\nuser: %s\nquery=%s\nfetch time: %s' %
+ (user_key, query, last_fetch, ))
+
+ result = service.users().messages().list(
+ userId=user_key, maxResults=100, q=query).execute()
+
+ incidents = []
+ # so far, so good
+ LOG('GMAIL: possible new incidents are %s' % (result, ))
+ for msg in result.get('messages', []):
+ msg_result = service.users().messages().get(
+ id=msg['id'], userId=user_key).execute()
+ incident = mail_to_incident(msg_result, service, user_key)
+ temp_date = datetime.strptime(
+ incident['occurred'], '%Y-%m-%dT%H:%M:%SZ')
+ # update last run
+ if temp_date > last_fetch:
+ last_fetch = temp_date + timedelta(seconds=1)
+
+ # avoid duplication due to weak time query
+ if temp_date > current_fetch:
+ incidents.append(incident)
+
+ demisto.info('extract {} incidents'.format(len(incidents)))
+ demisto.setLastRun({'gmt_time': last_fetch.isoformat().split('.')[0] + 'Z'})
+ return incidents
+
+
+def main():
+ global ADMIN_EMAIL, PRIVATE_KEY_CONTENT, GAPPS_ID
+ ADMIN_EMAIL = demisto.params()['adminEmail'].get('identifier', '')
+ PRIVATE_KEY_CONTENT = demisto.params()['adminEmail'].get('password', '{}')
+ GAPPS_ID = demisto.params().get('gappsID')
+ ''' EXECUTION CODE '''
+ COMMANDS = {
+ 'gmail-list-users': list_users_command,
+ 'gmail-get-user': get_user_command,
+ 'gmail-create-user': create_user_command,
+ 'gmail-delete-user': delete_user_command,
+ 'gmail-get-user-roles': get_user_role_command,
+ 'gmail-revoke-user-role': revoke_user_roles_command,
+ 'gmail-get-tokens-for-user': get_user_tokens_command,
+ 'gmail-search-all-mailboxes': search_all_mailboxes,
+ 'gmail-search': search_command,
+ 'gmail-get-mail': get_mail_command,
+ 'gmail-get-attachments': get_attachments_command,
+ 'gmail-move-mail': move_mail_command,
+ 'gmail-move-mail-to-mailbox': move_mail_to_mailbox_command,
+ 'gmail-delete-mail': delete_mail_command,
+ 'gmail-get-thread': get_thread_command,
+ 'gmail-add-filter': add_filter_command,
+ 'gmail-add-delete-filter': add_delete_filter_command,
+ 'gmail-list-filters': list_filters_command,
+ 'gmail-remove-filter': remove_filter_command,
+ 'gmail-hide-user-in-directory': hide_user_command,
+ 'gmail-set-password': set_user_password_command,
+ 'gmail-get-autoreply': get_autoreply_command,
+ 'gmail-set-autoreply': set_autoreply_command,
+ 'gmail-delegate-user-mailbox': delegate_user_mailbox_command,
+ 'gmail-remove-delegated-mailbox': remove_delegate_user_mailbox_command,
+ 'send-mail': send_mail_command
+ }
+ command = demisto.command()
+ LOG('GMAIL: command is %s' % (command, ))
+ try:
+ if command == 'test-module':
+ list_users(ADMIN_EMAIL.split('@')[1])
+ demisto.results('ok')
+ sys.exit(0)
+
+ if command == 'fetch-incidents':
+ demisto.incidents(fetch_incidents())
+ sys.exit(0)
+
+ cmd_func = COMMANDS.get(command)
+ if cmd_func is None:
+ raise NotImplementedError(
+ 'Command "{}" is not implemented.'.format(command))
+
+ else:
+ demisto.results(cmd_func()) # type: ignore
+
+ except Exception as e:
+ import traceback
+ if command == 'fetch-incidents':
+ LOG(traceback.format_exc())
+ LOG.print_log()
+ raise
+
+ else:
+ return_error('GMAIL: {}'.format(str(e)), traceback.format_exc())
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/Gmail/Gmail.yml b/Integrations/Gmail/Gmail.yml
new file mode 100644
index 000000000000..ba02e733f8ad
--- /dev/null
+++ b/Integrations/Gmail/Gmail.yml
@@ -0,0 +1,1323 @@
+commonfields:
+ id: Gmail
+ version: -1
+name: Gmail
+display: Gmail
+category: Authentication
+description: Gmail API and user management (This integration replaces the Gmail functionality
+ in the GoogleApps API and G Suite integration).
+configuration:
+- display: Email of user with admin privileges (the Password refers to the Service Account private key)
+ name: adminEmail
+ defaultvalue: ""
+ type: 9
+ required: true
+- display: Immutable Google Apps Id
+ name: gappsID
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Events query (e.g. "from:example@demisto.com")
+ name: query
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Events user key (e.g. example@demisto.com)
+ name: queryUserKey
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Fetch incidents
+ name: isFetch
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Trust any certificate (not secure)
+ name: insecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Incident type
+ name: incidentType
+ defaultvalue: ""
+ type: 13
+ required: false
+- display: First fetch timestamp, in days.
+ name: fetch_time
+ defaultvalue: 1 days
+ type: 0
+ required: false
+script:
+ script: '-'
+ type: python
+ subtype: python2
+ commands:
+ - name: gmail-delete-user
+ arguments:
+ - name: user-id
+ required: true
+ default: true
+ description: The user's email address. The "me" special value can be used to indicate
+ the authenticated user.
+ description: Deletes a Gmail user.
+ - name: gmail-get-tokens-for-user
+ arguments:
+ - name: user-id
+ required: true
+ default: true
+ description: The user's email address. The "me" special value can be used to indicate
+ the authenticated user.
+ description: Lists all tokens associated with a specified user.
+ applications.
+ - name: gmail-get-user
+ arguments:
+ - name: user-id
+ required: true
+ default: true
+ description: The user's email address. The "me" special value can be used to indicate
+ the authenticated user.
+ - name: projection
+ auto: PREDEFINED
+ predefined:
+ - basic
+ - custom
+ - full
+ description: 'The subset of fields to fetch for the user. Can be: "basic": Do not include any custom fields for the user (default), "custom":
+ Includes custom fields from schemas requested in custom-field-mask, "full": Includes all fields associated with the user.'
+ defaultValue: basic
+ - name: view-type-public-domain
+ auto: PREDEFINED
+ predefined:
+ - admin_view
+ - domain_public
+ description: 'Whether to fetch the administrator or public view of the user. Can be admin_view (default), which includes both administrator and domain-public fields; or "domain_public", which includes user fields
+ that are publicly visible to other users in the domain.'
+ defaultValue: admin_view
+ - name: custom-field-mask
+ description: A comma separated list of schema names. All fields from these schemas
+ are fetched. This should only be set when projection=custom.
+ outputs:
+ - contextPath: Account.Type
+ description: The account type. For example, "AD", "LocalOS", "Google", "AppleID", and so on.
+ type: String
+ - contextPath: Account.ID
+ description: The unique ID for the account (integration specific). For AD accounts
+ this is the Distinguished Name (DN).
+ type: String
+ - contextPath: Account.DisplayName
+ description: The display name.
+ type: string
+ - contextPath: Account.Gmail.Address
+ description: Email assigned with the current account.
+ type: string
+ - contextPath: Account.Email.Address
+ description: The email address of the account.
+ type: String
+ - contextPath: Account.Groups
+ description: Groups to which the account belongs (integration specific). For
+ example, for AD, these are the groups in which the account is a member.
+ type: String
+ - contextPath: Account.Domain
+ description: The domain of the account.
+ type: String
+ - contextPath: Account.Username
+ description: The account username in the relevant system.
+ type: String
+ - contextPath: Account.OrganizationUnit
+ description: The Organization Unit (OU) of the account.
+ type: String
+ description: Gets information for a specified user.
+ - name: gmail-get-user-roles
+ arguments:
+ - name: user-id
+ required: true
+ default: true
+ description: The user's email address. The "me" special value can be used to indicate
+ the authenticated user.
+ outputs:
+ - contextPath: GoogleApps.Role.RoleAssignmentId
+ description: The unique ID of the role assignment.
+ type: string
+ - contextPath: GoogleApps.Role.ScopeType
+ description: The scope type of the role.
+ type: string
+ - contextPath: GoogleApps.Role.Kind
+ description: The kind of the Role.
+ type: string
+ - contextPath: GoogleApps.Role.OrgUnitId
+ description: Organization in which user was assigned.
+ type: string
+ - contextPath: GoogleApps.Role.ID
+ description: The inner role ID.
+ type: string
+ - contextPath: GoogleApps.Role.AssignedTo
+ description: User ID who was assigned to the role.
+ type: string
+ description: Retrieves a list of all Google roles for a specified user.
+ - name: gmail-get-attachments
+ arguments:
+ - name: message-id
+ required: true
+ description: The ID of the message to retrieve.
+ - name: user-id
+ required: true
+ description: The user's email address. The "me" special value can be used to indicate
+ the authenticated user.
+ description: Retrieves attachments from a sent Gmail message.
+ - name: gmail-get-mail
+ arguments:
+ - name: user-id
+ required: true
+ default: true
+ description: The user's email address. The special value me can be used to indicate
+ the authenticated user.
+ - name: message-id
+ required: true
+ description: The ID of the message to retrieve.
+ - name: format
+ auto: PREDEFINED
+ predefined:
+ - full
+ - metadata
+ - minimal
+ - raw
+ description: 'The format in which to return the message. Can be: "full":
+ Returns the full email message data with body content parsed in the payload
+ field; the raw field is not used. (default) / "metadata": Returns only the email
+ message ID, labels, and email headers / "minimal": Returns only the email message
+ ID and labels; does not return the email headers, body, or payload / "raw":
+ Returns the full email message data with body content in the raw field as
+ a base64url encoded string; the payload field is not used.'
+ defaultValue: full
+ outputs:
+ - contextPath: Gmail.ID
+ description: Inner ID of the Gmail message.
+ type: String
+ - contextPath: Gmail.ThreadId
+ description: The thread ID.
+ type: string
+ - contextPath: Gmail.Format
+ description: MIME type of email.
+ type: string
+ - contextPath: Gmail.Labels
+ description: Labels of the specific email.
+ type: string
+ - contextPath: Gmail.To
+ description: Email Address of the receiver.
+ type: String
+ - contextPath: Gmail.From
+ description: Email Address of the sender.
+ type: String
+ - contextPath: Gmail.Cc
+ description: Additional recipient email address (CC).
+ type: string
+ - contextPath: Gmail.Bcc
+ description: Additional recipient email address (BCC).
+ type: string
+ - contextPath: Gmail.Subject
+ description: Subject of the email.
+ type: string
+ - contextPath: Gmail.Body
+ description: The content of the email.
+ type: string
+ - contextPath: Gmail.Attachments
+ description: The attachments of the email. Attachments ID's are separated by ','.
+ type: unknown
+ - contextPath: Gmail.Headers
+ description: All headers of the specific email (list).
+ type: unknown
+ - contextPath: Gmail.Mailbox
+ description: The email mailbox.
+ type: string
+ - contextPath: Email.To
+ description: The recipient of the email.
+ type: String
+ - contextPath: Email.From
+ description: The sender of the email.
+ type: String
+ - contextPath: Email.CC
+ description: Additional recipient email address (CC).
+ type: String
+ - contextPath: Email.BCC
+ description: Additional recipient email address (BCC).
+ type: String
+ - contextPath: Email.Format
+ description: The format of the email.
+ type: String
+ - contextPath: Email.Body/HTML
+ description: The HTML version of the email.
+ type: String
+ - contextPath: Email.Body/Text
+ description: The plain-text version of the email.
+ type: String
+ - contextPath: Email.Subject
+ description: The subject of the email.
+ type: String
+ - contextPath: Email.Headers
+ description: The headers of the email.
+ type: String
+ - contextPath: Email.Attachments.entryID
+ description: Attachments ids separated by ','.
+ type: Unknown
+ - contextPath: Email.Date
+ description: The date the email was received.
+ type: String
+ description: Retrieves the Gmail message sent to a specified user.
+ - name: gmail-search
+ arguments:
+ - name: user-id
+ required: true
+ default: true
+ description: The user's email address. The "me" special value can be used to indicate
+ the authenticated user.
+ - name: query
+ description: 'Returns messages matching the specified query. Supports the
+ same query format as the Gmail search box. For example, "from:someuser@example.com
+ rfc822msgid: is:unread". For more syntax information see "https://support.google.com/mail/answer/7190?hl=en"'
+ - name: max-results
+ description: Maximum number of results to return. Default is 100. Maximum is
+ 500. Can be 1 to 500, inclusive.
+ defaultValue: "100"
+ - name: fields
+ description: Enables partial responses to be retrieved, separated by commas. For more information, see https://developers.google.com/gdata/docs/2.0/basics#PartialResponse.
+ - name: labels-ids
+ description: Only returns messages with labels that match all of the specified
+ label IDs in a comma separated list.
+ - name: page-token
+ description: Page token to retrieve a specific page of results in the list.
+ - name: include-spam-trash
+ auto: PREDEFINED
+ predefined:
+ - "False"
+ - "True"
+ description: 'Include messages from SPAM and TRASH in the results. (Default:
+ false)'
+ defaultValue: "False"
+ - name: from
+ description: 'Specify the sender. For example, "john"'
+ - name: to
+ description: 'Specify the receiver. For example, "john"'
+ - name: subject
+ description: 'Words in the subject line. For example, "alert"'
+ - name: filename
+ description: 'Attachments with a certain name or file type. For example, "pdf"
+ or "report.pdf"'
+ - name: in
+ description: 'Messages in any folder, including Spam and Trash. For example:
+ shopping'
+ - name: after
+ description: 'Search for messages sent after a certain time period. For example:
+ 2018/05/06'
+ - name: before
+ description: 'Search for messages sent before a certain time period. for example:
+ 2018/05/09'
+ - name: has-attachments
+ auto: PREDEFINED
+ predefined:
+ - "True"
+ - "False"
+ description: Whether to search for messages sent with attachments (boolean value).
+ outputs:
+ - contextPath: Gmail.ID
+ description: Inner ID of the Gmail message.
+ type: string
+ - contextPath: Gmail.ThreadId
+ description: The thread ID.
+ type: string
+ - contextPath: Gmail.Format
+ description: MIME type of email.
+ type: string
+ - contextPath: Gmail.Labels
+ description: Labels of the specific email.
+ type: string
+ - contextPath: Gmail.To
+ description: Email Address of the receiver.
+ type: string
+ - contextPath: Gmail.From
+ description: Email Address of the sender.
+ type: string
+ - contextPath: Gmail.Cc
+ description: Additional recipient email address (CC).
+ type: string
+ - contextPath: Gmail.Bcc
+ description: Additional recipient email address (BCC).
+ type: string
+ - contextPath: Gmail.Subject
+ description: Subject of the specific email.
+ type: string
+ - contextPath: Gmail.Body
+ description: The content of the email.
+ type: string
+ - contextPath: Gmail.Attachments
+ description: Attachment details. Attachments IDs are separated by ','
+ type: unknown
+ - contextPath: Gmail.Headers
+ description: All headers of a specific email (list).
+ type: unknown
+ - contextPath: Gmail.Mailbox
+ description: The email mailbox.
+ type: string
+ - contextPath: Email.To
+ description: The recipient of the email.
+ type: String
+ - contextPath: Email.From
+ description: The sender of the email.
+ type: String
+ - contextPath: Email.CC
+ description: Additional recipient email address (CC).
+ type: String
+ - contextPath: Email.BCC
+ description: Additional recipient email address (BCC).
+ type: String
+ - contextPath: Email.Format
+ description: The format of the email.
+ type: String
+ - contextPath: Email.Body/HTML
+ description: The HTML version of the email.
+ type: String
+ - contextPath: Email.Body/Text
+ description: The plain-text version of the email.
+ type: String
+ - contextPath: Email.Subject
+ description: The subject of the email.
+ type: String
+ - contextPath: Email.Headers
+ description: The headers of the email.
+ type: String
+ - contextPath: Email.Attachments.entryID
+ description: Email Attachment IDs. Separated by ','
+ type: Unknown
+ - contextPath: Email.Date
+ description: The date the email was received.
+ type: String
+ description: Searches for Gmail records of a specified Google user.
+ - name: gmail-search-all-mailboxes
+ arguments:
+ - name: query
+ description: 'Returns messages matching the specified query. Supports the
+ same query format as the Gmail search box. For example, "from:someuser@example.com
+ rfc822msgid: is:unread". For more syntax information,see "https://support.google.com/mail/answer/7190?hl=en"'
+ - name: max-results
+ description: Maximum number of results to return. Default is 100. Maximum is
+ 500. Acceptable values are 1 to 500, inclusive.
+ defaultValue: "100"
+ - name: fields
+ description: Enables partial responses to be retrieved in a comma separated list. For more information, see https://developers.google.com/gdata/docs/2.0/basics#PartialResponse.
+ - name: labels-ids
+ description: Only returns messages with labels that match all of the specified
+ label IDs in a comma separated list.
+ - name: page-token
+ description: Page token to retrieve a specific page of results in the list.
+ - name: include-spam-trash
+ auto: PREDEFINED
+ predefined:
+ - "False"
+ - "True"
+ description: 'Includes messages from SPAM and TRASH in the results. (Default:
+ false)'
+ defaultValue: "False"
+ - name: from
+ description: 'Specifies the sender. For example, "john"'
+ - name: to
+ description: 'Specifies the receiver. For example, "john"'
+ - name: subject
+ description: 'Words in the subject line. For example, "alert"'
+ - name: filename
+ description: 'Attachments with a certain name or file type. For example, "pdf"
+ or "report.pdf"'
+ - name: in
+ description: 'Messages in any folder, including Spam and Trash. For example,
+ shopping'
+ - name: after
+ description: 'Search for messages sent after a certain time period. For example,
+ 2018/05/06'
+ - name: before
+ description: 'Search for messages sent before a certain time period. For example,
+ 2018/05/09'
+ - name: has-attachments
+ auto: PREDEFINED
+ predefined:
+ - "False"
+ - "True"
+ description: 'Whether to search for messages sent with attachments.'
+ outputs:
+ - contextPath: Gmail.ID
+ description: Inner ID of the Gmail message.
+ type: string
+ - contextPath: Gmail.ThreadId
+ description: The thread ID.
+ type: string
+ - contextPath: Gmail.Format
+ description: MIME type of the email.
+ type: string
+ - contextPath: Gmail.Labels
+ description: Labels of a specific email.
+ type: string
+ - contextPath: Gmail.To
+ description: Email Address of the receiver.
+ type: string
+ - contextPath: Gmail.From
+ description: Email Address of the sender.
+ type: string
+ - contextPath: Gmail.Cc
+ description: Additional recipient email address (CC).
+ type: string
+ - contextPath: Gmail.Bcc
+ description: Additional recipient email address (BCC).
+ type: string
+ - contextPath: Gmail.Subject
+ description: Subject of the specific email.
+ type: string
+ - contextPath: Gmail.Body
+ description: The content of the email.
+ type: string
+ - contextPath: Gmail.Attachments
+ description: The attachments of the email. IDs are separated by ','.
+ type: unknown
+ - contextPath: Gmail.Headers
+ description: All headers of specific mail (list).
+ type: unknown
+ - contextPath: Gmail.Mailbox
+ description: The Gmail Mailbox.
+ type: string
+ - contextPath: Email.To
+ description: The recipient of the email.
+ type: String
+ - contextPath: Email.From
+ description: The sender of the email.
+ type: String
+ - contextPath: Email.CC
+ description: Additional recipient email address (CC).
+ type: String
+ - contextPath: Email.BCC
+ description: Additional recipient email address (BCC).
+ type: String
+ - contextPath: Email.Format
+ description: The format of the email.
+ type: String
+ - contextPath: Email.Body/HTML
+ description: The HTML version of the email.
+ type: String
+ - contextPath: Email.Body/Text
+ description: The plain-text version of the email.
+ type: String
+ - contextPath: Email.Subject
+ description: The subject of the email.
+ type: String
+ - contextPath: Email.Headers
+ description: The headers of the email.
+ type: String
+ - contextPath: Email.Attachments.entryID
+ description: Email Attachments. IDs are separated by ','.
+ type: Unknown
+ - contextPath: Email.Date
+ description: The date the email was received.
+ type: String
+ description: Searches the Gmail records for all Google users.
+ - name: gmail-list-users
+ arguments:
+ - name: projection
+ auto: PREDEFINED
+ predefined:
+ - basic
+ - custom
+ - full
+ description: 'The subset of fields to fetch for the user. Can be "basic": Do not include any custom fields for the user. (default), "custom":
+ Include custom fields from schemas requested in customFieldMask, "full": Include
+ all fields associated with this user.'
+ - name: domain
+ default: true
+ description: The domain name. Use this field to get fields from only one domain.
+ To return all domains for a customer account, use the customer query parameter.
+ - name: customer
+ description: The unique ID for the customers Google account. Default is the value
+ specified in the integration configuration. For a multi-domain account,
+ to fetch all groups for a customer, use this field instead of domain.
+ - name: event
+ auto: PREDEFINED
+ predefined:
+ - add
+ - delete
+ - makeAdmin
+ - undelete
+ - update
+ description: 'The event on which subscription intended (if subscribing). Can be add/delete/makeAdmin/undelete/update'
+ - name: max-results
+ description: Maximum number of results to return. Default is 100. Maximum is
+ 500. Can be 1 to 500, inclusive.
+ - name: custom-field-mask
+ description: A comma-separated list of schema names. All fields from these schemas
+ are fetched. Must be set when projection=custom.
+ - name: query
+ description: Query string search. Should be of the form "". Complete documentation
+ is at https://developers.google.com/admin-sdk/directory/v1/guides/search-users
+ - name: show-deleted
+ auto: PREDEFINED
+ predefined:
+ - "False"
+ - "True"
+ description: If true, retrieves the list of deleted users. Default is
+ false.
+ - name: sort-order
+ auto: PREDEFINED
+ predefined:
+ - ASCENDING
+ - DESCENDING
+ description: 'How to sort out results. Can be ASCENDING/DESCENDING'
+ - name: token
+ description: Token to authorize and authenticate the action.
+ - name: view-type-public-domain
+ auto: PREDEFINED
+ predefined:
+ - admin_view
+ - domain_public
+ description: 'Whether to fetch either the administrator or public view of the user.
+ Can be admin_view (default), which includes both administrator and domain-public fields or "domain_public"(includes fields for the user that are publicly visible to other users in the domain).'
+ defaultValue: admin_view
+ outputs:
+ - contextPath: Account.Type
+ description: 'The account type. For example, "AD", "LocalOS", "Google", "AppleID", and so on.'
+ type: String
+ - contextPath: Account.ID
+ description: The unique ID for the account (integration specific). For AD accounts
+ this is the Distinguished Name (DN).
+ type: String
+ - contextPath: Account.DisplayName
+ description: The display name.
+ type: String
+ - contextPath: Account.Gmail.Address
+ description: Email assigned with the current account.
+ type: string
+ - contextPath: Account.Email.Adderss
+ description: The email address of the account.
+ type: String
+ - contextPath: Account.Groups
+ description: Groups to which the account belongs (integration specific). For
+ example, for AD these are the groups in which the account is member.
+ type: String
+ - contextPath: Account.Domain
+ description: The domain of the account.
+ type: String
+ - contextPath: Account.Username
+ description: The username of the account.
+ type: String
+ - contextPath: Account.OrganizationUnit
+ description: The Organization Unit (OU) of the account.
+ type: String
+ description: Lists all Google users in a domain.
+ - name: gmail-revoke-user-role
+ arguments:
+ - name: user-id
+ description: The user's email address. The special value me can be used to indicate
+ the authenticated user.
+ - name: role-assignment-id
+ required: true
+ description: The immutable ID of the role assignment.
+ description: Revokes a role for a specified Google user.
+ - name: gmail-create-user
+ arguments:
+ - name: email
+ required: true
+ default: true
+ description: The user's primary email address. The primary email address must be unique
+ and cannot be an alias of another user.
+ - name: first-name
+ required: true
+ description: The user's first name.
+ - name: family-name
+ required: true
+ description: The user's last name.
+ - name: password
+ required: true
+ description: Stores the password for the user account. A password can contain
+ any combination of ASCII characters. A minimum of 8 characters is required.
+ The maximum length is 100 characters.
+ outputs:
+ - contextPath: Account.Type
+ description: 'The account type. For example, "AD", "LocalOS", "Google", "AppleID", and so on.'
+ type: String
+ - contextPath: Account.ID
+ description: The unique ID for the account (integration specific). For AD accounts
+ this is the Distinguished Name (DN).
+ type: String
+ - contextPath: Account.DisplayName
+ description: The display name.
+ type: string
+ - contextPath: Account.Gmail.Address
+ description: Email assigned with the current account.
+ type: string
+ - contextPath: Account.Email.Address
+ description: The email address of the account.
+ type: String
+ - contextPath: Account.Username
+ description: The username of the account.
+ type: String
+ - contextPath: Account.Groups
+ description: Groups to which the account belongs (integration specific). For
+ example, for AD these are groups in which the account is a member.
+ type: String
+ - contextPath: Account.Domain
+ description: The domain of the account.
+ type: String
+ - contextPath: Account.OrganizationUnit
+ description: The Organization Unit (OU) of the account.
+ type: String
+ description: Creates a new Gmail user.
+ - name: gmail-delete-mail
+ arguments:
+ - name: user-id
+ required: true
+ default: true
+ description: The user's email address. The special value me can be used to indicate
+ the authenticated user.
+ - name: message-id
+ required: true
+ description: The ID of the message to delete.
+ - name: permanent
+ auto: PREDEFINED
+ predefined:
+ - "False"
+ - "True"
+ description: Whether to delete the email permanently or move it to trash (default).
+ description: Deletes an email in the user's mailbox.
+ - name: gmail-get-thread
+ arguments:
+ - name: user-id
+ required: true
+ default: true
+ description: The user's email address. The special value me can be used to indicate
+ the authenticated user.
+ - name: thread-id
+ required: true
+ description: The ID of the thread to retrieve.
+ - name: format
+ auto: PREDEFINED
+ predefined:
+ - full
+ - metadata
+ - minimal
+ - raw
+ description: 'The format in which to return the message. Can be: "full":
+ Returns the full email message data with body content parsed in the payload
+ field; the raw field is not used. (default) / "metadata": Returns only email
+ message ID, labels, and email headers / "minimal": Returns only email message
+ ID and labels; does not return the email headers, body, or payload / "raw":
+ Returns the full email message data with body content in the raw field as
+ a base64url encoded string; the payload field is not used'
+ defaultValue: full
+ outputs:
+ - contextPath: Gmail.ID
+ description: Inner ID of the Gmail message.
+ type: string
+ - contextPath: Gmail.ThreadId
+ description: The thread ID.
+ type: string
+ - contextPath: Gmail.Format
+ description: MIME type of email.
+ type: string
+ - contextPath: Gmail.Labels
+ description: Labels of the specific email.
+ type: string
+ - contextPath: Gmail.To
+ description: Email Address of the receiver.
+ type: string
+ - contextPath: Gmail.From
+ description: Email Address of the sender.
+ type: string
+ - contextPath: Gmail.Cc
+ description: Additional recipient email address (CC).
+ type: string
+ - contextPath: Gmail.Bcc
+ description: Additional recipient email address (BCC).
+ type: string
+ - contextPath: Gmail.Subject
+ description: Subject of a specific email.
+ type: string
+ - contextPath: Gmail.Body
+ description: The content of the email.
+ type: string
+ - contextPath: Gmail.Attachments
+ description: The attachments of the email. IDs are separated by ','.
+ type: unknown
+ - contextPath: Gmail.Headers
+ description: All headers of the specific email (list).
+ type: unknown
+ - contextPath: Gmail.Mailbox
+ description: The Gmail Mailbox.
+ type: string
+ - contextPath: Email.To
+ description: The recipient of the email.
+ type: String
+ - contextPath: Email.From
+ description: The sender of the email.
+ type: String
+ - contextPath: Email.CC
+ description: Additional recipient email address (CC).
+ type: String
+ - contextPath: Email.BCC
+ description: Additional recipient email address (BCC).
+ type: String
+ - contextPath: Email.Format
+ description: The format of the email.
+ type: String
+ - contextPath: Email.Body/HTML
+ description: The HTML version of the email.
+ type: String
+ - contextPath: Email.Body/Text
+ description: The plain-text version of the email.
+ type: String
+ - contextPath: Email.Subject
+ description: The subject of the email.
+ type: String
+ - contextPath: Email.Headers
+ description: The headers of the email.
+ type: String
+ - contextPath: Email.Attachments.entryID
+ description: Email Attachments. IDs are separated by ','.
+ type: Unknown
+ - contextPath: Email.Date
+ description: The date the email was received.
+ type: String
+ description: Returns all messages in a thread.
+ - name: gmail-move-mail
+ arguments:
+ - name: user-id
+ required: true
+ default: true
+ description: The user's email address. The special value me can be used to indicate
+ the authenticated user.
+ - name: message-id
+ required: true
+ description: The ID of the message to retrieve.
+ - name: add-labels
+ description: Comma-separated list of labels to add to the email.
+ isArray: true
+ - name: remove-labels
+ description: Comma separated list of labels to remove from the email.
+ isArray: true
+ outputs:
+ - contextPath: Gmail.ID
+ description: Inner ID of the Gmail message.
+ type: string
+ - contextPath: Gmail.ThreadId
+ description: The thread ID.
+ type: string
+ - contextPath: Gmail.Format
+ description: MIME type of email.
+ type: string
+ - contextPath: Gmail.Labels
+ description: Labels of the specific email.
+ type: string
+ - contextPath: Gmail.To
+ description: Gmail address of the receiver.
+ type: string
+ - contextPath: Gmail.From
+ description: Gmail address of the sender.
+ type: string
+ - contextPath: Gmail.Cc
+ description: Additional recipient email address (CC).
+ type: string
+ - contextPath: Gmail.Bcc
+ description: Additional recipient email address (BCC).
+ type: string
+ - contextPath: Gmail.Subject
+ description: Subject of the specific email.
+ type: string
+ - contextPath: Gmail.Body
+ description: The content of the email.
+ type: string
+ - contextPath: Gmail.Attachments
+ description: The attachments of the email. IDs are separated by ','.
+ type: unknown
+ - contextPath: Gmail.Headers
+ description: All headers of the specific email (list).
+ type: unknown
+ - contextPath: Gmail.Mailbox
+ description: The Gmail mailbox.
+ type: string
+ - contextPath: Email.To
+ description: The recipient of the email.
+ type: String
+ - contextPath: Email.From
+ description: The sender of the email.
+ type: String
+ - contextPath: Email.CC
+ description: Additional recipient email address (CC).
+ type: Unknown
+ - contextPath: Email.BCC
+ description: Additional recipient email address (BCC).
+ type: Unknown
+ - contextPath: Email.Format
+ description: The format of the email.
+ type: String
+ - contextPath: Email.Body/HTML
+ description: The HTML version of the email.
+ type: String
+ - contextPath: Email.Body/Text
+ description: The plain-text version of the email.
+ type: String
+ - contextPath: Email.Subject
+ description: The subject of the email.
+ type: String
+ - contextPath: Email.Headers
+ description: The headers of the email.
+ type: String
+ - contextPath: Email.Attachments.entryID
+ description: Email attachments. IDs are separated by ','.
+ type: Unknown
+ - contextPath: Email.Date
+ description: The date the email was received.
+ type: String
+ description: Moves an email to a different folder.
+ - name: gmail-move-mail-to-mailbox
+ arguments:
+ - name: src-user-id
+ required: true
+ default: true
+ description: The source user's email address. The special value me can be used
+ to indicate the authenticated user.
+ - name: message-id
+ required: true
+ description: The ID of the message to retrieve.
+ - name: dst-user-id
+ required: true
+ description: The destination user's email address. The me special value can
+ be used to indicate the authenticated user.
+ outputs:
+ - contextPath: Gmail.ID
+ description: Inner ID of the Gmail message.
+ type: string
+ - contextPath: Gmail.ThreadId
+ description: The thread ID.
+ type: string
+ - contextPath: Gmail.Format
+ description: MIME type of email.
+ type: string
+ - contextPath: Gmail.Labels
+ description: Labels of the specific email.
+ type: string
+ - contextPath: Gmail.To
+ description: Gmail address of the receiver.
+ type: string
+ - contextPath: Gmail.From
+ description: Gmail address of the sender.
+ type: string
+ - contextPath: Gmail.Cc
+ description: Additional recipient email address (CC).
+ type: string
+ - contextPath: Gmail.Bcc
+ description: Additional recipient email address (BCC).
+ type: string
+ - contextPath: Gmail.Subject
+ description: Subject of the specific email.
+ type: string
+ - contextPath: Gmail.Body
+ description: The content of the email.
+ type: string
+ - contextPath: Gmail.Attachments
+ description: The attachments of the email. IDs are separated by ','.
+ type: unknown
+ - contextPath: Gmail.Headers
+ description: All headers of specific the email (list).
+ type: unknown
+ - contextPath: Gmail.Mailbox
+ description: The Gmail mailbox.
+ type: string
+ - contextPath: Email.To
+ description: The recipient of the email.
+ type: String
+ - contextPath: Email.From
+ description: The sender of the email.
+ type: String
+ - contextPath: Email.CC
+ description: Additional recipient email address (CC).
+ type: String
+ - contextPath: Email.BCC
+ description: Additional recipient email address (BCC).
+ type: String
+ - contextPath: Email.Format
+ description: The format of the email.
+ type: String
+ - contextPath: Email.Body/HTML
+ description: The HTML version of the email.
+ type: String
+ - contextPath: Email.Body/Text
+ description: The plain-text version of the email.
+ type: String
+ - contextPath: Email.Subject
+ description: The subject of the email.
+ type: String
+ - contextPath: Email.Headers
+ description: The headers of the email.
+ type: String
+ - contextPath: Email.Attachments.entryID
+ description: Emails attachments. IDs are separated by ','.
+ type: Unknown
+ - contextPath: Email.Date
+ description: The date the email was received.
+ type: String
+ description: Moves an email to a different mailbox.
+ - name: gmail-add-delete-filter
+ arguments:
+ - name: user-id
+ required: true
+ default: true
+ description: The user's email address. The me special value can be used to indicate
+ the authenticated user.
+ - name: email-address
+ required: true
+ description: Email address in which to block messages.
+ description: Adds a rule for email deletion by address.
+ - name: gmail-add-filter
+ arguments:
+ - name: user-id
+ required: true
+ default: true
+ description: The user's email address. The me special value can be used to indicate
+ the authenticated user.
+ - name: from
+ description: The sender's display name or email address.
+ - name: to
+ description: The recipient's display name or email address. Includes recipients
+ in the "to", "cc", and "bcc" header fields. You can use the local part
+ of the email address. For example, "example" and "example@" both match "example@gmail.com".
+ This field is case-insensitive.
+ - name: subject
+ description: The email subject.
+ - name: query
+ description: Returns messages matching the specified query. Supports the
+ same query format as the Gmail search box. For example, "from:someuser@example.com
+ is:unread".
+ - name: has-attachments
+ description: Whether the message has any attachments.
+ - name: size
+ description: The size of the entire RFC822 message in bytes, including all headers
+ and attachments.
+ - name: add-labels
+ description: Comma-separated list of labels to add to the message.
+ - name: remove-labels
+ description: Comma-separated list of labels to remove from the message.
+ - name: forward
+ description: Email address that the message is to be forwarded. The email
+ needs to be configured as a forwarding address, see https://support.google.com/mail/answer/10957?hl=en#null.
+ - name: size-comparison
+ auto: PREDEFINED
+ predefined:
+ - larger
+ - smaller
+ description: The message size in bytes compared to the size field.
+ outputs:
+ - contextPath: GmailFilter.ID
+ description: Filter ID.
+ type: string
+ - contextPath: GmailFilter.Mailbox
+ description: Mailbox containing the filter.
+ type: string
+ - contextPath: GmailFilter.Criteria
+ description: Filter Criteria.
+ type: Unknown
+ - contextPath: GmailFilter.Action
+ description: Filter Action.
+ type: Unknown
+ description: Add a new filter.
+ - name: gmail-list-filters
+ arguments:
+ - name: user-id
+ required: true
+ description: User's email address. The "me" special value can be used to indicate
+ the authenticated user.
+ - name: limit
+ description: Limit of the results list. Default is 100.
+ - name: address
+ description: List filters associated with the email address.
+ outputs:
+ - contextPath: GmailFilter.ID
+ description: Filter ID.
+ type: string
+ - contextPath: GmailFilter.Mailbox
+ description: Mailbox containing the filter.
+ type: string
+ - contextPath: GmailFilter.Criteria
+ description: Filter Criteria.
+ type: Unknown
+ - contextPath: GmailFilter.Action
+ description: Filter Action.
+ type: Unknown
+ description: List all filters in a user's mailbox.
+ - name: gmail-remove-filter
+ arguments:
+ - name: user-id
+ required: true
+ description: User's email address. The "me" special value can be used to indicate
+ the authenticated user.
+ - name: filter_ids
+ required: true
+ description: Comma separated list of filter IDs (can be retrieve using `gmail-list-filters`
+ command)
+ isArray: true
+ description: Removes a Filter.
+ - name: gmail-hide-user-in-directory
+ arguments:
+ - name: user-id
+ required: true
+ description: The user's email address. The "me" special value can be used to indicate the authenticated user.
+ - name: visible-globally
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Whether to hide the user's visibility in the Global Directory. Can be False to hide the user, True to show the user in the directory (default).
+ defaultValue: "true"
+ outputs:
+ - contextPath: Account.Type
+ description: 'The account type. For example, "AD", "LocalOS", "Google", "AppleID", and so on.'
+ type: String
+ - contextPath: Account.ID
+ description: The unique ID for the account (integration specific). For AD accounts
+ this is the Distinguished Name (DN).
+ type: String
+ - contextPath: Account.DisplayName
+ description: The display name.
+ type: String
+ - contextPath: Account.Email.Address
+ description: The email address of the account.
+ type: String
+ - contextPath: Account.Gmail.Address
+ description: Email assigned with current account.
+ type: Unknown
+ - contextPath: Account.Domain
+ description: The domain of the account.
+ type: String
+ - contextPath: Account.Username
+ description: The username of the account.
+ type: String
+ - contextPath: Account.OrganizationUnit
+ description: The Organization Unit (OU) of the account.
+ type: String
+ - contextPath: Account.VisibleInDirectory
+ description: Is the Account visible in the Global Directory.
+ type: Boolean
+ - contextPath: Account.Groups
+ description: Groups in which the account belongs (integration specific). For
+ example, for AD these are groups of which the account is memberOf.
+ type: String
+ description: Hide a user's contact information, such as email address, profile information in the Global Directory.
+ - name: gmail-set-password
+ arguments:
+ - name: user-id
+ required: true
+ description: The user's email address. The special value me can be used to indicate
+ the authenticated user.
+ - name: password
+ required: true
+ description: String formatted password for the user. Depends on the
+ Password Policy of the Organization
+ description: Sets the password for the user.
+ - name: gmail-get-autoreply
+ arguments:
+ - name: user-id
+ required: true
+ description: The user's email address. The special value me can be used to indicate
+ the authenticated user.
+ outputs:
+ - contextPath: Account.Gmail.AutoReply.EnableAutoReply
+ description: Flag that controls whether Gmail automatically replies to messages.
+ type: Boolean
+ - contextPath: Account.Gmail.AutoReply.ResponseBody
+ description: Response body in plain text format.
+ type: String
+ - contextPath: Account.Gmail.AutoReply.ResponseSubject
+ description: Optional text to add to the subject line in vacation responses.
+ To enable auto-replies, the response subject or the response
+ body must not be empty.
+ type: String
+ - contextPath: Account.Gmail.AutoReply.RestrictToContact
+ description: Flag that determines whether responses are sent to recipients who
+ are not in the user's list of contacts.
+ type: String
+ - contextPath: Account.Gmail.AutoReply.RestrcitToDomain
+ description: Flag that determines whether responses are sent to recipients who
+ are outside of the user's domain. This feature is only available for G Suite
+ users.
+ type: String
+ - contextPath: Account.Gmail.Address
+ description: Email assigned with the current account.
+ type: String
+ description: Returns the auto-reply message set for the user-account.
+ - name: gmail-set-autoreply
+ arguments:
+ - name: user-id
+ required: true
+ description: The user's email address. The "me" special value me can be used to indicate
+ the authenticated user.
+ - name: enable-autoReply
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Whether Gmail automatically replies to messages. Boolean. Set to true to automatically reply (default).
+ defaultValue: "true"
+ - name: response-subject
+ description: Optional text to add to the subject line in vacation responses.
+ To enable auto-replies, either the response subject or the response
+ body must not be empty.
+ - name: response-body
+ description: Response body in plain text format.
+ outputs:
+ - contextPath: Account.Gmail.AutoReply.EnableAutoReply
+ description: Flag that controls whether Gmail automatically replies to messages.
+ type: Boolean
+ - contextPath: Account.Gmail.AutoReply.ResponseBody
+ description: Response body in plain text format.
+ type: String
+ - contextPath: Account.Gmail.AutoReply.ResponseSubject
+ description: Optional text to add to the subject line in vacation responses.
+ To enable auto-replies, either the response subject or the response
+ body must not be empty.
+ type: String
+ - contextPath: Account.Gmail.AutoReply.RestrictToContact
+ description: Determines whether responses are sent to recipients who
+ are not in the user's list of contacts.
+ type: String
+ - contextPath: Account.Gmail.AutoReply.RestrcitToDomain
+ description: Determines whether responses are sent to recipients who
+ are outside of the user's domain. This feature is only available for G Suite
+ users.
+ type: String
+ - contextPath: Account.Gmail.Address
+ description: Email assigned with the current account.
+ type: String
+ description: 'Set auto-reply for the user. Note: if the body is not set, the current
+ body will be deleted'
+ - name: gmail-delegate-user-mailbox
+ arguments:
+ - name: user-id
+ required: true
+ description: The user's email address. The "me" special value can be used to indicate
+ the authenticated user.
+ - name: delegate-email
+ required: true
+ description: The email address of the delegate.
+ description: Adds a delegate to the mailbox, without sending any verification
+ email. The delegate user must be a member of the same G Suite organization as
+ the delegator user and must be added using their primary email address, and not an email alias.
+ - name: send-mail
+ arguments:
+ - name: to
+ required: true
+ description: Email addresses of the receiver.
+ isArray: true
+ - name: from
+ description: Email address of the sender.
+ - name: body
+ description: The contents (body) of the email to be sent in plain text.
+ - name: subject
+ required: true
+ description: Subject for the email to be sent.
+ - name: attachIDs
+ description: A comma-separated list of IDs of War Room entries that contain
+ the files that need be attached to the email.
+ isArray: true
+ - name: cc
+ description: Additional recipient email address (CC).
+ isArray: true
+ - name: bcc
+ description: Additional recipient email address (BCC).
+ isArray: true
+ - name: htmlBody
+ description: The contents (body) of the email to be sent in HTML format.
+ - name: replyTo
+ description: Address that needs to be used to reply to the message.
+ - name: attachNames
+ description: |-
+ A comma-separated list of new names to rename attachments corresponding to the order that they were attached to the email.
+ Examples - To rename first and third file attachNames=new_fileName1,,new_fileName3
+ To rename second and fifth files attachNames=,new_fileName2,,,new_fileName5
+ isArray: true
+ - name: attachCIDs
+ description: |-
+ A comma-separated list of CID images to embed attachments inside the email.
+ isArray: true
+ - name: transientFile
+ description: |-
+ Textual name for an attached file. Multiple files are supported as a
+ comma-separated list. For example, transientFile="t1.txt,temp.txt,t3.txt" transientFileContent="test
+ 2,temporary file content,third file content" transientFileCID="t1.txt@xxx.yyy,t2.txt@xxx.zzz")
+ isArray: true
+ - name: transientFileContent
+ description: |-
+ Content for the attached file. Multiple files are supported as a comma-separated
+ list. For example, transientFile="t1.txt,temp.txt,t3.txt" transientFileContent="test
+ 2,temporary file content,third file content" transientFileCID="t1.txt@xxx.yyy,t2.txt@xxx.zzz")
+ isArray: true
+ - name: transientFileCID
+ description: |-
+ CID image for an attached file to include within the email body. Multiple files are
+ supported as comma-separated list. (e.g. transientFile="t1.txt,temp.txt,t3.txt"
+ transientFileContent="test 2,temporary file content,third file content" transientFileCID="t1.txt@xxx.yyy,t2.txt@xxx.zzz")
+ isArray: true
+ - name: additionalHeader
+ description: 'A CSV list of additional headers in the format: headerName=headerValue.
+ For example: "headerName1=headerValue1,headerName2=headerValue2".'
+ isArray: true
+ - name: templateParams
+ description: |-
+ 'Replaces {varname} variables with values from this parameter. Expected
+ values are in the form of a JSON document. For example, {"varname" :{"value" "some
+ value", "key": "context key"}}. Each var name can either be provided with
+ the value or a context key to retrieve the value.'
+ outputs:
+ - contextPath: Gmail.SentMail.ID
+ description: The immutable ID of the message.
+ type: String
+ - contextPath: Gmail.SentMail.Labels
+ description: List of IDs of labels applied to this message.
+ type: String
+ - contextPath: Gmail.SentMail.ThreadId
+ description: The ID of the thread in which the message belongs.
+ type: String
+ - contextPath: Gmail.SentMail.To
+ description: The recipient of the email.
+ type: String
+ - contextPath: Gmail.SentMail.From
+ description: The sender of the email.
+ type: Unknown
+ - contextPath: Gmail.SentMail.Cc
+ description: Additional recipient email address (CC).
+ type: String
+ - contextPath: Gmail.SentMail.Bcc
+ description: Additional recipient email address (BCC).
+ type: String
+ - contextPath: Gmail.SentMail.Subject
+ description: The subject of the email.
+ type: String
+ - contextPath: Gmail.SentMail.Body
+ description: The plain-text version of the email.
+ type: Unknown
+ - contextPath: Gmail.SentMail.MailBox
+ description: The mailbox from which the mail was sent.
+ type: String
+ description: Sends mail using Gmail.
+ - name: gmail-remove-delegated-mailbox
+ arguments:
+ - name: user-id
+ required: true
+ description: The user's email address. The "me" special value can be used to indicate
+ the authenticated user.
+ - name: removed-mail
+ required: true
+ description: The email address to remove from delegation.
+ description: Removes a delegate from the mailbox, without sending any verification
+ email. The delegate user must be a member of the same G Suite organization as
+ the delegator user using their primary email address, and not an email alias.
+ dockerimage: demisto/google-api:1.0
+ isfetch: true
+ runonce: false
+tests:
+- Gmail Convert Html Test
+- GmailTest
diff --git a/Integrations/Gmail/Gmail_description.md b/Integrations/Gmail/Gmail_description.md
new file mode 100644
index 000000000000..09955b296906
--- /dev/null
+++ b/Integrations/Gmail/Gmail_description.md
@@ -0,0 +1,56 @@
+This API enables the usage of Google Admin API. In order to enable it, you will need to create an account service private key JSON file and copy its content to **Password** parameter.
+
+Follow the steps here to create such a private key and authorize the API for usage: [https://developers.google.com/admin-sdk/directory/v1/guides/delegation](https://developers.google.com/admin-sdk/directory/v1/guides/delegation)
+
+It is necessary to authorize the next APIs for that service account:
+[https://www.googleapis.com/auth/admin.directory.user.readonly](https://www.googleapis.com/auth/admin.directory.user.readonly)
+
+In order to fetch user roles, authorize this API: [https://www.googleapis.com/auth/admin.directory.rolemanagement.readonly](https://www.googleapis.com/auth/admin.directory.rolemanagement.readonly)
+
+In order to revoke user roles, authorize this API: [https://www.googleapis.com/auth/admin.directory.rolemanagement](https://www.googleapis.com/auth/admin.directory.rolemanagement)
+
+In order to search user mailboxes, authorize this API: [https://www.googleapis.com/auth/gmail.readonly](https://www.googleapis.com/auth/gmail.readonly)
+
+In order to delete emails from user mailbox, authorize this API:
+[https://mail.google.com](https://mail.google.com),
+[https://www.googleapis.com/auth/gmail.modify](https://www.googleapis.com/auth/gmail.modify)
+
+In order to fetch user security tokens, authorize this API: [https://www.googleapis.com/auth/admin.directory.user.security](https://www.googleapis.com/auth/admin.directory.user.security)
+
+In order to fetch mobile info, authorize this API: [https://www.googleapis.com/auth/admin.directory.device.mobile.readonly](https://www.googleapis.com/auth/admin.directory.device.mobile.readonly)
+
+In order to preform actions on mobile devices, authorize this API: [https://www.googleapis.com/auth/admin.directory.device.mobile.action](https://www.googleapis.com/auth/admin.directory.device.mobile.action)
+
+In order to preform actions on chorme devices, authorize this API:[https://www.googleapis.com/auth/admin.directory.device.chromeos](https://www.googleapis.com/auth/admin.directory.device.chromeos)
+
+In order to block email addresses, authorize this API:
+[https://www.googleapis.com/auth/gmail.settings.basic](https://www.googleapis.com/auth/gmail.settings.basic)
+
+In order to get auto-replay messages from a user, authorize this API:
+[https://mail.google.com](https://mail.google.com),
+[https://www.googleapis.com/auth/gmail.modify](https://www.googleapis.com/auth/gmail.modify),
+[https://www.googleapis.com/auth/gmail.readonly](https://www.googleapis.com/auth/gmail.readonly)
+and [https://www.googleapis.com/auth/gmail.settings.basic](https://www.googleapis.com/auth/gmail.settings.basic)
+
+In order to set auto-replay messages, authorize this API: [https://www.googleapis.com/auth/gmail.settings.basic](https://www.googleapis.com/auth/gmail.settings.basic)
+
+In order to hide users from the global directory, authorize this API: [https://www.googleapis.com/auth/admin.directory.user](https://www.googleapis.com/auth/admin.directory.user)
+
+In order to delegate a user to a mailbox or remove a delegated mail from a mailbox, please authorize this api too: [https://www.googleapis.com/auth/gmail.settings.sharing](https://www.googleapis.com/auth/gmail.settings.sharing)
+
+In order to set a user's password, authorize this API: [https://www.googleapis.com/auth/admin.directory.user](https://www.googleapis.com/auth/admin.directory.user)
+
+In order to send mails, authorize this API:
+[https://www.googleapis.com/auth/gmail.compose](https://www.googleapis.com/auth/gmail.compose) and [https://www.googleapis.com/auth/gmail.send](https://www.googleapis.com/auth/gmail.send)
+
+For the email user param, please choose a user with admin permissions and make sure that you follow the steps to perform Google Apps Domain-Wide Delegation of Authority.
+
+In order to revoke/fetch user role, you will need the Immutable Google Apps ID param.
+To get an Immutable Google Apps ID (or customerId):
+1. Go to [https://admin.google.com](https://admin.google.com)
+2. Security -> Set up single sign-on (SSO)
+
+You will see there URLs in the format:
+[https://accounts.google.com/o/saml2/idp?idpid=Cxxxxxxxx](https://accounts.google.com/o/saml2/idp?idpid=Cxxxxxxxx)
+Cxxxxxxxx is your Immutable Google Apps ID (customerId).
+
diff --git a/Integrations/Gmail/Gmail_image.png b/Integrations/Gmail/Gmail_image.png
new file mode 100644
index 000000000000..b7a1847495de
Binary files /dev/null and b/Integrations/Gmail/Gmail_image.png differ
diff --git a/Integrations/Gmail/Gmail_test.py b/Integrations/Gmail/Gmail_test.py
new file mode 100644
index 000000000000..0e6279b46f00
--- /dev/null
+++ b/Integrations/Gmail/Gmail_test.py
@@ -0,0 +1,20 @@
+
+def test_timestamp_to_date():
+ from Gmail import create_base_time
+ valid_timestamp = '1566819604000'
+ valid_header_date = "Mon, 26 Aug 2019 14:40:04 +0300"
+ # this does contain the utc time change
+ invalid_header_date = "25 Aug 2019 06:25:38"
+ # this does contain the utc time change
+ semi_valid_header_date = "26 Aug 2019 14:40:04 +0300"
+ assert str(create_base_time(valid_timestamp, valid_header_date)) == "Mon, 26 Aug 2019 14:40:04 +0300"
+ assert str(create_base_time(valid_timestamp, semi_valid_header_date)) == "Mon, 26 Aug 2019 14:40:04 +0300"
+ assert str(create_base_time(valid_timestamp, invalid_header_date)) == "Mon, 26 Aug 2019 11:40:04 -0000"
+
+
+def test_move_to_gmt():
+ from Gmail import move_to_gmt
+ valid_header_date = "Mon, 26 Aug 2019 14:40:04 +0300"
+ no_utc_header_date = "Mon, 26 Aug 2019 14:40:04 -0000"
+ assert str(move_to_gmt(valid_header_date)) == "2019-08-26T11:40:04Z"
+ assert str(move_to_gmt(no_utc_header_date)) == "2019-08-26T14:40:04Z"
diff --git a/Integrations/Gmail/Pipfile b/Integrations/Gmail/Pipfile
new file mode 100644
index 000000000000..12758759c35a
--- /dev/null
+++ b/Integrations/Gmail/Pipfile
@@ -0,0 +1,21 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+flake8 = "*"
+
+[packages]
+oauth2client = "*"
+google-api-python-client = "*"
+google-auth-httplib2 = "*"
+google-auth-oauthlib = "*"
+httplib2 = "*"
+
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/Gmail/Pipfile.lock b/Integrations/Gmail/Pipfile.lock
new file mode 100644
index 000000000000..76e3da67da24
--- /dev/null
+++ b/Integrations/Gmail/Pipfile.lock
@@ -0,0 +1,426 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "11476e8a3b68c68081e308554590b12cc69fa7c485514441b1be7bce166b8301"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "cachetools": {
+ "hashes": [
+ "sha256:219b7dc6024195b6f2bc3d3f884d1fef458745cd323b04165378622dcc823852",
+ "sha256:9efcc9fab3b49ab833475702b55edd5ae07af1af7a4c627678980b45e459c460"
+ ],
+ "version": "==3.1.0"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5",
+ "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae"
+ ],
+ "version": "==2019.3.9"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "google-api-python-client": {
+ "hashes": [
+ "sha256:06907006ed5ce831018f03af3852d739c0b2489cdacfda6971bcc2075c762858",
+ "sha256:937eabdc3940977f712fa648a096a5142766b6d0a0f58bc603e2ac0687397ef0"
+ ],
+ "index": "pypi",
+ "version": "==1.7.8"
+ },
+ "google-auth": {
+ "hashes": [
+ "sha256:0f7c6a64927d34c1a474da92cfc59e552a5d3b940d3266606c6a28b72888b9e4",
+ "sha256:20705f6803fd2c4d1cc2dcb0df09d4dfcb9a7d51fd59e94a3a28231fd93119ed"
+ ],
+ "version": "==1.6.3"
+ },
+ "google-auth-httplib2": {
+ "hashes": [
+ "sha256:098fade613c25b4527b2c08fa42d11f3c2037dda8995d86de0745228e965d445",
+ "sha256:f1c437842155680cf9918df9bc51c1182fda41feef88c34004bd1978c8157e08"
+ ],
+ "index": "pypi",
+ "version": "==0.0.3"
+ },
+ "google-auth-oauthlib": {
+ "hashes": [
+ "sha256:a0470c19130ddf90c2b07c0c701d72890a7335090903aeb709f003a66416380f",
+ "sha256:c57303d85199fdba00bc7b8fb21ccf6c2b9d3e69d6830fd69ff951c64cf2c1d6"
+ ],
+ "index": "pypi",
+ "version": "==0.3.0"
+ },
+ "httplib2": {
+ "hashes": [
+ "sha256:23914b5487dfe8ef09db6656d6d63afb0cf3054ad9ebc50868ddc8e166b5f8e8",
+ "sha256:a18121c7c72a56689efbf1aef990139ad940fee1e64c6f2458831736cd593600"
+ ],
+ "index": "pypi",
+ "version": "==0.12.3"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "oauth2client": {
+ "hashes": [
+ "sha256:b8a81cc5d60e2d364f0b1b98f958dbd472887acaf1a5b05e21c28c31a2d6d3ac",
+ "sha256:d486741e451287f69568a4d26d70d9acd73a2bbfa275746c535b4209891cccc6"
+ ],
+ "index": "pypi",
+ "version": "==4.1.3"
+ },
+ "oauthlib": {
+ "hashes": [
+ "sha256:0ce32c5d989a1827e3f1148f98b9085ed2370fc939bf524c9c851d8714797298",
+ "sha256:3e1e14f6cde7e5475128d30e97edc3bfb4dc857cb884d8714ec161fdbb3b358e"
+ ],
+ "version": "==3.0.1"
+ },
+ "pyasn1": {
+ "hashes": [
+ "sha256:da2420fe13a9452d8ae97a0e478adde1dee153b11ba832a95b223a2ba01c10f7",
+ "sha256:da6b43a8c9ae93bc80e2739efb38cc776ba74a886e3e9318d65fe81a8b8a2c6e"
+ ],
+ "version": "==0.4.5"
+ },
+ "pyasn1-modules": {
+ "hashes": [
+ "sha256:ef721f68f7951fab9b0404d42590f479e30d9005daccb1699b0a51bb4177db96",
+ "sha256:f309b6c94724aeaf7ca583feb1cc70430e10d7551de5e36edfc1ae6909bcfb3c"
+ ],
+ "version": "==0.2.5"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
+ "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
+ ],
+ "version": "==2.21.0"
+ },
+ "requests-oauthlib": {
+ "hashes": [
+ "sha256:bd6533330e8748e94bf0b214775fed487d309b8b8fe823dc45641ebcd9a32f57",
+ "sha256:d3ed0c8f2e3bbc6b344fa63d6f933745ab394469da38db16bdddb461c7e25140"
+ ],
+ "version": "==1.2.0"
+ },
+ "rsa": {
+ "hashes": [
+ "sha256:14ba45700ff1ec9eeb206a2ce76b32814958a98e372006c8fb76ba820211be66",
+ "sha256:1a836406405730121ae9823e19c6e806c62bbad73f890574fff50efa4122c487"
+ ],
+ "version": "==4.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "uritemplate": {
+ "hashes": [
+ "sha256:01c69f4fe8ed503b2951bef85d996a9d22434d2431584b5b107b2981ff416fbd",
+ "sha256:1b9c467a940ce9fb9f50df819e8ddd14696f89b9a8cc87ac77952ba416e0a8fd",
+ "sha256:c02643cebe23fc8adb5e6becffe201185bf06c40bda5c0b4028a93f1527d011d"
+ ],
+ "version": "==3.0.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0",
+ "sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3"
+ ],
+ "version": "==1.24.2"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==1.5"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32",
+ "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==3.7.4"
+ },
+ "entrypoints": {
+ "hashes": [
+ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
+ "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
+ ],
+ "version": "==0.3"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "flake8": {
+ "hashes": [
+ "sha256:859996073f341f2670741b51ec1e67a01da142831aa1fdc6242dbf88dffbe661",
+ "sha256:a796a115208f5c03b18f332f7c11729812c8c3ded6c46319c59b53efd3819da8"
+ ],
+ "index": "pypi",
+ "version": "==3.7.7"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==1.0.2"
+ },
+ "functools32": {
+ "hashes": [
+ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0",
+ "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.3.post2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265",
+ "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.0"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:01cb7e1ca5e6c5b3f235f0385057f70558b70d2f00320208825fa62887292f43",
+ "sha256:268067462aed7eb2a1e237fcb287852f22077de3fb07964e87e00f829eea2d1a"
+ ],
+ "version": "==4.3.17"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33",
+ "sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39",
+ "sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019",
+ "sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088",
+ "sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b",
+ "sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e",
+ "sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6",
+ "sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b",
+ "sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5",
+ "sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff",
+ "sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd",
+ "sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7",
+ "sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff",
+ "sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d",
+ "sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2",
+ "sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35",
+ "sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4",
+ "sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514",
+ "sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252",
+ "sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109",
+ "sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f",
+ "sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c",
+ "sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92",
+ "sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577",
+ "sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d",
+ "sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d",
+ "sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f",
+ "sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a",
+ "sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b"
+ ],
+ "version": "==1.3.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1",
+ "sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==2.0.0"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742",
+ "sha256:5887121d7f7df3603bca2f710e7219f3eca0eb69e0b7cc6e0a022e155ac931a7"
+ ],
+ "markers": "python_version < '3.6'",
+ "version": "==2.3.3"
+ },
+ "pbr": {
+ "hashes": [
+ "sha256:6901995b9b686cb90cceba67a0f6d4d14ae003cd59bc12beb61549bdfbe3bc89",
+ "sha256:d950c64aeea5456bbd147468382a5bb77fe692c13c9f00f0219814ce5b642755"
+ ],
+ "version": "==5.2.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f",
+ "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746"
+ ],
+ "version": "==0.9.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pyflakes": {
+ "hashes": [
+ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
+ "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ ],
+ "version": "==2.1.1"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:02c2b6d268695a8b64ad61847f92e611e6afcff33fd26c3a2125370c4662905d",
+ "sha256:ee1e85575587c5b58ddafa25e1c1b01691ef172e139fc25585e5d3f02451da93"
+ ],
+ "index": "pypi",
+ "version": "==1.9.4"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:3773f4c235918987d51daf1db66d51c99fac654c81d6f2f709a046ab446d5e5d",
+ "sha256:b7802283b70ca24d7119b32915efa7c409982f59913c1a6c0640aacf118b95f5"
+ ],
+ "index": "pypi",
+ "version": "==4.4.1"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typing": {
+ "hashes": [
+ "sha256:4027c5f6127a6267a435201981ba156de91ad0d1d98e9ddc2aa173453453492d",
+ "sha256:57dcf675a99b74d64dacf6fba08fb17cf7e3d5fdff53d4a30ea2a5e7e52543d4",
+ "sha256:a4c8473ce11a65999c8f59cb093e70686b6c84c98df58c1dae9b3b196089858a"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==3.6.6"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ }
+ }
+}
diff --git a/Integrations/GoogleBigQuery/GoogleBigQuery.py b/Integrations/GoogleBigQuery/GoogleBigQuery.py
new file mode 100644
index 000000000000..67cc7c5f3cdf
--- /dev/null
+++ b/Integrations/GoogleBigQuery/GoogleBigQuery.py
@@ -0,0 +1,205 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+""" IMPORTS """
+
+import os
+import json
+import requests
+from google.cloud import bigquery
+
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+
+''' GLOBALS/PARAMS '''
+
+TEST_QUERY = ('SELECT name FROM `bigquery-public-data.usa_names.usa_1910_2013` '
+ 'WHERE state = "TX" '
+ 'LIMIT 100')
+
+
+''' HELPER FUNCTIONS '''
+
+
+def represents_bool(string_var):
+ return string_var.lower() == 'false' or string_var.lower() == 'true'
+
+
+def str_to_bool(str_representing_bool):
+ return str_representing_bool.lower() == "true"
+
+
+def bool_arg_set_to_true(arg):
+ return arg and str_to_bool(arg)
+
+
+def start_and_return_bigquery_client(google_service_creds_json_string):
+ cur_directory_path = os.getcwd()
+ creds_file_name = '{0}.json'.format(demisto.uniqueFile())
+ path_to_save_creds_file = os.path.join(cur_directory_path, creds_file_name)
+ with open(path_to_save_creds_file, "w") as creds_file:
+ json.dump(json.loads(google_service_creds_json_string), creds_file)
+ os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = path_to_save_creds_file
+ creds_file.close()
+ bigquery_client = bigquery.Client()
+ return bigquery_client
+
+
+def validate_args_for_query_job_config(allow_large_results, priority, use_query_cache, use_legacy_sql, dry_run,
+ destination_table, write_disposition):
+ if allow_large_results and not represents_bool(allow_large_results):
+ return_error("Error: allow_large_results must have a boolean value.")
+ if bool_arg_set_to_true(allow_large_results) and not destination_table:
+ return_error("Error: allow_large_results could only be set to True if a destination table is provided as well.")
+ if bool_arg_set_to_true(allow_large_results) and not bool_arg_set_to_true(use_legacy_sql):
+ return_error("Error: allow_large_results could be set to True only if use_legacy_sql is set to True.")
+ if use_query_cache and not represents_bool(use_query_cache):
+ return_error("Error: use_query_cache must have a boolean value.")
+ if bool_arg_set_to_true(use_query_cache) and destination_table:
+ return_error("Error: use_query_cache cannot be set to True if a destination_table is set")
+ if use_legacy_sql and not represents_bool(use_legacy_sql):
+ return_error("Error: use_legacy_sql must have a boolean value.")
+ if dry_run and not represents_bool(dry_run):
+ return_error("Error: dry_run must have a boolean value.")
+ if priority and not (priority == 'INTERACTIVE' or priority == 'BATCH'):
+ return_error("Error: priority must have a value of INTERACTIVE or BATCH.")
+ if write_disposition and not (write_disposition == 'WRITE_TRUNCATE' or write_disposition == 'WRITE_APPEND'
+ or write_disposition == 'WRITE_EMPTY'):
+ return_error("Error: write_disposition must have a value of WRITE_TRUNCATE, WRITE_APPEND or WRITE_EMPTY.")
+
+
+def build_query_job_config(allow_large_results, default_dataset_string, destination_table, dry_run, priority,
+ use_query_cache, use_legacy_sql, kms_key_name, write_disposition):
+ validate_args_for_query_job_config(allow_large_results, priority, use_query_cache, use_legacy_sql, dry_run,
+ destination_table, write_disposition)
+ query_job_config = bigquery.QueryJobConfig()
+ if allow_large_results:
+ query_job_config.allow_large_results = str_to_bool(allow_large_results)
+ if default_dataset_string:
+ query_job_config.default_dataset = default_dataset_string
+ if destination_table:
+ query_job_config.destination = destination_table
+ if kms_key_name:
+ query_job_config.destination_encryption_configuration = bigquery.table.EncryptionConfiguration(kms_key_name)
+ if dry_run:
+ query_job_config.dry_run = str_to_bool(dry_run)
+ if use_legacy_sql:
+ query_job_config.use_legacy_sql = str_to_bool(use_legacy_sql)
+ if use_query_cache:
+ query_job_config.use_query_cache = str_to_bool(use_query_cache)
+ if priority:
+ query_job_config.priority = priority
+ if write_disposition:
+ query_job_config.write_disposition = write_disposition
+
+ return query_job_config
+
+
+def convert_to_string_if_datetime(object_that_may_be_datetime):
+ if isinstance(object_that_may_be_datetime, datetime):
+ return object_that_may_be_datetime.strftime("%m/%d/%Y %H:%M:%S")
+ else:
+ return object_that_may_be_datetime
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def query(query_string, project_id, location, allow_large_results, default_dataset, destination, kms_key_name, dry_run,
+ priority, use_query_cache, use_legacy_sql,
+ google_service_creds, job_id, write_disposition):
+ bigquery_client = start_and_return_bigquery_client(google_service_creds)
+ job_config = build_query_job_config(allow_large_results, default_dataset, destination, dry_run, priority,
+ use_query_cache, use_legacy_sql, kms_key_name, write_disposition)
+ query_job = bigquery_client.query(query=query_string, job_config=job_config, location=location,
+ job_id=job_id, project=project_id)
+ if not (dry_run and str_to_bool(dry_run)):
+ query_results = query_job.result()
+ return query_results
+ else:
+ # if dry run is activated, the results (number of bytes the query will process) are returned in the job itself
+ return query_job
+
+
+def query_command():
+ args = demisto.args()
+ query_to_run = args['query']
+ project_id = args.get('project_id', None)
+ location = args.get('location', None)
+ allow_large_results = args.get('allow_large_results', None)
+ default_dataset = args.get('default_dataset', None)
+ destination_table = args.get('destination_table', None)
+ kms_key_name = args.get('kms_key_name', None)
+ dry_run = args.get('dry_run', None)
+ priority = args.get('priority', None)
+ use_query_cache = args.get('use_query_cache', None)
+ use_legacy_sql = args.get('use_legacy_sql', None)
+ google_service_creds = demisto.params()['google_service_creds']
+ job_id = args.get('job_id', None)
+ write_disposition = args.get('write_disposition', None)
+ query_results = query(query_to_run, project_id, location, allow_large_results, default_dataset,
+ destination_table, kms_key_name, dry_run, priority, use_query_cache, use_legacy_sql,
+ google_service_creds, job_id, write_disposition)
+
+ context = {}
+ rows_contexts = []
+ human_readable = 'No results found.'
+ if dry_run and str_to_bool(dry_run):
+ human_readable = '### Dry run results: \n This query will process {0} ' \
+ 'bytes'.format(query_results.total_bytes_processed)
+
+ else:
+
+ for row in query_results:
+ row_context = {underscoreToCamelCase(k): convert_to_string_if_datetime(v) for k, v in row.items()}
+ rows_contexts.append(row_context)
+
+ if rows_contexts:
+
+ context['BigQuery(val.Query && val.Query == obj.Query)'] = {
+ 'Query': args['query'],
+ 'Row': rows_contexts
+ }
+ title = 'BigQuery Query Results'
+ human_readable = tableToMarkdown(title, rows_contexts, removeNull=True)
+
+ return_outputs(
+ readable_output=human_readable,
+ outputs=context,
+ raw_response=rows_contexts
+ )
+
+
+def test_module():
+ """
+ Perform basic get request to get item samples
+ """
+ try:
+ bigquery_client = start_and_return_bigquery_client(demisto.params()['google_service_creds'])
+ query_job = bigquery_client.query(TEST_QUERY)
+ query_results = query_job.result()
+ results_rows_iterator = iter(query_results)
+ next(results_rows_iterator)
+ demisto.results("ok")
+ except Exception as ex:
+ return_error("Authentication error: credentials JSON provided is invalid.\n Exception recieved:"
+ "{}".format(ex))
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('Command being called is %s' % (demisto.command()))
+
+try:
+ if demisto.command() == 'test-module':
+ test_module()
+ elif demisto.command() == 'bigquery-query':
+ query_command()
+
+
+except Exception as e:
+ LOG(str(e))
+ LOG.print_log()
+ raise
diff --git a/Integrations/GoogleBigQuery/GoogleBigQuery.yml b/Integrations/GoogleBigQuery/GoogleBigQuery.yml
new file mode 100644
index 000000000000..f49ac8bd4387
--- /dev/null
+++ b/Integrations/GoogleBigQuery/GoogleBigQuery.yml
@@ -0,0 +1,94 @@
+commonfields:
+ id: Google BigQuery
+ version: -1
+name: Google BigQuery
+display: Google BigQuery
+category: Database
+description: Integration for Google BigQuery, a data warehouse for querying and analyzing
+ large databases.
+ In all commands, for any argument not specified, the BigQuery default value for that argument will be applied.
+configuration:
+- display: Google service account JSON (a credentials JSON generated from Google API
+ Manager or from GCP console)
+ name: google_service_creds
+ defaultvalue: ""
+ type: 4
+ required: true
+script:
+ script: ''
+ type: python
+ commands:
+ - name: bigquery-query
+ arguments:
+ - name: query
+ required: true
+ description: A query string (in BigQuery query syntax).
+ For more information about the standard syntax, see the BigQuery
+ documentation - https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax.
+ - name: location
+ description: The geographic location where the job should run. Required for locations other than US and EU.
+ - name: allow_large_results
+ auto: PREDEFINED
+ predefined:
+ - "True"
+ - "False"
+ description: Allow query results tables larger than 128 MB compressed (legacy SQL only)
+ - name: default_dataset
+ description: A string of the fully-qualified dataset ID in standard SQL format.
+ The value must include a project ID and dataset ID, separated by periods.
+ - name: destination_table
+ description: The table to which the results are written. Default value is "None".
+ - name: kms_key_name
+ description: Custom encryption configuration for the destination table.
+ - name: dry_run
+ auto: PREDEFINED
+ predefined:
+ - "True"
+ - "False"
+ description: If "true," BigQuery doesn't run the job. Instead, if the query
+ is valid, BigQuery returns statistics about the job, such as how many bytes
+ would be processed. If the query is invalid, an error is returned. The default
+ value is "false".
+ - name: priority
+ auto: PREDEFINED
+ predefined:
+ - BATCH
+ - INTERACTIVE
+ description: Priority of the query (“INTERACTIVE†or “BATCHâ€).
+ A query set as INTERACTIVE will be run on-demand, at the next possible time. A query set as BATCH
+ will start as soon as idle resources are available, and changed to INTERACTIVE priority if
+ it wasn’t started within 24 hours. The default value is “INTERACTIVEâ€.
+ - name: use_query_cache
+ auto: PREDEFINED
+ predefined:
+ - "True"
+ - "False"
+ description: Whether to look for the query results in the cache.
+ - name: use_legacy_sql
+ auto: PREDEFINED
+ predefined:
+ - "True"
+ - "False"
+ description: Whether to use legacy SQL syntax.
+ - name: job_id
+ description: The ID of the job. The ID must contain only letters (a-z, A-Z),
+ numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024
+ characters.
+ - name: write_disposition
+ auto: PREDEFINED
+ predefined:
+ - WRITE_EMPTY
+ - WRITE_TRUNCATE
+ - WRITE_APPEND
+ description: Specifies the action that occurs if the destination table already exists.
+ outputs:
+ - contextPath: BigQuery.Query
+ description: The query performed.
+ type: String
+ - contextPath: BigQuery.Row
+ description: The table rows the given query returned.
+ type: Unknown
+ description: Performs a query on BigQuery.
+ dockerimage: demisto/bigquery:1.0.0.239
+ subtype: python3
+ runonce: false
diff --git a/Integrations/GoogleBigQuery/GoogleBigQuery_description.md b/Integrations/GoogleBigQuery/GoogleBigQuery_description.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Integrations/GoogleBigQuery/GoogleBigQuery_image.png b/Integrations/GoogleBigQuery/GoogleBigQuery_image.png
new file mode 100644
index 000000000000..4c8932657d04
Binary files /dev/null and b/Integrations/GoogleBigQuery/GoogleBigQuery_image.png differ
diff --git a/Integrations/GoogleBigQuery/GoogleBigQuery_test.py b/Integrations/GoogleBigQuery/GoogleBigQuery_test.py
new file mode 100644
index 000000000000..97024f997553
--- /dev/null
+++ b/Integrations/GoogleBigQuery/GoogleBigQuery_test.py
@@ -0,0 +1,11 @@
+import datetime
+from GoogleBigQuery import convert_to_string_if_datetime
+
+
+def test_convert_to_string_if_datetime():
+ test_conversion_for_none = convert_to_string_if_datetime(None)
+ assert test_conversion_for_none is None
+ now = datetime.datetime.now()
+ convert_to_string_if_datetime(now)
+ test_conversion_for_empty_string = convert_to_string_if_datetime("")
+ assert test_conversion_for_empty_string == ""
diff --git a/Integrations/GoogleBigQuery/Pipfile b/Integrations/GoogleBigQuery/Pipfile
new file mode 100644
index 000000000000..41c7519a7a9f
--- /dev/null
+++ b/Integrations/GoogleBigQuery/Pipfile
@@ -0,0 +1,14 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+
+[packages]
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/GoogleBigQuery/Pipfile.lock b/Integrations/GoogleBigQuery/Pipfile.lock
new file mode 100644
index 000000000000..643d94a1edbe
--- /dev/null
+++ b/Integrations/GoogleBigQuery/Pipfile.lock
@@ -0,0 +1,174 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "030517bfcc68d7e2f82fb5831e88abe2f6540ec99eefed71048ae95c58697218"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:18c796c2cd35eb1a1d3f012a214a542790a1aed95e29768bdcb9f2197eccbd0b",
+ "sha256:96151fca2c6e736503981896495d344781b60d18bfda78dc11b290c6125ebdb6"
+ ],
+ "version": "==4.3.15"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33",
+ "sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39",
+ "sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019",
+ "sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088",
+ "sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b",
+ "sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e",
+ "sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6",
+ "sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b",
+ "sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5",
+ "sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff",
+ "sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd",
+ "sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7",
+ "sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff",
+ "sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d",
+ "sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2",
+ "sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35",
+ "sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4",
+ "sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514",
+ "sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252",
+ "sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109",
+ "sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f",
+ "sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c",
+ "sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92",
+ "sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577",
+ "sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d",
+ "sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d",
+ "sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f",
+ "sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a",
+ "sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b"
+ ],
+ "version": "==1.3.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:0125e8f60e9e031347105eb1682cef932f5e97d7b9a1a28d9bf00c22a5daef40",
+ "sha256:590044e3942351a1bdb1de960b739ff4ce277960f2425ad4509446dbace8d9d1"
+ ],
+ "markers": "python_version > '2.7'",
+ "version": "==6.0.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f",
+ "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746"
+ ],
+ "version": "==0.9.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:592eaa2c33fae68c7d75aacf042efc9f77b27c08a6224a4f59beab8d9a420523",
+ "sha256:ad3ad5c450284819ecde191a654c09b0ec72257a2c711b9633d677c71c9850c4"
+ ],
+ "index": "pypi",
+ "version": "==4.3.1"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:4d0d06d173eecf172703219a71dbd4ade0e13904e6bbce1ce660e2e0dc78b5c4",
+ "sha256:bfdf02789e3d197bd682a758cae0a4a18706566395fbe2803badcd1335e0173e"
+ ],
+ "index": "pypi",
+ "version": "==1.10.1"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:035a54ede6ce1380599b2ce57844c6554666522e376bd111eb940fbc7c3dad23",
+ "sha256:037c35f2741ce3a9ac0d55abfcd119133cbd821fffa4461397718287092d9d15",
+ "sha256:049feae7e9f180b64efacbdc36b3af64a00393a47be22fa9cb6794e68d4e73d3",
+ "sha256:19228f7940beafc1ba21a6e8e070e0b0bfd1457902a3a81709762b8b9039b88d",
+ "sha256:2ea681e91e3550a30c2265d2916f40a5f5d89b59469a20f3bad7d07adee0f7a6",
+ "sha256:3a6b0a78af298d82323660df5497bcea0f0a4a25a0b003afd0ce5af049bd1f60",
+ "sha256:5385da8f3b801014504df0852bf83524599df890387a3c2b17b7caa3d78b1773",
+ "sha256:606d8afa07eef77280c2bf84335e24390055b478392e1975f96286d99d0cb424",
+ "sha256:69245b5b23bbf7fb242c9f8f08493e9ecd7711f063259aefffaeb90595d62287",
+ "sha256:6f6d839ab09830d59b7fa8fb6917023d8cb5498ee1f1dbd82d37db78eb76bc99",
+ "sha256:730888475f5ac0e37c1de4bd05eeb799fdb742697867f524dc8a4cd74bcecc23",
+ "sha256:9819b5162ffc121b9e334923c685b0d0826154e41dfe70b2ede2ce29034c71d8",
+ "sha256:9e60ef9426efab601dd9aa120e4ff560f4461cf8442e9c0a2b92548d52800699",
+ "sha256:af5fbdde0690c7da68e841d7fc2632345d570768ea7406a9434446d7b33b0ee1",
+ "sha256:b64efdbdf3bbb1377562c179f167f3bf301251411eb5ac77dec6b7d32bcda463",
+ "sha256:bac5f444c118aeb456fac1b0b5d14c6a71ea2a42069b09c176f75e9bd4c186f6",
+ "sha256:bda9068aafb73859491e13b99b682bd299c1b5fd50644d697533775828a28ee0",
+ "sha256:d659517ca116e6750101a1326107d3479028c5191f0ecee3c7203c50f5b915b0",
+ "sha256:eddd3fb1f3e0f82e5915a899285a39ee34ce18fd25d89582bc89fc9fb16cd2c6"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.3.1"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ }
+ }
+}
diff --git a/Integrations/GoogleCloudCompute/GoogleCloudCompute.py b/Integrations/GoogleCloudCompute/GoogleCloudCompute.py
new file mode 100644
index 000000000000..b89c721956a9
--- /dev/null
+++ b/Integrations/GoogleCloudCompute/GoogleCloudCompute.py
@@ -0,0 +1,4597 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+""" IMPORTS """
+
+
+from googleapiclient import discovery
+from google.oauth2 import service_account
+import json
+import time
+
+# disable weak-typing warnings by pylint.
+# See: https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/iam/api-client/quickstart.py#L36
+# pylint: disable=no-member
+
+""" GLOBALS/PARAMS """
+
+# Params for assembling object of the Service Account Credentials File Contents
+SERVICE_ACCOUNT_FILE = demisto.params().get('service')
+SERVICE_ACT_PROJECT_ID = None
+
+
+# Params for constructing googleapiclient service object
+API_VERSION = 'v1'
+GSERVICE = 'compute'
+SCOPE = ['https://www.googleapis.com/auth/cloud-platform']
+SERVICE = None # variable set by build_and_authenticate() function
+
+
+"""
+HELPER FUNCTIONS
+"""
+
+
+def parse_resource_ids(resource_id):
+ """
+ Split the resource ids to a list
+ parameter: (string) resource_id
+ Return the resource_ids as a list
+ """
+ id_list = resource_id.replace(" ", "")
+ resource_ids = id_list.split(",")
+ return resource_ids
+
+
+def parse_firewall_rule(rule_str):
+ """
+ Transforms a string of multiple inputes to a dictionary list
+ parameter: (string) rules
+ A firewall rule in the specified project
+ Return firewall rules as dictionary list
+ """
+ rules = []
+ regex = re.compile(r'ipprotocol=([\w\d_:.-]+),ports=([ /\w\d@_,.\*-]+)', flags=re.I)
+ for f in rule_str.split(';'):
+ match = regex.match(f)
+ if match is None:
+ raise ValueError('Could not parse field: %s' % (f,))
+ continue
+
+ rules.append({'IPProtocol': match.group(1), 'ports': match.group(2).split(',')})
+
+ return rules
+
+
+def parse_metadata_items(tags_str):
+ """
+ Transforms a string of multiple inputes to a dictionary list
+ parameter: (string) metadata_items
+
+ Return metadata items as a dictionary list
+ """
+ tags = []
+ regex = re.compile(r'key=([\w\d_:.-]+),value=([ /\w\d@_,.\*-]+)', flags=re.I)
+ for f in tags_str.split(';'):
+ match = regex.match(f)
+ if match is None:
+ raise ValueError('Could not parse field: %s' % (f,))
+ continue
+
+ tags.append({'key': match.group(1), 'value': match.group(2)})
+
+ return tags
+
+
+def parse_named_ports(tags_str):
+ """
+ Transforms a string of multiple inputes to a dictionary list
+ parameter: (string) namedPorts
+
+ Return named ports as a dictionary list
+ """
+ tags = []
+ regex = re.compile(r'name=([\w\d_:.-]+),port=([ /\w\d@_,.\*-]+)', flags=re.I)
+ for f in tags_str.split(';'):
+ match = regex.match(f)
+ if match is None:
+ raise ValueError('Could not parse field: %s' % (f,))
+ continue
+
+ tags.append({'name': match.group(1).lower(), 'port': match.group(2)})
+
+ return tags
+
+
+def parse_labels(tags_str):
+ """
+ Transforms a string of multiple inputes to a dictionary list
+ parameter: (string) labels
+
+ Return labels as a dictionary list
+ """
+ tags = {}
+ regex = re.compile(r'key=([\w\d_:.-]+),value=([ /\w\d@_,.\*-]+)', flags=re.I)
+ for f in tags_str.split(';'):
+ match = regex.match(f)
+ if match is None:
+ raise ValueError('Could not parse field: ' + f)
+
+ tags.update({match.group(1).lower(): match.group(2).lower()})
+
+ return tags
+
+
+def build_and_authenticate(googleservice):
+ """
+ Return a service object via which can call GRM API.
+
+ Use the service_account credential file generated in the Google Cloud
+ Platform to build the Google Resource Manager API Service object.
+
+ returns: service
+ Google Resource Manager API Service object via which commands in the
+ integration will make API calls
+ """
+
+ global SERVICE_ACT_PROJECT_ID
+ auth_json_string = str(SERVICE_ACCOUNT_FILE).replace("\'", "\"").replace("\\\\", "\\")
+ service_account_info = json.loads(auth_json_string)
+ SERVICE_ACT_PROJECT_ID = service_account_info.get('project_id')
+ service_credentials = service_account.Credentials.from_service_account_info(
+ service_account_info, scopes=SCOPE
+ )
+ service = discovery.build(GSERVICE, API_VERSION, credentials=service_credentials)
+ return service
+
+
+def wait_for_zone_operation(args):
+ """
+ This command will block until an operation has been marked as complete.
+
+ parameter: (string) zone
+ Name of the zone for this request.
+ parameter: (string) name
+ Name of the operations resource.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ zone = args.get('zone')
+ name = args.get('name')
+ while True:
+ result = (
+ compute.zoneOperations()
+ .get(project=project, zone=zone, operation=name)
+ .execute()
+ )
+ if result.get('status') == 'DONE':
+ if 'error' in result:
+ raise Exception(result['error'])
+ operation = result
+ data_res = {
+ 'status': operation.get('status'),
+ 'kind': operation.get('kind'),
+ 'name': operation.get('name'),
+ 'id': operation.get('id'),
+ 'progress': operation.get('progress'),
+ 'startTime': operation.get('startTime'),
+ 'operationType': operation.get('operationType')
+ }
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': operation}
+ return_outputs(
+ tableToMarkdown(
+ 'Google Cloud Compute Operations', data_res, removeNull=True
+ )
+ if data_res
+ else 'No results were found',
+ ec
+ )
+ break
+
+ time.sleep(2)
+
+
+def wait_for_region_operation(args):
+ """
+ This command will block until an operation has been marked as complete.
+
+ parameter: (string) region
+ Name of the region for this request.
+ parameter: (string) name
+ Name of the operations resource.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ region = args.get('region')
+ name = args.get('name')
+ while True:
+ result = (
+ compute.regionOperations()
+ .get(project=project, region=region, operation=name)
+ .execute()
+ )
+ if result.get('status') == 'DONE':
+ if 'error' in result:
+ raise Exception(result['error'])
+ operation = result
+ data_res = {
+ 'status': operation.get('status'),
+ 'kind': operation.get('kind'),
+ 'name': operation.get('name'),
+ 'id': operation.get('id'),
+ 'progress': operation.get('progress'),
+ 'startTime': operation.get('startTime'),
+ 'operationType': operation.get('operationType')
+ }
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': operation}
+ return_outputs(
+ tableToMarkdown(
+ 'Google Cloud Compute Operations', data_res, removeNull=True
+ ),
+ ec
+ )
+ break
+
+ time.sleep(2)
+
+
+def wait_for_global_operation(args):
+ """
+ This command will block until an operation has been marked as complete.
+
+ parameter: (string) name
+ Name of the operations resource.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ name = args.get('name')
+ while True:
+ result = (
+ compute.globalOperations().get(project=project, operation=name).execute()
+ )
+ if result.get('status') == 'DONE':
+ if 'error' in result:
+ raise Exception(result['error'])
+
+ operation = result
+ data_res = {
+ 'status': operation.get('status'),
+ 'kind': operation.get('kind'),
+ 'name': operation.get('name'),
+ 'id': operation.get('id'),
+ 'progress': operation.get('progress'),
+ 'startTime': operation.get('startTime'),
+ 'operationType': operation.get('operationType')
+ }
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': operation}
+
+ return_outputs(
+ tableToMarkdown(
+ 'Google Cloud Compute Operations', data_res, removeNull=True
+ ),
+ ec
+ )
+ break
+
+ time.sleep(2)
+
+
+def test_module():
+
+ build_and_authenticate(GSERVICE)
+ demisto.results('ok')
+
+
+# instances()
+def create_instance(args):
+ """
+ Creates an instance resource in the specified project using the data included in the request.
+ parameter: (string) name
+ parameter: (string) description
+ parameter: (boolean) canIpForward (true/false)
+ parameter: (list) tags
+ parameter: (string) tagsFingerprint
+ parameter: (string) zone
+ parameter: (string) machine_type
+ parameter: (string) network
+ parameter: (string) sub_network
+ parameter: (string) networkIP
+ parameter: (string) networkInterfacesfingerprint
+ parameter: (boolean) externalInternetAccess (true/false)
+ parameter: (string) externalNatIP
+ parameter: (boolean) setPublicPtr (true/false)
+ parameter: (string) publicPtrDomainName
+ parameter: (string) networkTier (PREMIUM,STANDARD)
+ parameter: (string) ipCidrRange
+ parameter: (string) subnetworkRangeName
+ parameter: (string) diskType (PERSISTENT,SCRATCH)
+ parameter: (string) diskMode (READ_WRITE,READ_ONLY)
+ parameter: (string) diskSource
+ parameter: (string) diskDeviceName
+ parameter: (boolean) diskBoot (true/false)
+ parameter: (string) initializeParamsDiskName
+ parameter: (string) initializeParamsSourceImage
+ parameter: (int) initializeParamsdiskSizeGb
+ parameter: (string) initializeParamsDiskType
+ parameter: (string) initializeParamsSourceImageEncryptionKeyRawKey
+ parameter: (string) initializeParamsSourceImageEncryptionKeykmsKeyName
+ parameter: (string) initializeParamsDiskLabels
+ parameter: (string) initializeParamsDiskDescription
+ parameter: (boolean) diskAutodelete (true/false)
+ parameter: (string) diskInterface (SCSI,NVME)
+ parameter: (list) diskGuestOsFeatures
+ parameter: (string) diskEncryptionKeyRawKey
+ parameter: (string) diskEncryptionKeyKmsKeyName
+ parameter: (dict) metadataItems
+ parameter: (string) serviceAccountEmail
+ parameter: (list) serviceAccountscopes
+ parameter: (string) schedulingOnHostMaintenance (MIGRATE,TERMINATE)
+ parameter: (boolean) schedulingAutomaticRestart (true/false)
+ parameter: (boolean) schedulingPreemptible (true/false)
+ parameter: (dict) labels
+ parameter: (string) labelFingerprint
+ parameter: (string) minCpuPlatform
+ parameter: (string) guestAcceleratorsAcceleratorType
+ parameter: (integer) guestAcceleratorsAcceleratorCount
+ parameter: (boolean) deletionProtection (true/false)
+
+ Return the created instance to the war room
+ """
+ config = {}
+ if args.get('name'):
+ name = args.get('name', '')
+ name = name.lower()
+ config.update({'name': name})
+
+ if args.get('description'):
+ description = args.get('description')
+ config.update({'description': description})
+
+ if args.get('tags'):
+ tags = args.get('tags')
+ if 'tags' not in config.keys():
+ config.update({'tags': [{}]})
+ config['tags'][0].update({'items': parse_resource_ids(tags)})
+
+ if args.get('canIpForward'):
+ can_ip_forward = True if args.get('canIpForward') == 'true' else False
+ config.update({'canIpForward': can_ip_forward})
+
+ if args.get('tagsFingerprint'):
+ tags_fingerprint = args.get('tagsFingerprint')
+ if 'tags' not in config.keys():
+ config.update({'tags': [{}]})
+ config['tags'][0].update({'fingerprint': tags_fingerprint})
+
+ zone = args.get('zone')
+ machine_type = args.get('machine_type')
+
+ zone_machine_type = 'zones/' + zone + '/machineTypes/' + machine_type
+ config.update({'machineType': zone_machine_type})
+
+ if args.get('network'):
+ network = args.get('network')
+ if 'networkInterfaces' not in config.keys():
+ config.update({'networkInterfaces': [{}]})
+ config['networkInterfaces'][0].update({'network': network})
+
+ if args.get('subnetwork'):
+ sub_network = args.get('subnetwork')
+ if 'networkInterfaces' not in config.keys():
+ config.update({'networkInterfaces': [{}]})
+ config['networkInterfaces'][0].update({'subnetwork': sub_network})
+
+ if args.get('networkIP'):
+ network_ip = args.get('networkIP')
+ if 'networkInterfaces' not in config.keys():
+ config.update({'networkInterfaces': [{}]})
+ config['networkInterfaces'][0].update({'networkIP': network_ip})
+
+ if args.get('networkInterfacesfingerprint'):
+ network_interfaces_fingerprint = args.get('networkInterfacesfingerprint')
+ if 'networkInterfaces' not in config.keys():
+ config.update({'networkInterfaces': [{}]})
+ config['networkInterfaces'][0].update(
+ {'fingerprint': network_interfaces_fingerprint}
+ )
+
+ if args.get('externalInternetAccess'):
+ external_network = (
+ True if args.get('externalInternetAccess') == 'true' else False
+ )
+ if external_network:
+ if 'networkInterfaces' not in config.keys():
+ config.update({'networkInterfaces': [{}]})
+ if 'accessConfigs' not in config['networkInterfaces'][0].keys():
+ config['networkInterfaces'][0].update({'accessConfigs': [{}]})
+ config['networkInterfaces'][0]['accessConfigs'][0].update(
+ {'type': 'ONE_TO_ONE_NAT', 'name': 'External NAT'}
+ )
+
+ if args.get('externalNatIP'):
+ nat_ip = args.get('externalNatIP')
+ if 'networkInterfaces' not in config.keys():
+ config.update({'networkInterfaces': [{}]})
+ if 'accessConfigs' not in config['networkInterfaces'][0].keys():
+ config['networkInterfaces'][0].update({'accessConfigs': [{}]})
+
+ config['networkInterfaces'][0]['accessConfigs'][0].update({'natIP': nat_ip})
+
+ if args.get('setPublicPtr'):
+ set_public_ptr = True if args.get('setPublicPtr') == 'true' else False
+ if 'networkInterfaces' not in config.keys():
+ config.update({'networkInterfaces': [{}]})
+ if 'accessConfigs' not in config['networkInterfaces'][0].keys():
+ config['networkInterfaces'][0].update({'accessConfigs': [{}]})
+
+ config['networkInterfaces'][0]['accessConfigs'][0].update(
+ {'setPublicPtr': set_public_ptr}
+ )
+
+ if args.get('publicPtrDomainName'):
+ public_ptr_domain_name = args.get('setPublicPtr')
+ if 'networkInterfaces' not in config.keys():
+ config.update({'networkInterfaces': [{}]})
+ if 'accessConfigs' not in config['networkInterfaces'][0].keys():
+ config['networkInterfaces'][0].update({'accessConfigs': [{}]})
+
+ config['networkInterfaces'][0]['accessConfigs'][0].update(
+ {'publicPtrDomainName': public_ptr_domain_name}
+ )
+
+ if args.get('networkTier'):
+ network_tier = args.get('networkTier')
+ if 'networkInterfaces' not in config.keys():
+ config.update({'networkInterfaces': [{}]})
+ if 'accessConfigs' not in config['networkInterfaces'][0].keys():
+ config['networkInterfaces'][0].update({'accessConfigs': [{}]})
+
+ config['networkInterfaces'][0]['accessConfigs'][0].update(
+ {'networkTier': network_tier}
+ )
+
+ if args.get('ipCidrRange'):
+ ip_cidr_range = args.get('ipCidrRange')
+ if 'networkInterfaces' not in config.keys():
+ config.update({'networkInterfaces': [{}]})
+ if 'aliasIpRanges' not in config['networkInterfaces'][0].keys():
+ config['networkInterfaces'][0].update({'aliasIpRanges': [{}]})
+
+ config['networkInterfaces'][0]['aliasIpRanges'][0].update(
+ {'ipCidrRange': ip_cidr_range}
+ )
+
+ if args.get('subnetworkRangeName'):
+ subnet_work_range_name = args.get('subnetworkRangeName')
+ if 'networkInterfaces' not in config.keys():
+ config.update({'networkInterfaces': [{}]})
+ if 'aliasIpRanges' not in config['networkInterfaces'][0].keys():
+ config['networkInterfaces'][0].update({'aliasIpRanges': [{}]})
+
+ config['networkInterfaces'][0]['aliasIpRanges'][0].update(
+ {'subnetworkRangeName': subnet_work_range_name}
+ )
+
+ if args.get('diskType'):
+ disk_type = args.get('diskType')
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ config['disks'][0].update({'type': disk_type})
+
+ if args.get('diskMode'):
+ disk_mode = args.get('diskMode')
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ config['disks'][0].update({'mode': disk_mode})
+
+ if args.get('diskSource'):
+ disk_source = args.get('diskSource')
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ config['disks'][0].update({'source': disk_source})
+
+ if args.get('diskDeviceName'):
+ disk_device_name = args.get('diskDeviceName')
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ config['disks'][0].update({'deviceName': disk_device_name})
+
+ if args.get('diskBoot') is not None:
+ disk_boot = True if args.get('diskBoot') == 'true' else False
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ config['disks'][0].update({'boot': disk_boot})
+
+ if args.get('initializeParamsDiskName'):
+ initialize_params_disk_name = args.get('initializeParamsDiskName')
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ if 'initializeParams' not in config['disks'][0].keys():
+ config['disks'][0].update({'initializeParams': {}})
+ config['disks'][0]['initializeParams'].update(
+ {'diskName': initialize_params_disk_name}
+ )
+
+ if args.get('initializeParamsSourceImage'):
+ image = args.get('initializeParamsSourceImage')
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ if 'initializeParams' not in config['disks'][0].keys():
+ config['disks'][0].update({'initializeParams': {}})
+ config['disks'][0]['initializeParams'].update({'sourceImage': image})
+
+ if args.get('initializeParamsdiskSizeGb'):
+ initialize_params_disk_size_gb = args.get('initializeParamsdiskSizeGb')
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ if 'initializeParams' not in config['disks'][0].keys():
+ config['disks'][0].update({'initializeParams': {}})
+ config['disks'][0]['initializeParams'].update(
+ {'diskSizeGb': int(initialize_params_disk_size_gb)}
+ )
+
+ if args.get('initializeParamsDiskType'):
+ initialize_params_disk_type = args.get('initializeParamsDiskType')
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ if 'initializeParams' not in config['disks'][0].keys():
+ config['disks'][0].update({'initializeParams': {}})
+ config['disks'][0]['initializeParams'].update(
+ {'diskType': initialize_params_disk_type}
+ )
+
+ if args.get('initializeParamsSourceImageEncryptionKeyRawKey'):
+ initialize_params_source_image_encryption_key_raw_key = args.get(
+ 'initializeParamsSourceImageEncryptionKeyRawKey'
+ )
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ if 'initializeParams' not in config['disks'][0].keys():
+ config['disks'][0].update({'initializeParams': {}})
+ if (
+ 'sourceImageEncryptionKey'
+ not in config['disks'][0]['initializeParams'].keys()
+ ):
+ config['disks'][0]['initializeParams'].update(
+ {'sourceImageEncryptionKey': {}}
+ )
+ config['disks'][0]['initializeParams']['sourceImageEncryptionKey'].update(
+ {'rawKey': initialize_params_source_image_encryption_key_raw_key}
+ )
+
+ if args.get('initializeParamsSourceImageEncryptionKeykmsKeyName'):
+ initialize_params_source_image_encryption_key_kms_key_name = args.get(
+ 'initializeParamsSourceImageEncryptionKeykmsKeyName'
+ )
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ if 'initializeParams' not in config['disks'][0].keys():
+ config['disks'][0].update({'initializeParams': {}})
+ if (
+ 'sourceImageEncryptionKey'
+ not in config['disks'][0]['initializeParams'].keys()
+ ):
+ config['disks'][0]['initializeParams'].update(
+ {'sourceImageEncryptionKey': {}}
+ )
+ config['disks'][0]['initializeParams']['sourceImageEncryptionKey'].update(
+ {'kmsKeyName': initialize_params_source_image_encryption_key_kms_key_name}
+ )
+
+ if args.get('initializeParamsDiskLabels'):
+ initialize_params_disk_labels = args.get('initializeParamsDiskLabels')
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ if 'initializeParams' not in config['disks'][0].keys():
+ config['disks'][0].update({'initializeParams': {}})
+ config['disks'][0]['initializeParams'].update(
+ {'labels': parse_labels(initialize_params_disk_labels)}
+ )
+
+ if args.get('initializeParamsDiskDescription'):
+ initialize_params_disk_description = args.get('initializeParamsDiskDescription')
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ if 'initializeParams' not in config['disks'][0].keys():
+ config['disks'][0].update({'initializeParams': {}})
+ config['disks'][0]['initializeParams'].update(
+ {'description': initialize_params_disk_description}
+ )
+
+ if args.get('diskAutodelete'):
+ disk_auto_delete = True if args.get('diskAutodelete') == 'true' else False
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ config['disks'][0].update({'autoDelete': disk_auto_delete})
+
+ if args.get('diskInterface'):
+ disk_interface = args.get('diskInterface')
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ config['disks'][0].update({'interface': disk_interface})
+
+ if args.get('diskGuestOsFeatures'):
+ disk_guest_os_features = args.get('diskGuestOsFeatures')
+ disk_guest_os_features = parse_resource_ids(disk_guest_os_features)
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ config['disks'][0].update({'guestOsFeatures': []})
+ for f in disk_guest_os_features:
+ config['disks'][0]['guestOsFeatures'].append({'type': f})
+
+ if args.get('diskEncryptionKeyRawKey'):
+ disk_encryption_key_raw_key = args.get('diskEncryptionKeyRawKey')
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ if 'diskEncryptionKey' not in config['disks'][0].keys():
+ config['disks'][0].update({'diskEncryptionKey': {}})
+ config['disks'][0]['diskEncryptionKey'].update(
+ {'rawKey': disk_encryption_key_raw_key}
+ )
+
+ if args.get('diskEncryptionKeyKmsKeyName'):
+ disk_encryption_key_kms_key_name = args.get('diskEncryptionKeyKmsKeyName')
+ if 'disks' not in config.keys():
+ config.update({'disks': [{}]})
+ if 'diskEncryptionKey' not in config['disks'][0].keys():
+ config['disks'][0].update({'diskEncryptionKey': {}})
+ config['disks'][0]['diskEncryptionKey'].update(
+ {'kmsKeyName': disk_encryption_key_kms_key_name}
+ )
+
+ meta_data = {}
+ if args.get('metadataItems'):
+ meta_data.update({'items': parse_metadata_items(args.get('metadataItems'))})
+ config.update({'metadata': meta_data})
+
+ service_accounts = {} # type: dict
+ if (
+ args.get('serviceAccountEmail') is not None
+ and args.get('serviceAccountscopes') is not None
+ ):
+ service_accounts = {
+ 'serviceAccounts': [
+ {
+ 'email': args.get('serviceAccountEmail'),
+ 'scopes': parse_resource_ids(args.get('serviceAccountscopes')),
+ }
+ ]
+ }
+ config.update({'serviceAccounts': service_accounts})
+
+ if args.get('schedulingOnHostMaintenance'):
+ scheduling_on_host_maintenance = args.get('schedulingOnHostMaintenance')
+ if 'scheduling' not in config.keys():
+ config.update({'scheduling': {}})
+ config['scheduling'].update(
+ {'onHostMaintenance': scheduling_on_host_maintenance}
+ )
+
+ if args.get('schedulingAutomaticRestart'):
+ scheduling_automatic_restart = (
+ True if args.get('schedulingAutomaticRestart') == 'true' else False
+ )
+ if 'scheduling' not in config.keys():
+ config.update({'scheduling': {}})
+ config['scheduling'].update({'automaticRestart': scheduling_automatic_restart})
+
+ if args.get('schedulingPreemptible'):
+ scheduling_preemptible = (
+ True if args.get('schedulingPreemptible') == 'true' else False
+ )
+ if 'scheduling' not in config.keys():
+ config.update({'scheduling': {}})
+ config['scheduling'].update({'preemptible': scheduling_preemptible})
+
+ if args.get('labels'):
+ labels = args.get('labels')
+ config.update({'labels': parse_labels(labels)})
+
+ if args.get('labelFingerprint'):
+ label_fingerprint = args.get('labelFingerprint')
+ config.update({'labelFingerprint': label_fingerprint})
+
+ if args.get('minCpuPlatform'):
+ min_cpu_platform = args.get('minCpuPlatform')
+ config.update({'minCpuPlatform': min_cpu_platform})
+
+ if args.get('guestAcceleratorsAcceleratorType'):
+ guest_accelerators_accelerator_type = args.get(
+ 'guestAcceleratorsAcceleratorType'
+ )
+ if 'guestAccelerators' not in config.keys():
+ config.update({'guestAccelerators': [{}]})
+ config['guestAccelerators'][0].update(
+ {'acceleratorType': guest_accelerators_accelerator_type}
+ )
+
+ if args.get('guestAcceleratorsAcceleratorCount'):
+ guest_accelerators_accelerator_count = args.get(
+ 'guestAcceleratorsAcceleratorCount'
+ )
+ if 'guestAccelerators' not in config.keys():
+ config.update({'guestAccelerators': [{}]})
+ config['guestAccelerators'][0].update(
+ {'acceleratorCount': int(guest_accelerators_accelerator_count)}
+ )
+
+ if args.get('deletionProtection'):
+ deletion_protection = (
+ True if args.get('deletionProtection') == 'true' else False
+ )
+ config.update({'deletionProtection': deletion_protection})
+
+ project = SERVICE_ACT_PROJECT_ID
+
+ operation = (
+ compute.instances().insert(project=project, zone=zone, body=config).execute()
+ )
+
+ data_res = {
+ 'status': operation.get('status'),
+ 'kind': operation.get('kind'),
+ 'name': operation.get('name'),
+ 'id': operation.get('id'),
+ 'progress': operation.get('progress'),
+ 'startTime': operation.get('startTime'),
+ 'operationType': operation.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': operation}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ operation,
+ )
+
+
+def list_instances(args):
+ """
+ Retrieves the list of instances contained within the specified zone.
+
+ parameter: (string) zone
+ Name of the zone for request.
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (string) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ zone = args.get('zone')
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ request = compute.instances().list(
+ project=project,
+ zone=zone,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token,
+ )
+ output = []
+ data_res = []
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for instance in response['items']:
+ output.append(instance)
+ data_res_item = {
+ 'id': instance.get('id'),
+ 'name': instance.get('name'),
+ 'machineType': instance.get('machineType'),
+ 'zone': instance.get('zone'),
+ }
+ data_res.append(data_res_item)
+
+ request = compute.instances().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Instances(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Instances', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def aggregated_list_instances(args):
+ """
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+
+ request = compute.instances().aggregatedList(
+ project=project,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token,
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for name, instances_scoped_list in response['items'].items():
+ if 'warning' not in instances_scoped_list.keys():
+ for inst in instances_scoped_list.get('instances', []):
+ output.append(inst)
+ data_res_item = {
+ 'id': inst.get('id'),
+ 'name': inst.get('name'),
+ 'machineType': inst.get('machineType'),
+ 'zone': inst.get('zone'),
+ }
+ data_res.append(data_res_item)
+
+ request = compute.instances().aggregatedList_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Instances(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Instances', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def set_instance_metadata(args):
+ """
+ Sets metadata for the specified instance to the data included in the request.
+
+ parameter: (string) zone
+ Name of the zone for request.
+ parameter: (string) instance
+ Name of the instance scoping this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ zone = args.get('zone')
+ instance = args.get('instance')
+
+ meta_data = {}
+ if args.get('metadataFingerprint'):
+ meta_data.update({'fingerprint': args.get('metadataFingerprint')})
+ if args.get('metadataItems'):
+ meta_data.update({'items': parse_metadata_items(args.get('metadataItems'))})
+
+ request = compute.instances().setMetadata(
+ project=project, zone=zone, instance=instance, body=meta_data
+ )
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def get_instance(args):
+ """
+ Returns the specified Instance resource.
+
+ parameter: (string) zone
+ Name of the zone for request.
+ parameter: (string) instance
+ Name of the instance scoping this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ instance = args.get('instance')
+ zone = args.get('zone')
+
+ request = compute.instances().get(project=project, zone=zone, instance=instance)
+ response = request.execute()
+ data_res = {
+ 'id': response.get('id'),
+ 'name': response.get('name'),
+ 'machineType': response.get('machineType'),
+ 'zone': response.get('zone'),
+ }
+
+ ec = {'GoogleCloudCompute.Instances(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Instances', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def delete_instance(args):
+ """
+ Deletes the specified Instance resource
+
+ parameter: (string) zone
+ Name of the zone for request.
+ parameter: (string) instance
+ Name of the instance scoping this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ instance = args.get('instance')
+ zone = args.get('zone')
+
+ request = compute.instances().delete(project=project, zone=zone, instance=instance)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def start_instance(args):
+ """
+ Starts an instance that was stopped using the instances().stop method.
+
+ parameter: (string) zone
+ Name of the zone for request.
+ parameter: (string) instance
+ Name of the instance scoping this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ instance = args.get('instance')
+ zone = args.get('zone')
+
+ request = compute.instances().start(project=project, zone=zone, instance=instance)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def stop_instance(args):
+ """
+ Stops a running instance, shutting it down cleanly, and allows you to restart the instance at a later time
+
+ parameter: (string) zone
+ Name of the zone for request.
+ parameter: (string) instance
+ Name of the instance scoping this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ instance = args.get('instance')
+ zone = args.get('zone')
+
+ request = compute.instances().stop(project=project, zone=zone, instance=instance)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def reset_instance(args):
+ """
+ Performs a reset on the instance.
+
+ parameter: (string) zone
+ Name of the zone for request.
+ parameter: (string) instance
+ Name of the instance scoping this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ instance = args.get('instance')
+ zone = args.get('zone')
+
+ request = compute.instances().reset(project=project, zone=zone, instance=instance)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def set_instance_labels(args):
+ """
+ Sets labels on an instance
+
+ parameter: (string) zone
+ Name of the zone for request.
+ parameter: (string) instance
+ Name of the instance scoping this request.
+ parameter: (dict) labels
+ An object containing a list of 'key': value pairs
+ parameter: (string) labelFingerprint
+ Fingerprint of the previous set of labels for this resource.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ instance = args.get('instance')
+ zone = args.get('zone')
+ labels = args.get('labels')
+
+ labels = parse_labels(labels)
+ body = {'labels': labels}
+
+ if args.get('labelFingerprint'):
+ body.update({'labelFingerprint': args.get('labelFingerprint')})
+
+ request = compute.instances().setLabels(
+ project=project, zone=zone, instance=instance, body=body
+ )
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def set_instance_machine_type(args):
+ """
+ Changes the machine type for a stopped instance to the machine type specified in the request.
+
+ parameter: (string) zone
+ Name of the zone for request.
+ parameter: (string) instance
+ Name of the instance scoping this request.
+ parameter: (string) machine_type
+ Full or partial URL of the machine type resource.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ instance = args.get('instance')
+ zone = args.get('zone')
+ machine_type = args.get('machineType')
+
+ body = {'machineType': machine_type}
+
+ request = compute.instances().setMachineType(
+ project=project, zone=zone, instance=instance, body=body
+ )
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+# images()
+def get_image(args):
+ """
+ Returns the specified image.
+
+ parameter: (string) project
+ Project ID for this request.
+ parameter: (string) image
+ Name of the image resource to return.
+ """
+ if args.get('project') is not None:
+ project = args.get('project')
+ else:
+ project = SERVICE_ACT_PROJECT_ID
+
+ image = args.get('image')
+
+ request = compute.images().get(project=project, image=image)
+ response = request.execute()
+
+ data_res = {'id': response.get('id'), 'name': response.get('name')}
+
+ ec = {'GoogleCloudCompute.Images(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Images', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def get_image_from_family(args):
+ """
+ Returns the latest image that is part of an image family and is not deprecated.
+
+ parameter: (string) project
+ Project ID for this request.
+ parameter: (string) family
+ Name of the image family to search for.
+ """
+ project = args.get('project')
+ family = args.get('family')
+
+ request = compute.images().getFromFamily(project=project, family=family)
+ response = request.execute()
+
+ data_res = {
+ 'id': response.get('id'),
+ 'name': response.get('name'),
+ 'family': response.get('family'),
+ }
+
+ ec = {'GoogleCloudCompute.Images(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Images', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def list_images(args):
+ """
+ parameter: (string) project
+ Project ID for this request.
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (string) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ return: demisto entry (list)
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+
+ if args.get('project'):
+ project = args.get('project')
+ else:
+ project = SERVICE_ACT_PROJECT_ID
+
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.images().list(
+ project=project,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token,
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for image in response['items']:
+ output.append(image)
+ data_res_item = {'id': image.get('id'), 'name': image.get('name')}
+ data_res.append(data_res_item)
+
+ request = compute.images().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Images(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Images', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def delete_image(args):
+ """
+ Deletes the specified image.
+
+ parameter: (string) image
+ Name of the image resource to delete.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ image = args.get('image')
+
+ request = compute.images().delete(project=project, image=image)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def set_image_labels(args):
+ """
+ Sets the labels on an image.
+
+ parameter: (string) image
+ Name of the image resource to delete.
+ parameter: (dict) labels
+ A list of labels to apply for this resource.
+ parameter: (string) labelFingerprint
+ The fingerprint of the previous set of labels for this resource.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ image = args.get('image')
+ labels = args.get('labels')
+ label_fingerprint = args.get('labelFingerprint')
+
+ labels = parse_labels(labels)
+ body = {'labels': labels, 'labelFingerprint': label_fingerprint}
+
+ request = compute.images().setLabels(project=project, resource=image, body=body)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def insert_image(args):
+ """
+ Creates an image in the specified project using the data included in the request.
+ """
+ config = {}
+ if args.get('name'):
+ name = args.get('name', '')
+ name = name.lower()
+ config.update({'name': name})
+
+ force_create = False
+ if args.get('forceCreate'):
+ force_create = True if args.get('forceCreate') == 'true' else False
+
+ if args.get('description'):
+ description = args.get('description')
+ config.update({'description': description})
+
+ if args.get('rawDiskSource'):
+ raw_disk_source = args.get('rawDiskSource')
+ config.update({'rawDisk': {}})
+ config['rawDisk'].update({'source': raw_disk_source})
+
+ if args.get('rawDiskSha1Checksum'):
+ raw_disk_sha1_checksum = args.get('rawDiskSha1Checksum')
+ if 'rawDisk' not in config.keys():
+ config.update({'rawDisk': {}})
+ config['rawDisk'].update({'sha1Checksum': raw_disk_sha1_checksum})
+
+ if args.get('rawDiskContainerType'):
+ raw_disk_container_type = args.get('rawDiskContainerType')
+ if 'rawDisk' not in config.keys():
+ config.update({'rawDisk': {}})
+ config['rawDisk'].update({'containerType': raw_disk_container_type})
+
+ if args.get('deprecatedState'):
+ deprecated_state = args.get('deprecatedState')
+ config.update({'deprecated': {}})
+ config['deprecated'].update({'state': deprecated_state})
+
+ if args.get('deprecatedReplacement'):
+ deprecated_replacement = args.get('deprecatedReplacement')
+ if 'deprecated' not in config.keys():
+ config.update({'deprecated': {}})
+ config['deprecated'].update({'replacement': deprecated_replacement})
+
+ if args.get('archiveSizeBytes'):
+ archive_size_bytes = args.get('archiveSizeBytes')
+ config.update({'archiveSizeBytes': int(archive_size_bytes)})
+
+ if args.get('diskSizeGb'):
+ disk_size_gb = args.get('diskSizeGb')
+ config.update({'diskSizeGb': int(disk_size_gb)})
+
+ if args.get('sourceDisk'):
+ source_disk = args.get('sourceDisk')
+ config.update({'sourceDisk': source_disk})
+
+ if args.get('licenses'):
+ licenses = args.get('licenses')
+ config.update({'licenses': parse_resource_ids(licenses)})
+
+ if args.get('family'):
+ family = args.get('family')
+ config.update({'family': family})
+
+ if args.get('imageEncryptionKeyRawKey'):
+ image_encryption_key_raw_key = args.get('imageEncryptionKeyRawKey')
+ config.update({'imageEncryptionKey': {'rawKey': image_encryption_key_raw_key}})
+
+ if args.get('imageEncryptionKeyKmsKeyName'):
+ image_encryption_key_kms_key_name = args.get('imageEncryptionKeyKmsKeyName')
+ if 'imageEncryptionKey' not in config.keys():
+ config.update({'imageEncryptionKey': {}})
+ config['imageEncryptionKey'].update(
+ {'kmsKeyName': image_encryption_key_kms_key_name}
+ )
+
+ if args.get('sourceDiskEncryptionKeyRawKey'):
+ source_disk_encryption_key_raw_key = args.get('sourceDiskEncryptionKeyRawKey')
+ if 'sourceDiskEncryptionKey' not in config.keys():
+ config.update({'sourceDiskEncryptionKey': {}})
+ config['sourceDiskEncryptionKey'].update(
+ {'rawKey': source_disk_encryption_key_raw_key}
+ )
+
+ if args.get('sourceDiskEncryptionKeyKmsKeyName'):
+ source_disk_encryption_key_kms_key_name = args.get(
+ 'sourceDiskEncryptionKeyKmsKeyName'
+ )
+ if 'sourceDiskEncryptionKey' not in config.keys():
+ config.update({'sourceDiskEncryptionKey': {}})
+ config['sourceDiskEncryptionKey'].update(
+ {'kmsKeyName': source_disk_encryption_key_kms_key_name}
+ )
+
+ if args.get('labels'):
+ labels = args.get('labels')
+ config.update({'labels': parse_labels(labels)})
+
+ if args.get('labelFingerprint'):
+ label_fingerprint = args.get('labelFingerprint')
+ config.update({'labelFingerprint': label_fingerprint})
+
+ if args.get('guestOsFeatures'):
+ guest_os_features = args.get('guestOsFeatures')
+ guest_os_features = parse_resource_ids(guest_os_features)
+ config.update({'guestOsFeatures': []})
+ for f in guest_os_features:
+ config['guestOsFeatures'].append({'type': f})
+
+ if args.get('licenseCodes'):
+ license_codes = args.get('licenseCodes')
+ config.update({'licenseCodes': parse_resource_ids(license_codes)})
+
+ if args.get('sourceImage'):
+ source_image = args.get('sourceImage')
+ config.update({'sourceImage': source_image})
+
+ if args.get('imageEncryptionKeyRawKey'):
+ image_encryption_key_raw_key = args.get('imageEncryptionKeyRawKey')
+ config.update({'imageEncryptionKey': {'rawKey': image_encryption_key_raw_key}})
+
+ if args.get('sourceImageEncryptionKeyKmsKeyName'):
+ source_image_encryption_key_kms_key_name = args.get(
+ 'sourceImageEncryptionKeyKmsKeyName'
+ )
+ if 'sourceImageEncryptionKey' not in config.keys():
+ config.update({'sourceImageEncryptionKey': {}})
+ config['sourceImageEncryptionKey'].update(
+ {'kmsKeyName': source_image_encryption_key_kms_key_name}
+ )
+
+ if args.get('sourceSnapshot'):
+ source_snapshot = args.get('sourceSnapshot')
+ config.update({'sourceSnapshot': source_snapshot})
+
+ if args.get('sourceSnapshotEncryptionKeyRawKey'):
+ source_snapshot_encryption_key_raw_key = args.get(
+ 'sourceSnapshotEncryptionKeyRawKey'
+ )
+ if 'sourceSnapshotEncryptionKey' not in config.keys():
+ config.update({'sourceSnapshotEncryptionKey': {}})
+ config['sourceSnapshotEncryptionKey'].update(
+ {'rawKey': source_snapshot_encryption_key_raw_key}
+ )
+
+ if args.get('sourceSnapshotEncryptionKeyKmsKeyName'):
+ source_snapshot_encryption_key_kms_key_name = args.get(
+ 'sourceSnapshotEncryptionKeyKmsKeyName'
+ )
+ if 'sourceSnapshotEncryptionKey' not in config.keys():
+ config.update({'sourceSnapshotEncryptionKey': {}})
+ config['sourceSnapshotEncryptionKey'].update(
+ {'kmsKeyName': source_snapshot_encryption_key_kms_key_name}
+ )
+
+ project = SERVICE_ACT_PROJECT_ID
+ response = (
+ compute.images()
+ .insert(project=project, forceCreate=force_create, body=config)
+ .execute()
+ )
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def networks_add_peering(args):
+ """
+ Adds a peering to the specified network.
+ """
+ config = {}
+ network = args.get('network')
+
+ if args.get('name'):
+ name = args.get('name', '')
+ name = name.lower()
+ config.update({'name': name})
+
+ if args.get('peerNetwork'):
+ peer_network = args.get('peerNetwork')
+ config.update({'peerNetwork': peer_network})
+
+ if args.get('autoCreateRoutes'):
+ auto_create_routes = True if args.get('autoCreateRoutes') == 'true' else False
+ config.update({'autoCreateRoutes': auto_create_routes})
+
+ if args.get('networkPeeringName'):
+ network_peering_name = args.get('networkPeeringName')
+ config.update({'networkPeering': {}})
+ config['networkPeering'].update({'name': network_peering_name})
+
+ if args.get('networkPeeringNetwork'):
+ network_peering_network = args.get('networkPeeringNetwork')
+ if 'networkPeering' not in config.keys():
+ config.update({'networkPeering': {}})
+ config['networkPeering'].update({'network': network_peering_network})
+
+ if args.get('networkPeeringExchangeSubnetRoutes'):
+ network_peering_exchange_subnet_routes = (
+ True if args.get('networkPeeringExchangeSubnetRoutes') == 'True' else False
+ )
+ if 'networkPeering' not in config.keys():
+ config.update({'networkPeering': {}})
+ config['networkPeering'].update(
+ {'exchangeSubnetRoutes': network_peering_exchange_subnet_routes}
+ )
+
+ project = SERVICE_ACT_PROJECT_ID
+ response = (
+ compute.networks()
+ .addPeering(project=project, network=network, body=config)
+ .execute()
+ )
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def delete_network(args):
+ """
+ Deletes the specified network.
+
+ parameter: (string) network
+ Name of the network to delete.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ network = args.get('network')
+
+ request = compute.networks().delete(project=project, network=network)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def get_network(args):
+ """
+ Returns the specified network
+
+ parameter: (string) network
+ Name of the network to return.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ network = args.get('network')
+
+ request = compute.networks().get(project=project, network=network)
+ response = request.execute()
+
+ data_res = {'name': response.get('name'), 'id': response.get('id')}
+
+ ec = {'GoogleCloudCompute.Networks(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Networks', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def insert_network(args):
+ """
+ Creates a network in the specified project using the data included in the request.
+ """
+ config = {}
+
+ if args.get('name'):
+ name = args.get('name', '')
+ name = name.lower()
+ config.update({'name': name})
+
+ if args.get('description'):
+ description = args.get('description')
+ config.update({'description': description})
+
+ if args.get('autoCreateSubnetworks'):
+ auto_create_sub_networks = (
+ True if args.get('autoCreateSubnetworks') == 'true' else False
+ )
+ config.update({'autoCreateSubnetworks': auto_create_sub_networks})
+
+ if args.get('routingConfigRoutingMode'):
+ routing_config_routing_mode = args.get('routingConfigRoutingMode')
+ config.update({'routingConfig': {'routingMode': routing_config_routing_mode}})
+
+ project = SERVICE_ACT_PROJECT_ID
+ response = compute.networks().insert(project=project, body=config).execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def list_networks(args):
+ """
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.networks().list(
+ project=project,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token,
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for item in response['items']:
+ output.append(item)
+ data_res_item = {'name': item.get('name'), 'id': item.get('id')}
+ data_res.append(data_res_item)
+
+ request = compute.networks().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Networks(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Networks', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def networks_removepeering(args):
+ """
+ Removes a peering from the specified network.
+
+ parameter: (string) network
+ Name of the network resource to remove peering from.
+ parameter: (string) name
+ Name of the peering.
+ """
+ config = {}
+ network = args.get('network')
+
+ if args.get('name'):
+ name = args.get('name', '')
+ config.update({'name': name})
+
+ project = SERVICE_ACT_PROJECT_ID
+ response = (
+ compute.networks()
+ .removePeering(project=project, network=network, body=config)
+ .execute()
+ )
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def get_global_operation(args):
+ """
+ Retrieves the specified Operations resource.
+
+ parameter: (string) name
+ Name of the Operations resource to return.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ operation = args.get('name')
+
+ request = compute.globalOperations().get(project=project, operation=operation)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def get_zone_operation(args):
+ """
+ Retrieves the specified zone-specific Operations resource.
+
+ parameter: (string) name
+ Name of the Operations resource to return.
+ parameter: (string) zone
+ Name of the zone for this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ name = args.get('name')
+ zone = args.get('zone')
+
+ request = compute.zoneOperations().get(project=project, zone=zone, operation=name)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def get_region_operation(args):
+ """
+ Retrieves the specified region-specific Operations resource.
+
+ parameter: (string) name
+ Name of the Operations resource to return.
+ parameter: (string) region
+ Name of the region for this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ name = args.get('name')
+ region = args.get('region')
+
+ request = compute.regionOperations().get(
+ project=project, region=region, operation=name
+ )
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def list_zone_operation(args):
+ """
+ parameter: (string) zone
+ Name of the zone for request.
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ zone = args.get('zone')
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.zoneOperations().list(
+ project=project,
+ zone=zone,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token,
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for operation in response['items']:
+ output.append(operation)
+ data_res_item = {
+ 'status': operation.get('status'),
+ 'kind': operation.get('kind'),
+ 'name': operation.get('name'),
+ 'id': operation.get('id'),
+ 'progress': operation.get('progress'),
+ 'startTime': operation.get('startTime'),
+ 'operationType': operation.get('operationType'),
+ }
+ data_res.append(data_res_item)
+
+ request = compute.zoneOperations().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def delete_zone_operation(args):
+ """
+ Deletes the specified zone-specific Operations resource.
+
+ parameter: (string) name
+ Name of the Operations resource to delete.
+ parameter: (string) zone
+ Name of the zone for this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ name = args.get('name')
+ zone = args.get('zone')
+
+ request = compute.zoneOperations().delete(
+ project=project, zone=zone, operation=name
+ )
+ request.execute()
+
+ return 'success'
+
+
+def list_region_operation(args):
+ """
+ parameter: (string) region
+ Name of the region for this request.
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ region = args.get('region')
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.regionOperations().list(
+ project=project,
+ region=region,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token,
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for operation in response['items']:
+ output.append(operation)
+ data_res_item = {
+ 'status': operation.get('status'),
+ 'kind': operation.get('kind'),
+ 'name': operation.get('name'),
+ 'id': operation.get('id'),
+ 'progress': operation.get('progress'),
+ 'startTime': operation.get('startTime'),
+ 'operationType': operation.get('operationType'),
+ }
+ data_res.append(data_res_item)
+
+ request = compute.regionOperations().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def delete_region_operation(args):
+ """
+ Deletes the specified region-specific Operations resource.
+
+ parameter: (string) name
+ Name of the Operations resource to delete.
+ parameter: (string) region
+ Name of the region for this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ name = args.get('name')
+ region = args.get('region')
+
+ request = compute.regionOperations().delete(
+ project=project, region=region, operation=name
+ )
+ request.execute()
+
+ return 'success'
+
+
+def list_global_operation(args):
+ """
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.globalOperations().list(
+ project=project,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token,
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for operation in response['items']:
+ output.append(operation)
+ data_res_item = {
+ 'status': operation.get('status'),
+ 'kind': operation.get('kind'),
+ 'name': operation.get('name'),
+ 'id': operation.get('id'),
+ 'progress': operation.get('progress'),
+ 'startTime': operation.get('startTime'),
+ 'operationType': operation.get('operationType'),
+ }
+ data_res.append(data_res_item)
+
+ request = compute.globalOperations().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def delete_global_operation(args):
+ project = SERVICE_ACT_PROJECT_ID
+ name = args.get('name')
+
+ request = compute.globalOperations().delete(project=project, operation=name)
+ request.execute()
+
+ return 'success'
+
+
+def aggregated_list_addresses(args):
+ """
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.addresses().aggregatedList(
+ project=project,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token,
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for name, instances_scoped_list in response['items'].items():
+ if 'warning' not in instances_scoped_list.keys():
+ for addr in instances_scoped_list.get('addresses'):
+ output.append(addr)
+ data_res_item = {
+ 'id': addr.get('id'),
+ 'name': addr.get('name'),
+ 'address': addr.get('address'),
+ }
+ data_res.append(data_res_item)
+
+ request = compute.addresses().aggregatedList_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Addresses(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Addresses', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def delete_address(args):
+ """
+ Deletes the specified address resource.
+
+ parameter: (string) address
+ Name of the address resource to delete.
+ parameter: (string) region
+ Name of the region for this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ address = args.get('address')
+ region = args.get('region')
+
+ request = compute.addresses().delete(
+ project=project, region=region, address=address
+ )
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def get_address(args):
+ """
+ Returns the specified address resource.
+
+ parameter: (string) address
+ Name of the address resource to return.
+ parameter: (string) region
+ Name of the region for this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ address = args.get('address')
+ region = args.get('region')
+
+ request = compute.addresses().get(project=project, region=region, address=address)
+ response = request.execute()
+ data_res = {
+ 'id': response.get('id'),
+ 'name': response.get('name'),
+ 'address': response.get('address'),
+ }
+ ec = {'GoogleCloudCompute.Addresses(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Addresses', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def insert_address(args):
+ """
+ Creates an address resource in the specified project using the data included in the request.
+ """
+ config = {}
+ if args.get('name'):
+ name = args.get('name', '')
+ name = name.lower()
+ config.update({'name': name})
+
+ region = args.get('region')
+
+ if args.get('description'):
+ description = args.get('description')
+ config.update({'description': description})
+
+ if args.get('address'):
+ address = args.get('address')
+ config.update({'address': address})
+
+ if args.get('prefixLength'):
+ prefix_length = args.get('prefixLength')
+ config.update({'prefixLength': prefix_length})
+
+ if args.get('networkTier'):
+ network_tier = args.get('networkTier')
+ config.update({'networkTier': network_tier})
+
+ if args.get('addressType'):
+ address_type = args.get('addressType')
+ config.update({'addressType': address_type})
+
+ if args.get('purpose'):
+ purpose = args.get('purpose')
+ config.update({'purpose': purpose})
+
+ if args.get('subnetwork'):
+ sub_network = args.get('subnetwork')
+ config.update({'subnetwork': sub_network})
+
+ if args.get('network'):
+ network = args.get('network')
+ config.update({'network': network})
+
+ project = SERVICE_ACT_PROJECT_ID
+ response = (
+ compute.addresses()
+ .insert(project=project, region=region, body=config)
+ .execute()
+ )
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def list_addresses(args):
+ """
+ parameter: (string) region
+ Name of the region for this request.
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ region = args.get('region')
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.addresses().list(
+ project=project,
+ region=region,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token,
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for address in response['items']:
+ output.append(address)
+ data_res_item = {
+ 'id': address.get('id'),
+ 'name': address.get('name'),
+ 'address': address.get('address'),
+ }
+ data_res.append(data_res_item)
+
+ request = compute.addresses().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Addresses(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Addresses', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def delete_global_address(args):
+ """
+ Deletes the specified address resource.
+
+ parameter: (string) address
+ Name of the address resource to delete.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ address = args.get('address')
+
+ request = compute.globalAddresses().delete(project=project, address=address)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def get_global_address(args):
+ """
+ Returns the specified address resource
+
+ parameter: (string) address
+ Name of the address resource to return.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ address = args.get('address')
+
+ request = compute.globalAddresses().get(project=project, address=address)
+ response = request.execute()
+
+ data_res = {
+ 'id': response.get('id'),
+ 'name': response.get('name'),
+ 'address': response.get('address'),
+ }
+
+ ec = {'GoogleCloudCompute.Addresses(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Addresses', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def insert_global_address(args):
+ """
+ Creates an address resource in the specified project using the data included in the request.
+ """
+ config = {}
+ if args.get('name'):
+ name = args.get('name', '')
+ name = name.lower()
+ config.update({'name': name})
+
+ if args.get('description'):
+ description = args.get('description')
+ config.update({'description': description})
+
+ if args.get('address'):
+ address = args.get('address')
+ config.update({'address': address})
+
+ if args.get('prefixLength'):
+ prefix_length = args.get('prefixLength')
+ config.update({'prefixLength': prefix_length})
+
+ if args.get('networkTier'):
+ networkTier = args.get('networkTier')
+ config.update({'networkTier': networkTier})
+
+ if args.get('ipVersion'):
+ ip_version = args.get('ipVersion')
+ config.update({'ipVersion': ip_version})
+
+ if args.get('addressType'):
+ address_type = args.get('addressType')
+ config.update({'addressType': address_type})
+
+ if args.get('purpose'):
+ purpose = args.get('purpose')
+ config.update({'purpose': purpose})
+
+ if args.get('subnetwork'):
+ sub_network = args.get('subnetwork')
+ config.update({'subnetwork': sub_network})
+
+ if args.get('network'):
+ network = args.get('network')
+ config.update({'network': network})
+
+ project = SERVICE_ACT_PROJECT_ID
+ response = compute.globalAddresses().insert(project=project, body=config).execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def list_global_addresses(args):
+ """
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.globalAddresses().list(
+ project=project,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token,
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for address in response['items']:
+ output.append(address)
+ data_res_item = {
+ 'id': address.get('id'),
+ 'name': address.get('name'),
+ 'address': address.get('address'),
+ }
+ data_res.append(data_res_item)
+
+ request = compute.globalAddresses().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Addresses(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Addresses', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+# disks()
+def aggregated_list_disks(args):
+ """
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.disks().aggregatedList(
+ project=project,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token,
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for name, instances_scoped_list in response['items'].items():
+ if 'warning' not in instances_scoped_list.keys():
+ for disk in instances_scoped_list.get('disks', []):
+ output.append(disk)
+ data_res_item = {
+ 'id': disk.get('id'),
+ 'name': disk.get('name'),
+ 'sizeGb': disk.get('sizeGb'),
+ 'zone': disk.get('zone'),
+ 'status': disk.get('status'),
+ 'type': disk.get('type'),
+ }
+ data_res.append(data_res_item)
+
+ request = compute.disks().aggregatedList_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Disks(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Disks', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def create_disk_snapshot(args):
+ """
+ Creates a snapshot of a specified persistent disk.
+
+ parameter: (string) zone
+ The name of the zone for this request.
+ parameter: (string) disk
+ Name of the persistent disk to snapshot.
+
+ """
+
+ zone = args.get('zone')
+ disk = args.get('disk')
+
+ config = {}
+ if args.get('name'):
+ name = args.get('name', '')
+ name = name.lower()
+ config.update({'name': name})
+
+ if args.get('description'):
+ description = args.get('description')
+ config.update({'description': description})
+
+ if args.get('snapshotEncryptionKeyRawKey'):
+ raw_key = args.get('snapshotEncryptionKeyRawKey')
+ if 'snapshotEncryptionKey' not in config.keys():
+ config.update({'snapshotEncryptionKey': {}})
+ config['snapshotEncryptionKey'].update({'rawKey': raw_key})
+
+ if args.get('snapshotEncryptionKeyKmsKeyName'):
+ kms_key_name = args.get('snapshotEncryptionKeyKmsKeyName')
+ if 'snapshotEncryptionKey' not in config.keys():
+ config.update({'snapshotEncryptionKey': {}})
+ config['snapshotEncryptionKey'].update({'kmsKeyName': kms_key_name})
+
+ if args.get('sourceDiskEncryptionKeyRawKey'):
+ raw_key = args.get('sourceDiskEncryptionKeyRawKey')
+ if 'sourceDiskEncryptionKey' not in config.keys():
+ config.update({'sourceDiskEncryptionKey': {}})
+ config['sourceDiskEncryptionKey'].update({'rawKey': raw_key})
+
+ if args.get('sourceDiskEncryptionKeyKmsKeyName'):
+ kms_key_name = args.get('sourceDiskEncryptionKeyKmsKeyName')
+ if 'sourceDiskEncryptionKey' not in config.keys():
+ config.update({'sourceDiskEncryptionKey': {}})
+ config['sourceDiskEncryptionKey'].update({'kmsKeyName': kms_key_name})
+
+ if args.get('labels'):
+ labels = args.get('labels')
+ config.update({'labels': parse_labels(labels)})
+
+ if args.get('labelFingerprint'):
+ label_fingerprint = args.get('labelFingerprint')
+ config.update({'labelFingerprint': label_fingerprint})
+
+ project = SERVICE_ACT_PROJECT_ID
+ response = (
+ compute.disks()
+ .createSnapshot(project=project, zone=zone, disk=disk, body=config)
+ .execute()
+ )
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def delete_disk(args):
+ """
+ Deletes the specified persistent disk.
+
+ parameter: (string) disk
+ Name or id of the resource for this request.
+ parameter: (string) zone
+ Name of the zone for this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ disk = args.get('disk')
+ zone = args.get('zone')
+
+ request = compute.disks().delete(project=project, zone=zone, disk=disk)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def get_disk(args):
+ """
+ Returns a specified persistent disk.
+
+ parameter: (string) disk
+ Name or id of the resource for this request.
+ parameter: (string) zone
+ Name of the zone for this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ disk = args.get('disk')
+ zone = args.get('zone')
+
+ request = compute.disks().get(project=project, zone=zone, disk=disk)
+ response = request.execute()
+
+ data_res = {
+ 'id': response.get('id'),
+ 'name': response.get('name'),
+ 'sizeGb': response.get('sizeGb'),
+ 'zone': response.get('zone'),
+ 'status': response.get('status'),
+ 'type': response.get('type'),
+ }
+
+ ec = {'GoogleCloudCompute.Disks(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Disks', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def insert_disk(args):
+ """
+ Creates a persistent disk in the specified project using the data in the request.
+ """
+ config = {}
+ if args.get('name'):
+ name = args.get('name', '')
+ name = name.lower()
+ config.update({'name': name})
+
+ if args.get('zone'):
+ zone = args.get('zone')
+
+ if args.get('disktype'):
+ disk_type = args.get('disktype')
+ config.update({'type': disk_type})
+
+ if args.get('description'):
+ description = args.get('description')
+ config.update({'description': description})
+
+ if args.get('sizeGb'):
+ size_gb = args.get('sizeGb')
+ config.update({'sizeGb': int(size_gb)})
+
+ if args.get('sourceSnapshot'):
+ sourceSnapshot = args.get('sourceSnapshot')
+ config.update({'sourceSnapshot': sourceSnapshot})
+
+ if args.get('sourceImage'):
+ source_image = args.get('sourceImage')
+ config.update({'sourceImage': source_image})
+
+ if args.get('licenses'):
+ licenses = args.get('licenses')
+ config.update({'licenses': parse_resource_ids(licenses)})
+
+ if args.get('guestOsFeatures'):
+ guest_os_features = args.get('guestOsFeatures')
+ guest_os_features = parse_resource_ids(guest_os_features)
+ config.update({'guestOsFeatures': []})
+ for f in guest_os_features:
+ config['guestOsFeatures'].append({'type': f})
+
+ if args.get('diskEncryptionKeyRawKey'):
+ disk_encryption_key_raw_key = args.get('diskEncryptionKeyRawKey')
+ config.update({'diskEncryptionKey': {}})
+ config['diskEncryptionKey'].update({'rawKey': disk_encryption_key_raw_key})
+
+ if args.get('diskEncryptionKeyKmsKeyName'):
+ disk_encryption_key_kms_key_name = args.get('diskEncryptionKeyKmsKeyName')
+ if 'diskEncryptionKey' not in config.keys():
+ config.update({'diskEncryptionKey': {}})
+ config['diskEncryptionKey'].update(
+ {'kmsKeyName': disk_encryption_key_kms_key_name}
+ )
+
+ if args.get('imageEncryptionKeyRawKey'):
+ image_encryption_key_raw_key = args.get('imageEncryptionKeyRawKey')
+ config.update({'imageEncryptionKey': {'rawKey': image_encryption_key_raw_key}})
+
+ if args.get('sourceImageEncryptionKeyKmsKeyName'):
+ source_image_encryption_key_kms_key_name = args.get(
+ 'sourceImageEncryptionKeyKmsKeyName'
+ )
+ if 'sourceImageEncryptionKey' not in config.keys():
+ config.update({'sourceImageEncryptionKey': {}})
+ config['sourceImageEncryptionKey'].update(
+ {'kmsKeyName': source_image_encryption_key_kms_key_name}
+ )
+
+ if args.get('sourceSnapshotEncryptionKeyRawKey'):
+ source_snapshot_encryption_key_raw_key = args.get(
+ 'sourceSnapshotEncryptionKeyRawKey'
+ )
+ if 'sourceSnapshotEncryptionKey' not in config.keys():
+ config.update({'sourceSnapshotEncryptionKey': {}})
+ config['sourceSnapshotEncryptionKey'].update(
+ {'rawKey': source_snapshot_encryption_key_raw_key}
+ )
+
+ if args.get('sourceSnapshotEncryptionKeyKmsKeyName'):
+ source_snapshot_encryption_key_kms_key_name = args.get(
+ 'sourceSnapshotEncryptionKeyKmsKeyName'
+ )
+ if 'sourceSnapshotEncryptionKey' not in config.keys():
+ config.update({'sourceSnapshotEncryptionKey': {}})
+ config['sourceSnapshotEncryptionKey'].update(
+ {'kmsKeyName': source_snapshot_encryption_key_kms_key_name}
+ )
+
+ if args.get('labels'):
+ labels = args.get('labels')
+ config.update({'labels': parse_labels(labels)})
+
+ if args.get('labelFingerprint'):
+ label_fingerprint = args.get('labelFingerprint')
+ config.update({'labelFingerprint': label_fingerprint})
+
+ if args.get('replicaZones'):
+ replica_zones = args.get('replicaZones')
+ config.update({'replicaZones': parse_resource_ids(replica_zones)})
+
+ if args.get('licenseCodes'):
+ license_codes = args.get('licenseCodes')
+ config.update({'licenseCodes': parse_resource_ids(license_codes)})
+
+ if args.get('physicalBlockSizeBytes'):
+ physical_block_size_bytes = args.get('physicalBlockSizeBytes')
+ config.update({'physicalBlockSizeBytes': int(physical_block_size_bytes)})
+
+ project = SERVICE_ACT_PROJECT_ID
+ response = compute.disks().insert(project=project, zone=zone, body=config).execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType'),
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response,
+ )
+
+
+def list_disks(args):
+ """
+ parameter: (string) zone
+ Name of the zone for this request.
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ zone = args.get('zone')
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.disks().list(
+ project=project,
+ zone=zone,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for disk in response['items']:
+ output.append(disk)
+ data_res_item = {
+ 'id': disk.get('id'),
+ 'name': disk.get('name'),
+ 'sizeGb': disk.get('sizeGb'),
+ 'zone': disk.get('zone'),
+ 'status': disk.get('status'),
+ 'type': disk.get('type')
+ }
+ data_res.append(data_res_item)
+
+ request = compute.disks().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Disks(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Disks', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def resize_disk(args):
+ """
+ Resizes the specified persistent disk.
+
+ parameter: (string) disk
+ Name or id of the resource for this request.
+ parameter: (string) zone
+ Name of the zone for this request.
+ parameter: (string) sizeGb
+ The new size of the persistent disk, which is specified in GB.
+ """
+ config = {}
+
+ disk = args.get('disk')
+ zone = args.get('zone')
+
+ if args.get('sizeGb'):
+ size_gb = args.get('sizeGb')
+ config.update({'sizeGb': int(size_gb)})
+
+ project = SERVICE_ACT_PROJECT_ID
+ response = (
+ compute.disks()
+ .resize(project=project, zone=zone, disk=disk, body=config)
+ .execute()
+ )
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType')
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def set_disk_labels(args):
+ """
+ Sets the labels on a disk.
+
+ parameter: (string) disk
+ Name or id of the resource for this request.
+ parameter: (string) zone
+ Name of the zone for this request.
+ parameter: (dict) labels
+ The labels to set for this resource.
+ parameter: (string) labelFingerprint
+ The fingerprint of the previous set of labels for this resource, used to detect conflicts.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ disk = args.get('disk')
+ zone = args.get('zone')
+ labels = args.get('labels')
+ if args.get('labelFingerprint'):
+ label_fingerprint = args.get('labelFingerprint')
+
+ labels = parse_labels(labels)
+ body = {'labels': labels}
+
+ if args.get('labelFingerprint') is not None:
+ body.update({'labelFingerprint': label_fingerprint})
+
+ request = compute.disks().setLabels(
+ project=project, zone=zone, resource=disk, body=body
+ )
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType')
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+# diskTypes()
+def aggregated_list_disk_types(args):
+ """
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.diskTypes().aggregatedList(
+ project=project,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for name, instances_scoped_list in response['items'].items():
+ if 'warning' not in instances_scoped_list.keys():
+ for disktype in instances_scoped_list.get('diskTypes', []):
+ output.append(disktype)
+ data_res_item = {
+ 'name': disktype.get('name'),
+ 'validDiskSize': disktype.get('validDiskSize')
+ }
+ data_res.append(data_res_item)
+
+ request = compute.diskTypes().aggregatedList_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.DiskTypes(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute DiskTypes', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def get_disk_type(args):
+ """
+ Returns the specified disk type.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ disk_type = args.get('disktype')
+ zone = args.get('zone')
+
+ request = compute.diskTypes().get(project=project, zone=zone, diskType=disk_type)
+ response = request.execute()
+
+ data_res = {
+ 'name': response.get('name'),
+ 'validDiskSize': response.get('validDiskSize'),
+ 'zone': response.get('zone')
+ }
+
+ ec = {'GoogleCloudCompute.DiskTypes(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute DiskTypes', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def list_disks_types(args):
+ """
+ parameter: (string) zone
+ Name of the zone for this request.
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ zone = args.get('zone')
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.diskTypes().list(
+ project=project,
+ zone=zone,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for disktype in response['items']:
+ output.append(disktype)
+ data_res_item = {
+ 'name': disktype.get('name'),
+ 'validDiskSize': disktype.get('validDiskSize')
+ }
+ data_res.append(data_res_item)
+
+ request = compute.diskTypes().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.DiskTypes(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute DiskTypes', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+# instanceGroups()
+def instance_groups_add_instances(args):
+ """
+ Adds a list of instances to the specified instance group.
+ All of the instances in the instance group must be in the same network/subnetwork.
+
+ parameter: (dict) instances
+ The list of instances to add to the instance group.
+ parameter: (string) zone
+ Name of the zone for this request.
+ parameter: (string) instance_group
+ The name of the instance group
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ instance_group = args.get('instanceGroup')
+ zone = args.get('zone')
+ instances = args.get('instances')
+
+ instances = parse_resource_ids(instances)
+ instarry = []
+ for inst in instances:
+ instarry.append({'instance': inst})
+
+ body = {}
+ body.update({'instances': instarry})
+
+ request = compute.instanceGroups().addInstances(
+ project=project, zone=zone, instanceGroup=instance_group, body=body
+ )
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType')
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def aggregated_list_instance_groups(args):
+ """
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (string) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.instanceGroups().aggregatedList(
+ project=project,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for name, instances_scoped_list in response['items'].items():
+ if 'warning' not in instances_scoped_list.keys():
+ for item in instances_scoped_list.get('instanceGroups', []):
+ output.append(item)
+ data_res_item = {
+ 'id': item.get('id'),
+ 'name': item.get('name'),
+ 'zone': item.get('zone'),
+ 'network': item.get('network')
+ }
+ data_res.append(data_res_item)
+
+ request = compute.instanceGroups().aggregatedList_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.InstanceGroups(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown(
+ 'Google Cloud Compute Instance Groups', data_res, removeNull=True
+ ),
+ ec,
+ response
+ )
+
+
+def delete_instance_group(args):
+ """
+ Deletes the specified instance group. The instances in the group are not deleted.
+
+ parameter: (string) zone
+ Name of the zone for this request.
+ parameter: (string) instance_group
+ The name of the instance group
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ instance_group = args.get('instanceGroup')
+ zone = args.get('zone')
+
+ request = compute.instanceGroups().delete(
+ project=project, zone=zone, instanceGroup=instance_group
+ )
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType')
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def get_instance_group(args):
+ """
+ Returns the specified instance group.
+
+ parameter: (string) zone
+ Name of the zone for this request.
+ parameter: (string) instance_group
+ The name of the instance group.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ instance_group = args.get('instanceGroup')
+ zone = args.get('zone')
+
+ request = compute.instanceGroups().get(
+ project=project, zone=zone, instanceGroup=instance_group
+ )
+ response = request.execute()
+ data_res = {
+ 'id': response.get('id'),
+ 'name': response.get('name'),
+ 'zone': response.get('zone'),
+ 'network': response.get('network')
+ }
+
+ ec = {'GoogleCloudCompute.InstanceGroups(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown(
+ 'Google Cloud Compute Instance Groups', data_res, removeNull=True
+ ),
+ ec,
+ response
+ )
+
+
+def insert_instance_group(args):
+ """
+ parameter: (string) zone
+ Name of the zone for this request.
+ parameter: (string) name
+ The name of the instance group.
+ parameter: (string) description
+ An optional description of this resource. Provide this property when you create the resource.
+ parameter: (object) namedPorts
+ Assigns a name to a port number
+ parameter: (string) network
+ The URL of the network to which all instances in the instance group belong.
+
+ """
+ config = {}
+ if args.get('name'):
+ name = args.get('name')
+ name = name.lower()
+ config.update({'name': name})
+
+ if args.get('zone'):
+ zone = args.get('zone')
+
+ if args.get('description'):
+ description = args.get('description')
+ config.update({'description': description})
+
+ if args.get('namedPorts'):
+ named_ports = args.get('namedPorts')
+ config.update({'namedPorts': parse_named_ports(named_ports)})
+
+ if args.get('network'):
+ network = args.get('network')
+ config.update({'network': network})
+
+ project = SERVICE_ACT_PROJECT_ID
+ response = (
+ compute.instanceGroups()
+ .insert(project=project, zone=zone, body=config)
+ .execute()
+ )
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType')
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def list_instance_groups(args):
+ """
+ parameter: (string) zone
+ Name of the zone for this request.
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ zone = args.get('zone')
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ request = compute.instanceGroups().list(
+ project=project,
+ zone=zone,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for item in response['items']:
+ output.append(item)
+
+ request = compute.instanceGroups().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.InstanceGroups(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown(
+ 'Google Cloud Compute Instance Groups', output, removeNull=True
+ ),
+ ec,
+ response
+ )
+
+
+def list_instance_groups_instances(args):
+ """
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+ parameter: (enum) instanceState
+ A filter for the state of the instances in the instance group. Valid options are ALL or RUNNING
+ """
+ project = SERVICE_ACT_PROJECT_ID
+
+ zone = args.get('zone')
+ config = {}
+ instance_group = args.get('instanceGroup')
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ if args.get('instanceState'):
+ instance_state = args.get('instanceState')
+ config.update({'instanceState': instance_state})
+
+ output = []
+ data_res = []
+ request = compute.instanceGroups().listInstances(
+ project=project,
+ zone=zone,
+ instanceGroup=instance_group,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token,
+ body=config
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for item in response['items']:
+ output.append(item)
+ data_res_item = {
+ 'instance': item.get('instance'),
+ 'status': item.get('status')
+ }
+ data_res.append(data_res_item)
+
+ request = compute.instanceGroups().listInstances_next(
+ previous_request=request, previous_response=response
+ )
+ output = {'Group': instance_group, 'Instances': output}
+
+ ec = {'GoogleCloudCompute.InstanceGroupsInstances': output}
+ return_outputs(
+ tableToMarkdown(
+ 'Google Cloud Compute Instance Groups', data_res, removeNull=True
+ ),
+ ec,
+ response
+ )
+
+
+def instance_groups_remove_instances(args):
+ """
+ Removes one or more instances from the specified instance group, but does not delete those instances.
+
+ parameter: (string) zone
+ The name of the zone for this request.
+ parameter: (string) instanceGroup
+ The name of the instance group where the named ports are updated.
+ parameter: (list) instances
+ The list of instances to remove from the instance group.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ instance_group = args.get('instanceGroup')
+ zone = args.get('zone')
+ instances = args.get('instances')
+
+ instances = parse_resource_ids(instances)
+ instarry = []
+ for inst in instances:
+ instarry.append({'instance': inst})
+
+ body = {'instances': instarry}
+
+ request = compute.instanceGroups().removeInstances(
+ project=project, zone=zone, instanceGroup=instance_group, body=body
+ )
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType')
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def set_instance_group_named_ports(args):
+ """
+ Sets the named ports for the specified instance group.
+
+ parameter: (string) zone
+ The name of the zone for this request.
+ parameter: (string) instanceGroup
+ The name of the instance group where the named ports are updated.
+ parameter: (list) namedPorts
+ The list of named ports to set for this instance group.
+ parameter: (string) fingerprint
+ The fingerprint of the named ports information for this instance group.
+ """
+ config = {}
+ if args.get('instanceGroup'):
+ instance_group = args.get('instanceGroup')
+
+ if args.get('zone'):
+ zone = args.get('zone')
+
+ if args.get('namedPorts'):
+ named_ports = args.get('namedPorts')
+ config.update({'namedPorts': parse_named_ports(named_ports)})
+
+ if args.get('fingerprint'):
+ fingerprint = args.get('fingerprint')
+ config.update({'fingerprint': fingerprint})
+
+ project = SERVICE_ACT_PROJECT_ID
+ response = (
+ compute.instanceGroups()
+ .setNamedPorts(
+ project=project, zone=zone, instanceGroup=instance_group, body=config
+ )
+ .execute()
+ )
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType')
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+# regions()
+def get_region(args):
+ """
+ Get a specified region resource.
+
+ parameter: (string) region
+ The name of the region for this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ region = args.get('region')
+
+ request = compute.regions().get(project=project, region=region)
+ response = request.execute()
+
+ data_res = {
+ 'id': response.get('id'),
+ 'name': response.get('name'),
+ 'status': response.get('status')
+ }
+
+ ec = {'GoogleCloudCompute.Regions(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Regions', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def list_regions(args):
+ """
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.regions().list(
+ project=project,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for item in response['items']:
+ output.append(item)
+ data_res_item = {
+ 'id': item.get('id'),
+ 'name': item.get('name'),
+ 'status': item.get('status')
+ }
+ data_res.append(data_res_item)
+
+ request = compute.regions().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Regions(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Regions', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def get_zone(args):
+
+ """
+ Get a specified zone resource.
+
+ parameter: (string) zone
+ The name of the zone for this request.
+ """
+
+ project = SERVICE_ACT_PROJECT_ID
+ zone = args.get('zone')
+
+ request = compute.zones().get(project=project, zone=zone)
+ response = request.execute()
+
+ data_res = {
+ 'id': response.get('id'),
+ 'name': response.get('name'),
+ 'status': response.get('status')
+ }
+
+ ec = {'GoogleCloudCompute.Zones(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Zones', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def list_zones(args):
+
+ """
+ parameter: (string) zone
+ Name of the zone for this request.
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+
+ project = SERVICE_ACT_PROJECT_ID
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.zones().list(
+ project=project,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for item in response['items']:
+ output.append(item)
+ data_res_item = {
+ 'id': item.get('id'),
+ 'name': item.get('name'),
+ 'status': item.get('status')
+ }
+ data_res.append(data_res_item)
+
+ request = compute.zones().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Zones(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Zones', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def aggregated_list_machine_types(args):
+
+ """
+ parameter: (string) zone
+ Name of the zone for this request.
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+
+ project = SERVICE_ACT_PROJECT_ID
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.machineTypes().aggregatedList(
+ project=project,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for name, instances_scoped_list in response['items'].items():
+ if 'warning' not in instances_scoped_list.keys():
+ for item in instances_scoped_list.get('machineTypes', []):
+ output.append(item)
+ data_res_item = {
+ 'id': item.get('id'),
+ 'name': item.get('name'),
+ 'memoryMb': item.get('memoryMb'),
+ 'guestCpus': item.get('guestCpus')
+ }
+ data_res.append(data_res_item)
+
+ request = compute.machineTypes().aggregatedList(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.MachineTypes(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown(
+ 'Google Cloud Compute Machine Types', data_res, removeNull=True
+ ),
+ ec,
+ response
+ )
+
+
+def get_machine_type(args):
+
+ """
+ Get a specified machine type.
+
+ parameter: (string) zone
+ The name of the zone for this request.
+
+ parameter: (string) machineType
+ Name of the machine type to return.
+ """
+
+ project = SERVICE_ACT_PROJECT_ID
+ machine_type = args.get('machineType')
+ zone = args.get('zone')
+
+ request = compute.machineTypes().get(
+ project=project, zone=zone, machineType=machine_type
+ )
+ response = request.execute()
+
+ data_res = {
+ 'id': response.get('id'),
+ 'name': response.get('name'),
+ 'memoryMb': response.get('memoryMb'),
+ 'guestCpus': response.get('guestCpus')
+ }
+
+ ec = {'GoogleCloudCompute.MachineTypes(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown(
+ 'Google Cloud Compute Machine Types', data_res, removeNull=True
+ ),
+ ec,
+ response
+ )
+
+
+def list_machine_types(args):
+
+ """
+ parameter: (string) zone
+ Name of the zone for this request.
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+
+ project = SERVICE_ACT_PROJECT_ID
+ zone = args.get('zone')
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.machineTypes().list(
+ project=project,
+ zone=zone,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for item in response['items']:
+ output.append(item)
+ data_res_item = {
+ 'id': item.get('id'),
+ 'name': item.get('name'),
+ 'memoryMb': item.get('memoryMb'),
+ 'guestCpus': item.get('guestCpus')
+ }
+ data_res.append(data_res_item)
+
+ request = compute.machineTypes().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.MachineTypes(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown(
+ 'Google Cloud Compute Machine Types', data_res, removeNull=True
+ ),
+ ec,
+ response
+ )
+
+
+def insert_firewall(args):
+ """
+ Creates a firewall rule in the specified project using the data included in the request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+
+ config = {}
+ if args.get('name'):
+ config.update({'name': args.get('name')})
+
+ if args.get('description'):
+ config.update({'description': args.get('description')})
+
+ if args.get('network'):
+ config.update({'network': args.get('network')})
+
+ if args.get('priority'):
+ config.update({'priority': int(args.get('priority'))})
+
+ if args.get('sourceRanges'):
+ config.update({'sourceRanges': parse_resource_ids(args.get('sourceRanges'))})
+
+ if args.get('destinationRanges'):
+ config.update(
+ {'destinationRanges': parse_resource_ids(args.get('destinationRanges'))}
+ )
+
+ if args.get('sourceTags'):
+ config.update({'sourceTags': parse_resource_ids(args.get('sourceTags'))})
+
+ if args.get('targetTags'):
+ config.update({'targetTags': parse_resource_ids(args.get('targetTags'))})
+
+ if args.get('sourceServiceAccounts'):
+ config.update(
+ {
+ 'sourceServiceAccounts': parse_resource_ids(
+ args.get('sourceServiceAccounts')
+ )
+ }
+ )
+
+ if args.get('targetServiceAccounts'):
+ config.update(
+ {
+ 'targetServiceAccounts': parse_resource_ids(
+ args.get('targetServiceAccounts')
+ )
+ }
+ )
+
+ if args.get('allowed'):
+ config.update({'allowed': parse_firewall_rule(args.get('allowed'))})
+
+ if args.get('denied'):
+ config.update({'denied': parse_firewall_rule(args.get('denied'))})
+
+ if args.get('direction'):
+ config.update({'direction': args.get('direction')})
+
+ if args.get('logConfigEnable'):
+ log_config_enable = True if args.get('logConfigEnable') == 'true' else False
+ config.update({'logConfig': {'enable': log_config_enable}})
+
+ if args.get('disabled'):
+ disabled = args.get('disabled') == 'true'
+ config.update({'disabled': disabled})
+
+ request = compute.firewalls().insert(project=project, body=config)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType')
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def patch_firewall(args):
+ """
+ Updates the specified firewall rule with the data included in the request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+
+ config = {}
+ if args.get('name'):
+ name = args.get('name')
+ config.update({'name': args.get('name')})
+
+ if args.get('description'):
+ config.update({'description': args.get('description')})
+
+ if args.get('network'):
+ config.update({'network': args.get('network')})
+
+ if args.get('priority'):
+ config.update({'priority': int(args.get('priority'))})
+
+ if args.get('sourceRanges'):
+ config.update({'sourceRanges': parse_resource_ids(args.get('sourceRanges'))})
+
+ if args.get('destinationRanges'):
+ config.update(
+ {'destinationRanges': parse_resource_ids(args.get('destinationRanges'))}
+ )
+
+ if args.get('sourceTags'):
+ config.update({'sourceTags': parse_resource_ids(args.get('sourceTags'))})
+
+ if args.get('targetTags'):
+ config.update({'targetTags': parse_resource_ids(args.get('targetTags'))})
+
+ if args.get('sourceServiceAccounts'):
+ config.update(
+ {
+ 'sourceServiceAccounts': parse_resource_ids(
+ args.get('sourceServiceAccounts')
+ )
+ }
+ )
+
+ if args.get('targetServiceAccounts'):
+ config.update(
+ {
+ 'targetServiceAccounts': parse_resource_ids(
+ args.get('targetServiceAccounts')
+ )
+ }
+ )
+
+ if args.get('allowed'):
+ config.update({'allowed': parse_firewall_rule(args.get('allowed'))})
+
+ if args.get('denied'):
+ config.update({'denied': parse_firewall_rule(args.get('denied'))})
+
+ if args.get('direction'):
+ config.update({'direction': args.get('direction')})
+
+ if args.get('logConfigEnable'):
+ log_config_enable = True if args.get('logConfigEnable') == 'true' else False
+ config.update({'logConfig': {'enable': log_config_enable}})
+
+ if args.get('disabled'):
+ disabled = True if args.get('disabled') == 'true' else False
+ config.update({'disabled': disabled})
+
+ request = compute.firewalls().patch(project=project, firewall=name, body=config)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType')
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def list_firewalls(args):
+
+ """
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+
+ project = SERVICE_ACT_PROJECT_ID
+
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.firewalls().list(
+ project=project,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for item in response['items']:
+ output.append(item)
+ data_res_item = {
+ 'name': item.get('name'),
+ 'network': item.get('network'),
+ 'priority': item.get('priority')
+ }
+ data_res.append(data_res_item)
+
+ request = compute.firewalls().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Firewalls(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Firewalls', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def get_firewall(args):
+ """
+ Get a specified firewall rule.
+
+ parameter: (string) name
+ Name of the firewall rule to return.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ name = args.get('name')
+
+ request = compute.firewalls().get(project=project, firewall=name)
+ response = request.execute()
+
+ data_res = {
+ 'name': response.get('name'),
+ 'network': response.get('network'),
+ 'priority': response.get('priority')
+ }
+
+ ec = {'GoogleCloudCompute.Firewalls(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Firewalls', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def delete_firewall(args):
+ """
+ Delete a specified firewall.
+
+ parameter: (string) name
+ Name of the firewall rule to delete.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ name = args.get('name')
+
+ request = compute.firewalls().delete(project=project, firewall=name)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType')
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+# snapshots()
+def delete_snapshot(args):
+ """
+ Delete a specified snapshot.
+
+ parameter: (string) name
+ Name of the Snapshot resource to delete.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ name = args.get('name')
+
+ request = compute.snapshots().delete(project=project, snapshot=name)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType')
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def get_snapshot(args):
+ project = SERVICE_ACT_PROJECT_ID
+ name = args.get('name')
+
+ request = compute.snapshots().get(project=project, snapshot=name)
+ response = request.execute()
+
+ data_res = {
+ 'name': response.get('name'),
+ 'status': response.get('status'),
+ 'creationTimestamp': response.get('creationTimestamp')
+ }
+
+ ec = {'GoogleCloudCompute.Snapshots(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Snapshots', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def list_snapshots(args):
+ """
+ parameter: (string) zone
+ Name of the zone for this request.
+ parameter: (number) maxResults
+ The maximum number of results per page that should be returned (Default 500).
+ parameter: (string) filters
+ A filter expression that filters resources listed in the response
+ parameter: (string) pageToken
+ Specifies a page token to use
+ parameter: (orderBy) orderBy
+ Sorts list results by a certain order.
+ By default, results are returned in alphanumerical order based on the resource name
+
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ max_results = int(args.get('maxResults'))
+ filters = args.get('filters')
+ order_by = args.get('orderBy')
+ page_token = args.get('pageToken')
+
+ output = []
+ data_res = []
+ request = compute.snapshots().list(
+ project=project,
+ filter=filters,
+ maxResults=max_results,
+ orderBy=order_by,
+ pageToken=page_token
+ )
+ while request:
+ response = request.execute()
+ if 'items' in response.keys():
+ for item in response['items']:
+ output.append(item)
+ data_res_item = {
+ 'name': response.get('name'),
+ 'status': response.get('status'),
+ 'creationTimestamp': response.get('creationTimestamp')
+ }
+ data_res.append(data_res_item)
+
+ request = compute.snapshots().list_next(
+ previous_request=request, previous_response=response
+ )
+
+ ec = {'GoogleCloudCompute.Snapshots(val.id === obj.id)': output}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Snapshots', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+def set_snapshot_labels(args):
+ """"
+ parameter: (dict) labels
+ A list of labels to apply for this resource.
+ parameter: (number) labelFingerprint
+ The fingerprint of the previous set of labels for this resource, used to detect conflicts.
+ parameter: (string) name
+ Name or ID of the resource for this request.
+ """
+ project = SERVICE_ACT_PROJECT_ID
+ name = args.get('name')
+ labels = args.get('labels')
+
+ labels = parse_labels(labels)
+ body = {'labels': labels}
+
+ if args.get('labelFingerprint'):
+ body.update({'labelFingerprint': args.get('labelFingerprint')})
+
+ request = compute.snapshots().setLabels(project=project, resource=name, body=body)
+ response = request.execute()
+
+ data_res = {
+ 'status': response.get('status'),
+ 'kind': response.get('kind'),
+ 'name': response.get('name'),
+ 'id': response.get('id'),
+ 'progress': response.get('progress'),
+ 'operationType': response.get('operationType')
+ }
+
+ ec = {'GoogleCloudCompute.Operations(val.id === obj.id)': response}
+ return_outputs(
+ tableToMarkdown('Google Cloud Compute Operations', data_res, removeNull=True),
+ ec,
+ response
+ )
+
+
+"""
+EXECUTION CODE
+"""
+
+try:
+ compute = build_and_authenticate(GSERVICE)
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module()
+
+ elif demisto.command() == 'gcp-compute-insert-instance':
+ create_instance(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-instance':
+ get_instance(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-delete-instance':
+ delete_instance(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-start-instance':
+ start_instance(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-stop-instance':
+ stop_instance(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-reset-instance':
+ reset_instance(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-instances':
+ list_instances(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-set-instance-labels':
+ set_instance_labels(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-set-instance-metadata':
+ set_instance_metadata(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-set-instance-machine-type':
+ set_instance_machine_type(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-aggregated-list-instances':
+ aggregated_list_instances(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-image-from-family':
+ get_image_from_family(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-image':
+ get_image(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-networks-add-peering':
+ networks_add_peering(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-delete-network':
+ delete_network(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-network':
+ get_network(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-insert-network':
+ insert_network(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-networks':
+ list_networks(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-networks-remove-peering':
+ networks_removepeering(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-global-operation':
+ get_global_operation(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-zone-operation':
+ get_zone_operation(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-region-operation':
+ get_region_operation(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-zone-operation':
+ list_zone_operation(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-global-operation':
+ list_global_operation(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-region-operation':
+ list_region_operation(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-delete-zone-operation':
+ delete_zone_operation(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-delete-global-operation':
+ delete_global_operation(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-delete-region-operation':
+ delete_region_operation(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-delete-address':
+ delete_address(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-address':
+ get_address(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-insert-address':
+ insert_address(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-addresses':
+ list_addresses(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-aggregated-list-addresses':
+ aggregated_list_addresses(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-delete-global-address':
+ delete_global_address(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-global-address':
+ get_global_address(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-insert-global-address':
+ insert_global_address(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-global-addresses':
+ list_global_addresses(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-aggregated-list-disks':
+ aggregated_list_disks(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-create-disk-snapshot':
+ create_disk_snapshot(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-delete-disk':
+ delete_disk(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-disk':
+ get_disk(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-insert-disk':
+ insert_disk(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-disks':
+ list_disks(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-resize-disk':
+ resize_disk(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-set-disk-labels':
+ set_disk_labels(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-aggregated-list-disk-types':
+ aggregated_list_disk_types(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-disk-type':
+ get_disk_type(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-disk-types':
+ list_disks_types(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-images':
+ list_images(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-delete-image':
+ delete_image(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-set-image-labels':
+ set_image_labels(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-insert-image':
+ insert_image(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-instance-groups-add-instances':
+ instance_groups_add_instances(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-aggregated-list-instance-groups':
+ aggregated_list_instance_groups(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-delete-instance-group':
+ delete_instance_group(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-instance-group':
+ get_instance_group(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-insert-instance-group':
+ insert_instance_group(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-instance-groups':
+ list_instance_groups(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-instance-group-instances':
+ list_instance_groups_instances(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-instance-groups-remove-instances':
+ instance_groups_remove_instances(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-set-group-instance-named-ports':
+ set_instance_group_named_ports(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-region':
+ get_region(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-regions':
+ list_regions(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-zone':
+ get_zone(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-zones':
+ list_zones(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-aggregated-list-machine-types':
+ aggregated_list_machine_types(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-machine-type':
+ get_machine_type(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-machine-types':
+ list_machine_types(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-wait-for-zone-operation':
+ wait_for_zone_operation(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-wait-for-region-operation':
+ wait_for_region_operation(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-wait-for-global-operation':
+ wait_for_global_operation(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-insert-firewall':
+ insert_firewall(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-patch-firewall':
+ patch_firewall(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-firewall':
+ list_firewalls(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-firewall':
+ get_firewall(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-delete-firewall':
+ delete_firewall(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-set-snapshot-labels':
+ set_snapshot_labels(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-list-snapshots':
+ list_snapshots(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-get-snapshot':
+ get_snapshot(demisto.args())
+
+ elif demisto.command() == 'gcp-compute-delete-snapshot':
+ delete_snapshot(demisto.args())
+
+except Exception as e:
+ LOG(e)
+ try:
+ response = json.loads(e.content) # type: ignore
+ response = response['error']
+ status_code = response.get('code')
+ err_message = response.get('message')
+ full_err_msg = 'error code: {}\n{}'.format(status_code, err_message)
+ return_error(full_err_msg)
+ except AttributeError:
+ return_error(e.message)
diff --git a/Integrations/GoogleCloudCompute/GoogleCloudCompute.yml b/Integrations/GoogleCloudCompute/GoogleCloudCompute.yml
new file mode 100644
index 000000000000..6ae6e2e4ff83
--- /dev/null
+++ b/Integrations/GoogleCloudCompute/GoogleCloudCompute.yml
@@ -0,0 +1,12799 @@
+category: IT Services
+commonfields:
+ id: Google Cloud Compute
+ version: -1
+configuration:
+- display: Service Account Private Key file contents (JSON)
+ name: service
+ required: true
+ type: 4
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Google Compute Engine delivers virtual machines running in Google's innovative
+ data centers and worldwide fiber network. Compute Engine's tooling and workflow
+ support enable scaling from single instances to global, load-balanced cloud computing.
+display: Google Cloud Compute
+name: Google Cloud Compute
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: ' The name of the resource, provided by the client when initially
+ creating the resource. The resource name must be 1-63 characters long, and
+ comply with RFC1035. Specifically, the name must be 1-63 characters long and
+ match the regular expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first
+ character must be a lowercase letter, and all following characters must be
+ a dash, lowercase letter, or digit, except the last character, which cannot
+ be a dash.'
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ description: Tags to apply to this instance. Tags are used to identify valid
+ sources or targets for network firewalls and are specified by the client during
+ instance creation. The tags can be later modified by the setTags method. Each
+ tag within the list must comply with RFC1035. Multiple tags can be specified
+ via the 'tags.items' field.
+ isArray: false
+ name: tags
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a fingerprint for this request, which is essentially
+ a hash of the tags' contents and used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update tags. You must always provide an up-to-date fingerprint hash
+ in order to update or change tags.
+ isArray: false
+ name: tagsFingerprint
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Allows this instance to send and receive packets with non-matching
+ destination or source IPs. This is required if you plan to use this instance
+ to forward routes. For more information, see Enabling IP Forwarding.
+ isArray: false
+ name: canIpForward
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: 'The machine type EX: n1-standard-1 or custom-4-5120'
+ isArray: false
+ name: machine_type
+ required: false
+ secret: false
+ - default: false
+ description: URL of the network resource for this instance. When creating an
+ instance, if neither the network nor the subnetwork is specified, the default
+ network global/networks/default is used; if the network is not specified but
+ the subnetwork is specified, the network is inferred.
+ isArray: false
+ name: network
+ required: false
+ secret: false
+ - default: false
+ description: The URL of the Subnetwork resource for this instance. If the network
+ resource is in legacy mode, do not provide this property. If the network is
+ in auto subnet mode, providing the subnetwork is optional. If the network
+ is in custom subnet mode, then this field should be specified. If you specify
+ this property, you can specify the subnetwork as a full or partial URL.
+ isArray: false
+ name: subnetwork
+ required: false
+ secret: false
+ - default: false
+ description: An IPv4 internal network address to assign to the instance for
+ this network interface. If not specified by the user, an unused internal IP
+ is assigned by the system.
+ isArray: false
+ name: networkIP
+ required: false
+ secret: false
+ - default: false
+ description: Fingerprint hash of contents stored in this network interface.
+ This field will be ignored when inserting an Instance or adding a NetworkInterface.
+ An up-to-date fingerprint must be provided in order to update the NetworkInterface,
+ otherwise the request will fail with error 412 conditionNotMet.
+ isArray: false
+ name: networkInterfacesfingerprint
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: This will set accessConfigs to type ONE_TO_ONE_NAT and name to
+ External NAT
+ isArray: false
+ name: externalInternetAccess
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: An external IP address associated with this instance. Specify an
+ unused static external IP address available to the project or leave this field
+ undefined to use an IP from a shared ephemeral IP address pool. If you specify
+ a static external IP address, it must live in the same region as the zone
+ of the instance.
+ isArray: false
+ name: externalNatIP
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Specifies whether a public DNS ‘PTR’ record should be created to
+ map the external IP address of the instance to a DNS domain name.
+ isArray: false
+ name: setPublicPtr
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: The DNS domain name for the public PTR record. This field can only
+ be set when the setPublicPtr field is enabled.
+ isArray: false
+ name: publicPtrDomainName
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'This signifies the networking tier used for configuring this access
+ configuration and can only take the following values: PREMIUM, STANDARD.'
+ isArray: false
+ name: networkTier
+ predefined:
+ - PREMIUM
+ - STANDARD
+ required: false
+ secret: false
+ - default: false
+ description: The IP CIDR range represented by this alias IP range. This IP CIDR
+ range must belong to the specified subnetwork and cannot contain IP addresses
+ reserved by system or used by other network interfaces. This range may be
+ a single IP address (e.g. 0.0.0.0), a netmask (e.g. /24) or a CIDR format
+ string (e.g. 0.0.0.0/24).
+ isArray: false
+ name: ipCidrRange
+ required: false
+ secret: false
+ - default: false
+ description: Optional subnetwork secondary range name specifying the secondary
+ range from which to allocate the IP CIDR range for this alias IP range. If
+ left unspecified, the primary range of the subnetwork will be used.
+ isArray: false
+ name: subnetworkRangeName
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Specifies the type of the disk, either SCRATCH or PERSISTENT. If
+ not specified, the default is PERSISTENT.
+ isArray: false
+ name: diskType
+ predefined:
+ - PERSISTENT
+ - SCRATCH
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The mode in which to attach this disk, either READ_WRITE or READ_ONLY.
+ If not specified, the default is to attach the disk in READ_WRITE mode.
+ isArray: false
+ name: diskMode
+ predefined:
+ - READ_WRITE
+ - READ_ONLY
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a valid partial or full URL to an existing Persistent
+ Disk resource. When creating a new instance, one of initializeParams.sourceImage
+ or disks.source is required except for local SSD. If desired, you can also
+ attach existing non-root persistent disks using this property. This field
+ is only applicable for persistent disks. Note that for InstanceTemplate,
+ specify the disk name, not the URL for the disk.
+ isArray: false
+ name: diskSource
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a unique device name of your choice that is reflected
+ into the /dev/disk/by-id/google-* tree of a Linux operating system running
+ within the instance. This name can be used to reference the device for mounting,
+ resizing, and so on, from within the instance.
+ isArray: false
+ name: diskDeviceName
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Indicates that this is a boot disk. The virtual machine will use
+ the first partition of the disk for its root filesystem.
+ isArray: false
+ name: diskBoot
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Specifies the disk name. If not specified, the default is to use
+ the name of the instance. If the disk with the instance name exists already
+ in the given zone/region, a new name will be automatically generated.
+ isArray: false
+ name: initializeParamsDiskName
+ required: false
+ secret: false
+ - default: false
+ description: 'The source image to create this disk. When creating a new instance,
+ one of initializeParams.sourceImage or disks.source is required except for
+ local SSD. To create a disk with one of the public operating system images,
+ specify the image by its family name. For example, specify family/debian-9
+ to use the latest Debian 9 image: projects/debian-cloud/global/images/family/debian-9 Alternatively,
+ use a specific version of a public operating system image: projects/debian-cloud/global/images/debian-9-stretch-vYYYYMMDD'
+ isArray: false
+ name: initializeParamsSourceImage
+ required: false
+ secret: false
+ - default: false
+ description: Specifies the size of the disk in base-2 GB.
+ isArray: false
+ name: initializeParamsdiskSizeGb
+ required: false
+ secret: false
+ - default: false
+ description: 'Specifies the disk type to use to create the instance. If not
+ specified, the default is pd-standard, specified using the full URL. For example: https://www.googleapis.com/compute/v1/projects/project/zones/zone/diskTypes/pd-standard Other
+ values include pd-ssd and local-ssd. If you define this field, you can provide
+ either the full or partial URL. '
+ isArray: false
+ name: initializeParamsDiskType
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ isArray: false
+ name: initializeParamsSourceImageEncryptionKeyRawKey
+ required: false
+ secret: false
+ - default: false
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ isArray: false
+ name: initializeParamsSourceImageEncryptionKeykmsKeyName
+ required: false
+ secret: false
+ - default: false
+ description: 'Labels to apply to this disk. These can be later modified by the
+ disks.setLabels method. This field is only applicable for persistent disks.
+ EX: key=abc,value=123;key=abc,value=123'
+ isArray: false
+ name: initializeParamsDiskLabels
+ required: false
+ secret: false
+ - default: false
+ description: An optional description. Provide this property when creating the
+ disk.
+ isArray: false
+ name: initializeParamsDiskDescription
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Specifies whether the disk will be auto-deleted when the instance
+ is deleted (but not when the disk is detached from the instance).
+ isArray: false
+ name: diskAutodelete
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Specifies the disk interface to use for attaching this disk, which
+ is either SCSI or NVME. The default is SCSI. Persistent disks must always
+ use SCSI and the request will fail if you attempt to attach a persistent disk
+ in any other format than SCSI. Local SSDs can use either NVME or SCSI. For
+ performance characteristics of SCSI over NVMe, see Local SSD performance.
+ isArray: false
+ name: diskInterface
+ predefined:
+ - SCSI
+ - NVME
+ required: false
+ secret: false
+ - default: false
+ description: A list of features to enable on the guest operating system. Applicable
+ only for bootable images. Read Enabling guest operating system features to
+ see a list of available options. comma separated.
+ isArray: false
+ name: diskGuestOsFeatures
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ isArray: false
+ name: diskEncryptionKeyRawKey
+ required: false
+ secret: false
+ - default: false
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ isArray: false
+ name: diskEncryptionKeyKmsKeyName
+ required: false
+ secret: false
+ - default: false
+ description: 'The metadata key/value pairs assigned to this instance. This includes
+ custom metadata and predefined keys. EX: key=abc,value=123;key=abc,value=123'
+ isArray: false
+ name: metadataItems
+ required: false
+ secret: false
+ - default: false
+ description: Email address of the service account.
+ isArray: false
+ name: serviceAccountEmail
+ required: false
+ secret: false
+ - default: false
+ description: The list of scopes to be made available for this service account.
+ isArray: false
+ name: serviceAccountscopes
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Defines the maintenance behavior for this instance. For standard
+ instances, the default behavior is MIGRATE. For preemptible instances, the
+ default and only possible behavior is TERMINATE. For more information, see
+ Setting Instance Scheduling Options.
+ isArray: false
+ name: schedulingOnHostMaintenance
+ predefined:
+ - MIGRATE
+ - TERMINATE
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Specifies whether the instance should be automatically restarted
+ if it is terminated by Compute Engine (not terminated by a user). You can
+ only set the automatic restart option for standard instances. Preemptible
+ instances cannot be automatically restarted. By default, this is set to true
+ so an instance is automatically restarted if it is terminated by Compute Engine.
+ isArray: false
+ name: schedulingAutomaticRestart
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Defines whether the instance is preemptible. This can only be set
+ during instance creation, it cannot be set or changed after the instance has
+ been created.
+ isArray: false
+ name: schedulingPreemptible
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Labels to apply to this instance. These can be later modified by
+ the setLabels method. key=abc,value=123;key=abc,value=123
+ isArray: false
+ name: labels
+ required: false
+ secret: false
+ - default: false
+ description: A fingerprint for this request, which is essentially a hash of
+ the label's contents and used for optimistic locking. The fingerprint is initially
+ generated by Compute Engine and changes after every request to modify or update
+ labels. You must always provide an up-to-date fingerprint hash in order to
+ update or change labels.
+ isArray: false
+ name: labelFingerprint
+ required: false
+ secret: false
+ - default: false
+ description: 'Specifies a minimum CPU platform for the VM instance. Applicable
+ values are the friendly names of CPU platforms, such as minCpuPlatform: "Intel
+ Haswell" or minCpuPlatform: "Intel Sandy Bridge".'
+ isArray: false
+ name: minCpuPlatform
+ required: false
+ secret: false
+ - default: false
+ description: 'Full or partial URL of the accelerator type resource to attach
+ to this instance. For example: projects/my-project/zones/us-central1-c/acceleratorTypes/nvidia-tesla-p100
+ If you are creating an instance template, specify only the accelerator name.
+ See GPUs on Compute Engine for a full list of accelerator types.'
+ isArray: false
+ name: guestAcceleratorsAcceleratorType
+ required: false
+ secret: false
+ - default: false
+ description: The number of the guest accelerator cards exposed to this instance.
+ isArray: false
+ name: guestAcceleratorsAcceleratorCount
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether the resource should be protected against deletion.
+ isArray: false
+ name: deletionProtection
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates an instance resource in the specified project using the data
+ included in the request.
+ execution: false
+ name: gcp-compute-insert-instance
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Project ID for this request.
+ isArray: false
+ name: project
+ required: true
+ secret: false
+ - default: false
+ description: Name of the image family to search for.
+ isArray: false
+ name: family
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the latest image that is part of an image family and is not
+ deprecated.
+ execution: false
+ name: gcp-compute-get-image-from-family
+ outputs:
+ - contextPath: GoogleCloudCompute.Images.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.name
+ description: Name of the resource; provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceType
+ description: The type of the image used to create this disk. The default and
+ only value is RAW
+ type: string
+ - contextPath: GoogleCloudCompute.Images.rawDisk
+ description: The parameters of the raw disk image.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.rawDisk.source
+ description: The full Google Cloud Storage URL where the disk image is stored.
+ You must provide either this property or the sourceDisk property but not both.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.rawDisk.sha1Checksum
+ description: An optional SHA1 checksum of the disk image before unpackaging
+ provided by the client when the disk image is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.rawDisk.containerType
+ description: The format used to encode and transmit the block device, which
+ should be TAR. This is just a container and transmission format and not a
+ runtime format. Provided by the client when the disk image is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated
+ description: The deprecation status associated with this image.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated.state
+ description: The deprecation state of this resource. This can be ACTIVE DEPRECATED,
+ OBSOLETE, or DELETED. Operations which communicate the end of life date for
+ an image, can use ACTIVE. Operations which create a new resource using a DEPRECATED
+ resource will return successfully, but with a warning indicating the deprecated
+ resource and recommending its replacement. Operations which use OBSOLETE or
+ DELETED resources will be rejected and result in an error.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated.replacement
+ description: The URL of the suggested replacement for a deprecated resource.
+ The suggested replacement resource must be the same kind of resource as the
+ deprecated resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated.deprecated
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DEPRECATED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated.obsolete
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to OBSOLETE. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated.deleted
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DELETED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.status
+ description: The status of the image. An image can be used to create other resources,
+ such as instances, only after the image has been successfully created and
+ the status is set to READY. Possible values are FAILED, PENDING, or READY.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.archiveSizeBytes
+ description: Size of the image tar.gz archive stored in Google Cloud Storage
+ (in bytes).
+ type: string
+ - contextPath: GoogleCloudCompute.Images.diskSizeGb
+ description: Size of the image when restored onto a persistent disk (in GB).
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDisk
+ description: 'URL of the source disk used to create this image. This can be
+ a full or valid partial URL. You must provide either this property or the
+ rawDisk.source property but not both to create an image. For example, the
+ following are valid values: https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk
+ , projects/project/zones/zone/disks/disk , zones/zone/disks/disk'
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDiskId
+ description: The ID value of the disk used to create this image. This value
+ may be used to determine whether the image was taken from the current or a
+ previous instance of a given disk name.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.licenses
+ description: Any applicable license URI.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.family
+ description: The name of the image family to which this image belongs. You can
+ create disks by specifying an image family instead of a specific image name.
+ The image family always returns its latest image that is not deprecated. The
+ name of the image family must comply with RFC1035.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.imageEncryptionKey
+ description: Encrypts the image using a customer-supplied encryption key. After
+ you encrypt an image with a customer-supplied key, you must provide the same
+ key if you use the image later (e.g. to create a disk from the image). Customer-supplied
+ encryption keys do not protect access to metadata of the disk. If you do not
+ provide an encryption key when creating the image, then the disk will be encrypted
+ using an automatically generated key and you do not need to provide a key
+ to use the image later.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.imageEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.imageEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.imageEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDiskEncryptionKey
+ description: The customer-supplied encryption key of the source disk. Required
+ if the source disk is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDiskEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDiskEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDiskEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.labels
+ description: Labels to apply to this image. These can be later modified by the
+ setLabels method.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.labelFingerprint
+ description: A fingerprint for the labels being applied to this image, which
+ is essentially a hash of the labels used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update labels. You must always provide an up-to-date fingerprint
+ hash in order to update or change labels, otherwise the request will fail
+ with error 412 conditionNotMet.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.guestOsFeatures
+ description: A list of features to enable on the guest operating system. Applicable
+ only for bootable images. Read Enabling guest operating system features to
+ see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.guestOsFeatures.type
+ description: The ID of a supported feature. Read Enabling guest operating system
+ features to see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.licenseCodes
+ description: Integer license codes indicating which licenses are attached to
+ this image.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImage
+ description: URL of the source image used to create this image. This can be
+ a full or valid partial URL.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImageId
+ description: The ID value of the image used to create this image. This value
+ may be used to determine whether the image was taken from the current or a
+ previous instance of a given image name.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImageEncryptionKey
+ description: The customer-supplied encryption key of the source image. Required
+ if the source image is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImageEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImageEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImageEncryptionKey.sha256
+ description: ' The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.'
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshot
+ description: URL of the source snapshot used to create this image. This can
+ be a full or valid partial URL.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshotId
+ description: ' The ID value of the snapshot used to create this image. This
+ value may be used to determine whether the snapshot was taken from the current
+ or a previous instance of a given snapshot name.'
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshotEncryptionKey
+ description: The customer-supplied encryption key of the source snapshot. Required
+ if the source snapshot is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshotEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshotEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshotEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.kind
+ description: Type of the resource. Always compute#image for images.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the network to return.
+ isArray: false
+ name: network
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the specified network.
+ execution: false
+ name: gcp-compute-get-network
+ outputs:
+ - contextPath: GoogleCloudCompute.Networks.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.name
+ description: Name of the resource. Provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.gatewayIPv4
+ description: The gateway address for default routing out of the network. This
+ value is read only and is selected by GCP.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.autoCreateSubnetworks
+ description: When set to true, the VPC network is created in "auto" mode. When
+ set to false, the VPC network is created in "custom" mode.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Networks.subnetworks
+ description: Server-defined fully-qualified URLs for all subnetworks in this
+ VPC network.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.peerings
+ description: ' A list of network peerings for the resource.'
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.peerings.name
+ description: Name of this peering. Provided by the client when the peering is
+ created. The name must comply with RFC1035. Specifically, the name must be
+ 1-63 characters long and match regular expression [a-z]([-a-z0-9]*[a-z0-9])?
+ which means the first character must be a lowercase letter, and all the following
+ characters must be a dash, lowercase letter, or digit, except the last character,
+ which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.peerings.network
+ description: The URL of the peer network. It can be either full URL or partial
+ URL. The peer network may belong to a different project. If the partial URL
+ does not contain project, it is assumed that the peer network is in the same
+ project as the current network.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.peerings.state
+ description: State for the peering.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.peerings.stateDetails
+ description: Details about the current state of the peering.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.peerings.autoCreateRoutes
+ description: This field will be deprecated soon. Prefer using exchangeSubnetRoutes
+ instead. Indicates whether full mesh connectivity is created and managed automatically.
+ When it is set to true, Google Compute Engine will automatically create and
+ manage the routes between two networks when the state is ACTIVE. Otherwise,
+ user needs to create routes manually to route packets to peer network.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Networks.peerings.exchangeSubnetRoutes
+ description: Whether full mesh connectivity is created and managed automatically.
+ When it is set to true, Google Compute Engine will automatically create and
+ manage the routes between two networks when the peering state is ACTIVE. Otherwise,
+ user needs to create routes manually to route packets to peer network.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Networks.routingConfig
+ description: The network-level routing configuration for this network. Used
+ by Cloud Router to determine what type of network-wide routing behavior to
+ enforce.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.routingConfig.routingMode
+ description: The network-wide routing mode to use. If set to REGIONAL, this
+ networks cloud routers will only advertise routes with subnets of this network
+ in the same region as the router. If set to GLOBAL, this networks cloud routers
+ will advertise routes with all subnets of this network, across regions.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.kind
+ description: Type of the resource. Always compute#network for networks.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves the list of instances contained within the specified zone.
+ execution: false
+ name: gcp-compute-list-instances
+ outputs:
+ - contextPath: GoogleCloudCompute.Instances.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.name
+ description: The name of the resource, provided by the client when initially
+ creating the resource. The resource name must be 1-63 characters long, and
+ comply with RFC1035. Specifically, the name must be 1-63 characters long and
+ match the regular expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first
+ character must be a lowercase letter, and all following characters must be
+ a dash, lowercase letter, or digit, except the last character, which cannot
+ be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.tags
+ description: Tags to apply to this instance. Tags are used to identify valid
+ sources or targets for network firewalls and are specified by the client during
+ instance creation. The tags can be later modified by the setTags method. Each
+ tag within the list must comply with RFC1035. Multiple tags can be specified
+ via the tags.items field.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.tags.items
+ description: An array of tags. Each tag must be 1-63 characters long, and comply
+ with RFC1035.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.tags.fingerprint
+ description: Specifies a fingerprint for this request, which is essentially
+ a hash of the tags contents and used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update tags. You must always provide an up-to-date fingerprint hash
+ in order to update or change tags.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.machineType
+ description: 'Full or partial URL of the machine type resource to use for this
+ instance, in the format: zones/zone/machineTypes/machine-type. This is provided
+ by the client when the instance is created.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.status
+ description: 'The status of the instance. One of the following values: PROVISIONING,
+ STAGING, RUNNING, STOPPING, STOPPED, SUSPENDING, SUSPENDED, and TERMINATED.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.statusMessage
+ description: An optional, human-readable explanation of the status.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.zone
+ description: URL of the zone where the instance resides. You must specify this
+ field as part of the HTTP request URL. It is not settable as a field in the
+ request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.canIpForward
+ description: Allows this instance to send and receive packets with non-matching
+ destination or source IPs. This is required if you plan to use this instance
+ to forward routes. For more information, see Enabling IP Forwarding.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces
+ description: An array of network configurations for this instance. These specify
+ how interfaces are configured to interact with other network services, such
+ as connecting to the internet. Multiple interfaces are supported per instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.network
+ description: URL of the network resource for this instance. When creating an
+ instance, if neither the network nor the subnetwork is specified, the default
+ network global/networks/default is used; if the network is not specified but
+ the subnetwork is specified, the network is inferred.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.subnetwork
+ description: The URL of the Subnetwork resource for this instance. If the network
+ resource is in legacy mode, do not provide this property. If the network is
+ in auto subnet mode, providing the subnetwork is optional. If the network
+ is in custom subnet mode, then this field should be specified. If you specify
+ this property, you can specify the subnetwork as a full or partial URL
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.networkIP
+ description: An IPv4 internal network address to assign to the instance for
+ this network interface. If not specified by the user, an unused internal IP
+ is assigned by the system.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.name
+ description: ' The name of the network interface, generated by the server. For
+ network devices, these are eth0, eth1, etc.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs
+ description: An array of configurations for this interface. Currently, only
+ one access config, ONE_TO_ONE_NAT, is supported. If there are no accessConfigs
+ specified, then this instance will have no external internet access.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.type
+ description: The type of configuration. The default and only option is ONE_TO_ONE_NAT.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.name
+ description: The name of this access configuration. The default and recommended
+ name is External NAT but you can use any arbitrary string you would like.
+ For example, My external IP or Network Access.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.natIP
+ description: An external IP address associated with this instance. Specify an
+ unused static external IP address available to the project or leave this field
+ undefined to use an IP from a shared ephemeral IP address pool. If you specify
+ a static external IP address, it must live in the same region as the zone
+ of the instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.setPublicPtr
+ description: Specifies whether a public DNS ‘PTR’ record should be created to
+ map the external IP address of the instance to a DNS domain name.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.publicPtrDomainName
+ description: The DNS domain name for the public PTR record. This field can only
+ be set when the setPublicPtr field is enabled.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.networkTier
+ description: 'This signifies the networking tier used for configuring this access
+ configuration and can only take the following values: PREMIUM, STANDARD.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.kind
+ description: Type of the resource. Always compute#accessConfig for access configs.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.aliasIpRanges
+ description: An array of alias IP ranges for this network interface. Can only
+ be specified for network interfaces on subnet-mode networks.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.aliasIpRanges.ipCidrRange
+ description: The IP CIDR range represented by this alias IP range. This IP CIDR
+ range must belong to the specified subnetwork and cannot contain IP addresses
+ reserved by system or used by other network interfaces. This range may be
+ a single IP address (e.g. 0.0.0.0), a netmask (e.g. /24) or a CIDR format
+ string (e.g. 0.0.0.0/24).
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.aliasIpRanges.subnetworkRangeName
+ description: Optional subnetwork secondary range name specifying the secondary
+ range from which to allocate the IP CIDR range for this alias IP range. If
+ left unspecified, the primary range of the subnetwork will be used.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.fingerprint
+ description: Fingerprint hash of contents stored in this network interface.
+ This field will be ignored when inserting an Instance or adding a NetworkInterface.
+ An up-to-date fingerprint must be provided in order to update the NetworkInterface,
+ otherwise the request will fail with error 412 conditionNotMet.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.kind
+ description: Type of the resource. Always compute#networkInterface for network
+ interfaces.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks
+ description: Array of disks associated with this instance. Persistent disks
+ must be created before you can assign them.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.type
+ description: Specifies the type of the disk, either SCRATCH or PERSISTENT. If
+ not specified, the default is PERSISTENT.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.mode
+ description: The mode in which to attach this disk, either READ_WRITE or READ_ONLY.
+ If not specified, the default is to attach the disk in READ_WRITE mode.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.source
+ description: Specifies a valid partial or full URL to an existing Persistent
+ Disk resource. When creating a new instance, one of initializeParams.sourceImage
+ or disks.source is required except for local SSD.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.deviceName
+ description: Specifies a unique device name of your choice that is reflected
+ into the /dev/disk/by-id/google-* tree of a Linux operating system running
+ within the instance. This name can be used to reference the device for mounting,
+ resizing, and so on, from within the instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.index
+ description: A zero-based index to this disk, where 0 is reserved for the boot
+ disk. If you have many disks attached to an instance, each disk would have
+ a unique index number.
+ type: number
+ - contextPath: GoogleCloudCompute.Instances.disks.boot
+ description: Indicates that this is a boot disk. The virtual machine will use
+ the first partition of the disk for its root filesystem.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams
+ description: Specifies the parameters for a new disk that will be created alongside
+ the new instance. Use initialization parameters to create boot disks or local
+ SSDs attached to the new instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.diskName
+ description: Specifies the disk name. If not specified, the default is to use
+ the name of the instance. If the disk with the instance name exists already
+ in the given zone/region, a new name will be automatically generated.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.sourceImage
+ description: The source image to create this disk. When creating a new instance,
+ one of initializeParams.sourceImage or disks.source is required except for
+ local SSD.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.diskSizeGb
+ description: Specifies the size of the disk in base-2 GB.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.diskType
+ description: Specifies the disk type to use to create the instance. If not specified,
+ the default is pd-standard, specified using the full URL
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.sourceImageEncryptionKey
+ description: The customer-supplied encryption key of the source image. Required
+ if the source image is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.sourceImageEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.sourceImageEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.sourceImageEncryptionKey.kmsKeyName
+ description: 'The name of the encryption key that is stored in Google Cloud
+ KMS. '
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.labels
+ description: Labels to apply to this disk. These can be later modified by the
+ disks.setLabels method. This field is only applicable for persistent disks.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.labels.key
+ description: The disk label key.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.labels.value
+ description: The disk label value.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.description
+ description: An optional description. Provide this property when creating the
+ disk.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.autoDelete
+ description: Specifies whether the disk will be auto-deleted when the instance
+ is deleted (but not when the disk is detached from the instance).
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.disks.licenses
+ description: ' Any valid publicly visible licenses.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.interface
+ description: Specifies the disk interface to use for attaching this disk, which
+ is either SCSI or NVME. The default is SCSI. Persistent disks must always
+ use SCSI and the request will fail if you attempt to attach a persistent disk
+ in any other format than SCSI. Local SSDs can use either NVME or SCSI. For
+ performance characteristics of SCSI over NVMe, see Local SSD performance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.guestOsFeatures
+ description: A list of features to enable on the guest operating system. Applicable
+ only for bootable images. Read Enabling guest operating system features to
+ see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.guestOsFeatures.type
+ description: The ID of a supported feature. Read Enabling guest operating system
+ features to see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.diskEncryptionKey
+ description: Encrypts or decrypts a disk using a customer-supplied encryption
+ key.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.diskEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.diskEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.diskEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.kind
+ description: Type of the resource. Always compute#attachedDisk for attached
+ disks.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata
+ description: The metadata key/value pairs assigned to this instance. This includes
+ custom metadata and predefined keys.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata.fingerprint
+ description: Specifies a fingerprint for this request, which is essentially
+ a hash of the metadatas contents and used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update metadata. You must always provide an up-to-date fingerprint
+ hash in order to update or change metadata, otherwise the request will fail
+ with error 412 conditionNotMet.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata.items
+ description: Array of key/value pairs. The total size of all keys and values
+ must be less than 512 KB.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata.items.key
+ description: 'Key for the metadata entry. Keys must conform to the following
+ regexp: [a-zA-Z0-9-_]+, and be less than 128 bytes in length. This is reflected
+ as part of a URL in the metadata server. Additionally, to avoid ambiguity,
+ keys must not conflict with any other metadata keys for the project.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata.items.value
+ description: Value for the metadata entry. These are free-form strings, and
+ only have meaning as interpreted by the image running in the instance. The
+ only restriction placed on values is that their size must be less than or
+ equal to 262144 bytes (256 KiB).
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata.kind
+ description: Type of the resource. Always compute#metadata for metadata.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.serviceAccounts
+ description: A list of service accounts, with their specified scopes, authorized
+ for this instance. Only one service account per VM instance is supported.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.serviceAccounts.email
+ description: Email address of the service account.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.serviceAccounts.scopes
+ description: The list of scopes to be made available for this service account
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.selfLink
+ description: Server-defined URL for this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling
+ description: Sets the scheduling options for this instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling.onHostMaintenance
+ description: Defines the maintenance behavior for this instance. For standard
+ instances, the default behavior is MIGRATE. For preemptible instances, the
+ default and only possible behavior is TERMINATE. For more information, see
+ Setting Instance Scheduling Options.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling.automaticRestart
+ description: Specifies whether the instance should be automatically restarted
+ if it is terminated by Compute Engine (not terminated by a user). You can
+ only set the automatic restart option for standard instances. Preemptible
+ instances cannot be automatically restarted.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.scheduling.preemptible
+ description: Defines whether the instance is preemptible. This can only be set
+ during instance creation, it cannot be set or changed after the instance has
+ been created.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.scheduling.nodeAffinities
+ description: A set of node affinity and anti-affinity.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling.nodeAffinities.key
+ description: Corresponds to the label key of Node resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling.nodeAffinities.operator
+ description: Defines the operation of node selection.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling.nodeAffinities.values
+ description: Corresponds to the label values of Node resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.cpuPlatform
+ description: The CPU platform used by this instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.labels
+ description: Labels to apply to this instance. These can be later modified by
+ the setLabels method.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.labels.key
+ description: The label key.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.labels.value
+ description: The label value.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.labelFingerprint
+ description: A fingerprint for this request, which is essentially a hash of
+ the labels contents and used for optimistic locking. The fingerprint is initially
+ generated by Compute Engine and changes after every request to modify or update
+ labels. You must always provide an up-to-date fingerprint hash in order to
+ update or change labels.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.minCpuPlatform
+ description: 'Specifies a minimum CPU platform for the VM instance. Applicable
+ values are the friendly names of CPU platforms, such as minCpuPlatform: "Intel
+ Haswell" or minCpuPlatform: "Intel Sandy Bridge".'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.guestAccelerators
+ description: A list of the type and count of accelerator cards attached to the
+ instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.guestAccelerators.acceleratorType
+ description: 'Full or partial URL of the accelerator type resource to attach
+ to this instance. For example: projects/my-project/zones/us-central1-c/acceleratorTypes/nvidia-tesla-p100
+ If you are creating an instance template, specify only the accelerator name.
+ See GPUs on Compute Engine for a full list of accelerator types.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.guestAccelerators.acceleratorCount
+ description: The number of the guest accelerator cards exposed to this instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.startRestricted
+ description: Whether a VM has been restricted for start because Compute Engine
+ has detected suspicious activity.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.deletionProtection
+ description: Whether the resource should be protected against deletion.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.hostname
+ description: Hostname
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.kind
+ description: '] Type of the resource. Always compute#instance for instances.'
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves aggregated list of all of the instances in your project
+ across all regions and zones.
+ execution: false
+ name: gcp-compute-aggregated-list-instances
+ outputs:
+ - contextPath: GoogleCloudCompute.Instances.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.name
+ description: The name of the resource, provided by the client when initially
+ creating the resource. The resource name must be 1-63 characters long, and
+ comply with RFC1035. Specifically, the name must be 1-63 characters long and
+ match the regular expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first
+ character must be a lowercase letter, and all following characters must be
+ a dash, lowercase letter, or digit, except the last character, which cannot
+ be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.tags
+ description: Tags to apply to this instance. Tags are used to identify valid
+ sources or targets for network firewalls and are specified by the client during
+ instance creation. The tags can be later modified by the setTags method. Each
+ tag within the list must comply with RFC1035. Multiple tags can be specified
+ via the tags.items field.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.tags.items
+ description: An array of tags. Each tag must be 1-63 characters long, and comply
+ with RFC1035.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.tags.fingerprint
+ description: Specifies a fingerprint for this request, which is essentially
+ a hash of the tags contents and used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update tags. You must always provide an up-to-date fingerprint hash
+ in order to update or change tags.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.machineType
+ description: 'Full or partial URL of the machine type resource to use for this
+ instance, in the format: zones/zone/machineTypes/machine-type. This is provided
+ by the client when the instance is created.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.status
+ description: 'The status of the instance. One of the following values: PROVISIONING,
+ STAGING, RUNNING, STOPPING, STOPPED, SUSPENDING, SUSPENDED, and TERMINATED.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.statusMessage
+ description: An optional, human-readable explanation of the status.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.zone
+ description: URL of the zone where the instance resides. You must specify this
+ field as part of the HTTP request URL. It is not settable as a field in the
+ request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.canIpForward
+ description: Allows this instance to send and receive packets with non-matching
+ destination or source IPs. This is required if you plan to use this instance
+ to forward routes. For more information, see Enabling IP Forwarding.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces
+ description: An array of network configurations for this instance. These specify
+ how interfaces are configured to interact with other network services, such
+ as connecting to the internet. Multiple interfaces are supported per instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.network
+ description: URL of the network resource for this instance. When creating an
+ instance, if neither the network nor the subnetwork is specified, the default
+ network global/networks/default is used; if the network is not specified but
+ the subnetwork is specified, the network is inferred.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.subnetwork
+ description: The URL of the Subnetwork resource for this instance. If the network
+ resource is in legacy mode, do not provide this property. If the network is
+ in auto subnet mode, providing the subnetwork is optional. If the network
+ is in custom subnet mode, then this field should be specified. If you specify
+ this property, you can specify the subnetwork as a full or partial URL
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.networkIP
+ description: An IPv4 internal network address to assign to the instance for
+ this network interface. If not specified by the user, an unused internal IP
+ is assigned by the system.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.name
+ description: ' The name of the network interface, generated by the server. For
+ network devices, these are eth0, eth1, etc.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs
+ description: An array of configurations for this interface. Currently, only
+ one access config, ONE_TO_ONE_NAT, is supported. If there are no accessConfigs
+ specified, then this instance will have no external internet access.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.type
+ description: The type of configuration. The default and only option is ONE_TO_ONE_NAT.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.name
+ description: The name of this access configuration. The default and recommended
+ name is External NAT but you can use any arbitrary string you would like.
+ For example, My external IP or Network Access.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.natIP
+ description: An external IP address associated with this instance. Specify an
+ unused static external IP address available to the project or leave this field
+ undefined to use an IP from a shared ephemeral IP address pool. If you specify
+ a static external IP address, it must live in the same region as the zone
+ of the instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.setPublicPtr
+ description: Specifies whether a public DNS ‘PTR’ record should be created to
+ map the external IP address of the instance to a DNS domain name.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.publicPtrDomainName
+ description: The DNS domain name for the public PTR record. This field can only
+ be set when the setPublicPtr field is enabled.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.networkTier
+ description: 'This signifies the networking tier used for configuring this access
+ configuration and can only take the following values: PREMIUM, STANDARD.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.kind
+ description: Type of the resource. Always compute#accessConfig for access configs.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.aliasIpRanges
+ description: An array of alias IP ranges for this network interface. Can only
+ be specified for network interfaces on subnet-mode networks.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.aliasIpRanges.ipCidrRange
+ description: The IP CIDR range represented by this alias IP range. This IP CIDR
+ range must belong to the specified subnetwork and cannot contain IP addresses
+ reserved by system or used by other network interfaces. This range may be
+ a single IP address (e.g. 0.0.0.0), a netmask (e.g. /24) or a CIDR format
+ string (e.g. 0.0.0.0/24).
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.aliasIpRanges.subnetworkRangeName
+ description: Optional subnetwork secondary range name specifying the secondary
+ range from which to allocate the IP CIDR range for this alias IP range. If
+ left unspecified, the primary range of the subnetwork will be used.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.fingerprint
+ description: Fingerprint hash of contents stored in this network interface.
+ This field will be ignored when inserting an Instance or adding a NetworkInterface.
+ An up-to-date fingerprint must be provided in order to update the NetworkInterface,
+ otherwise the request will fail with error 412 conditionNotMet.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.kind
+ description: Type of the resource. Always compute#networkInterface for network
+ interfaces.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks
+ description: Array of disks associated with this instance. Persistent disks
+ must be created before you can assign them.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.type
+ description: Specifies the type of the disk, either SCRATCH or PERSISTENT. If
+ not specified, the default is PERSISTENT.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.mode
+ description: The mode in which to attach this disk, either READ_WRITE or READ_ONLY.
+ If not specified, the default is to attach the disk in READ_WRITE mode.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.source
+ description: Specifies a valid partial or full URL to an existing Persistent
+ Disk resource. When creating a new instance, one of initializeParams.sourceImage
+ or disks.source is required except for local SSD.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.deviceName
+ description: Specifies a unique device name of your choice that is reflected
+ into the /dev/disk/by-id/google-* tree of a Linux operating system running
+ within the instance. This name can be used to reference the device for mounting,
+ resizing, and so on, from within the instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.index
+ description: A zero-based index to this disk, where 0 is reserved for the boot
+ disk. If you have many disks attached to an instance, each disk would have
+ a unique index number.
+ type: number
+ - contextPath: GoogleCloudCompute.Instances.disks.boot
+ description: Indicates that this is a boot disk. The virtual machine will use
+ the first partition of the disk for its root filesystem.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams
+ description: Specifies the parameters for a new disk that will be created alongside
+ the new instance. Use initialization parameters to create boot disks or local
+ SSDs attached to the new instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.diskName
+ description: Specifies the disk name. If not specified, the default is to use
+ the name of the instance. If the disk with the instance name exists already
+ in the given zone/region, a new name will be automatically generated.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.sourceImage
+ description: The source image to create this disk. When creating a new instance,
+ one of initializeParams.sourceImage or disks.source is required except for
+ local SSD.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.diskSizeGb
+ description: Specifies the size of the disk in base-2 GB.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.diskType
+ description: Specifies the disk type to use to create the instance. If not specified,
+ the default is pd-standard, specified using the full URL
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.sourceImageEncryptionKey
+ description: The customer-supplied encryption key of the source image. Required
+ if the source image is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.sourceImageEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.sourceImageEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.sourceImageEncryptionKey.kmsKeyName
+ description: 'The name of the encryption key that is stored in Google Cloud
+ KMS. '
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.labels
+ description: Labels to apply to this disk. These can be later modified by the
+ disks.setLabels method. This field is only applicable for persistent disks.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.labels.key
+ description: The disk label key.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.labels.value
+ description: The disk label value.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.description
+ description: An optional description. Provide this property when creating the
+ disk.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.autoDelete
+ description: Specifies whether the disk will be auto-deleted when the instance
+ is deleted (but not when the disk is detached from the instance).
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.disks.licenses
+ description: ' Any valid publicly visible licenses.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.interface
+ description: Specifies the disk interface to use for attaching this disk, which
+ is either SCSI or NVME. The default is SCSI. Persistent disks must always
+ use SCSI and the request will fail if you attempt to attach a persistent disk
+ in any other format than SCSI. Local SSDs can use either NVME or SCSI. For
+ performance characteristics of SCSI over NVMe, see Local SSD performance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.guestOsFeatures
+ description: A list of features to enable on the guest operating system. Applicable
+ only for bootable images. Read Enabling guest operating system features to
+ see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.guestOsFeatures.type
+ description: The ID of a supported feature. Read Enabling guest operating system
+ features to see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.diskEncryptionKey
+ description: Encrypts or decrypts a disk using a customer-supplied encryption
+ key.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.diskEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.diskEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.diskEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.kind
+ description: Type of the resource. Always compute#attachedDisk for attached
+ disks.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata
+ description: The metadata key/value pairs assigned to this instance. This includes
+ custom metadata and predefined keys.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata.fingerprint
+ description: Specifies a fingerprint for this request, which is essentially
+ a hash of the metadatas contents and used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update metadata. You must always provide an up-to-date fingerprint
+ hash in order to update or change metadata, otherwise the request will fail
+ with error 412 conditionNotMet.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata.items
+ description: Array of key/value pairs. The total size of all keys and values
+ must be less than 512 KB.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata.items.key
+ description: 'Key for the metadata entry. Keys must conform to the following
+ regexp: [a-zA-Z0-9-_]+, and be less than 128 bytes in length. This is reflected
+ as part of a URL in the metadata server. Additionally, to avoid ambiguity,
+ keys must not conflict with any other metadata keys for the project.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata.items.value
+ description: Value for the metadata entry. These are free-form strings, and
+ only have meaning as interpreted by the image running in the instance. The
+ only restriction placed on values is that their size must be less than or
+ equal to 262144 bytes (256 KiB).
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata.kind
+ description: Type of the resource. Always compute#metadata for metadata.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.serviceAccounts
+ description: A list of service accounts, with their specified scopes, authorized
+ for this instance. Only one service account per VM instance is supported.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.serviceAccounts.email
+ description: Email address of the service account.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.serviceAccounts.scopes
+ description: The list of scopes to be made available for this service account
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.selfLink
+ description: Server-defined URL for this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling
+ description: Sets the scheduling options for this instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling.onHostMaintenance
+ description: Defines the maintenance behavior for this instance. For standard
+ instances, the default behavior is MIGRATE. For preemptible instances, the
+ default and only possible behavior is TERMINATE. For more information, see
+ Setting Instance Scheduling Options.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling.automaticRestart
+ description: Specifies whether the instance should be automatically restarted
+ if it is terminated by Compute Engine (not terminated by a user). You can
+ only set the automatic restart option for standard instances. Preemptible
+ instances cannot be automatically restarted.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.scheduling.preemptible
+ description: Defines whether the instance is preemptible. This can only be set
+ during instance creation, it cannot be set or changed after the instance has
+ been created.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.scheduling.nodeAffinities
+ description: A set of node affinity and anti-affinity.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling.nodeAffinities.key
+ description: Corresponds to the label key of Node resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling.nodeAffinities.operator
+ description: Defines the operation of node selection.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling.nodeAffinities.values
+ description: Corresponds to the label values of Node resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.cpuPlatform
+ description: The CPU platform used by this instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.labels
+ description: Labels to apply to this instance. These can be later modified by
+ the setLabels method.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.labels.key
+ description: The label key.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.labels.value
+ description: The label value.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.labelFingerprint
+ description: A fingerprint for this request, which is essentially a hash of
+ the labels contents and used for optimistic locking. The fingerprint is initially
+ generated by Compute Engine and changes after every request to modify or update
+ labels. You must always provide an up-to-date fingerprint hash in order to
+ update or change labels.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.minCpuPlatform
+ description: 'Specifies a minimum CPU platform for the VM instance. Applicable
+ values are the friendly names of CPU platforms, such as minCpuPlatform: "Intel
+ Haswell" or minCpuPlatform: "Intel Sandy Bridge".'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.guestAccelerators
+ description: A list of the type and count of accelerator cards attached to the
+ instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.guestAccelerators.acceleratorType
+ description: 'Full or partial URL of the accelerator type resource to attach
+ to this instance. For example: projects/my-project/zones/us-central1-c/acceleratorTypes/nvidia-tesla-p100
+ If you are creating an instance template, specify only the accelerator name.
+ See GPUs on Compute Engine for a full list of accelerator types.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.guestAccelerators.acceleratorCount
+ description: The number of the guest accelerator cards exposed to this instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.startRestricted
+ description: Whether a VM has been restricted for start because Compute Engine
+ has detected suspicious activity.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.deletionProtection
+ description: Whether the resource should be protected against deletion.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.hostname
+ description: Hostname
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.kind
+ description: '] Type of the resource. Always compute#instance for instances.'
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the Operations resource to return.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the specified Operations resource. Gets a list of operations
+ by making a list() request.
+ execution: false
+ name: gcp-compute-get-global-operation
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the Operations resource to return.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: Name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the specified zone-specific Operations resource.
+ execution: false
+ name: gcp-compute-get-zone-operation
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the zone for request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a list of Operation resources contained within the specified
+ zone.
+ execution: false
+ name: gcp-compute-list-zone-operation
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the Operations resource to delete.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: Name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the specified zone-specific Operations resource.
+ execution: false
+ name: gcp-compute-delete-zone-operation
+ - arguments:
+ - default: false
+ description: Name of the instance resource to return.
+ isArray: false
+ name: instance
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the specified Instance resource. Gets a list of available
+ instances by making a list() request.
+ execution: false
+ name: gcp-compute-get-instance
+ outputs:
+ - contextPath: GoogleCloudCompute.Instances.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.name
+ description: The name of the resource, provided by the client when initially
+ creating the resource. The resource name must be 1-63 characters long, and
+ comply with RFC1035. Specifically, the name must be 1-63 characters long and
+ match the regular expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first
+ character must be a lowercase letter, and all following characters must be
+ a dash, lowercase letter, or digit, except the last character, which cannot
+ be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.tags
+ description: Tags to apply to this instance. Tags are used to identify valid
+ sources or targets for network firewalls and are specified by the client during
+ instance creation. The tags can be later modified by the setTags method. Each
+ tag within the list must comply with RFC1035. Multiple tags can be specified
+ via the tags.items field.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.tags.items
+ description: An array of tags. Each tag must be 1-63 characters long, and comply
+ with RFC1035.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.tags.fingerprint
+ description: Specifies a fingerprint for this request, which is essentially
+ a hash of the tags contents and used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update tags. You must always provide an up-to-date fingerprint hash
+ in order to update or change tags.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.machineType
+ description: 'Full or partial URL of the machine type resource to use for this
+ instance, in the format: zones/zone/machineTypes/machine-type. This is provided
+ by the client when the instance is created.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.status
+ description: 'The status of the instance. One of the following values: PROVISIONING,
+ STAGING, RUNNING, STOPPING, STOPPED, SUSPENDING, SUSPENDED, and TERMINATED.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.statusMessage
+ description: An optional, human-readable explanation of the status.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.zone
+ description: URL of the zone where the instance resides. You must specify this
+ field as part of the HTTP request URL. It is not settable as a field in the
+ request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.canIpForward
+ description: Allows this instance to send and receive packets with non-matching
+ destination or source IPs. This is required if you plan to use this instance
+ to forward routes. For more information, see Enabling IP Forwarding.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces
+ description: An array of network configurations for this instance. These specify
+ how interfaces are configured to interact with other network services, such
+ as connecting to the internet. Multiple interfaces are supported per instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.network
+ description: URL of the network resource for this instance. When creating an
+ instance, if neither the network nor the subnetwork is specified, the default
+ network global/networks/default is used; if the network is not specified but
+ the subnetwork is specified, the network is inferred.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.subnetwork
+ description: The URL of the Subnetwork resource for this instance. If the network
+ resource is in legacy mode, do not provide this property. If the network is
+ in auto subnet mode, providing the subnetwork is optional. If the network
+ is in custom subnet mode, then this field should be specified. If you specify
+ this property, you can specify the subnetwork as a full or partial URL
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.networkIP
+ description: An IPv4 internal network address to assign to the instance for
+ this network interface. If not specified by the user, an unused internal IP
+ is assigned by the system.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.name
+ description: ' The name of the network interface, generated by the server. For
+ network devices, these are eth0, eth1, etc.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs
+ description: An array of configurations for this interface. Currently, only
+ one access config, ONE_TO_ONE_NAT, is supported. If there are no accessConfigs
+ specified, then this instance will have no external internet access.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.type
+ description: The type of configuration. The default and only option is ONE_TO_ONE_NAT.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.name
+ description: The name of this access configuration. The default and recommended
+ name is External NAT but you can use any arbitrary string you would like.
+ For example, My external IP or Network Access.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.natIP
+ description: An external IP address associated with this instance. Specify an
+ unused static external IP address available to the project or leave this field
+ undefined to use an IP from a shared ephemeral IP address pool. If you specify
+ a static external IP address, it must live in the same region as the zone
+ of the instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.setPublicPtr
+ description: Specifies whether a public DNS ‘PTR’ record should be created to
+ map the external IP address of the instance to a DNS domain name.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.publicPtrDomainName
+ description: The DNS domain name for the public PTR record. This field can only
+ be set when the setPublicPtr field is enabled.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.networkTier
+ description: 'This signifies the networking tier used for configuring this access
+ configuration and can only take the following values: PREMIUM, STANDARD.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.accessConfigs.kind
+ description: Type of the resource. Always compute#accessConfig for access configs.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.aliasIpRanges
+ description: An array of alias IP ranges for this network interface. Can only
+ be specified for network interfaces on subnet-mode networks.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.aliasIpRanges.ipCidrRange
+ description: The IP CIDR range represented by this alias IP range. This IP CIDR
+ range must belong to the specified subnetwork and cannot contain IP addresses
+ reserved by system or used by other network interfaces. This range may be
+ a single IP address (e.g. 0.0.0.0), a netmask (e.g. /24) or a CIDR format
+ string (e.g. 0.0.0.0/24).
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.aliasIpRanges.subnetworkRangeName
+ description: Optional subnetwork secondary range name specifying the secondary
+ range from which to allocate the IP CIDR range for this alias IP range. If
+ left unspecified, the primary range of the subnetwork will be used.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.fingerprint
+ description: Fingerprint hash of contents stored in this network interface.
+ This field will be ignored when inserting an Instance or adding a NetworkInterface.
+ An up-to-date fingerprint must be provided in order to update the NetworkInterface,
+ otherwise the request will fail with error 412 conditionNotMet.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.networkInterfaces.kind
+ description: Type of the resource. Always compute#networkInterface for network
+ interfaces.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks
+ description: Array of disks associated with this instance. Persistent disks
+ must be created before you can assign them.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.type
+ description: Specifies the type of the disk, either SCRATCH or PERSISTENT. If
+ not specified, the default is PERSISTENT.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.mode
+ description: The mode in which to attach this disk, either READ_WRITE or READ_ONLY.
+ If not specified, the default is to attach the disk in READ_WRITE mode.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.source
+ description: Specifies a valid partial or full URL to an existing Persistent
+ Disk resource. When creating a new instance, one of initializeParams.sourceImage
+ or disks.source is required except for local SSD.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.deviceName
+ description: Specifies a unique device name of your choice that is reflected
+ into the /dev/disk/by-id/google-* tree of a Linux operating system running
+ within the instance. This name can be used to reference the device for mounting,
+ resizing, and so on, from within the instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.index
+ description: A zero-based index to this disk, where 0 is reserved for the boot
+ disk. If you have many disks attached to an instance, each disk would have
+ a unique index number.
+ type: number
+ - contextPath: GoogleCloudCompute.Instances.disks.boot
+ description: Indicates that this is a boot disk. The virtual machine will use
+ the first partition of the disk for its root filesystem.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams
+ description: Specifies the parameters for a new disk that will be created alongside
+ the new instance. Use initialization parameters to create boot disks or local
+ SSDs attached to the new instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.diskName
+ description: Specifies the disk name. If not specified, the default is to use
+ the name of the instance. If the disk with the instance name exists already
+ in the given zone/region, a new name will be automatically generated.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.sourceImage
+ description: The source image to create this disk. When creating a new instance,
+ one of initializeParams.sourceImage or disks.source is required except for
+ local SSD.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.diskSizeGb
+ description: Specifies the size of the disk in base-2 GB.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.diskType
+ description: Specifies the disk type to use to create the instance. If not specified,
+ the default is pd-standard, specified using the full URL
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.sourceImageEncryptionKey
+ description: The customer-supplied encryption key of the source image. Required
+ if the source image is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.sourceImageEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.sourceImageEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.sourceImageEncryptionKey.kmsKeyName
+ description: 'The name of the encryption key that is stored in Google Cloud
+ KMS. '
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.labels
+ description: Labels to apply to this disk. These can be later modified by the
+ disks.setLabels method. This field is only applicable for persistent disks.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.labels.key
+ description: The disk label key.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.labels.value
+ description: The disk label value.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.initializeParams.description
+ description: An optional description. Provide this property when creating the
+ disk.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.autoDelete
+ description: Specifies whether the disk will be auto-deleted when the instance
+ is deleted (but not when the disk is detached from the instance).
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.disks.licenses
+ description: ' Any valid publicly visible licenses.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.interface
+ description: Specifies the disk interface to use for attaching this disk, which
+ is either SCSI or NVME. The default is SCSI. Persistent disks must always
+ use SCSI and the request will fail if you attempt to attach a persistent disk
+ in any other format than SCSI. Local SSDs can use either NVME or SCSI. For
+ performance characteristics of SCSI over NVMe, see Local SSD performance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.guestOsFeatures
+ description: A list of features to enable on the guest operating system. Applicable
+ only for bootable images. Read Enabling guest operating system features to
+ see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.guestOsFeatures.type
+ description: The ID of a supported feature. Read Enabling guest operating system
+ features to see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.diskEncryptionKey
+ description: Encrypts or decrypts a disk using a customer-supplied encryption
+ key.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.diskEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.diskEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.diskEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.disks.kind
+ description: Type of the resource. Always compute#attachedDisk for attached
+ disks.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata
+ description: The metadata key/value pairs assigned to this instance. This includes
+ custom metadata and predefined keys.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata.fingerprint
+ description: Specifies a fingerprint for this request, which is essentially
+ a hash of the metadatas contents and used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update metadata. You must always provide an up-to-date fingerprint
+ hash in order to update or change metadata, otherwise the request will fail
+ with error 412 conditionNotMet.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata.items
+ description: Array of key/value pairs. The total size of all keys and values
+ must be less than 512 KB.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata.items.key
+ description: 'Key for the metadata entry. Keys must conform to the following
+ regexp: [a-zA-Z0-9-_]+, and be less than 128 bytes in length. This is reflected
+ as part of a URL in the metadata server. Additionally, to avoid ambiguity,
+ keys must not conflict with any other metadata keys for the project.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata.items.value
+ description: Value for the metadata entry. These are free-form strings, and
+ only have meaning as interpreted by the image running in the instance. The
+ only restriction placed on values is that their size must be less than or
+ equal to 262144 bytes (256 KiB).
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.metadata.kind
+ description: Type of the resource. Always compute#metadata for metadata.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.serviceAccounts
+ description: A list of service accounts, with their specified scopes, authorized
+ for this instance. Only one service account per VM instance is supported.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.serviceAccounts.email
+ description: Email address of the service account.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.serviceAccounts.scopes
+ description: The list of scopes to be made available for this service account
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.selfLink
+ description: Server-defined URL for this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling
+ description: Sets the scheduling options for this instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling.onHostMaintenance
+ description: Defines the maintenance behavior for this instance. For standard
+ instances, the default behavior is MIGRATE. For preemptible instances, the
+ default and only possible behavior is TERMINATE. For more information, see
+ Setting Instance Scheduling Options.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling.automaticRestart
+ description: Specifies whether the instance should be automatically restarted
+ if it is terminated by Compute Engine (not terminated by a user). You can
+ only set the automatic restart option for standard instances. Preemptible
+ instances cannot be automatically restarted.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.scheduling.preemptible
+ description: Defines whether the instance is preemptible. This can only be set
+ during instance creation, it cannot be set or changed after the instance has
+ been created.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.scheduling.nodeAffinities
+ description: A set of node affinity and anti-affinity.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling.nodeAffinities.key
+ description: Corresponds to the label key of Node resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling.nodeAffinities.operator
+ description: Defines the operation of node selection.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.scheduling.nodeAffinities.values
+ description: Corresponds to the label values of Node resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.cpuPlatform
+ description: The CPU platform used by this instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.labels
+ description: Labels to apply to this instance. These can be later modified by
+ the setLabels method.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.labels.key
+ description: The label key.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.labels.value
+ description: The label value.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.labelFingerprint
+ description: A fingerprint for this request, which is essentially a hash of
+ the labels contents and used for optimistic locking. The fingerprint is initially
+ generated by Compute Engine and changes after every request to modify or update
+ labels. You must always provide an up-to-date fingerprint hash in order to
+ update or change labels.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.minCpuPlatform
+ description: 'Specifies a minimum CPU platform for the VM instance. Applicable
+ values are the friendly names of CPU platforms, such as minCpuPlatform: "Intel
+ Haswell" or minCpuPlatform: "Intel Sandy Bridge".'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.guestAccelerators
+ description: A list of the type and count of accelerator cards attached to the
+ instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.guestAccelerators.acceleratorType
+ description: 'Full or partial URL of the accelerator type resource to attach
+ to this instance. For example: projects/my-project/zones/us-central1-c/acceleratorTypes/nvidia-tesla-p100
+ If you are creating an instance template, specify only the accelerator name.
+ See GPUs on Compute Engine for a full list of accelerator types.'
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.guestAccelerators.acceleratorCount
+ description: The number of the guest accelerator cards exposed to this instance.
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.startRestricted
+ description: Whether a VM has been restricted for start because Compute Engine
+ has detected suspicious activity.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.deletionProtection
+ description: Whether the resource should be protected against deletion.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Instances.hostname
+ description: Hostname
+ type: string
+ - contextPath: GoogleCloudCompute.Instances.kind
+ description: '] Type of the resource. Always compute#instance for instances.'
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the instance resource to delete.
+ isArray: false
+ name: instance
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the specified Instance resource. For more information, see
+ Stopping or Deleting an Instance.
+ execution: false
+ name: gcp-compute-delete-instance
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the instance resource to start.
+ isArray: false
+ name: instance
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ deprecated: false
+ description: Starts an instance that was stopped using the instances().stop method.
+ For more information, see Restart an instance.
+ execution: false
+ name: gcp-compute-start-instance
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the instance resource to stop.
+ isArray: false
+ name: instance
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: false
+ secret: false
+ deprecated: false
+ description: Stops a running instance, shutting it down cleanly, and allows you
+ to restart the instance at a later time. Stopped instances do not incur VM usage
+ charges while they are stopped. However, resources that the VM is using, such
+ as persistent disks and static IP addresses, will continue to be charged until
+ they are deleted. For more information, see Stopping an instance.
+ execution: false
+ name: gcp-compute-stop-instance
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the instance scoping this request.
+ isArray: false
+ name: instance
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ deprecated: false
+ description: Performs a reset on the instance. This is a hard reset the VM does
+ not do a graceful shutdown. For more information, see Resetting an instance.
+ execution: false
+ name: gcp-compute-reset-instance
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the instance scoping this request.
+ isArray: false
+ name: instance
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ description: 'An object containing a list of "key": value pairs. Example: key=abc,value=123;key=ABC,value=321'
+ isArray: false
+ name: labels
+ required: true
+ secret: false
+ - default: false
+ description: Fingerprint of the previous set of labels for this resource, used
+ to prevent conflicts. Provide the latest fingerprint value when making a request
+ to add or change labels.
+ isArray: false
+ name: labelFingerprint
+ required: true
+ secret: false
+ deprecated: false
+ description: Sets labels on an instance. To learn more about labels, read the
+ Labeling Resources documentation.
+ execution: false
+ name: gcp-compute-set-instance-labels
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the instance scoping this request.
+ isArray: false
+ name: instance
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ description: Specifies a fingerprint for this request, which is essentially
+ a hash of the metadata's contents and used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update metadata. You must always provide an up-to-date fingerprint
+ hash in order to update or change metadata, otherwise the request will fail
+ with error 412 conditionNotMet.
+ isArray: false
+ name: metadataFingerprint
+ required: false
+ secret: false
+ - default: false
+ description: 'key/value pairs. EX: key=abc,value=123;key=ABC,value=321'
+ isArray: false
+ name: metadataItems
+ required: true
+ secret: false
+ deprecated: false
+ description: Sets metadata for the specified instance to the data included in
+ the request.
+ execution: false
+ name: gcp-compute-set-instance-metadata
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the instance scoping this request.
+ isArray: false
+ name: instance
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ description: 'Full or partial URL of the machine type resource. See Machine
+ Types for a full list of machine types. For example: zones/us-central1-f/machineTypes/n1-standard-1'
+ isArray: false
+ name: machineType
+ required: true
+ secret: false
+ deprecated: false
+ description: Changes the machine type for a stopped instance to the machine type
+ specified in the request.
+ execution: false
+ name: gcp-compute-set-instance-machine-type
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the Operations resource to return.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: Name of the region for this request.
+ isArray: false
+ name: region
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the specified region-specific Operations resource.
+ execution: false
+ name: gcp-compute-get-region-operation
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the region for this request.
+ isArray: false
+ name: region
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a list of Operation resources contained within the specified
+ region.
+ execution: false
+ name: gcp-compute-list-region-operation
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the Operations resource to delete.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: Name of the region for this request.
+ isArray: false
+ name: region
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the specified region-specific Operations resource.
+ execution: false
+ name: gcp-compute-delete-region-operation
+ - arguments:
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a list of Operation resources contained within the specified
+ project.
+ execution: false
+ name: gcp-compute-list-global-operation
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the Operations resource to delete.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the specified Operations resource.
+ execution: false
+ name: gcp-compute-delete-global-operation
+ - arguments:
+ - default: false
+ description: Name of the address resource to delete.
+ isArray: false
+ name: address
+ required: true
+ secret: false
+ - default: false
+ description: Name of the region for this request.
+ isArray: false
+ name: region
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the specified address resource.
+ execution: false
+ name: gcp-compute-delete-address
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the address resource to return.
+ isArray: false
+ name: address
+ required: true
+ secret: false
+ - default: false
+ description: Name of the region for this request.
+ isArray: false
+ name: region
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the specified address resource.
+ execution: false
+ name: gcp-compute-get-address
+ outputs:
+ - contextPath: GoogleCloudCompute.Addresses.id
+ description: ' The unique identifier for the resource. This identifier is defined
+ by the server.'
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.name
+ description: Name of the resource. Provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.address
+ description: The static IP address represented by this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.prefixLength
+ description: The prefix length if the resource reprensents an IP range.
+ type: number
+ - contextPath: GoogleCloudCompute.Addresses.status
+ description: The status of the address, which can be one of RESERVING, RESERVED,
+ or IN_USE. An address that is RESERVING is currently in the process of being
+ reserved. A RESERVED address is currently reserved and available to use. An
+ IN_USE address is currently being used by another resource and is not available.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.region
+ description: URL of the region where the regional address resides. This field
+ is not applicable to global addresses. You must specify this field as part
+ of the HTTP request URL. You cannot set this field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.users
+ description: The URLs of the resources that are using this address.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.networkTier
+ description: 'This signifies the networking tier used for configuring this Address
+ and can only take the following values: PREMIUM, STANDARD. Global forwarding
+ rules can only be Premium Tier. Regional forwarding rules can be either Premium
+ or Standard Tier. Standard Tier addresses applied to regional forwarding rules
+ can be used with any external load balancer. Regional forwarding rules in
+ Premium Tier can only be used with a Network load balancer.'
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.ipVersion
+ description: The IP Version that will be used by this address. Valid options
+ are IPV4 or IPV6. This can only be specified for a global address.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.addressType
+ description: The type of address to reserve, either INTERNAL or EXTERNAL. If
+ unspecified, defaults to EXTERNAL.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.purpose
+ description: The purpose of resource, only used with INTERNAL type.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.subnetwork
+ description: The URL of the subnetwork in which to reserve the address. If an
+ IP address is specified, it must be within the subnetworks IP range. This
+ field can only be used with INTERNAL type with GCE_ENDPOINT/DNS_RESOLVER purposes.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.network
+ description: The URL of the network in which to reserve the address. This field
+ can only be used with INTERNAL type with VPC_PEERING purpose.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.kind
+ description: Type of the resource. Always compute#address for addresses.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the resource. Provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: Name of the region for this request.
+ isArray: false
+ name: region
+ required: true
+ secret: false
+ - default: false
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: The static IP address represented by this resource.
+ isArray: false
+ name: address
+ required: false
+ secret: false
+ - default: false
+ description: The prefix length if the resource reprensents an IP range.
+ isArray: false
+ name: prefixLength
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: PREMIUM
+ description: 'This signifies the networking tier used for configuring this Address
+ and can only take the following values: PREMIUM, STANDARD. Global forwarding
+ rules can only be Premium Tier. Regional forwarding rules can be either Premium
+ or Standard Tier. Standard Tier addresses applied to regional forwarding rules
+ can be used with any external load balancer. Regional forwarding rules in
+ Premium Tier can only be used with a Network load balancer. If this field
+ is not specified, it is assumed to be PREMIUM.'
+ isArray: false
+ name: networkTier
+ predefined:
+ - PREMIUM
+ - STANDARD
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: EXTERNAL
+ description: The type of address to reserve, either INTERNAL or EXTERNAL. If
+ unspecified, defaults to EXTERNAL.
+ isArray: false
+ name: addressType
+ predefined:
+ - EXTERNAL
+ - INTERNAL
+ required: false
+ secret: false
+ - default: false
+ description: The purpose of resource, only used with INTERNAL type.
+ isArray: false
+ name: purpose
+ required: false
+ secret: false
+ - default: false
+ description: The URL of the subnetwork in which to reserve the address. If an
+ IP address is specified, it must be within the subnetwork's IP range. This
+ field can only be used with INTERNAL type with GCE_ENDPOINT/DNS_RESOLVER purposes.
+ isArray: false
+ name: subnetwork
+ required: false
+ secret: false
+ - default: false
+ description: The URL of the network in which to reserve the address. This field
+ can only be used with INTERNAL type with VPC_PEERING purpose.
+ isArray: false
+ name: network
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates an address resource in the specified project using the data
+ included in the request.
+ execution: false
+ name: gcp-compute-insert-address
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the region for this request.
+ isArray: false
+ name: region
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a list of addresses contained within the specified region.
+ execution: false
+ name: gcp-compute-list-addresses
+ outputs:
+ - contextPath: GoogleCloudCompute.Addresses.id
+ description: ' The unique identifier for the resource. This identifier is defined
+ by the server.'
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.name
+ description: Name of the resource. Provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.address
+ description: The static IP address represented by this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.prefixLength
+ description: The prefix length if the resource reprensents an IP range.
+ type: number
+ - contextPath: GoogleCloudCompute.Addresses.status
+ description: The status of the address, which can be one of RESERVING, RESERVED,
+ or IN_USE. An address that is RESERVING is currently in the process of being
+ reserved. A RESERVED address is currently reserved and available to use. An
+ IN_USE address is currently being used by another resource and is not available.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.region
+ description: URL of the region where the regional address resides. This field
+ is not applicable to global addresses. You must specify this field as part
+ of the HTTP request URL. You cannot set this field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.users
+ description: The URLs of the resources that are using this address.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.networkTier
+ description: 'This signifies the networking tier used for configuring this Address
+ and can only take the following values: PREMIUM, STANDARD. Global forwarding
+ rules can only be Premium Tier. Regional forwarding rules can be either Premium
+ or Standard Tier. Standard Tier addresses applied to regional forwarding rules
+ can be used with any external load balancer. Regional forwarding rules in
+ Premium Tier can only be used with a Network load balancer.'
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.ipVersion
+ description: The IP Version that will be used by this address. Valid options
+ are IPV4 or IPV6. This can only be specified for a global address.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.addressType
+ description: The type of address to reserve, either INTERNAL or EXTERNAL. If
+ unspecified, defaults to EXTERNAL.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.purpose
+ description: The purpose of resource, only used with INTERNAL type.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.subnetwork
+ description: The URL of the subnetwork in which to reserve the address. If an
+ IP address is specified, it must be within the subnetworks IP range. This
+ field can only be used with INTERNAL type with GCE_ENDPOINT/DNS_RESOLVER purposes.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.network
+ description: The URL of the network in which to reserve the address. This field
+ can only be used with INTERNAL type with VPC_PEERING purpose.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.kind
+ description: Type of the resource. Always compute#address for addresses.
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves an aggregated list of addresses.
+ execution: false
+ name: gcp-compute-aggregated-list-addresses
+ outputs:
+ - contextPath: GoogleCloudCompute.Addresses.id
+ description: ' The unique identifier for the resource. This identifier is defined
+ by the server.'
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.name
+ description: Name of the resource. Provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.address
+ description: The static IP address represented by this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.prefixLength
+ description: The prefix length if the resource reprensents an IP range.
+ type: number
+ - contextPath: GoogleCloudCompute.Addresses.status
+ description: The status of the address, which can be one of RESERVING, RESERVED,
+ or IN_USE. An address that is RESERVING is currently in the process of being
+ reserved. A RESERVED address is currently reserved and available to use. An
+ IN_USE address is currently being used by another resource and is not available.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.region
+ description: URL of the region where the regional address resides. This field
+ is not applicable to global addresses. You must specify this field as part
+ of the HTTP request URL. You cannot set this field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.users
+ description: The URLs of the resources that are using this address.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.networkTier
+ description: 'This signifies the networking tier used for configuring this Address
+ and can only take the following values: PREMIUM, STANDARD. Global forwarding
+ rules can only be Premium Tier. Regional forwarding rules can be either Premium
+ or Standard Tier. Standard Tier addresses applied to regional forwarding rules
+ can be used with any external load balancer. Regional forwarding rules in
+ Premium Tier can only be used with a Network load balancer.'
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.ipVersion
+ description: The IP Version that will be used by this address. Valid options
+ are IPV4 or IPV6. This can only be specified for a global address.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.addressType
+ description: The type of address to reserve, either INTERNAL or EXTERNAL. If
+ unspecified, defaults to EXTERNAL.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.purpose
+ description: The purpose of resource, only used with INTERNAL type.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.subnetwork
+ description: The URL of the subnetwork in which to reserve the address. If an
+ IP address is specified, it must be within the subnetworks IP range. This
+ field can only be used with INTERNAL type with GCE_ENDPOINT/DNS_RESOLVER purposes.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.network
+ description: The URL of the network in which to reserve the address. This field
+ can only be used with INTERNAL type with VPC_PEERING purpose.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.kind
+ description: Type of the resource. Always compute#address for addresses.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the address resource to delete.
+ isArray: false
+ name: address
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the specified address resource.
+ execution: false
+ name: gcp-compute-delete-global-address
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the address resource to delete.
+ isArray: false
+ name: address
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the specified address resource.
+ execution: false
+ name: gcp-compute-get-global-address
+ outputs:
+ - contextPath: GoogleCloudCompute.Addresses.id
+ description: ' The unique identifier for the resource. This identifier is defined
+ by the server.'
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.name
+ description: Name of the resource. Provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.address
+ description: The static IP address represented by this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.prefixLength
+ description: The prefix length if the resource reprensents an IP range.
+ type: number
+ - contextPath: GoogleCloudCompute.Addresses.status
+ description: The status of the address, which can be one of RESERVING, RESERVED,
+ or IN_USE. An address that is RESERVING is currently in the process of being
+ reserved. A RESERVED address is currently reserved and available to use. An
+ IN_USE address is currently being used by another resource and is not available.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.region
+ description: URL of the region where the regional address resides. This field
+ is not applicable to global addresses. You must specify this field as part
+ of the HTTP request URL. You cannot set this field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.users
+ description: The URLs of the resources that are using this address.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.networkTier
+ description: 'This signifies the networking tier used for configuring this Address
+ and can only take the following values: PREMIUM, STANDARD. Global forwarding
+ rules can only be Premium Tier. Regional forwarding rules can be either Premium
+ or Standard Tier. Standard Tier addresses applied to regional forwarding rules
+ can be used with any external load balancer. Regional forwarding rules in
+ Premium Tier can only be used with a Network load balancer.'
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.ipVersion
+ description: The IP Version that will be used by this address. Valid options
+ are IPV4 or IPV6. This can only be specified for a global address.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.addressType
+ description: The type of address to reserve, either INTERNAL or EXTERNAL. If
+ unspecified, defaults to EXTERNAL.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.purpose
+ description: The purpose of resource, only used with INTERNAL type.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.subnetwork
+ description: The URL of the subnetwork in which to reserve the address. If an
+ IP address is specified, it must be within the subnetworks IP range. This
+ field can only be used with INTERNAL type with GCE_ENDPOINT/DNS_RESOLVER purposes.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.network
+ description: The URL of the network in which to reserve the address. This field
+ can only be used with INTERNAL type with VPC_PEERING purpose.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.kind
+ description: Type of the resource. Always compute#address for addresses.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the resource. Provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: The static IP address represented by this resource.
+ isArray: false
+ name: address
+ required: false
+ secret: false
+ - default: false
+ description: The prefix length if the resource reprensents an IP range.
+ isArray: false
+ name: prefixLength
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: PREMIUM
+ description: 'This signifies the networking tier used for configuring this Address
+ and can only take the following values: PREMIUM, STANDARD. Global forwarding
+ rules can only be Premium Tier. Regional forwarding rules can be either Premium
+ or Standard Tier. Standard Tier addresses applied to regional forwarding rules
+ can be used with any external load balancer. Regional forwarding rules in
+ Premium Tier can only be used with a Network load balancer. If this field
+ is not specified, it is assumed to be PREMIUM.'
+ isArray: false
+ name: networkTier
+ predefined:
+ - PREMIUM
+ - STANDARD
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The IP Version that will be used by this address. Valid options
+ are IPV4 or IPV6. This can only be specified for a global address.
+ isArray: false
+ name: ipVersion
+ predefined:
+ - IPV4
+ - IPV6
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: EXTERNAL
+ description: The type of address to reserve, either INTERNAL or EXTERNAL. If
+ unspecified, defaults to EXTERNAL.
+ isArray: false
+ name: addressType
+ predefined:
+ - EXTERNAL
+ - INTERNAL
+ required: false
+ secret: false
+ - default: false
+ description: The purpose of resource, only used with INTERNAL type.
+ isArray: false
+ name: purpose
+ required: false
+ secret: false
+ - default: false
+ description: The URL of the subnetwork in which to reserve the address. If an
+ IP address is specified, it must be within the subnetwork's IP range. This
+ field can only be used with INTERNAL type with GCE_ENDPOINT/DNS_RESOLVER purposes.
+ isArray: false
+ name: subnetwork
+ required: false
+ secret: false
+ - default: false
+ description: The URL of the network in which to reserve the address. This field
+ can only be used with INTERNAL type with VPC_PEERING purpose.
+ isArray: false
+ name: network
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates an address resource in the specified project using the data
+ included in the request.
+ execution: false
+ name: gcp-compute-insert-global-address
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For example,
+ if you are filtering Compute Engine instances, you can exclude instances named
+ example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a list of global addresses.
+ execution: false
+ name: gcp-compute-list-global-addresses
+ outputs:
+ - contextPath: GoogleCloudCompute.Addresses.id
+ description: ' The unique identifier for the resource. This identifier is defined
+ by the server.'
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.name
+ description: Name of the resource. Provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.address
+ description: The static IP address represented by this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.prefixLength
+ description: The prefix length if the resource reprensents an IP range.
+ type: number
+ - contextPath: GoogleCloudCompute.Addresses.status
+ description: The status of the address, which can be one of RESERVING, RESERVED,
+ or IN_USE. An address that is RESERVING is currently in the process of being
+ reserved. A RESERVED address is currently reserved and available to use. An
+ IN_USE address is currently being used by another resource and is not available.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.region
+ description: URL of the region where the regional address resides. This field
+ is not applicable to global addresses. You must specify this field as part
+ of the HTTP request URL. You cannot set this field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.users
+ description: The URLs of the resources that are using this address.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.networkTier
+ description: 'This signifies the networking tier used for configuring this Address
+ and can only take the following values: PREMIUM, STANDARD. Global forwarding
+ rules can only be Premium Tier. Regional forwarding rules can be either Premium
+ or Standard Tier. Standard Tier addresses applied to regional forwarding rules
+ can be used with any external load balancer. Regional forwarding rules in
+ Premium Tier can only be used with a Network load balancer.'
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.ipVersion
+ description: The IP Version that will be used by this address. Valid options
+ are IPV4 or IPV6. This can only be specified for a global address.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.addressType
+ description: The type of address to reserve, either INTERNAL or EXTERNAL. If
+ unspecified, defaults to EXTERNAL.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.purpose
+ description: The purpose of resource, only used with INTERNAL type.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.subnetwork
+ description: The URL of the subnetwork in which to reserve the address. If an
+ IP address is specified, it must be within the subnetworks IP range. This
+ field can only be used with INTERNAL type with GCE_ENDPOINT/DNS_RESOLVER purposes.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.network
+ description: The URL of the network in which to reserve the address. This field
+ can only be used with INTERNAL type with VPC_PEERING purpose.
+ type: string
+ - contextPath: GoogleCloudCompute.Addresses.kind
+ description: Type of the resource. Always compute#address for addresses.
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves an aggregated list of persistent disks.
+ execution: false
+ name: gcp-compute-aggregated-list-disks
+ outputs:
+ - contextPath: GoogleCloudCompute.Disks.id
+ description: Unique identifier for the resource; defined by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.name
+ description: Name of the resource. Provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sizeGb
+ description: Size of the persistent disk, specified in GB. You can specify this
+ field when creating a persistent disk using the sourceImage or sourceSnapshot
+ parameter, or specify it alone to create an empty persistent disk.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.zone
+ description: URL of the zone where the disk resides. You must specify this field
+ as part of the HTTP request URL. It is not settable as a field in the request
+ body.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.status
+ description: The status of disk creation.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshot
+ description: The source snapshot used to create this disk. You can provide this
+ as a partial or full URL to the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshotId
+ description: The unique ID of the snapshot used to create this disk. This value
+ identifies the exact snapshot that was used to create this persistent disk.
+ For example, if you created the persistent disk from a snapshot that was later
+ deleted and recreated under the same name, the source snapshot ID would identify
+ the exact version of the snapshot that was used.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.options
+ description: Internal use only.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.selfLink
+ description: Server-defined fully-qualified URL for this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImage
+ description: The source image used to create this disk. If the source image
+ is deleted, this field will not be set.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImageId
+ description: The ID value of the image used to create this disk. This value
+ identifies the exact image that was used to create this persistent disk. For
+ example, if you created the persistent disk from an image that was later deleted
+ and recreated under the same name, the source image ID would identify the
+ exact version of the image that was used.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.type
+ description: 'URL of the disk type resource describing which disk type to use
+ to create the disk. Provide this when creating the disk. For example: project/zones/zone/diskTypes/pd-standard
+ or pd-ssd'
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.licenses
+ description: A list of publicly visible licenses. Reserved for Googles use.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.guestOsFeatures
+ description: A list of features to enable on the guest operating system. Applicable
+ only for bootable images. Read Enabling guest operating system features to
+ see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.guestOsFeatures.type
+ description: The ID of a supported feature. Read Enabling guest operating system
+ features to see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.lastAttachTimestamp
+ description: Last attach timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.lastDetachTimestamp
+ description: Last detach timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.users
+ description: 'Links to the users of the disk (attached instances) in form: project/zones/zone/instances/instance'
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.diskEncryptionKey
+ description: Encrypts the disk using a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.diskEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.diskEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.diskEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImageEncryptionKey
+ description: The customer-supplied encryption key of the source image. Required
+ if the source image is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImageEncryptionKey.sha256
+ description: ' The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource'
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImageEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImageEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshotEncryptionKey
+ description: The customer-supplied encryption key of the source snapshot. Required
+ if the source snapshot is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshotEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshotEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshotEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.labels
+ description: Labels to apply to this disk. These can be later modified by the
+ setLabels method.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.labels.key
+ description: The label key
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.labels.value
+ description: The label value
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.labelFingerprint
+ description: A fingerprint for the labels being applied to this disk, which
+ is essentially a hash of the labels set used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update labels. You must always provide an up-to-date fingerprint
+ hash in order to update or change labels, otherwise the request will fail
+ with error 412 conditionNotMet.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.region
+ description: URL of the region where the disk resides. Only applicable for regional
+ resources. You must specify this field as part of the HTTP request URL. It
+ is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.replicaZones
+ description: URLs of the zones where the disk should be replicated to. Only
+ applicable for regional resources.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.licenseCodes
+ description: Integer license codes indicating which licenses are attached to
+ this disk.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.physicalBlockSizeBytes
+ description: Physical block size of the persistent disk, in bytes. If not present
+ in a request, a default value is used. Currently supported sizes are 4096
+ and 16384, other sizes may be added in the future. If an unsupported value
+ is requested, the error message will list the supported values for the callers
+ project.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.kind
+ description: Type of the resource. Always compute#disk for disks.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ description: Name of the snapshot.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: Name of the persistent disk to snapshot.
+ isArray: false
+ name: disk
+ required: true
+ secret: false
+ - default: false
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ isArray: false
+ name: snapshotEncryptionKeyRawKey
+ required: false
+ secret: false
+ - default: false
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ isArray: false
+ name: snapshotEncryptionKeyKmsKeyName
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ isArray: false
+ name: sourceDiskEncryptionKeyRawKey
+ required: false
+ secret: false
+ - default: false
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ isArray: false
+ name: sourceDiskEncryptionKeyKmsKeyName
+ required: false
+ secret: false
+ - default: false
+ description: Labels to apply to this snapshot. These can be later modified by
+ the setLabels method. Label values may be empty. ex key=abc,value=123;key=abc,value=123
+ isArray: false
+ name: labels
+ required: false
+ secret: false
+ - default: false
+ description: A fingerprint for the labels being applied to this snapshot, which
+ is essentially a hash of the labels set used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update labels. You must always provide an up-to-date fingerprint
+ hash in order to update or change labels, otherwise the request will fail
+ with error 412 conditionNotMet.
+ isArray: false
+ name: labelFingerprint
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a snapshot of a specified persistent disk.
+ execution: false
+ name: gcp-compute-create-disk-snapshot
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the persistent disk to delete.
+ isArray: false
+ name: disk
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the specified persistent disk. Deleting a disk removes its
+ data permanently and is irreversible. However, deleting a disk does not delete
+ any snapshots previously made from the disk. You must separately delete snapshots.
+ execution: false
+ name: gcp-compute-delete-disk
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the persistent disk to return.
+ isArray: false
+ name: disk
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns a specified persistent disk. Gets a list of available persistent
+ disks by making a list() request.
+ execution: false
+ name: gcp-compute-get-disk
+ outputs:
+ - contextPath: GoogleCloudCompute.Disks.id
+ description: Unique identifier for the resource; defined by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.name
+ description: Name of the resource. Provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sizeGb
+ description: Size of the persistent disk, specified in GB. You can specify this
+ field when creating a persistent disk using the sourceImage or sourceSnapshot
+ parameter, or specify it alone to create an empty persistent disk.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.zone
+ description: URL of the zone where the disk resides. You must specify this field
+ as part of the HTTP request URL. It is not settable as a field in the request
+ body.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.status
+ description: The status of disk creation.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshot
+ description: The source snapshot used to create this disk. You can provide this
+ as a partial or full URL to the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshotId
+ description: The unique ID of the snapshot used to create this disk. This value
+ identifies the exact snapshot that was used to create this persistent disk.
+ For example, if you created the persistent disk from a snapshot that was later
+ deleted and recreated under the same name, the source snapshot ID would identify
+ the exact version of the snapshot that was used.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.options
+ description: Internal use only.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.selfLink
+ description: Server-defined fully-qualified URL for this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImage
+ description: The source image used to create this disk. If the source image
+ is deleted, this field will not be set.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImageId
+ description: The ID value of the image used to create this disk. This value
+ identifies the exact image that was used to create this persistent disk. For
+ example, if you created the persistent disk from an image that was later deleted
+ and recreated under the same name, the source image ID would identify the
+ exact version of the image that was used.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.type
+ description: 'URL of the disk type resource describing which disk type to use
+ to create the disk. Provide this when creating the disk. For example: project/zones/zone/diskTypes/pd-standard
+ or pd-ssd'
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.licenses
+ description: A list of publicly visible licenses. Reserved for Googles use.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.guestOsFeatures
+ description: A list of features to enable on the guest operating system. Applicable
+ only for bootable images. Read Enabling guest operating system features to
+ see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.guestOsFeatures.type
+ description: The ID of a supported feature. Read Enabling guest operating system
+ features to see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.lastAttachTimestamp
+ description: Last attach timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.lastDetachTimestamp
+ description: Last detach timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.users
+ description: 'Links to the users of the disk (attached instances) in form: project/zones/zone/instances/instance'
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.diskEncryptionKey
+ description: Encrypts the disk using a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.diskEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.diskEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.diskEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImageEncryptionKey
+ description: The customer-supplied encryption key of the source image. Required
+ if the source image is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImageEncryptionKey.sha256
+ description: ' The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource'
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImageEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImageEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshotEncryptionKey
+ description: The customer-supplied encryption key of the source snapshot. Required
+ if the source snapshot is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshotEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshotEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshotEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.labels
+ description: Labels to apply to this disk. These can be later modified by the
+ setLabels method.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.labels.key
+ description: The label key
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.labels.value
+ description: The label value
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.labelFingerprint
+ description: A fingerprint for the labels being applied to this disk, which
+ is essentially a hash of the labels set used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update labels. You must always provide an up-to-date fingerprint
+ hash in order to update or change labels, otherwise the request will fail
+ with error 412 conditionNotMet.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.region
+ description: URL of the region where the disk resides. Only applicable for regional
+ resources. You must specify this field as part of the HTTP request URL. It
+ is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.replicaZones
+ description: URLs of the zones where the disk should be replicated to. Only
+ applicable for regional resources.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.licenseCodes
+ description: Integer license codes indicating which licenses are attached to
+ this disk.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.physicalBlockSizeBytes
+ description: Physical block size of the persistent disk, in bytes. If not present
+ in a request, a default value is used. Currently supported sizes are 4096
+ and 16384, other sizes may be added in the future. If an unsupported value
+ is requested, the error message will list the supported values for the callers
+ project.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.kind
+ description: Type of the resource. Always compute#disk for disks.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the resource. Provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ description: 'URL of the disk type resource describing which disk type to use
+ to create the disk. Provide this when creating the disk. For example: project/zones/zone/diskTypes/pd-standard
+ or pd-ssd'
+ isArray: false
+ name: disktype
+ required: true
+ secret: false
+ - default: false
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: Size of the persistent disk, specified in GB. You can specify this
+ field when creating a persistent disk using the sourceImage or sourceSnapshot
+ parameter, or specify it alone to create an empty persistent disk. If you
+ specify this field along with sourceImage or sourceSnapshot, the value of
+ sizeGb must not be less than the size of the sourceImage or the size of the
+ snapshot. Acceptable values are 1 to 65536, inclusive.
+ isArray: false
+ name: sizeGb
+ required: true
+ secret: false
+ - default: false
+ description: The source snapshot used to create this disk. You can provide this
+ as a partial or full URL to the resource.
+ isArray: false
+ name: sourceSnapshot
+ required: false
+ secret: false
+ - default: false
+ description: The source image used to create this disk. If the source image
+ is deleted, this field will not be set.
+ isArray: false
+ name: sourceImage
+ required: false
+ secret: false
+ - default: false
+ description: A list of publicly visible licenses. Reserved for Google's use.
+ isArray: false
+ name: licenses
+ required: false
+ secret: false
+ - default: false
+ description: A list of features to enable on the guest operating system. Applicable
+ only for bootable images. Read Enabling guest operating system features to
+ see a list of available options. Separated with ,
+ isArray: false
+ name: guestOsFeatures
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ isArray: false
+ name: diskEncryptionKeyRawKey
+ required: false
+ secret: false
+ - default: false
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ isArray: false
+ name: diskEncryptionKeyKmsKeyName
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ isArray: false
+ name: sourceImageEncryptionKeyRawKey
+ required: false
+ secret: false
+ - default: false
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ isArray: false
+ name: sourceImageEncryptionKeyKmsKeyName
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ isArray: false
+ name: sourceSnapshotEncryptionKeyRawKey
+ required: false
+ secret: false
+ - default: false
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ isArray: false
+ name: sourceSnapshotEncryptionKeyKmsKeyName
+ required: false
+ secret: false
+ - default: false
+ description: 'Labels to apply to this disk. These can be later modified by the
+ setLabels method. ex: key=abc,value=123;key=abc,value=123'
+ isArray: false
+ name: labels
+ required: false
+ secret: false
+ - default: false
+ description: A fingerprint for the labels being applied to this disk, which
+ is essentially a hash of the labels set used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update labels. You must always provide an up-to-date fingerprint
+ hash in order to update or change labels, otherwise the request will fail
+ with error 412 conditionNotMet.
+ isArray: false
+ name: labelFingerprint
+ required: false
+ secret: false
+ - default: false
+ description: URLs of the zones where the disk should be replicated to. Only
+ applicable for regional resources.
+ isArray: false
+ name: replicaZones
+ required: false
+ secret: false
+ - default: false
+ description: Integer license codes indicating which licenses are attached to
+ this disk.
+ isArray: false
+ name: licenseCodes
+ required: false
+ secret: false
+ - default: false
+ description: Physical block size of the persistent disk, in bytes. If not present
+ in a request, a default value is used. Currently supported sizes are 4096
+ and 16384, other sizes may be added in the future. If an unsupported value
+ is requested, the error message will list the supported values for the caller's
+ project.
+ isArray: false
+ name: physicalBlockSizeBytes
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a persistent disk in the specified project using the data
+ in the request. You can create a disk with a sourceImage, a sourceSnapshot,
+ or create an empty 500 GB data disk by omitting all properties. You can also
+ create a disk that is larger than the default size by specifying the sizeGb
+ property.
+ execution: false
+ name: gcp-compute-insert-disk
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a list of persistent disks contained within the specified
+ zone.
+ execution: false
+ name: gcp-compute-list-disks
+ outputs:
+ - contextPath: GoogleCloudCompute.Disks.id
+ description: Unique identifier for the resource; defined by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.name
+ description: Name of the resource. Provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sizeGb
+ description: Size of the persistent disk, specified in GB. You can specify this
+ field when creating a persistent disk using the sourceImage or sourceSnapshot
+ parameter, or specify it alone to create an empty persistent disk.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.zone
+ description: URL of the zone where the disk resides. You must specify this field
+ as part of the HTTP request URL. It is not settable as a field in the request
+ body.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.status
+ description: The status of disk creation.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshot
+ description: The source snapshot used to create this disk. You can provide this
+ as a partial or full URL to the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshotId
+ description: The unique ID of the snapshot used to create this disk. This value
+ identifies the exact snapshot that was used to create this persistent disk.
+ For example, if you created the persistent disk from a snapshot that was later
+ deleted and recreated under the same name, the source snapshot ID would identify
+ the exact version of the snapshot that was used.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.options
+ description: Internal use only.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.selfLink
+ description: Server-defined fully-qualified URL for this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImage
+ description: The source image used to create this disk. If the source image
+ is deleted, this field will not be set.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImageId
+ description: The ID value of the image used to create this disk. This value
+ identifies the exact image that was used to create this persistent disk. For
+ example, if you created the persistent disk from an image that was later deleted
+ and recreated under the same name, the source image ID would identify the
+ exact version of the image that was used.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.type
+ description: 'URL of the disk type resource describing which disk type to use
+ to create the disk. Provide this when creating the disk. For example: project/zones/zone/diskTypes/pd-standard
+ or pd-ssd'
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.licenses
+ description: A list of publicly visible licenses. Reserved for Googles use.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.guestOsFeatures
+ description: A list of features to enable on the guest operating system. Applicable
+ only for bootable images. Read Enabling guest operating system features to
+ see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.guestOsFeatures.type
+ description: The ID of a supported feature. Read Enabling guest operating system
+ features to see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.lastAttachTimestamp
+ description: Last attach timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.lastDetachTimestamp
+ description: Last detach timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.users
+ description: 'Links to the users of the disk (attached instances) in form: project/zones/zone/instances/instance'
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.diskEncryptionKey
+ description: Encrypts the disk using a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.diskEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.diskEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.diskEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImageEncryptionKey
+ description: The customer-supplied encryption key of the source image. Required
+ if the source image is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImageEncryptionKey.sha256
+ description: ' The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource'
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImageEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceImageEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshotEncryptionKey
+ description: The customer-supplied encryption key of the source snapshot. Required
+ if the source snapshot is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshotEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshotEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.sourceSnapshotEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.labels
+ description: Labels to apply to this disk. These can be later modified by the
+ setLabels method.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.labels.key
+ description: The label key
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.labels.value
+ description: The label value
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.labelFingerprint
+ description: A fingerprint for the labels being applied to this disk, which
+ is essentially a hash of the labels set used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update labels. You must always provide an up-to-date fingerprint
+ hash in order to update or change labels, otherwise the request will fail
+ with error 412 conditionNotMet.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.region
+ description: URL of the region where the disk resides. Only applicable for regional
+ resources. You must specify this field as part of the HTTP request URL. It
+ is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.replicaZones
+ description: URLs of the zones where the disk should be replicated to. Only
+ applicable for regional resources.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.licenseCodes
+ description: Integer license codes indicating which licenses are attached to
+ this disk.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.physicalBlockSizeBytes
+ description: Physical block size of the persistent disk, in bytes. If not present
+ in a request, a default value is used. Currently supported sizes are 4096
+ and 16384, other sizes may be added in the future. If an unsupported value
+ is requested, the error message will list the supported values for the callers
+ project.
+ type: string
+ - contextPath: GoogleCloudCompute.Disks.kind
+ description: Type of the resource. Always compute#disk for disks.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the persistent disk.
+ isArray: false
+ name: disk
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ description: The new size of the persistent disk, which is specified in GB.
+ isArray: false
+ name: sizeGb
+ required: true
+ secret: false
+ deprecated: false
+ description: Resizes the specified persistent disk. You can only increase the
+ size of the disk.
+ execution: false
+ name: gcp-compute-resize-disk
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name or id of the resource for this request.
+ isArray: false
+ name: disk
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ description: 'The labels to set for this resource. An object containing a list
+ of "key": value pairs. Example: key=abc,value=123;key=abc,value=123'
+ isArray: false
+ name: labels
+ required: true
+ secret: false
+ - default: false
+ description: The fingerprint of the previous set of labels for this resource,
+ used to detect conflicts. The fingerprint is initially generated by Compute
+ Engine and changes after every request to modify or update labels. You must
+ always provide an up-to-date fingerprint hash in order to update or change
+ labels. Make a get() request to the resource to get the latest fingerprint.
+ isArray: false
+ name: labelFingerprint
+ required: false
+ secret: false
+ deprecated: false
+ description: Sets the labels on a disk.
+ execution: false
+ name: gcp-compute-set-disk-labels
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves an aggregated list of disk types.
+ execution: false
+ name: gcp-compute-aggregated-list-disk-types
+ outputs:
+ - contextPath: GoogleCloudCompute.DiskTypes.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.description
+ description: ' An optional description of this resource.'
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.validDiskSize
+ description: An optional textual description of the valid disk size, such as
+ "10GB-10TB".
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated
+ description: The deprecation status associated with this disk type.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated.state
+ description: The deprecation state of this resource. This can be ACTIVE DEPRECATED,
+ OBSOLETE, or DELETED. Operations which communicate the end of life date for
+ an image, can use ACTIVE. Operations which create a new resource using a DEPRECATED
+ resource will return successfully, but with a warning indicating the deprecated
+ resource and recommending its replacement. Operations which use OBSOLETE or
+ DELETED resources will be rejected and result in an error.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated.replacement
+ description: The URL of the suggested replacement for a deprecated resource.
+ The suggested replacement resource must be the same kind of resource as the
+ deprecated resource.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated.deprecated
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DEPRECATED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated.obsolete
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to OBSOLETE. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated.deleted
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DELETED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.zone
+ description: URL of the zone where the disk type resides. You must specify this
+ field as part of the HTTP request URL. It is not settable as a field in the
+ request body.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.defaultDiskSizeGb
+ description: Server-defined default disk size in GB.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.region
+ description: URL of the region where the disk type resides. Only applicable
+ for regional resources. You must specify this field as part of the HTTP request
+ URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.kind
+ description: Type of the resource. Always compute#diskType for disk types.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the disk type to return.
+ isArray: false
+ name: disktype
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the specified disk type. Gets a list of available disk types
+ by making a list() request.
+ execution: false
+ name: gcp-compute-get-disk-type
+ outputs:
+ - contextPath: GoogleCloudCompute.DiskTypes.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.description
+ description: ' An optional description of this resource.'
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.validDiskSize
+ description: An optional textual description of the valid disk size, such as
+ "10GB-10TB".
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated
+ description: The deprecation status associated with this disk type.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated.state
+ description: The deprecation state of this resource. This can be ACTIVE DEPRECATED,
+ OBSOLETE, or DELETED. Operations which communicate the end of life date for
+ an image, can use ACTIVE. Operations which create a new resource using a DEPRECATED
+ resource will return successfully, but with a warning indicating the deprecated
+ resource and recommending its replacement. Operations which use OBSOLETE or
+ DELETED resources will be rejected and result in an error.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated.replacement
+ description: The URL of the suggested replacement for a deprecated resource.
+ The suggested replacement resource must be the same kind of resource as the
+ deprecated resource.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated.deprecated
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DEPRECATED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated.obsolete
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to OBSOLETE. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated.deleted
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DELETED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.zone
+ description: URL of the zone where the disk type resides. You must specify this
+ field as part of the HTTP request URL. It is not settable as a field in the
+ request body.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.defaultDiskSizeGb
+ description: Server-defined default disk size in GB.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.region
+ description: URL of the region where the disk type resides. Only applicable
+ for regional resources. You must specify this field as part of the HTTP request
+ URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.kind
+ description: Type of the resource. Always compute#diskType for disk types.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.id
+ description: The unique identifier for the resource.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a list of disk types available to the specified project.
+ execution: false
+ name: gcp-compute-list-disk-types
+ outputs:
+ - contextPath: GoogleCloudCompute.DiskTypes.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.description
+ description: ' An optional description of this resource.'
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.validDiskSize
+ description: An optional textual description of the valid disk size, such as
+ "10GB-10TB".
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated
+ description: The deprecation status associated with this disk type.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated.state
+ description: The deprecation state of this resource. This can be ACTIVE DEPRECATED,
+ OBSOLETE, or DELETED. Operations which communicate the end of life date for
+ an image, can use ACTIVE. Operations which create a new resource using a DEPRECATED
+ resource will return successfully, but with a warning indicating the deprecated
+ resource and recommending its replacement. Operations which use OBSOLETE or
+ DELETED resources will be rejected and result in an error.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated.replacement
+ description: The URL of the suggested replacement for a deprecated resource.
+ The suggested replacement resource must be the same kind of resource as the
+ deprecated resource.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated.deprecated
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DEPRECATED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated.obsolete
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to OBSOLETE. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.deprecated.deleted
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DELETED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.zone
+ description: URL of the zone where the disk type resides. You must specify this
+ field as part of the HTTP request URL. It is not settable as a field in the
+ request body.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.defaultDiskSizeGb
+ description: Server-defined default disk size in GB.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.region
+ description: URL of the region where the disk type resides. Only applicable
+ for regional resources. You must specify this field as part of the HTTP request
+ URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.DiskTypes.kind
+ description: Type of the resource. Always compute#diskType for disk types.
+ type: string
+ - arguments:
+ - default: false
+ description: Project ID for this request. if left empty configured project will
+ be used.
+ isArray: false
+ name: project
+ required: false
+ secret: false
+ - default: false
+ description: Name of the image resource to return.
+ isArray: false
+ name: image
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the specified image. Gets a list of available images by making
+ a list() request.
+ execution: false
+ name: gcp-compute-get-image
+ outputs:
+ - contextPath: GoogleCloudCompute.Images.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.name
+ description: Name of the resource; provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceType
+ description: The type of the image used to create this disk. The default and
+ only value is RAW
+ type: string
+ - contextPath: GoogleCloudCompute.Images.rawDisk
+ description: The parameters of the raw disk image.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.rawDisk.source
+ description: The full Google Cloud Storage URL where the disk image is stored.
+ You must provide either this property or the sourceDisk property but not both.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.rawDisk.sha1Checksum
+ description: An optional SHA1 checksum of the disk image before unpackaging
+ provided by the client when the disk image is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.rawDisk.containerType
+ description: The format used to encode and transmit the block device, which
+ should be TAR. This is just a container and transmission format and not a
+ runtime format. Provided by the client when the disk image is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated
+ description: The deprecation status associated with this image.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated.state
+ description: The deprecation state of this resource. This can be ACTIVE DEPRECATED,
+ OBSOLETE, or DELETED. Operations which communicate the end of life date for
+ an image, can use ACTIVE. Operations which create a new resource using a DEPRECATED
+ resource will return successfully, but with a warning indicating the deprecated
+ resource and recommending its replacement. Operations which use OBSOLETE or
+ DELETED resources will be rejected and result in an error.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated.replacement
+ description: The URL of the suggested replacement for a deprecated resource.
+ The suggested replacement resource must be the same kind of resource as the
+ deprecated resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated.deprecated
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DEPRECATED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated.obsolete
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to OBSOLETE. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated.deleted
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DELETED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.status
+ description: The status of the image. An image can be used to create other resources,
+ such as instances, only after the image has been successfully created and
+ the status is set to READY. Possible values are FAILED, PENDING, or READY.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.archiveSizeBytes
+ description: Size of the image tar.gz archive stored in Google Cloud Storage
+ (in bytes).
+ type: string
+ - contextPath: GoogleCloudCompute.Images.diskSizeGb
+ description: Size of the image when restored onto a persistent disk (in GB).
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDisk
+ description: 'URL of the source disk used to create this image. This can be
+ a full or valid partial URL. You must provide either this property or the
+ rawDisk.source property but not both to create an image. For example, the
+ following are valid values: https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk
+ , projects/project/zones/zone/disks/disk , zones/zone/disks/disk'
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDiskId
+ description: The ID value of the disk used to create this image. This value
+ may be used to determine whether the image was taken from the current or a
+ previous instance of a given disk name.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.licenses
+ description: Any applicable license URI.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.family
+ description: The name of the image family to which this image belongs. You can
+ create disks by specifying an image family instead of a specific image name.
+ The image family always returns its latest image that is not deprecated. The
+ name of the image family must comply with RFC1035.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.imageEncryptionKey
+ description: Encrypts the image using a customer-supplied encryption key. After
+ you encrypt an image with a customer-supplied key, you must provide the same
+ key if you use the image later (e.g. to create a disk from the image). Customer-supplied
+ encryption keys do not protect access to metadata of the disk. If you do not
+ provide an encryption key when creating the image, then the disk will be encrypted
+ using an automatically generated key and you do not need to provide a key
+ to use the image later.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.imageEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.imageEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.imageEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDiskEncryptionKey
+ description: The customer-supplied encryption key of the source disk. Required
+ if the source disk is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDiskEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDiskEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDiskEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.labels
+ description: Labels to apply to this image. These can be later modified by the
+ setLabels method.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.labelFingerprint
+ description: A fingerprint for the labels being applied to this image, which
+ is essentially a hash of the labels used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update labels. You must always provide an up-to-date fingerprint
+ hash in order to update or change labels, otherwise the request will fail
+ with error 412 conditionNotMet.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.guestOsFeatures
+ description: A list of features to enable on the guest operating system. Applicable
+ only for bootable images. Read Enabling guest operating system features to
+ see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.guestOsFeatures.type
+ description: The ID of a supported feature. Read Enabling guest operating system
+ features to see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.licenseCodes
+ description: Integer license codes indicating which licenses are attached to
+ this image.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImage
+ description: URL of the source image used to create this image. This can be
+ a full or valid partial URL.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImageId
+ description: The ID value of the image used to create this image. This value
+ may be used to determine whether the image was taken from the current or a
+ previous instance of a given image name.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImageEncryptionKey
+ description: The customer-supplied encryption key of the source image. Required
+ if the source image is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImageEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImageEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImageEncryptionKey.sha256
+ description: ' The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.'
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshot
+ description: URL of the source snapshot used to create this image. This can
+ be a full or valid partial URL.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshotId
+ description: ' The ID value of the snapshot used to create this image. This
+ value may be used to determine whether the snapshot was taken from the current
+ or a previous instance of a given snapshot name.'
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshotEncryptionKey
+ description: The customer-supplied encryption key of the source snapshot. Required
+ if the source snapshot is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshotEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshotEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshotEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.kind
+ description: Type of the resource. Always compute#image for images.
+ type: string
+ - arguments:
+ - default: false
+ description: Project ID for this request. if left empty configured project will
+ be used.
+ isArray: false
+ name: project
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves the list of custom images available to the specified project.
+ Custom images are images you create that belong to your project. This method
+ does not get any images that belong to other projects, including publicly-available
+ images, like Debian 8. If you want to get a list of publicly-available images,
+ use this method to make a request to the respective image project, such as debian-cloud
+ or windows-cloud.
+ execution: false
+ name: gcp-compute-list-images
+ outputs:
+ - contextPath: GoogleCloudCompute.Images.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.name
+ description: Name of the resource; provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceType
+ description: The type of the image used to create this disk. The default and
+ only value is RAW
+ type: string
+ - contextPath: GoogleCloudCompute.Images.rawDisk
+ description: The parameters of the raw disk image.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.rawDisk.source
+ description: The full Google Cloud Storage URL where the disk image is stored.
+ You must provide either this property or the sourceDisk property but not both.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.rawDisk.sha1Checksum
+ description: An optional SHA1 checksum of the disk image before unpackaging
+ provided by the client when the disk image is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.rawDisk.containerType
+ description: The format used to encode and transmit the block device, which
+ should be TAR. This is just a container and transmission format and not a
+ runtime format. Provided by the client when the disk image is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated
+ description: The deprecation status associated with this image.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated.state
+ description: The deprecation state of this resource. This can be ACTIVE DEPRECATED,
+ OBSOLETE, or DELETED. Operations which communicate the end of life date for
+ an image, can use ACTIVE. Operations which create a new resource using a DEPRECATED
+ resource will return successfully, but with a warning indicating the deprecated
+ resource and recommending its replacement. Operations which use OBSOLETE or
+ DELETED resources will be rejected and result in an error.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated.replacement
+ description: The URL of the suggested replacement for a deprecated resource.
+ The suggested replacement resource must be the same kind of resource as the
+ deprecated resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated.deprecated
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DEPRECATED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated.obsolete
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to OBSOLETE. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.deprecated.deleted
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DELETED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.status
+ description: The status of the image. An image can be used to create other resources,
+ such as instances, only after the image has been successfully created and
+ the status is set to READY. Possible values are FAILED, PENDING, or READY.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.archiveSizeBytes
+ description: Size of the image tar.gz archive stored in Google Cloud Storage
+ (in bytes).
+ type: string
+ - contextPath: GoogleCloudCompute.Images.diskSizeGb
+ description: Size of the image when restored onto a persistent disk (in GB).
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDisk
+ description: 'URL of the source disk used to create this image. This can be
+ a full or valid partial URL. You must provide either this property or the
+ rawDisk.source property but not both to create an image. For example, the
+ following are valid values: https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk
+ , projects/project/zones/zone/disks/disk , zones/zone/disks/disk'
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDiskId
+ description: The ID value of the disk used to create this image. This value
+ may be used to determine whether the image was taken from the current or a
+ previous instance of a given disk name.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.licenses
+ description: Any applicable license URI.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.family
+ description: The name of the image family to which this image belongs. You can
+ create disks by specifying an image family instead of a specific image name.
+ The image family always returns its latest image that is not deprecated. The
+ name of the image family must comply with RFC1035.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.imageEncryptionKey
+ description: Encrypts the image using a customer-supplied encryption key. After
+ you encrypt an image with a customer-supplied key, you must provide the same
+ key if you use the image later (e.g. to create a disk from the image). Customer-supplied
+ encryption keys do not protect access to metadata of the disk. If you do not
+ provide an encryption key when creating the image, then the disk will be encrypted
+ using an automatically generated key and you do not need to provide a key
+ to use the image later.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.imageEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.imageEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.imageEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDiskEncryptionKey
+ description: The customer-supplied encryption key of the source disk. Required
+ if the source disk is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDiskEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDiskEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceDiskEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.labels
+ description: Labels to apply to this image. These can be later modified by the
+ setLabels method.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.labelFingerprint
+ description: A fingerprint for the labels being applied to this image, which
+ is essentially a hash of the labels used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update labels. You must always provide an up-to-date fingerprint
+ hash in order to update or change labels, otherwise the request will fail
+ with error 412 conditionNotMet.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.guestOsFeatures
+ description: A list of features to enable on the guest operating system. Applicable
+ only for bootable images. Read Enabling guest operating system features to
+ see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.guestOsFeatures.type
+ description: The ID of a supported feature. Read Enabling guest operating system
+ features to see a list of available options.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.licenseCodes
+ description: Integer license codes indicating which licenses are attached to
+ this image.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImage
+ description: URL of the source image used to create this image. This can be
+ a full or valid partial URL.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImageId
+ description: The ID value of the image used to create this image. This value
+ may be used to determine whether the image was taken from the current or a
+ previous instance of a given image name.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImageEncryptionKey
+ description: The customer-supplied encryption key of the source image. Required
+ if the source image is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImageEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImageEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceImageEncryptionKey.sha256
+ description: ' The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.'
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshot
+ description: URL of the source snapshot used to create this image. This can
+ be a full or valid partial URL.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshotId
+ description: ' The ID value of the snapshot used to create this image. This
+ value may be used to determine whether the snapshot was taken from the current
+ or a previous instance of a given snapshot name.'
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshotEncryptionKey
+ description: The customer-supplied encryption key of the source snapshot. Required
+ if the source snapshot is protected by a customer-supplied encryption key.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshotEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshotEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.sourceSnapshotEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Images.kind
+ description: Type of the resource. Always compute#image for images.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the image resource to delete.
+ isArray: false
+ name: image
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the specified image.
+ execution: false
+ name: gcp-compute-delete-image
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name or id of the resource for this request.
+ isArray: false
+ name: image
+ required: true
+ secret: false
+ - default: false
+ description: 'An object containing a list of "key": value pairs. Example: key=abc,value=123;key=abc,value=123'
+ isArray: false
+ name: labels
+ required: true
+ secret: false
+ - default: false
+ description: The fingerprint of the previous set of labels for this resource,
+ used to detect conflicts. The fingerprint is initially generated by Compute
+ Engine and changes after every request to modify or update labels. You must
+ always provide an up-to-date fingerprint hash when updating or changing labels,
+ otherwise the request will fail with error 412 conditionNotMet. Make a get()
+ request to the resource to get the latest fingerprint.
+ isArray: false
+ name: labelFingerprint
+ required: true
+ secret: false
+ deprecated: false
+ description: Sets the labels on an image.
+ execution: false
+ name: gcp-compute-set-image-labels
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the resource; provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: The full Google Cloud Storage URL where the disk image is stored.
+ You must provide either this property or the sourceDisk property but not both.
+ isArray: false
+ name: rawDiskSource
+ required: false
+ secret: false
+ - default: false
+ description: An optional SHA1 checksum of the disk image before unpackaging
+ provided by the client when the disk image is created.
+ isArray: false
+ name: rawDiskSha1Checksum
+ required: false
+ secret: false
+ - default: false
+ description: The format used to encode and transmit the block device, which
+ should be TAR. This is just a container and transmission format and not a
+ runtime format. Provided by the client when the disk image is created.
+ isArray: false
+ name: rawDiskContainerType
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The deprecation state of this resource. This can be ACTIVE DEPRECATED,
+ OBSOLETE, or DELETED. Operations which communicate the end of life date for
+ an image, can use ACTIVE. Operations which create a new resource using a DEPRECATED
+ resource will return successfully, but with a warning indicating the deprecated
+ resource and recommending its replacement. Operations which use OBSOLETE or
+ DELETED resources will be rejected and result in an error.
+ isArray: false
+ name: deprecatedState
+ predefined:
+ - ACTIVE
+ - DEPRECATED
+ - OBSOLETE
+ - DELETED
+ required: false
+ secret: false
+ - default: false
+ description: The URL of the suggested replacement for a deprecated resource.
+ The suggested replacement resource must be the same kind of resource as the
+ deprecated resource.
+ isArray: false
+ name: deprecatedReplacement
+ required: false
+ secret: false
+ - default: false
+ description: Size of the image tar.gz archive stored in Google Cloud Storage
+ (in bytes).
+ isArray: false
+ name: archiveSizeBytes
+ required: false
+ secret: false
+ - default: false
+ description: Size of the image when restored onto a persistent disk (in GB).
+ isArray: false
+ name: diskSizeGb
+ required: false
+ secret: false
+ - default: false
+ description: 'URL of the source disk used to create this image. This can be
+ a full or valid partial URL. You must provide either this property or the
+ rawDisk.source property but not both to create an image. For example, the
+ following are valid values: https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk
+ projects/project/zones/zone/disks/disk zones/zone/disks/disk Authorization
+ requires the following Google IAM permission on the specified resource sourceDisk: compute.disks.useReadOnly'
+ isArray: false
+ name: sourceDisk
+ required: false
+ secret: false
+ - default: false
+ description: 'Any applicable license URI. Authorization requires the following
+ Google IAM permission on the specified resource licenses: compute.licenseCodes.use'
+ isArray: false
+ name: licenses
+ required: false
+ secret: false
+ - default: false
+ description: The name of the image family to which this image belongs. You can
+ create disks by specifying an image family instead of a specific image name.
+ The image family always returns its latest image that is not deprecated. The
+ name of the image family must comply with RFC1035.
+ isArray: false
+ name: family
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ isArray: false
+ name: imageEncryptionKeyRawKey
+ required: false
+ secret: false
+ - default: false
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ isArray: false
+ name: imageEncryptionKeyKmsKeyName
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ isArray: false
+ name: sourceDiskEncryptionKeyRawKey
+ required: false
+ secret: false
+ - default: false
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ isArray: false
+ name: sourceDiskEncryptionKeyKmsKeyName
+ required: false
+ secret: false
+ - default: false
+ description: 'Labels to apply to this image. These can be later modified by
+ the setLabels method. An object containing a list of "key": value pairs.
+ Example: key=abc,value=123;key=abc,value=123'
+ isArray: false
+ name: labels
+ required: false
+ secret: false
+ - default: false
+ description: A fingerprint for the labels being applied to this image, which
+ is essentially a hash of the labels used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update labels. You must always provide an up-to-date fingerprint
+ hash in order to update or change labels, otherwise the request will fail
+ with error 412 conditionNotMet. To see the latest fingerprint, make a get()
+ request to retrieve an image.
+ isArray: false
+ name: labelFingerprint
+ required: false
+ secret: false
+ - default: false
+ description: A list of features to enable on the guest operating system. Applicable
+ only for bootable images. Read Enabling guest operating system features to
+ see a list of available options.
+ isArray: false
+ name: guestOsFeatures
+ required: false
+ secret: false
+ - default: false
+ description: 'Integer license codes indicating which licenses are attached to
+ this image. Authorization requires the following Google IAM permission on
+ the specified resource licenseCodes: compute.licenseCodes.use'
+ isArray: false
+ name: licenseCodes
+ required: false
+ secret: false
+ - default: false
+ description: 'URL of the source image used to create this image. This can be
+ a full or valid partial URL. You must provide exactly one of: this property,
+ or the rawDisk.source property, or the sourceDisk property in order to create
+ an image.'
+ isArray: false
+ name: sourceImage
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ isArray: false
+ name: sourceImageEncryptionKeyRawKey
+ required: false
+ secret: false
+ - default: false
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ isArray: false
+ name: sourceImageEncryptionKeyKmsKeyName
+ required: false
+ secret: false
+ - default: false
+ description: 'URL of the source snapshot used to create this image. This can
+ be a full or valid partial URL. You must provide exactly one of: this property,
+ or the sourceImage property, or the rawDisk.source property, or the sourceDisk
+ property in order to create an image.'
+ isArray: false
+ name: sourceSnapshot
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ isArray: false
+ name: sourceSnapshotEncryptionKeyRawKey
+ required: false
+ secret: false
+ - default: false
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ isArray: false
+ name: sourceSnapshotEncryptionKeyKmsKeyName
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Force image creation if true.
+ isArray: false
+ name: forceCreate
+ predefined:
+ - 'true'
+ - 'false'
+ required: true
+ secret: false
+ deprecated: false
+ description: Creates an image in the specified project using the data included
+ in the request.
+ execution: false
+ name: gcp-compute-insert-image
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the instance group where you are adding instances.
+ isArray: false
+ name: instanceGroup
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone where the instance group is located.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ description: The list of instances to add to the instance group. comma separated.
+ isArray: false
+ name: instances
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds a list of instances to the specified instance group. All of
+ the instances in the instance group must be in the same network/subnetwork.
+ Read Adding instances for more information.
+ execution: false
+ name: gcp-compute-instance-groups-add-instances
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves the list of instance groups and sorts them by zone.
+ execution: false
+ name: gcp-compute-aggregated-list-instance-groups
+ - arguments:
+ - default: false
+ description: The name of the instance group to delete.
+ isArray: false
+ name: instanceGroup
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone where the instance group is located.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the specified instance group. The instances in the group
+ are not deleted. Note that instance group must not belong to a backend service.
+ Read Deleting an instance group for more information. HTTP
+ execution: false
+ name: gcp-compute-delete-instance-group
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the instance group.
+ isArray: false
+ name: instanceGroup
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone where the instance group is located.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the specified instance group. Gets a list of available instance
+ groups by making a list() request.
+ execution: false
+ name: gcp-compute-get-instance-group
+ outputs:
+ - contextPath: GoogleCloudCompute.InstanceGroups.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.creationTimestamp
+ description: The creation timestamp for this instance group in RFC3339 text
+ format.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.name
+ description: The name of the instance group. The name must be 1-63 characters
+ long, and comply with RFC1035.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.namedPorts
+ description: Assigns a name to a port number.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.namedPorts.name
+ description: The name for this named port. The name must be 1-63 characters
+ long, and comply with RFC1035.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.namedPorts.port
+ description: The port number, which can be a value between 1 and 65535.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.network
+ description: The URL of the network to which all instances in the instance group
+ belong.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.fingerprint
+ description: The fingerprint of the named ports. The system uses this fingerprint
+ to detect conflicts when multiple users change the named ports concurrently.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.zone
+ description: The URL of the zone where the instance group is located (for zonal
+ resources).
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.selfLink
+ description: The URL for this instance group. The server generates this URL.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.size
+ description: The total number of instances in the instance group.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.region
+ description: The URL of the region where the instance group is located (for
+ regional resources).
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.subnetwork
+ description: The URL of the subnetwork to which all instances in the instance
+ group belong
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.kind
+ description: ' The resource type, which is always compute#instanceGroup for
+ instance groups.'
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the instance group. The name must be 1-63 characters
+ long, and comply with RFC1035.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone where you want to create the instance group.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ description: 'Assigns a name to a port number. For example: name=abc,port=123;name=abc,port=123'
+ isArray: false
+ name: namedPorts
+ required: false
+ secret: false
+ - default: false
+ description: The URL of the network to which all instances in the instance group
+ belong.
+ isArray: false
+ name: network
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates an instance group in the specified project using the parameters
+ that are included in the request.
+ execution: false
+ name: gcp-compute-insert-instance-group
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the zone where the instance group is located.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves the list of instance groups that are located in the specified
+ project and zone.
+ execution: false
+ name: gcp-compute-list-instance-groups
+ outputs:
+ - contextPath: GoogleCloudCompute.InstanceGroups.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.creationTimestamp
+ description: The creation timestamp for this instance group in RFC3339 text
+ format.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.name
+ description: The name of the instance group. The name must be 1-63 characters
+ long, and comply with RFC1035.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.namedPorts
+ description: Assigns a name to a port number.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.namedPorts.name
+ description: The name for this named port. The name must be 1-63 characters
+ long, and comply with RFC1035.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.namedPorts.port
+ description: The port number, which can be a value between 1 and 65535.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.network
+ description: The URL of the network to which all instances in the instance group
+ belong.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.fingerprint
+ description: The fingerprint of the named ports. The system uses this fingerprint
+ to detect conflicts when multiple users change the named ports concurrently.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.zone
+ description: The URL of the zone where the instance group is located (for zonal
+ resources).
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.selfLink
+ description: The URL for this instance group. The server generates this URL.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.size
+ description: The total number of instances in the instance group.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.region
+ description: The URL of the region where the instance group is located (for
+ regional resources).
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.subnetwork
+ description: The URL of the subnetwork to which all instances in the instance
+ group belong
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroups.kind
+ description: ' The resource type, which is always compute#instanceGroup for
+ instance groups.'
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the zone where the instance group is located.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ description: The name of the instance group from which you want to generate
+ a list of included instances.
+ isArray: false
+ name: instanceGroup
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: A filter for the state of the instances in the instance group.
+ Valid options are ALL or RUNNING. If you do not specify this parameter the
+ list includes all instances regardless of their state.
+ isArray: false
+ name: instanceState
+ predefined:
+ - ALL
+ - RUNNING
+ required: false
+ secret: false
+ deprecated: false
+ description: Lists the instances in the specified instance group.
+ execution: false
+ name: gcp-compute-list-instance-group-instances
+ outputs:
+ - contextPath: GoogleCloudCompute.InstanceGroupsInstances.Instances.instance
+ description: The URL of the instance.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroupsInstances.Instances.status
+ description: The status of the instance
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroupsInstances.Instances.namedPorts
+ description: ' The named ports that belong to this instance group.'
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroupsInstances.Instances.namedPorts.name
+ description: The name for this named port. The name must be 1-63 characters
+ long, and comply with RFC1035.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroupsInstances.Instances.namedPorts.port
+ description: The port number, which can be a value between 1 and 65535.
+ type: string
+ - contextPath: GoogleCloudCompute.InstanceGroupsInstances.Group
+ description: The group listed.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The name of the instance group where the specified instances will
+ be removed.
+ isArray: false
+ name: instanceGroup
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone where the instance group is located.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ description: The list of instances to remove from the instance group. comma
+ separated.
+ isArray: false
+ name: instances
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes one or more instances from the specified instance group,
+ but does not delete those instances. If the group is part of a backend service
+ that has enabled connection draining, it can take up to 60 seconds after the
+ connection draining duration before the VM instance is removed or deleted.
+ execution: false
+ name: gcp-compute-instance-groups-remove-instances
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the instance group where the named ports are updated.
+ isArray: false
+ name: instanceGroup
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone where the instance group is located.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ description: 'The list of named ports to set for this instance group. example:
+ name=abc,port=123;name=abc,port=123'
+ isArray: false
+ name: namedPorts
+ required: true
+ secret: false
+ - default: false
+ description: The fingerprint of the named ports information for this instance
+ group. Use this optional property to prevent conflicts when multiple users
+ change the named ports settings concurrently. Obtain the fingerprint with
+ the instanceGroups.get method. Then, include the fingerprint in your request
+ to ensure that you do not overwrite changes that were applied from another
+ concurrent request. A request with an incorrect fingerprint will fail with
+ error 412 conditionNotMet.
+ isArray: false
+ name: fingerprint
+ required: false
+ secret: false
+ deprecated: false
+ description: Sets the named ports for the specified instance group.
+ execution: false
+ name: gcp-compute-set-group-instance-named-ports
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the region resource to return.
+ isArray: false
+ name: region
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the specified Region resource. Gets a list of available regions
+ by making a list() request.
+ execution: false
+ name: gcp-compute-get-region
+ outputs:
+ - contextPath: GoogleCloudCompute.Regions.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.creationTimestamp
+ description: ' Creation timestamp in RFC3339 text format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.description
+ description: Textual description of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.status
+ description: Status of the region, either UP or DOWN.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.zones
+ description: A list of zones available in this region, in the form of resource
+ URLs.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.quotas
+ description: Quotas assigned to this region.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.quotas.metric
+ description: Name of the quota metric.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.quotas.limit
+ description: Quota limit for this metric.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.quotas.usage
+ description: ' Current usage of this metric.'
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.quotas.owner
+ description: Owning resource. This is the resource on which this quota is applied.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.deprecated
+ description: The deprecation status associated with this region.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.deprecated.state
+ description: The deprecation state of this resource. This can be ACTIVE DEPRECATED,
+ OBSOLETE, or DELETED. Operations which communicate the end of life date for
+ an image, can use ACTIVE. Operations which create a new resource using a DEPRECATED
+ resource will return successfully, but with a warning indicating the deprecated
+ resource and recommending its replacement. Operations which use OBSOLETE or
+ DELETED resources will be rejected and result in an error.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.deprecated.replacement
+ description: The URL of the suggested replacement for a deprecated resource.
+ The suggested replacement resource must be the same kind of resource as the
+ deprecated resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.deprecated.deprecated
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DEPRECATED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.deprecated.obsolete
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to OBSOLETE. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.deprecated.deleted
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DELETED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.kind
+ description: Type of the resource. Always compute#region for regions.
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves the list of region resources available to the specified
+ project.
+ execution: false
+ name: gcp-compute-list-regions
+ outputs:
+ - contextPath: GoogleCloudCompute.Regions.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.creationTimestamp
+ description: ' Creation timestamp in RFC3339 text format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.description
+ description: Textual description of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.status
+ description: Status of the region, either UP or DOWN.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.zones
+ description: A list of zones available in this region, in the form of resource
+ URLs.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.quotas
+ description: Quotas assigned to this region.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.quotas.metric
+ description: Name of the quota metric.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.quotas.limit
+ description: Quota limit for this metric.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.quotas.usage
+ description: ' Current usage of this metric.'
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.quotas.owner
+ description: Owning resource. This is the resource on which this quota is applied.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.deprecated
+ description: The deprecation status associated with this region.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.deprecated.state
+ description: The deprecation state of this resource. This can be ACTIVE DEPRECATED,
+ OBSOLETE, or DELETED. Operations which communicate the end of life date for
+ an image, can use ACTIVE. Operations which create a new resource using a DEPRECATED
+ resource will return successfully, but with a warning indicating the deprecated
+ resource and recommending its replacement. Operations which use OBSOLETE or
+ DELETED resources will be rejected and result in an error.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.deprecated.replacement
+ description: The URL of the suggested replacement for a deprecated resource.
+ The suggested replacement resource must be the same kind of resource as the
+ deprecated resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.deprecated.deprecated
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DEPRECATED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.deprecated.obsolete
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to OBSOLETE. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.deprecated.deleted
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DELETED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Regions.kind
+ description: Type of the resource. Always compute#region for regions.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the zone resource to return.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the specified Zone resource. Gets a list of available zones
+ by making a list() request.
+ execution: false
+ name: gcp-compute-get-zone
+ outputs:
+ - contextPath: GoogleCloudCompute.Zones.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.creationTimestamp
+ description: ' Creation timestamp in RFC3339 text format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.description
+ description: Textual description of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.status
+ description: Status of the zone, either UP or DOWN.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.deprecated
+ description: The deprecation status associated with this zone.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.deprecated.state
+ description: The deprecation state of this resource. This can be ACTIVE DEPRECATED,
+ OBSOLETE, or DELETED. Operations which communicate the end of life date for
+ an image, can use ACTIVE. Operations which create a new resource using a DEPRECATED
+ resource will return successfully, but with a warning indicating the deprecated
+ resource and recommending its replacement. Operations which use OBSOLETE or
+ DELETED resources will be rejected and result in an error.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.deprecated.replacement
+ description: The URL of the suggested replacement for a deprecated resource.
+ The suggested replacement resource must be the same kind of resource as the
+ deprecated resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.deprecated.deprecated
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DEPRECATED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.deprecated.obsolete
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to OBSOLETE. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.deprecated.deleted
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DELETED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.region
+ description: Full URL reference to the region which hosts the zone.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.availableCpuPlatforms
+ description: Available cpu/platform selections for the zone. Do not use field
+ = 7 or field = 11. Next available field = 14.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.kind
+ description: Type of the resource. Always compute#zone for zones.
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves the list of Zone resources available to the specified project.
+ execution: false
+ name: gcp-compute-list-zones
+ outputs:
+ - contextPath: GoogleCloudCompute.Zones.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.creationTimestamp
+ description: ' Creation timestamp in RFC3339 text format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.description
+ description: Textual description of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.status
+ description: Status of the zone, either UP or DOWN.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.deprecated
+ description: The deprecation status associated with this zone.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.deprecated.state
+ description: The deprecation state of this resource. This can be ACTIVE DEPRECATED,
+ OBSOLETE, or DELETED. Operations which communicate the end of life date for
+ an image, can use ACTIVE. Operations which create a new resource using a DEPRECATED
+ resource will return successfully, but with a warning indicating the deprecated
+ resource and recommending its replacement. Operations which use OBSOLETE or
+ DELETED resources will be rejected and result in an error.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.deprecated.replacement
+ description: The URL of the suggested replacement for a deprecated resource.
+ The suggested replacement resource must be the same kind of resource as the
+ deprecated resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.deprecated.deprecated
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DEPRECATED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.deprecated.obsolete
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to OBSOLETE. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.deprecated.deleted
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DELETED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.region
+ description: Full URL reference to the region which hosts the zone.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.availableCpuPlatforms
+ description: Available cpu/platform selections for the zone. Do not use field
+ = 7 or field = 11. Next available field = 14.
+ type: string
+ - contextPath: GoogleCloudCompute.Zones.kind
+ description: Type of the resource. Always compute#zone for zones.
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves an aggregated list of machine types.
+ execution: false
+ name: gcp-compute-aggregated-list-machine-types
+ outputs:
+ - contextPath: GoogleCloudCompute.MachineTypes.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.description
+ description: An optional textual description of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.guestCpus
+ description: The number of virtual CPUs that are available to the instance.
+ type: number
+ - contextPath: GoogleCloudCompute.MachineTypes.memoryMb
+ description: The amount of physical memory available to the instance, defined
+ in MB.
+ type: number
+ - contextPath: GoogleCloudCompute.MachineTypes.imageSpaceGb
+ description: This property is deprecated and will never be populated with any
+ relevant values.
+ type: number
+ - contextPath: GoogleCloudCompute.MachineTypes.scratchDisks
+ description: A list of extended scratch disks assigned to the instance.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.scratchDisks.diskGb
+ description: Size of the scratch disk, defined in GB.
+ type: number
+ - contextPath: GoogleCloudCompute.MachineTypes.maximumPersistentDisks
+ description: Maximum persistent disks allowed.
+ type: number
+ - contextPath: GoogleCloudCompute.MachineTypes.maximumPersistentDisksSizeGb
+ description: ' Maximum total persistent disks size (GB) allowed.'
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated
+ description: The deprecation status associated with this machine type.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated.state
+ description: The deprecation state of this resource. This can be ACTIVE DEPRECATED,
+ OBSOLETE, or DELETED. Operations which communicate the end of life date for
+ an image, can use ACTIVE. Operations which create a new resource using a DEPRECATED
+ resource will return successfully, but with a warning indicating the deprecated
+ resource and recommending its replacement. Operations which use OBSOLETE or
+ DELETED resources will be rejected and result in an error.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated.replacement
+ description: The URL of the suggested replacement for a deprecated resource.
+ The suggested replacement resource must be the same kind of resource as the
+ deprecated resource.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated.deprecated
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DEPRECATED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated.obsolete
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to OBSOLETE. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated.deleted
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DELETED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.zone
+ description: The name of the zone where the machine type resides, such as us-central1-a.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.isSharedCpu
+ description: Whether this machine type has a shared CPU. See Shared-core machine
+ types for more information.
+ type: boolean
+ - contextPath: GoogleCloudCompute.MachineTypes.kind
+ description: The type of the resource. Always compute#machineType for machine
+ types.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the machine type to return.
+ isArray: false
+ name: machineType
+ required: true
+ secret: false
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the specified machine type. Gets a list of available machine
+ types by making a list() request.
+ execution: false
+ name: gcp-compute-get-machine-type
+ outputs:
+ - contextPath: GoogleCloudCompute.MachineTypes.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.description
+ description: An optional textual description of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.guestCpus
+ description: The number of virtual CPUs that are available to the instance.
+ type: number
+ - contextPath: GoogleCloudCompute.MachineTypes.memoryMb
+ description: The amount of physical memory available to the instance, defined
+ in MB.
+ type: number
+ - contextPath: GoogleCloudCompute.MachineTypes.imageSpaceGb
+ description: This property is deprecated and will never be populated with any
+ relevant values.
+ type: number
+ - contextPath: GoogleCloudCompute.MachineTypes.scratchDisks
+ description: A list of extended scratch disks assigned to the instance.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.scratchDisks.diskGb
+ description: Size of the scratch disk, defined in GB.
+ type: number
+ - contextPath: GoogleCloudCompute.MachineTypes.maximumPersistentDisks
+ description: Maximum persistent disks allowed.
+ type: number
+ - contextPath: GoogleCloudCompute.MachineTypes.maximumPersistentDisksSizeGb
+ description: ' Maximum total persistent disks size (GB) allowed.'
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated
+ description: The deprecation status associated with this machine type.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated.state
+ description: The deprecation state of this resource. This can be ACTIVE DEPRECATED,
+ OBSOLETE, or DELETED. Operations which communicate the end of life date for
+ an image, can use ACTIVE. Operations which create a new resource using a DEPRECATED
+ resource will return successfully, but with a warning indicating the deprecated
+ resource and recommending its replacement. Operations which use OBSOLETE or
+ DELETED resources will be rejected and result in an error.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated.replacement
+ description: The URL of the suggested replacement for a deprecated resource.
+ The suggested replacement resource must be the same kind of resource as the
+ deprecated resource.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated.deprecated
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DEPRECATED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated.obsolete
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to OBSOLETE. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated.deleted
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DELETED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.zone
+ description: The name of the zone where the machine type resides, such as us-central1-a.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.isSharedCpu
+ description: Whether this machine type has a shared CPU. See Shared-core machine
+ types for more information.
+ type: boolean
+ - contextPath: GoogleCloudCompute.MachineTypes.kind
+ description: The type of the resource. Always compute#machineType for machine
+ types.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a list of machine types available to the specified project.
+ execution: false
+ name: gcp-compute-list-machine-types
+ outputs:
+ - contextPath: GoogleCloudCompute.MachineTypes.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.description
+ description: An optional textual description of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.guestCpus
+ description: The number of virtual CPUs that are available to the instance.
+ type: number
+ - contextPath: GoogleCloudCompute.MachineTypes.memoryMb
+ description: The amount of physical memory available to the instance, defined
+ in MB.
+ type: number
+ - contextPath: GoogleCloudCompute.MachineTypes.imageSpaceGb
+ description: This property is deprecated and will never be populated with any
+ relevant values.
+ type: number
+ - contextPath: GoogleCloudCompute.MachineTypes.scratchDisks
+ description: A list of extended scratch disks assigned to the instance.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.scratchDisks.diskGb
+ description: Size of the scratch disk, defined in GB.
+ type: number
+ - contextPath: GoogleCloudCompute.MachineTypes.maximumPersistentDisks
+ description: Maximum persistent disks allowed.
+ type: number
+ - contextPath: GoogleCloudCompute.MachineTypes.maximumPersistentDisksSizeGb
+ description: ' Maximum total persistent disks size (GB) allowed.'
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated
+ description: The deprecation status associated with this machine type.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated.state
+ description: The deprecation state of this resource. This can be ACTIVE DEPRECATED,
+ OBSOLETE, or DELETED. Operations which communicate the end of life date for
+ an image, can use ACTIVE. Operations which create a new resource using a DEPRECATED
+ resource will return successfully, but with a warning indicating the deprecated
+ resource and recommending its replacement. Operations which use OBSOLETE or
+ DELETED resources will be rejected and result in an error.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated.replacement
+ description: The URL of the suggested replacement for a deprecated resource.
+ The suggested replacement resource must be the same kind of resource as the
+ deprecated resource.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated.deprecated
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DEPRECATED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated.obsolete
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to OBSOLETE. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.deprecated.deleted
+ description: An optional RFC3339 timestamp on or after which the state of this
+ resource is intended to change to DELETED. This is only informational and
+ the status will not change unless the client explicitly changes it.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.zone
+ description: The name of the zone where the machine type resides, such as us-central1-a.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.MachineTypes.isSharedCpu
+ description: Whether this machine type has a shared CPU. See Shared-core machine
+ types for more information.
+ type: boolean
+ - contextPath: GoogleCloudCompute.MachineTypes.kind
+ description: The type of the resource. Always compute#machineType for machine
+ types.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the network resource to add peering to.
+ isArray: false
+ name: network
+ required: true
+ secret: false
+ - default: false
+ description: ' Name of the peering, which should conform to RFC1035.'
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: URL of the peer network. It can be either full URL or partial URL.
+ The peer network may belong to a different project. If the partial URL does
+ not contain project, it is assumed that the peer network is in the same project
+ as the current network.
+ isArray: false
+ name: peerNetwork
+ required: false
+ secret: false
+ - default: false
+ description: Name of this peering. Provided by the client when the peering is
+ created. The name must comply with RFC1035. Specifically, the name must be
+ 1-63 characters long and match regular expression [a-z]([-a-z0-9]*[a-z0-9])?
+ which means the first character must be a lowercase letter, and all the following
+ characters must be a dash, lowercase letter, or digit, except the last character,
+ which cannot be a dash.
+ isArray: false
+ name: networkPeeringName
+ required: false
+ secret: false
+ - default: false
+ description: The URL of the peer network. It can be either full URL or partial
+ URL. The peer network may belong to a different project. If the partial URL
+ does not contain project, it is assumed that the peer network is in the same
+ project as the current network.
+ isArray: false
+ name: networkPeeringNetwork
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether full mesh connectivity is created and managed automatically.
+ When it is set to true, Google Compute Engine will automatically create and
+ manage the routes between two networks when the peering state is ACTIVE. Otherwise,
+ user needs to create routes manually to route packets to peer network.
+ isArray: false
+ name: networkPeeringExchangeSubnetRoutes
+ predefined:
+ - 'True'
+ - 'False'
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds a peering to the specified network.
+ execution: false
+ name: gcp-compute-networks-add-peering
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the network to delete.
+ isArray: false
+ name: network
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the specified network.
+ execution: false
+ name: gcp-compute-delete-network
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the resource. Provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: When set to true, the VPC network is created in "auto" mode. When
+ set to false, the VPC network is created in "custom" mode. An auto mode VPC
+ network starts with one subnet per region. Each subnet has a predetermined
+ range as described in Auto mode VPC network IP ranges.
+ isArray: false
+ name: autoCreateSubnetworks
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The network-wide routing mode to use. If set to REGIONAL, this
+ network's cloud routers will only advertise routes with subnets of this network
+ in the same region as the router. If set to GLOBAL, this network's cloud routers
+ will advertise routes with all subnets of this network, across regions.
+ isArray: false
+ name: routingConfigRoutingMode
+ predefined:
+ - REGIONAL
+ - GLOBAL
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a network in the specified project using the data included
+ in the request.
+ execution: false
+ name: gcp-compute-insert-network
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves the list of networks available to the specified project.
+ execution: false
+ name: gcp-compute-list-networks
+ outputs:
+ - contextPath: GoogleCloudCompute.Networks.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.name
+ description: Name of the resource. Provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.gatewayIPv4
+ description: The gateway address for default routing out of the network. This
+ value is read only and is selected by GCP.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.autoCreateSubnetworks
+ description: When set to true, the VPC network is created in "auto" mode. When
+ set to false, the VPC network is created in "custom" mode.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Networks.subnetworks
+ description: Server-defined fully-qualified URLs for all subnetworks in this
+ VPC network.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.peerings
+ description: ' A list of network peerings for the resource.'
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.peerings.name
+ description: Name of this peering. Provided by the client when the peering is
+ created. The name must comply with RFC1035. Specifically, the name must be
+ 1-63 characters long and match regular expression [a-z]([-a-z0-9]*[a-z0-9])?
+ which means the first character must be a lowercase letter, and all the following
+ characters must be a dash, lowercase letter, or digit, except the last character,
+ which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.peerings.network
+ description: The URL of the peer network. It can be either full URL or partial
+ URL. The peer network may belong to a different project. If the partial URL
+ does not contain project, it is assumed that the peer network is in the same
+ project as the current network.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.peerings.state
+ description: State for the peering.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.peerings.stateDetails
+ description: Details about the current state of the peering.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.peerings.autoCreateRoutes
+ description: This field will be deprecated soon. Prefer using exchangeSubnetRoutes
+ instead. Indicates whether full mesh connectivity is created and managed automatically.
+ When it is set to true, Google Compute Engine will automatically create and
+ manage the routes between two networks when the state is ACTIVE. Otherwise,
+ user needs to create routes manually to route packets to peer network.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Networks.peerings.exchangeSubnetRoutes
+ description: Whether full mesh connectivity is created and managed automatically.
+ When it is set to true, Google Compute Engine will automatically create and
+ manage the routes between two networks when the peering state is ACTIVE. Otherwise,
+ user needs to create routes manually to route packets to peer network.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Networks.routingConfig
+ description: The network-level routing configuration for this network. Used
+ by Cloud Router to determine what type of network-wide routing behavior to
+ enforce.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.routingConfig.routingMode
+ description: The network-wide routing mode to use. If set to REGIONAL, this
+ networks cloud routers will only advertise routes with subnets of this network
+ in the same region as the router. If set to GLOBAL, this networks cloud routers
+ will advertise routes with all subnets of this network, across regions.
+ type: string
+ - contextPath: GoogleCloudCompute.Networks.kind
+ description: Type of the resource. Always compute#network for networks.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the network resource to remove peering from.
+ isArray: false
+ name: network
+ required: true
+ secret: false
+ - default: false
+ description: Name of the peering, which should conform to RFC1035.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes a peering from the specified network.
+ execution: false
+ name: gcp-compute-networks-remove-peering
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the zone for this request.
+ isArray: false
+ name: zone
+ required: true
+ secret: false
+ - default: false
+ description: Name of the Operations resource
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: false
+ description: Wait for a zone operation to complete ,NOTE this command will occupy
+ a system resource
+ execution: false
+ name: gcp-compute-wait-for-zone-operation
+ - arguments:
+ - default: false
+ description: Name of the region for this request.
+ isArray: false
+ name: region
+ required: true
+ secret: false
+ - default: false
+ description: Name of the Operations resource
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: false
+ description: Wait for a region operation to complete ,NOTE this command will occupy
+ a system resource
+ execution: false
+ name: gcp-compute-wait-for-region-operation
+ - arguments:
+ - default: false
+ description: Name of the Operations resource
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: false
+ description: Wait for a global operation to complete ,NOTE this command will occupy
+ a system resource
+ execution: false
+ name: gcp-compute-wait-for-global-operation
+ - arguments:
+ - default: false
+ description: Name of the resource; provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: URL of the network resource for this firewall rule. If not specified
+ when creating a firewall rule, the default network is used
+ isArray: false
+ name: network
+ required: false
+ secret: false
+ - default: false
+ description: Priority for this rule. This is an integer between 0 and 65535,
+ both inclusive. When not specified, the value assumed is 1000. Relative priorities
+ determine precedence of conflicting rules. Lower value of priority implies
+ higher precedence (eg, a rule with priority 0 has higher precedence than a
+ rule with priority 1). DENY rules take precedence over ALLOW rules having
+ equal priority.
+ isArray: false
+ name: priority
+ required: false
+ secret: false
+ - default: false
+ description: 'If source ranges are specified, the firewall will apply only to
+ traffic that has source IP address in these ranges. These ranges must be expressed
+ in CIDR format. One or both of sourceRanges and sourceTags may be set. If
+ both properties are set, the firewall will apply to traffic that has source
+ IP address within sourceRanges OR the source IP that belongs to a tag listed
+ in the sourceTags property. The connection does not need to match both properties
+ for the firewall to apply. Only IPv4 is supported. comma separated. '
+ isArray: false
+ name: sourceRanges
+ required: false
+ secret: false
+ - default: false
+ description: If destination ranges are specified, the firewall will apply only
+ to traffic that has destination IP address in these ranges. These ranges must
+ be expressed in CIDR format. Only IPv4 is supported. comma separated.
+ isArray: false
+ name: destinationRanges
+ required: false
+ secret: false
+ - default: false
+ description: If source tags are specified, the firewall rule applies only to
+ traffic with source IPs that match the primary network interfaces of VM instances
+ that have the tag and are in the same VPC network. Source tags cannot be used
+ to control traffic to an instance's external IP address, it only applies to
+ traffic between instances in the same virtual network. Because tags are associated
+ with instances, not IP addresses. One or both of sourceRanges and sourceTags
+ may be set. If both properties are set, the firewall will apply to traffic
+ that has source IP address within sourceRanges OR the source IP that belongs
+ to a tag listed in the sourceTags property. The connection does not need to
+ match both properties for the firewall to apply. comma separated.
+ isArray: false
+ name: sourceTags
+ required: false
+ secret: false
+ - default: false
+ description: A list of tags that controls which instances the firewall rule
+ applies to. If targetTags are specified, then the firewall rule applies only
+ to instances in the VPC network that have one of those tags. If no targetTags
+ are specified, the firewall rule applies to all instances on the specified
+ network. comma separated.
+ isArray: false
+ name: targetTags
+ required: false
+ secret: false
+ - default: false
+ description: If source service accounts are specified, the firewall will apply
+ only to traffic originating from an instance with a service account in this
+ list. Source service accounts cannot be used to control traffic to an instance's
+ external IP address because service accounts are associated with an instance,
+ not an IP address. sourceRanges can be set at the same time as sourceServiceAccounts.
+ If both are set, the firewall will apply to traffic that has source IP address
+ within sourceRanges OR the source IP belongs to an instance with service account
+ listed in sourceServiceAccount. The connection does not need to match both
+ properties for the firewall to apply. sourceServiceAccounts cannot be used
+ at the same time as sourceTags or targetTags. comma separated.
+ isArray: false
+ name: sourceServiceAccounts
+ required: false
+ secret: false
+ - default: false
+ description: A list of service accounts indicating sets of instances located
+ in the network that may make network connections as specified in allowed[].
+ targetServiceAccounts cannot be used at the same time as targetTags or sourceTags.
+ If neither targetServiceAccounts nor targetTags are specified, the firewall
+ rule applies to all instances on the specified network. comma separated.
+ isArray: false
+ name: targetServiceAccounts
+ required: false
+ secret: false
+ - default: false
+ description: 'The list of ALLOW rules specified by this firewall. Each rule
+ specifies a protocol and port-range tuple that describes a permitted connection.
+ Ex: ipprotocol=tcp,ports=22,443;ipprotocol=tcp,ports=8080,80'
+ isArray: false
+ name: allowed
+ required: false
+ secret: false
+ - default: false
+ description: 'The list of DENY rules specified by this firewall. Each rule specifies
+ a protocol and port-range tuple that describes a denied connection. Ex: ipprotocol=tcp,ports=22,443;ipprotocol=tcp,ports=8080,80'
+ isArray: false
+ name: denied
+ required: false
+ secret: false
+ - default: false
+ description: 'Direction of traffic to which this firewall applies; default is
+ INGRESS. Note: For INGRESS traffic, it is NOT supported to specify destinationRanges;
+ For EGRESS traffic, it is NOT supported to specify sourceRanges OR sourceTags.'
+ isArray: false
+ name: direction
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: This field denotes whether to enable logging for a particular firewall
+ rule.
+ isArray: false
+ name: logConfigEnable
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Denotes whether the firewall rule is disabled, i.e not applied
+ to the network it is associated with. When set to true, the firewall rule
+ is not enforced and the network behaves as if it did not exist. If this is
+ unspecified, the firewall rule will be enabled.
+ isArray: false
+ name: disabled
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a firewall rule in the specified project using the data included
+ in the request.
+ execution: false
+ name: gcp-compute-insert-firewall
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the resource; provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: URL of the network resource for this firewall rule. If not specified
+ when creating a firewall rule, the default network is used
+ isArray: false
+ name: network
+ required: false
+ secret: false
+ - default: false
+ description: Priority for this rule. This is an integer between 0 and 65535,
+ both inclusive. When not specified, the value assumed is 1000. Relative priorities
+ determine precedence of conflicting rules. Lower value of priority implies
+ higher precedence (eg, a rule with priority 0 has higher precedence than a
+ rule with priority 1). DENY rules take precedence over ALLOW rules having
+ equal priority.
+ isArray: false
+ name: priority
+ required: false
+ secret: false
+ - default: false
+ description: 'If source ranges are specified, the firewall will apply only to
+ traffic that has source IP address in these ranges. These ranges must be expressed
+ in CIDR format. One or both of sourceRanges and sourceTags may be set. If
+ both properties are set, the firewall will apply to traffic that has source
+ IP address within sourceRanges OR the source IP that belongs to a tag listed
+ in the sourceTags property. The connection does not need to match both properties
+ for the firewall to apply. Only IPv4 is supported. comma separated. '
+ isArray: false
+ name: sourceRanges
+ required: false
+ secret: false
+ - default: false
+ description: If destination ranges are specified, the firewall will apply only
+ to traffic that has destination IP address in these ranges. These ranges must
+ be expressed in CIDR format. Only IPv4 is supported. comma separated.
+ isArray: false
+ name: destinationRanges
+ required: false
+ secret: false
+ - default: false
+ description: If source tags are specified, the firewall rule applies only to
+ traffic with source IPs that match the primary network interfaces of VM instances
+ that have the tag and are in the same VPC network. Source tags cannot be used
+ to control traffic to an instance's external IP address, it only applies to
+ traffic between instances in the same virtual network. Because tags are associated
+ with instances, not IP addresses. One or both of sourceRanges and sourceTags
+ may be set. If both properties are set, the firewall will apply to traffic
+ that has source IP address within sourceRanges OR the source IP that belongs
+ to a tag listed in the sourceTags property. The connection does not need to
+ match both properties for the firewall to apply. comma separated.
+ isArray: false
+ name: sourceTags
+ required: false
+ secret: false
+ - default: false
+ description: A list of tags that controls which instances the firewall rule
+ applies to. If targetTags are specified, then the firewall rule applies only
+ to instances in the VPC network that have one of those tags. If no targetTags
+ are specified, the firewall rule applies to all instances on the specified
+ network. comma separated.
+ isArray: false
+ name: targetTags
+ required: false
+ secret: false
+ - default: false
+ description: If source service accounts are specified, the firewall will apply
+ only to traffic originating from an instance with a service account in this
+ list. Source service accounts cannot be used to control traffic to an instance's
+ external IP address because service accounts are associated with an instance,
+ not an IP address. sourceRanges can be set at the same time as sourceServiceAccounts.
+ If both are set, the firewall will apply to traffic that has source IP address
+ within sourceRanges OR the source IP belongs to an instance with service account
+ listed in sourceServiceAccount. The connection does not need to match both
+ properties for the firewall to apply. sourceServiceAccounts cannot be used
+ at the same time as sourceTags or targetTags. comma separated.
+ isArray: false
+ name: sourceServiceAccounts
+ required: false
+ secret: false
+ - default: false
+ description: A list of service accounts indicating sets of instances located
+ in the network that may make network connections as specified in allowed[].
+ targetServiceAccounts cannot be used at the same time as targetTags or sourceTags.
+ If neither targetServiceAccounts nor targetTags are specified, the firewall
+ rule applies to all instances on the specified network. comma separated.
+ isArray: false
+ name: targetServiceAccounts
+ required: false
+ secret: false
+ - default: false
+ description: 'The list of ALLOW rules specified by this firewall. Each rule
+ specifies a protocol and port-range tuple that describes a permitted connection.
+ Ex: ipprotocol=tcp,ports=22,443;ipprotocol=tcp,ports=8080,80'
+ isArray: false
+ name: allowed
+ required: false
+ secret: false
+ - default: false
+ description: 'The list of DENY rules specified by this firewall. Each rule specifies
+ a protocol and port-range tuple that describes a denied connection. Ex: ipprotocol=tcp,ports=22,443;ipprotocol=tcp,ports=8080,80'
+ isArray: false
+ name: denied
+ required: false
+ secret: false
+ - default: false
+ description: 'Direction of traffic to which this firewall applies; default is
+ INGRESS. Note: For INGRESS traffic, it is NOT supported to specify destinationRanges;
+ For EGRESS traffic, it is NOT supported to specify sourceRanges OR sourceTags.'
+ isArray: false
+ name: direction
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: This field denotes whether to enable logging for a particular firewall
+ rule.
+ isArray: false
+ name: logConfigEnable
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Denotes whether the firewall rule is disabled, i.e not applied
+ to the network it is associated with. When set to true, the firewall rule
+ is not enforced and the network behaves as if it did not exist. If this is
+ unspecified, the firewall rule will be enabled.
+ isArray: false
+ name: disabled
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Updates the specified firewall rule with the data included in the
+ request.
+ execution: false
+ name: gcp-compute-patch-firewall
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves the list of firewall rules available to the specified project.
+ execution: false
+ name: gcp-compute-list-firewall
+ outputs:
+ - contextPath: GoogleCloudCompute.Firewalls.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.name
+ description: Name of the resource; provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.network
+ description: URL of the network resource for this firewall rule
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.priority
+ description: Priority for this rule. This is an integer between 0 and 65535,
+ both inclusive. When not specified, the value assumed is 1000. Relative priorities
+ determine precedence of conflicting rules. Lower value of priority implies
+ higher precedence (eg, a rule with priority 0 has higher precedence than a
+ rule with priority 1). DENY rules take precedence over ALLOW rules having
+ equal priority.
+ type: number
+ - contextPath: GoogleCloudCompute.Firewalls.sourceRanges
+ description: If source ranges are specified, the firewall will apply only to
+ traffic that has source IP address in these ranges. These ranges must be expressed
+ in CIDR format. One or both of sourceRanges and sourceTags may be set. If
+ both properties are set, the firewall will apply to traffic that has source
+ IP address within sourceRanges OR the source IP that belongs to a tag listed
+ in the sourceTags property. The connection does not need to match both properties
+ for the firewall to apply. Only IPv4 is supported.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.destinationRanges
+ description: If destination ranges are specified, the firewall will apply only
+ to traffic that has destination IP address in these ranges. These ranges must
+ be expressed in CIDR format. Only IPv4 is supported.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.sourceTags
+ description: If source tags are specified, the firewall rule applies only to
+ traffic with source IPs that match the primary network interfaces of VM instances
+ that have the tag and are in the same VPC network. Source tags cannot be used
+ to control traffic to an instance's external IP address, it only applies to
+ traffic between instances in the same virtual network. Because tags are associated
+ with instances, not IP addresses. One or both of sourceRanges and sourceTags
+ may be set. If both properties are set, the firewall will apply to traffic
+ that has source IP address within sourceRanges OR the source IP that belongs
+ to a tag listed in the sourceTags property. The connection does not need to
+ match both properties for the firewall to apply.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.targetTags
+ description: A list of tags that controls which instances the firewall rule
+ applies to. If targetTags are specified, then the firewall rule applies only
+ to instances in the VPC network that have one of those tags. If no targetTags
+ are specified, the firewall rule applies to all instances on the specified
+ network.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.sourceServiceAccounts
+ description: If source service accounts are specified, the firewall will apply
+ only to traffic originating from an instance with a service account in this
+ list. Source service accounts cannot be used to control traffic to an instance's
+ external IP address because service accounts are associated with an instance,
+ not an IP address. sourceRanges can be set at the same time as sourceServiceAccounts.
+ If both are set, the firewall will apply to traffic that has source IP address
+ within sourceRanges OR the source IP belongs to an instance with service account
+ listed in sourceServiceAccount. The connection does not need to match both
+ properties for the firewall to apply. sourceServiceAccounts cannot be used
+ at the same time as sourceTags or targetTags.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.targetServiceAccounts
+ description: A list of service accounts indicating sets of instances located
+ in the network that may make network connections as specified in allowed[].
+ targetServiceAccounts cannot be used at the same time as targetTags or sourceTags.
+ If neither targetServiceAccounts nor targetTags are specified, the firewall
+ rule applies to all instances on the specified network.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.allowed
+ description: The list of ALLOW rules specified by this firewall. Each rule specifies
+ a protocol and port-range tuple that describes a permitted connection.
+ type: Unknown
+ - contextPath: GoogleCloudCompute.Firewalls.allowed.IPProtocol
+ description: The IP protocol to which this rule applies. The protocol type is
+ required when creating a firewall rule. This value can either be one of the
+ following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp),
+ or the IP protocol number.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.allowed.ports
+ description: An optional list of ports to which this rule applies. This field
+ is only applicable for UDP or TCP protocol. Each entry must be either an integer
+ or a range. If not specified, this rule applies to connections through any
+ port.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.denied
+ description: The list of DENY rules specified by this firewall. Each rule specifies
+ a protocol and port-range tuple that describes a denied connection.
+ type: Unknown
+ - contextPath: GoogleCloudCompute.Firewalls.denied.IPProtocol
+ description: The IP protocol to which this rule applies. The protocol type is
+ required when creating a firewall rule. This value can either be one of the
+ following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp),
+ or the IP protocol number.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.denied.ports
+ description: An optional list of ports to which this rule applies. This field
+ is only applicable for UDP or TCP protocol. Each entry must be either an integer
+ or a range. If not specified, this rule applies to connections through any
+ port.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.direction
+ description: 'Direction of traffic to which this firewall applies; default is
+ INGRESS. Note: For INGRESS traffic, it is NOT supported to specify destinationRanges;
+ For EGRESS traffic, it is NOT supported to specify sourceRanges OR sourceTags.'
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.logConfig
+ description: This field denotes the logging options for a particular firewall
+ rule. If logging is enabled, logs will be exported to Stackdriver.
+ type: Unknown
+ - contextPath: GoogleCloudCompute.Firewalls.logConfig.enable
+ description: This field denotes whether to enable logging for a particular firewall
+ rule.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Firewalls.disabled
+ description: Denotes whether the firewall rule is disabled, i.e not applied
+ to the network it is associated with. When set to true, the firewall rule
+ is not enforced and the network behaves as if it did not exist. If this is
+ unspecified, the firewall rule will be enabled.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Firewalls.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.kind
+ description: ' Type of the resource. Always compute#firewall for firewall rules.'
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the firewall rule to return.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the specified firewall.
+ execution: false
+ name: gcp-compute-get-firewall
+ outputs:
+ - contextPath: GoogleCloudCompute.Firewalls.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.name
+ description: Name of the resource; provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.network
+ description: URL of the network resource for this firewall rule
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.priority
+ description: Priority for this rule. This is an integer between 0 and 65535,
+ both inclusive. When not specified, the value assumed is 1000. Relative priorities
+ determine precedence of conflicting rules. Lower value of priority implies
+ higher precedence (eg, a rule with priority 0 has higher precedence than a
+ rule with priority 1). DENY rules take precedence over ALLOW rules having
+ equal priority.
+ type: number
+ - contextPath: GoogleCloudCompute.Firewalls.sourceRanges
+ description: If source ranges are specified, the firewall will apply only to
+ traffic that has source IP address in these ranges. These ranges must be expressed
+ in CIDR format. One or both of sourceRanges and sourceTags may be set. If
+ both properties are set, the firewall will apply to traffic that has source
+ IP address within sourceRanges OR the source IP that belongs to a tag listed
+ in the sourceTags property. The connection does not need to match both properties
+ for the firewall to apply. Only IPv4 is supported.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.destinationRanges
+ description: If destination ranges are specified, the firewall will apply only
+ to traffic that has destination IP address in these ranges. These ranges must
+ be expressed in CIDR format. Only IPv4 is supported.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.sourceTags
+ description: If source tags are specified, the firewall rule applies only to
+ traffic with source IPs that match the primary network interfaces of VM instances
+ that have the tag and are in the same VPC network. Source tags cannot be used
+ to control traffic to an instance's external IP address, it only applies to
+ traffic between instances in the same virtual network. Because tags are associated
+ with instances, not IP addresses. One or both of sourceRanges and sourceTags
+ may be set. If both properties are set, the firewall will apply to traffic
+ that has source IP address within sourceRanges OR the source IP that belongs
+ to a tag listed in the sourceTags property. The connection does not need to
+ match both properties for the firewall to apply.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.targetTags
+ description: A list of tags that controls which instances the firewall rule
+ applies to. If targetTags are specified, then the firewall rule applies only
+ to instances in the VPC network that have one of those tags. If no targetTags
+ are specified, the firewall rule applies to all instances on the specified
+ network.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.sourceServiceAccounts
+ description: If source service accounts are specified, the firewall will apply
+ only to traffic originating from an instance with a service account in this
+ list. Source service accounts cannot be used to control traffic to an instance's
+ external IP address because service accounts are associated with an instance,
+ not an IP address. sourceRanges can be set at the same time as sourceServiceAccounts.
+ If both are set, the firewall will apply to traffic that has source IP address
+ within sourceRanges OR the source IP belongs to an instance with service account
+ listed in sourceServiceAccount. The connection does not need to match both
+ properties for the firewall to apply. sourceServiceAccounts cannot be used
+ at the same time as sourceTags or targetTags.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.targetServiceAccounts
+ description: A list of service accounts indicating sets of instances located
+ in the network that may make network connections as specified in allowed[].
+ targetServiceAccounts cannot be used at the same time as targetTags or sourceTags.
+ If neither targetServiceAccounts nor targetTags are specified, the firewall
+ rule applies to all instances on the specified network.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.allowed
+ description: The list of ALLOW rules specified by this firewall. Each rule specifies
+ a protocol and port-range tuple that describes a permitted connection.
+ type: Unknown
+ - contextPath: GoogleCloudCompute.Firewalls.allowed.IPProtocol
+ description: The IP protocol to which this rule applies. The protocol type is
+ required when creating a firewall rule. This value can either be one of the
+ following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp),
+ or the IP protocol number.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.allowed.ports
+ description: An optional list of ports to which this rule applies. This field
+ is only applicable for UDP or TCP protocol. Each entry must be either an integer
+ or a range. If not specified, this rule applies to connections through any
+ port.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.denied
+ description: The list of DENY rules specified by this firewall. Each rule specifies
+ a protocol and port-range tuple that describes a denied connection.
+ type: Unknown
+ - contextPath: GoogleCloudCompute.Firewalls.denied.IPProtocol
+ description: The IP protocol to which this rule applies. The protocol type is
+ required when creating a firewall rule. This value can either be one of the
+ following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp),
+ or the IP protocol number.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.denied.ports
+ description: An optional list of ports to which this rule applies. This field
+ is only applicable for UDP or TCP protocol. Each entry must be either an integer
+ or a range. If not specified, this rule applies to connections through any
+ port.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.direction
+ description: 'Direction of traffic to which this firewall applies; default is
+ INGRESS. Note: For INGRESS traffic, it is NOT supported to specify destinationRanges;
+ For EGRESS traffic, it is NOT supported to specify sourceRanges OR sourceTags.'
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.logConfig
+ description: This field denotes the logging options for a particular firewall
+ rule. If logging is enabled, logs will be exported to Stackdriver.
+ type: Unknown
+ - contextPath: GoogleCloudCompute.Firewalls.logConfig.enable
+ description: This field denotes whether to enable logging for a particular firewall
+ rule.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Firewalls.disabled
+ description: Denotes whether the firewall rule is disabled, i.e not applied
+ to the network it is associated with. When set to true, the firewall rule
+ is not enforced and the network behaves as if it did not exist. If this is
+ unspecified, the firewall rule will be enabled.
+ type: boolean
+ - contextPath: GoogleCloudCompute.Firewalls.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Firewalls.kind
+ description: ' Type of the resource. Always compute#firewall for firewall rules.'
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the firewall rule to delete.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the specified firewall.
+ execution: false
+ name: gcp-compute-delete-firewall
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name or id of the resource for this request.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: A list of labels to apply for this resource. Each label key & value
+ must comply with RFC1035. Specifically, the name must be 1-63 characters long
+ and match the regular expression [a-z]([-a-z0-9]*[a-z0-9])? which means the
+ first character must be a lowercase letter, and all following characters must
+ be a dash, lowercase letter, or digit, except the last character, which cannot
+ be a dash. For example, key=abc,value=123;key=abc,value=123
+ isArray: false
+ name: labels
+ required: true
+ secret: false
+ - default: false
+ description: The fingerprint of the previous set of labels for this resource,
+ used to detect conflicts. The fingerprint is initially generated by Compute
+ Engine and changes after every request to modify or update labels. You must
+ always provide an up-to-date fingerprint hash when updating or changing labels,
+ otherwise the request will fail with error 412 conditionNotMet. Make a get()
+ request to the resource to get the latest fingerprint.
+ isArray: false
+ name: labelFingerprint
+ required: true
+ secret: false
+ deprecated: false
+ description: Sets the labels on a snapshot.
+ execution: false
+ name: gcp-compute-set-snapshot-labels
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: '500'
+ description: 'The maximum number of results per page that should be returned.
+ If the number of available results is larger than maxResults, Compute Engine
+ returns a nextPageToken that can be used to get the next page of results in
+ subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default:
+ 500)'
+ isArray: false
+ name: maxResults
+ required: false
+ secret: false
+ - default: false
+ description: A filter expression that filters resources listed in the response.
+ The expression must specify the field name, a comparison operator, and the
+ value that you want to use for filtering. The value must be a string, a number,
+ or a boolean. The comparison operator must be either =, !=, >, or <. For
+ example, if you are filtering Compute Engine instances, you can exclude instances
+ named example-instance by specifying name != example-instance.
+ isArray: false
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: Sorts list results by a certain order. By default, results are
+ returned in alphanumerical order based on the resource name. You can also
+ sort results in descending order based on the creation timestamp using orderBy="creationTimestamp
+ desc". This sorts results based on the creationTimestamp field in reverse
+ chronological order (newest result first). Use this to sort resources like
+ operations so that the newest operation is returned first.
+ isArray: false
+ name: orderBy
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a page token to use. Set pageToken to the nextPageToken
+ returned by a previous list request to get the next page of results.
+ isArray: false
+ name: pageToken
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves the list of Snapshot resources contained within the specified
+ project.
+ execution: false
+ name: gcp-compute-list-snapshots
+ outputs:
+ - contextPath: GoogleCloudCompute.Snapshots.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.name
+ description: Name of the resource; provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.status
+ description: The status of the snapshot. This can be CREATING, DELETING, FAILED,
+ READY, or UPLOADING.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.sourceDisk
+ description: The source disk used to create this snapshot.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.sourceDiskId
+ description: The ID value of the disk used to create this snapshot. This value
+ may be used to determine whether the snapshot was taken from the current or
+ a previous instance of a given disk name.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.diskSizeGb
+ description: Size of the snapshot, specified in GB.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.storageBytes
+ description: A size of the storage used by the snapshot. As snapshots share
+ storage, this number is expected to change with snapshot creation/deletion
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.storageBytesStatus
+ description: An indicator whether storageBytes is in a stable state or it is
+ being adjusted as a result of shared storage reallocation. This status can
+ either be UPDATING, meaning the size of the snapshot is being updated, or
+ UP_TO_DATE, meaning the size of the snapshot is up-to-date.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.licenses
+ description: A list of public visible licenses that apply to this snapshot.
+ This can be because the original image had licenses attached (such as a Windows
+ image).
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.snapshotEncryptionKey
+ description: Encrypts the snapshot using a customer-supplied encryption key. After
+ you encrypt a snapshot using a customer-supplied key, you must provide the
+ same key if you use the image later For example, you must provide the encryption
+ key when you create a disk from the encrypted snapshot in a future request. Customer-supplied
+ encryption keys do not protect access to metadata of the disk. If you do
+ not provide an encryption key when creating the snapshot, then the snapshot
+ will be encrypted using an automatically generated key and you do not need
+ to provide a key to use the snapshot later.
+ type: unknown
+ - contextPath: GoogleCloudCompute.Snapshots.snapshotEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.snapshotEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.snapshotEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.sourceDiskEncryptionKey
+ description: The customer-supplied encryption key of the source disk. Required
+ if the source disk is protected by a customer-supplied encryption key.
+ type: Unknown
+ - contextPath: GoogleCloudCompute.Snapshots.sourceDiskEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.sourceDiskEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.sourceDiskEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.labels
+ description: 'Labels to apply to this snapshot. These can be later modified
+ by the setLabels method. Label values may be empty. An object containing
+ a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg",
+ "count": "3" }.'
+ type: Unknown
+ - contextPath: GoogleCloudCompute.Snapshots.labels.key
+ description: The label key.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.labels.value
+ description: The label value.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.labelFingerprint
+ description: A fingerprint for the labels being applied to this snapshot, which
+ is essentially a hash of the labels set used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update labels. You must always provide an up-to-date fingerprint
+ hash in order to update or change labels, otherwise the request will fail
+ with error 412 conditionNotMet.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.licenseCodes
+ description: ' Integer license codes indicating which licenses are attached
+ to this snapshot.'
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.kind
+ description: Type of the resource. Always compute#snapshot for Snapshot resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the Snapshot resource to return.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the specified Snapshot resource.
+ execution: false
+ name: gcp-compute-get-snapshot
+ outputs:
+ - contextPath: GoogleCloudCompute.Snapshots.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.creationTimestamp
+ description: Creation timestamp in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.name
+ description: Name of the resource; provided by the client when the resource
+ is created. The name must be 1-63 characters long, and comply with RFC1035.
+ Specifically, the name must be 1-63 characters long and match the regular
+ expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must
+ be a lowercase letter, and all following characters must be a dash, lowercase
+ letter, or digit, except the last character, which cannot be a dash.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.description
+ description: An optional description of this resource. Provide this property
+ when you create the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.status
+ description: The status of the snapshot. This can be CREATING, DELETING, FAILED,
+ READY, or UPLOADING.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.sourceDisk
+ description: The source disk used to create this snapshot.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.sourceDiskId
+ description: The ID value of the disk used to create this snapshot. This value
+ may be used to determine whether the snapshot was taken from the current or
+ a previous instance of a given disk name.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.diskSizeGb
+ description: Size of the snapshot, specified in GB.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.storageBytes
+ description: A size of the storage used by the snapshot. As snapshots share
+ storage, this number is expected to change with snapshot creation/deletion
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.storageBytesStatus
+ description: An indicator whether storageBytes is in a stable state or it is
+ being adjusted as a result of shared storage reallocation. This status can
+ either be UPDATING, meaning the size of the snapshot is being updated, or
+ UP_TO_DATE, meaning the size of the snapshot is up-to-date.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.licenses
+ description: A list of public visible licenses that apply to this snapshot.
+ This can be because the original image had licenses attached (such as a Windows
+ image).
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.snapshotEncryptionKey
+ description: Encrypts the snapshot using a customer-supplied encryption key. After
+ you encrypt a snapshot using a customer-supplied key, you must provide the
+ same key if you use the image later For example, you must provide the encryption
+ key when you create a disk from the encrypted snapshot in a future request. Customer-supplied
+ encryption keys do not protect access to metadata of the disk. If you do
+ not provide an encryption key when creating the snapshot, then the snapshot
+ will be encrypted using an automatically generated key and you do not need
+ to provide a key to use the snapshot later.
+ type: unknown
+ - contextPath: GoogleCloudCompute.Snapshots.snapshotEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.snapshotEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.snapshotEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.sourceDiskEncryptionKey
+ description: The customer-supplied encryption key of the source disk. Required
+ if the source disk is protected by a customer-supplied encryption key.
+ type: Unknown
+ - contextPath: GoogleCloudCompute.Snapshots.sourceDiskEncryptionKey.sha256
+ description: The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
+ encryption key that protects this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.sourceDiskEncryptionKey.rawKey
+ description: Specifies a 256-bit customer-supplied encryption key, encoded in
+ RFC 4648 base64 to either encrypt or decrypt this resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.sourceDiskEncryptionKey.kmsKeyName
+ description: The name of the encryption key that is stored in Google Cloud KMS.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.labels
+ description: 'Labels to apply to this snapshot. These can be later modified
+ by the setLabels method. Label values may be empty. An object containing
+ a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg",
+ "count": "3" }.'
+ type: Unknown
+ - contextPath: GoogleCloudCompute.Snapshots.labels.key
+ description: The label key.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.labels.value
+ description: The label value.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.labelFingerprint
+ description: A fingerprint for the labels being applied to this snapshot, which
+ is essentially a hash of the labels set used for optimistic locking. The fingerprint
+ is initially generated by Compute Engine and changes after every request to
+ modify or update labels. You must always provide an up-to-date fingerprint
+ hash in order to update or change labels, otherwise the request will fail
+ with error 412 conditionNotMet.
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.licenseCodes
+ description: ' Integer license codes indicating which licenses are attached
+ to this snapshot.'
+ type: string
+ - contextPath: GoogleCloudCompute.Snapshots.kind
+ description: Type of the resource. Always compute#snapshot for Snapshot resources.
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the Snapshot resource to delete.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes the specified Snapshot resource. Keep in mind that deleting
+ a single snapshot might not necessarily delete all the data on that snapshot.
+ If any data on the snapshot that is marked for deletion is needed for subsequent
+ snapshots, the data will be moved to the next corresponding snapshot.
+ execution: false
+ name: gcp-compute-delete-snapshot
+ outputs:
+ - contextPath: GoogleCloudCompute.Operations.id
+ description: The unique identifier for the resource. This identifier is defined
+ by the server.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.name
+ description: Name of the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.zone
+ description: The URL of the zone where the operation resides. Only available
+ when performing per-zone operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.clientOperationId
+ description: The value of requestId if you provided it in the request. Not present
+ otherwise.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.operationType
+ description: The type of operation, such as insert, update, or delete, and so
+ on.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetLink
+ description: The URL of the resource that the operation modifies. For operations
+ related to creating a snapshot, this points to the persistent disk that the
+ snapshot was created from.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.targetId
+ description: The unique target ID, which identifies a specific incarnation of
+ the target resource
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.status
+ description: 'The status of the operation, which can be one of the following:
+ PENDING RUNNING or DONE'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.statusMessage
+ description: An optional textual description of the current status of the operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.user
+ description: User who requested the operation for example EMAILADDRESS.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.progress
+ description: An optional progress indicator that ranges from 0 to 100. There
+ is no requirement that this be linear or support any granularity of operations.
+ This should not be used to guess when the operation will be complete. This
+ number should monotonically increase as the operation progresses.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.insertTime
+ description: The time that this operation was requested. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.startTime
+ description: The time that this operation was started by the server. This value
+ is in RFC3339 text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.endTime
+ description: The time that this operation was completed. This value is in RFC3339
+ text format.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error
+ description: If errors are generated during processing of the operation, this
+ field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors
+ description: The array of errors encountered while processing this operation.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.code
+ description: The error type identifier for this error.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.location
+ description: Indicates the field in the request that caused the error. This
+ property is optional.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.error.errors.message
+ description: An optional, human-readable error message.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings
+ description: If warning messages are generated during processing of the operation,
+ this field will be populated.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.code
+ description: A warning code, if applicable. For example, Compute Engine returns
+ NO_RESULTS_ON_PAGE if there are no results in the response.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.message
+ description: A human-readable description of the warning code.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data
+ description: 'Metadata about this warning in key: value format.'
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.key
+ description: A key that provides more detail on the warning being returned.
+ For example, for warnings where there are no results in a list request for
+ a particular zone, this key might be scope and the key value might be the
+ zone name. Other examples might be a key indicating a deprecated resource
+ and a suggested replacement, or a warning about invalid network settings (for
+ example, if an instance attempts to perform IP forwarding but is not enabled
+ for IP forwarding).
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.warnings.data.value
+ description: A warning data value corresponding to the key.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.httpErrorStatusCode
+ description: If the operation fails, this field contains the HTTP error status
+ code that was returned. For example, a 404 means the resource was not found.
+ type: number
+ - contextPath: GoogleCloudCompute.Operations.httpErrorMessage
+ description: If the operation fails, this field contains the HTTP error message
+ that was returned, such as NOT FOUND.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.selfLink
+ description: Server-defined URL for the resource.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.region
+ description: The URL of the region where the operation resides. Only available
+ when performing regional operations. You must specify this field as part of
+ the HTTP request URL. It is not settable as a field in the request body.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.description
+ description: A textual description of the operation, which is set when the operation
+ is created.
+ type: string
+ - contextPath: GoogleCloudCompute.Operations.kind
+ description: Type of the resource. Always compute#operation for Operation resources.
+ type: string
+ dockerimage: demisto/google-api:1.0.0.235 # disable-secrets-detection
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- No tests
diff --git a/Integrations/GoogleCloudCompute/GoogleCloudCompute_Image.png b/Integrations/GoogleCloudCompute/GoogleCloudCompute_Image.png
new file mode 100644
index 000000000000..965ed729399b
Binary files /dev/null and b/Integrations/GoogleCloudCompute/GoogleCloudCompute_Image.png differ
diff --git a/Integrations/GoogleCloudCompute/GoogleCloudCompute_description.md b/Integrations/GoogleCloudCompute/GoogleCloudCompute_description.md
new file mode 100644
index 000000000000..ce3f9e16e21a
--- /dev/null
+++ b/Integrations/GoogleCloudCompute/GoogleCloudCompute_description.md
@@ -0,0 +1,5 @@
+Create a Service Account
+1. Go to the [Google documentation](https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount) and follow the procedure in the Creating a Service Account section. After you create a service account, a Service Account Private Key file is downloaded. You will need this file when configuring an instance of the integration.
+2. Grant the Compute Admin permission to the Service Account to enable the Service Account to perform certain Google Cloud API commands.
+3. In Demisto, configure an instance of the Google Cloud Compute integration. For the Service Account Private Key parameter, add the Service Account Private Key file contents (JSON).
+
diff --git a/Integrations/GoogleCloudCompute/Pipfile b/Integrations/GoogleCloudCompute/Pipfile
new file mode 100644
index 000000000000..d412c275d4b6
--- /dev/null
+++ b/Integrations/GoogleCloudCompute/Pipfile
@@ -0,0 +1,15 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+
+[packages]
+google-cloud-storage = "*"
+google-api-python-client = "*"
+oauth2client = "*"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/GoogleCloudCompute/Pipfile.lock b/Integrations/GoogleCloudCompute/Pipfile.lock
new file mode 100644
index 000000000000..49bcc8efd449
--- /dev/null
+++ b/Integrations/GoogleCloudCompute/Pipfile.lock
@@ -0,0 +1,327 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "03f173b5fe17f44cd35ee358c365fe58b6623bed142c212c22bc1ab5b1c47f9c"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "cachetools": {
+ "hashes": [
+ "sha256:219b7dc6024195b6f2bc3d3f884d1fef458745cd323b04165378622dcc823852",
+ "sha256:9efcc9fab3b49ab833475702b55edd5ae07af1af7a4c627678980b45e459c460"
+ ],
+ "version": "==3.1.0"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5",
+ "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae"
+ ],
+ "version": "==2019.3.9"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265",
+ "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.0"
+ },
+ "google-api-core": {
+ "hashes": [
+ "sha256:5dcf8895690b4b95c1d96f77a314fcc5674a5e2db925343b3f67df3f0882967e",
+ "sha256:fc1fea74bd863fb71486066e0c6b3a4dad26fb70ec61a0edcada8637feb77c68"
+ ],
+ "version": "==1.9.0"
+ },
+ "google-api-python-client": {
+ "hashes": [
+ "sha256:06907006ed5ce831018f03af3852d739c0b2489cdacfda6971bcc2075c762858",
+ "sha256:937eabdc3940977f712fa648a096a5142766b6d0a0f58bc603e2ac0687397ef0"
+ ],
+ "index": "pypi",
+ "version": "==1.7.8"
+ },
+ "google-auth": {
+ "hashes": [
+ "sha256:0f7c6a64927d34c1a474da92cfc59e552a5d3b940d3266606c6a28b72888b9e4",
+ "sha256:20705f6803fd2c4d1cc2dcb0df09d4dfcb9a7d51fd59e94a3a28231fd93119ed"
+ ],
+ "version": "==1.6.3"
+ },
+ "google-auth-httplib2": {
+ "hashes": [
+ "sha256:098fade613c25b4527b2c08fa42d11f3c2037dda8995d86de0745228e965d445",
+ "sha256:f1c437842155680cf9918df9bc51c1182fda41feef88c34004bd1978c8157e08"
+ ],
+ "version": "==0.0.3"
+ },
+ "google-cloud-core": {
+ "hashes": [
+ "sha256:9bee63e0991be9801a4baf0b7841cf54f86c6e7fec922f45ea74cd4032ed4ee4",
+ "sha256:d85b1aaaf3bad9415ad1d8ee5eadce96d7007a82f13ce0a0629a003a11e83f29"
+ ],
+ "version": "==0.29.1"
+ },
+ "google-cloud-storage": {
+ "hashes": [
+ "sha256:a3115c22a71e2f172fade72c7b7b797a071f3ac9b66043191fc84c214ba0c671",
+ "sha256:aef243b533144c11c9ff750565c43dffe5445debb143697002edb6205f64a437"
+ ],
+ "index": "pypi",
+ "version": "==1.14.0"
+ },
+ "google-resumable-media": {
+ "hashes": [
+ "sha256:2dae98ee716efe799db3578a7b902fbf5592fc5c77d3c0906fc4ef9b1b930861",
+ "sha256:3e38923493ca0d7de0ad91c31acfefc393c78586db89364e91cb4f11990e51ba"
+ ],
+ "version": "==0.3.2"
+ },
+ "googleapis-common-protos": {
+ "hashes": [
+ "sha256:627ec53fab43d06c1b5c950e217fa9819e169daf753111a7f244e94bf8fb3384"
+ ],
+ "version": "==1.5.9"
+ },
+ "httplib2": {
+ "hashes": [
+ "sha256:4ba6b8fd77d0038769bf3c33c9a96a6f752bc4cdf739701fdcaf210121f399d4"
+ ],
+ "version": "==0.12.1"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "oauth2client": {
+ "hashes": [
+ "sha256:b8a81cc5d60e2d364f0b1b98f958dbd472887acaf1a5b05e21c28c31a2d6d3ac",
+ "sha256:d486741e451287f69568a4d26d70d9acd73a2bbfa275746c535b4209891cccc6"
+ ],
+ "index": "pypi",
+ "version": "==4.1.3"
+ },
+ "protobuf": {
+ "hashes": [
+ "sha256:21e395d7959551e759d604940a115c51c6347d90a475c9baf471a1a86b5604a9",
+ "sha256:57e05e16955aee9e6a0389fcbd58d8289dd2420e47df1a1096b3a232c26eb2dd",
+ "sha256:67819e8e48a74c68d87f25cad9f40edfe2faf278cdba5ca73173211b9213b8c9",
+ "sha256:75da7d43a2c8a13b0bc7238ab3c8ae217cbfd5979d33b01e98e1f78defb2d060",
+ "sha256:78e08371e236f193ce947712c072542ff19d0043ab5318c2ea46bbc2aaebdca6",
+ "sha256:7ee5b595db5abb0096e8c4755e69c20dfad38b2d0bcc9bc7bafc652d2496b471",
+ "sha256:86260ecfe7a66c0e9d82d2c61f86a14aa974d340d159b829b26f35f710f615db",
+ "sha256:92c77db4bd33ea4ee5f15152a835273f2338a5246b2cbb84bab5d0d7f6e9ba94",
+ "sha256:9c7b90943e0e188394b4f068926a759e3b4f63738190d1ab3d500d53b9ce7614",
+ "sha256:a77f217ea50b2542bae5b318f7acee50d9fc8c95dd6d3656eaeff646f7cab5ee",
+ "sha256:ad589ed1d1f83db22df867b10e01fe445516a5a4d7cfa37fe3590a5f6cfc508b",
+ "sha256:b06a794901bf573f4b2af87e6139e5cd36ac7c91ac85d7ae3fe5b5f6fc317513",
+ "sha256:bd8592cc5f8b4371d0bad92543370d4658dc41a5ccaaf105597eb5524c616291",
+ "sha256:be48e5a6248a928ec43adf2bea037073e5da692c0b3c10b34f9904793bd63138",
+ "sha256:cc5eb13f5ccc4b1b642cc147c2cdd121a34278b341c7a4d79e91182fff425836",
+ "sha256:cd3b0e0ad69b74ee55e7c321f52a98effed2b4f4cc9a10f3683d869de00590d5",
+ "sha256:d6e88c4920660aa75c0c2c4b53407aef5efd9a6e0ca7d2fc84d79aba2ccbda3a",
+ "sha256:ec3c49b6d247152e19110c3a53d9bb4cf917747882017f70796460728b02722e"
+ ],
+ "version": "==3.7.1"
+ },
+ "pyasn1": {
+ "hashes": [
+ "sha256:da2420fe13a9452d8ae97a0e478adde1dee153b11ba832a95b223a2ba01c10f7",
+ "sha256:da6b43a8c9ae93bc80e2739efb38cc776ba74a886e3e9318d65fe81a8b8a2c6e"
+ ],
+ "version": "==0.4.5"
+ },
+ "pyasn1-modules": {
+ "hashes": [
+ "sha256:79580acf813e3b7d6e69783884e6e83ac94bf4617b36a135b85c599d8a818a7b",
+ "sha256:a52090e8c5841ebbf08ae455146792d9ef3e8445b21055d3a3b7ed9c712b7c7c"
+ ],
+ "version": "==0.2.4"
+ },
+ "pytz": {
+ "hashes": [
+ "sha256:303879e36b721603cc54604edcac9d20401bdbe31e1e4fdee5b9f98d5d31dfda",
+ "sha256:d747dd3d23d77ef44c6a3526e274af6efeb0a6f1afd5a69ba4d5be4098c8e141"
+ ],
+ "version": "==2019.1"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
+ "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
+ ],
+ "version": "==2.21.0"
+ },
+ "rsa": {
+ "hashes": [
+ "sha256:14ba45700ff1ec9eeb206a2ce76b32814958a98e372006c8fb76ba820211be66",
+ "sha256:1a836406405730121ae9823e19c6e806c62bbad73f890574fff50efa4122c487"
+ ],
+ "version": "==4.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "uritemplate": {
+ "hashes": [
+ "sha256:01c69f4fe8ed503b2951bef85d996a9d22434d2431584b5b107b2981ff416fbd",
+ "sha256:1b9c467a940ce9fb9f50df819e8ddd14696f89b9a8cc87ac77952ba416e0a8fd",
+ "sha256:c02643cebe23fc8adb5e6becffe201185bf06c40bda5c0b4028a93f1527d011d"
+ ],
+ "version": "==3.0.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",
+ "sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
+ ],
+ "version": "==1.24.1"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.5"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32",
+ "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==3.7.4"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265",
+ "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.0"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:01cb7e1ca5e6c5b3f235f0385057f70558b70d2f00320208825fa62887292f43",
+ "sha256:268067462aed7eb2a1e237fcb287852f22077de3fb07964e87e00f829eea2d1a"
+ ],
+ "version": "==4.3.17"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33",
+ "sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39",
+ "sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019",
+ "sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088",
+ "sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b",
+ "sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e",
+ "sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6",
+ "sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b",
+ "sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5",
+ "sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff",
+ "sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd",
+ "sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7",
+ "sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff",
+ "sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d",
+ "sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2",
+ "sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35",
+ "sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4",
+ "sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514",
+ "sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252",
+ "sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109",
+ "sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f",
+ "sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c",
+ "sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92",
+ "sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577",
+ "sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d",
+ "sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d",
+ "sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f",
+ "sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a",
+ "sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b"
+ ],
+ "version": "==1.3.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:02c2b6d268695a8b64ad61847f92e611e6afcff33fd26c3a2125370c4662905d",
+ "sha256:ee1e85575587c5b58ddafa25e1c1b01691ef172e139fc25585e5d3f02451da93"
+ ],
+ "index": "pypi",
+ "version": "==1.9.4"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ }
+ }
+}
diff --git a/Integrations/GoogleCloudStorage/CHANGELOG.md b/Integrations/GoogleCloudStorage/CHANGELOG.md
new file mode 100644
index 000000000000..5a6731c4a7e6
--- /dev/null
+++ b/Integrations/GoogleCloudStorage/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+#### New Integration
+Google Cloud Storage is a RESTful online file storage web service for storing and accessing data on Google Cloud Platform infrastructure.
\ No newline at end of file
diff --git a/Integrations/GoogleCloudStorage/GoogleCloudStorage.py b/Integrations/GoogleCloudStorage/GoogleCloudStorage.py
new file mode 100644
index 000000000000..3dc78b77284d
--- /dev/null
+++ b/Integrations/GoogleCloudStorage/GoogleCloudStorage.py
@@ -0,0 +1,550 @@
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+from google.cloud import storage
+from typing import Any, Dict
+import requests
+import traceback
+import urllib3
+
+
+''' GLOBALS/PARAMS '''
+
+RFC3339_DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
+DEMISTO_DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
+
+SERVICE_ACCOUNT_JSON = demisto.params().get('service_account_json', '')
+INSECURE = demisto.params().get('insecure', False)
+
+client: storage.Client
+
+
+''' HELPER FUNCTIONS '''
+
+
+def initialize_module():
+ global client
+
+ # Allow an un-initialized client for the sake of unit tests
+ if SERVICE_ACCOUNT_JSON:
+ client = init_storage_client()
+
+ if INSECURE:
+ disable_tls_verification()
+
+ # Remove proxy if not set to true in params
+ handle_proxy()
+
+
+def init_storage_client():
+ """Creates the Python API client for Google Cloud Storage."""
+ cur_directory_path = os.getcwd()
+ credentials_file_name = demisto.uniqueFile() + '.json'
+ credentials_file_path = os.path.join(cur_directory_path, credentials_file_name)
+
+ with open(credentials_file_path, 'w') as creds_file:
+ json_object = json.loads(SERVICE_ACCOUNT_JSON)
+ json.dump(json_object, creds_file)
+
+ return storage.Client.from_service_account_json(credentials_file_path)
+
+
+def disable_tls_verification():
+
+ original_method = requests.Session.merge_environment_settings
+
+ def merge_environment_settings(self, url, proxies, stream, verify, cert):
+ settings = original_method(self, url, proxies, stream, verify, cert)
+ settings['verify'] = False
+ return settings
+
+ # noinspection PyTypeHints
+ requests.Session.merge_environment_settings = merge_environment_settings # type: ignore
+
+ urllib3.disable_warnings(category=urllib3.exceptions.InsecureRequestWarning)
+
+
+def ec_key(path, *merge_by):
+ """Returns the context key and merge logic for the given context path and ID field name(s)."""
+
+ if len(merge_by) == 0:
+ return path
+
+ js_condition = ''
+ for key in merge_by:
+ if js_condition:
+ js_condition += ' && '
+ js_condition += 'val.{0} && val.{0} === obj.{0}'.format(key)
+
+ return '{}({})'.format(path, js_condition)
+
+
+def reformat_datetime_str(dt_str):
+ """Reformats a date/time string from Google's RFC 3339 format to our format."""
+ dt = None if not dt_str else datetime.strptime(dt_str, RFC3339_DATETIME_FORMAT)
+ return datetime2str(dt)
+
+
+def datetime2str(dt):
+ """Converts a datetime object to string."""
+ return '' if not dt else dt.strftime(DEMISTO_DATETIME_FORMAT)
+
+
+def human_readable_table(title, contents):
+ """Creates a human-readable table for the given contents, preserving header order and adding spaces to headers."""
+
+ def header_transform(header):
+ return re.sub(r'([a-z])([A-Z])', '\\1 \\2', header)
+
+ first_dict: Dict[str, Any] = {}
+ if isinstance(contents, list) and contents:
+ first_dict = contents[0]
+ elif isinstance(contents, dict):
+ first_dict = contents
+
+ ordered_headers = None if not first_dict else list(first_dict.keys())
+
+ return tableToMarkdown(title, contents, ordered_headers, header_transform)
+
+
+def format_error(ex):
+ """Creates a human-readable error message for the given raised error."""
+ msg = 'Error occurred in the Google Cloud Storage Integration'
+
+ if hasattr(ex, '__class__'):
+ class_name = ex.__class__.__name__
+ details = str(ex)
+ if isinstance(ex, BaseException) and details:
+ msg = '{}: {}'.format(class_name, details)
+ else:
+ msg += ' ({})'.format(details if details else class_name)
+
+ return msg
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def module_test():
+ next(client.list_buckets().pages)
+
+
+''' Bucket management '''
+
+
+def bucket2dict(bucket):
+ """Converts a google.cloud.storage.Bucket object to context format (GCS.Bucket)."""
+ return {
+ 'Name': bucket.name,
+ 'TimeCreated': reformat_datetime_str(bucket._properties.get('timeCreated', '')),
+ 'TimeUpdated': reformat_datetime_str(bucket._properties.get('updated', '')),
+ 'OwnerID': '' if not bucket.owner else bucket.owner.get('entityId', '')
+ }
+
+
+def gcs_list_buckets():
+ buckets = client.list_buckets()
+ result = [bucket2dict(bucket) for bucket in buckets]
+
+ return_outputs(
+ readable_output=human_readable_table('Buckets in project ' + client.project, result),
+ outputs={ec_key('GCS.Bucket', 'Name'): result},
+ raw_response=result,
+ )
+
+
+def gcs_get_bucket():
+ bucket_name = demisto.args()['bucket_name']
+
+ bucket = client.get_bucket(bucket_name)
+ result = bucket2dict(bucket)
+
+ return_outputs(
+ readable_output=human_readable_table('Bucket ' + bucket_name, result),
+ outputs={ec_key('GCS.Bucket', 'Name'): result},
+ raw_response=result,
+ )
+
+
+def gcs_create_bucket():
+ bucket_name = demisto.args()['bucket_name']
+ bucket_acl = demisto.args().get('bucket_acl', '')
+ default_object_acl = demisto.args().get('default_object_acl', '')
+
+ bucket = client.create_bucket(bucket_name)
+ if bucket_acl:
+ bucket.acl.save_predefined(bucket_acl)
+ if default_object_acl:
+ bucket.default_object_acl.save_predefined(default_object_acl)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': f'Bucket {bucket_name} was created successfully.'
+ })
+
+
+def gcs_delete_bucket():
+ bucket_name = demisto.args()['bucket_name']
+ force = demisto.args().get('force', '') == 'true'
+
+ bucket = client.get_bucket(bucket_name)
+ bucket.delete(force)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': f'Bucket {bucket_name} was deleted successfully.'
+ })
+
+
+''' Object operations '''
+
+
+def blob2dict(blob):
+ """Converts a google.cloud.storage.Blob (which represents a storage object) to context format (GCS.BucketObject)."""
+ return {
+ 'Name': blob.name,
+ 'Bucket': blob.bucket.name,
+ 'ContentType': blob.content_type,
+ 'TimeCreated': datetime2str(blob.time_created),
+ 'TimeUpdated': datetime2str(blob.updated),
+ 'TimeDeleted': datetime2str(blob.time_deleted),
+ 'Size': blob.size,
+ 'MD5': blob.md5_hash,
+ 'OwnerID': '' if not blob.owner else blob.owner.get('entityId', ''),
+ 'CRC32c': blob.crc32c,
+ 'EncryptionAlgorithm': blob._properties.get('customerEncryption', {}).get('encryptionAlgorithm', ''),
+ 'EncryptionKeySHA256': blob._properties.get('customerEncryption', {}).get('keySha256', ''),
+ }
+
+
+def download_blob(blob, file_name=''):
+ cur_directory_path = os.getcwd()
+ file_name = file_name or blob.name.replace('\\', '/').split('/')[-1] or demisto.uniqueFile()
+ file_path = os.path.join(cur_directory_path, file_name)
+
+ with open(file_path, 'wb') as file:
+ client.download_blob_to_file(blob, file)
+
+ return file_name
+
+
+def upload_blob(file_path, bucket_name, object_name):
+ bucket = client.get_bucket(bucket_name)
+ blob = bucket.blob(object_name)
+
+ blob.upload_from_filename(file_path)
+
+ return blob
+
+
+def gcs_list_bucket_objects():
+ bucket_name = demisto.args()['bucket_name']
+
+ blobs = client.list_blobs(bucket_name)
+ result = [blob2dict(blob) for blob in blobs]
+
+ return_outputs(
+ readable_output=human_readable_table('Objects in bucket ' + bucket_name, result),
+ outputs={ec_key('GCS.BucketObject', 'Name', 'Bucket'): result},
+ raw_response=result,
+ )
+
+
+def gcs_download_file():
+ bucket_name = demisto.args()['bucket_name']
+ blob_name = demisto.args()['object_name']
+ saved_file_name = demisto.args().get('saved_file_name', '')
+
+ bucket = client.get_bucket(bucket_name)
+ blob = storage.Blob(blob_name, bucket)
+ saved_file_name = download_blob(blob, saved_file_name)
+
+ demisto.results(file_result_existing_file(saved_file_name))
+
+
+def gcs_upload_file():
+ entry_id = demisto.args()['entry_id']
+ bucket_name = demisto.args()['bucket_name']
+ object_name = demisto.args()['object_name']
+ object_acl = demisto.args().get('object_acl', '')
+
+ context_file = demisto.getFilePath(entry_id)
+ file_path = context_file['path']
+ file_name = context_file['name']
+ blob = upload_blob(file_path, bucket_name, object_name)
+ if object_acl:
+ blob.acl.save_predefined(object_acl)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': f'File {file_name} was successfully uploaded to bucket {bucket_name} as {object_name}'
+ })
+
+
+''' Bucket policy (ACL) '''
+
+
+def acl2dict(acl_entry, include_object_name=False):
+ """Converts an ACL entry from its raw JSON form to context format (GCS.BucketPolicy or GCS.BucketObjectPolicy)."""
+ result = {
+ 'Bucket': acl_entry.get('bucket', ''),
+ 'Object': acl_entry.get('object', ''),
+ 'Entity': acl_entry.get('entity', ''),
+ 'Email': acl_entry.get('email', ''),
+ 'Role': acl_entry.get('role', ''),
+ 'Team': acl_entry.get('projectTeam', {}).get('team', '')
+ }
+
+ # Check if we need to adapt from GCS.BucketObjectPolicy to GCS.BucketPolicy
+ if not include_object_name:
+ del result['Object']
+
+ return result
+
+
+def get_acl_entries(acl):
+ """Retrieves the entries of the given ACL (access control list) in their raw dictionary form."""
+ path = acl.reload_path
+ parsed_json = client._connection.api_request(method='GET', path=path)
+ return parsed_json.get('items', ())
+
+
+def set_acl_entry(acl, entity, role):
+ acl_entry = acl.entity_from_dict({'entity': entity, 'role': role.upper()})
+ acl.add_entity(acl_entry)
+ acl.save()
+
+
+def delete_acl_entry(acl, entity):
+ del acl.entities[str(entity)]
+ acl.save()
+
+
+def gcs_list_bucket_policy():
+ bucket_name = demisto.args()['bucket_name']
+
+ acl = client.get_bucket(bucket_name).acl
+
+ acl_entries = get_acl_entries(acl)
+ result = [acl2dict(entry) for entry in acl_entries]
+
+ return_outputs(
+ readable_output=human_readable_table('ACL policy for bucket ' + bucket_name, result),
+ outputs={ec_key('GCS.BucketPolicy', 'Bucket', 'Entity'): result},
+ raw_response=result,
+ )
+
+
+def gcs_create_bucket_policy():
+ bucket_name = demisto.args()['bucket_name']
+ entity = demisto.args()['entity']
+ role = demisto.args()['role']
+
+ acl = client.get_bucket(bucket_name).acl
+ if acl.has_entity(entity):
+ raise ValueError(f'Entity {entity} already exists in the ACL of bucket {bucket_name}'
+ ' (use gcs-put-bucket-policy to update it)')
+
+ set_acl_entry(acl, entity, role)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': f'Added entity {entity} to ACL of bucket {bucket_name} with role {role}'
+ })
+
+
+def gcs_put_bucket_policy():
+ bucket_name = demisto.args()['bucket_name']
+ entity = demisto.args()['entity']
+ role = demisto.args()['role']
+
+ acl = client.get_bucket(bucket_name).acl
+ if not acl.has_entity(entity):
+ raise ValueError(f'Entity {entity} does not exist in the ACL of bucket {bucket_name}'
+ ' (use gcs-create-bucket-policy to create it)')
+
+ set_acl_entry(acl, entity, role)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': f'Updated ACL entity {entity} in bucket {bucket_name} to role {role}'
+ })
+
+
+def gcs_delete_bucket_policy():
+ bucket_name = demisto.args()['bucket_name']
+ entity = demisto.args()['entity']
+
+ acl = client.get_bucket(bucket_name).acl
+ if not acl.has_entity(entity):
+ raise ValueError(f'Entity {entity} does not exist in the ACL of bucket {bucket_name}')
+
+ delete_acl_entry(acl, entity)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': f'Removed entity {entity} from ACL of bucket {bucket_name}'
+ })
+
+
+''' Object policy (ACL) '''
+
+
+def get_blob_acl(bucket_name, blob_name):
+ bucket = client.get_bucket(bucket_name)
+ blob = storage.Blob(blob_name, bucket)
+ return blob.acl
+
+
+def gcs_list_bucket_object_policy():
+ bucket_name = demisto.args()['bucket_name']
+ blob_name = demisto.args()['object_name']
+
+ acl = get_blob_acl(bucket_name, blob_name)
+ acl_entries = get_acl_entries(acl)
+ result = [acl2dict(entry, include_object_name=True) for entry in acl_entries]
+
+ return_outputs(
+ readable_output=human_readable_table('ACL policy for object ' + blob_name, result),
+ outputs={ec_key('GCS.BucketObjectPolicy', 'Bucket', 'Object', 'Entity'): result},
+ raw_response=result,
+ )
+
+
+def gcs_create_bucket_object_policy():
+ bucket_name = demisto.args()['bucket_name']
+ blob_name = demisto.args()['object_name']
+ entity = demisto.args()['entity']
+ role = demisto.args()['role']
+
+ acl = get_blob_acl(bucket_name, blob_name)
+ if acl.has_entity(entity):
+ raise ValueError(f'Entity {entity} already exists in the ACL of object {blob_name}'
+ ' (use gcs-put-bucket-object-policy to update it)')
+
+ set_acl_entry(acl, entity, role)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': f'Added entity {entity} to ACL of object {blob_name} with role {role}'
+ })
+
+
+def gcs_put_bucket_object_policy():
+ bucket_name = demisto.args()['bucket_name']
+ blob_name = demisto.args()['object_name']
+ entity = demisto.args()['entity']
+ role = demisto.args()['role']
+
+ acl = get_blob_acl(bucket_name, blob_name)
+ if not acl.has_entity(entity):
+ raise ValueError(f'Entity {entity} does not exist in the ACL of object {blob_name}'
+ ' (use gcs-create-bucket-object-policy to create it)')
+
+ set_acl_entry(acl, entity, role)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': f'Updated ACL entity {entity} in object {blob_name} to role {role}'
+ })
+
+
+def gcs_delete_bucket_object_policy():
+ bucket_name = demisto.args()['bucket_name']
+ blob_name = demisto.args()['object_name']
+ entity = demisto.args()['entity']
+
+ acl = get_blob_acl(bucket_name, blob_name)
+ if not acl.has_entity(entity):
+ raise ValueError(f'Entity {entity} does not exist in the ACL of object {blob_name}')
+
+ delete_acl_entry(acl, entity)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': f'Removed entity {entity} from ACL of object {blob_name}'
+ })
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('Command being called is ' + demisto.command())
+
+try:
+ initialize_module()
+
+ if demisto.command() == 'test-module':
+ module_test()
+ demisto.results('ok')
+
+ #
+ # Bucket management
+ #
+ elif demisto.command() == 'gcs-list-buckets':
+ gcs_list_buckets()
+
+ elif demisto.command() == 'gcs-get-bucket':
+ gcs_get_bucket()
+
+ elif demisto.command() == 'gcs-create-bucket':
+ gcs_create_bucket()
+
+ elif demisto.command() == 'gcs-delete-bucket':
+ gcs_delete_bucket()
+
+ #
+ # Object operations
+ #
+ elif demisto.command() == 'gcs-list-bucket-objects':
+ gcs_list_bucket_objects()
+
+ elif demisto.command() == 'gcs-download-file':
+ gcs_download_file()
+
+ elif demisto.command() == 'gcs-upload-file':
+ gcs_upload_file()
+
+ #
+ # Bucket policy (ACL)
+ #
+ elif demisto.command() == 'gcs-list-bucket-policy':
+ gcs_list_bucket_policy()
+
+ elif demisto.command() == 'gcs-create-bucket-policy':
+ gcs_create_bucket_policy()
+
+ elif demisto.command() == 'gcs-put-bucket-policy':
+ gcs_put_bucket_policy()
+
+ elif demisto.command() == 'gcs-delete-bucket-policy':
+ gcs_delete_bucket_policy()
+
+ #
+ # Object policy (ACL)
+ #
+ elif demisto.command() == 'gcs-list-bucket-object-policy':
+ gcs_list_bucket_object_policy()
+
+ elif demisto.command() == 'gcs-create-bucket-object-policy':
+ gcs_create_bucket_object_policy()
+
+ elif demisto.command() == 'gcs-put-bucket-object-policy':
+ gcs_put_bucket_object_policy()
+
+ elif demisto.command() == 'gcs-delete-bucket-object-policy':
+ gcs_delete_bucket_object_policy()
+
+except Exception as e:
+ LOG(traceback.format_exc())
+ return_error(format_error(e))
diff --git a/Integrations/GoogleCloudStorage/GoogleCloudStorage.yml b/Integrations/GoogleCloudStorage/GoogleCloudStorage.yml
new file mode 100644
index 000000000000..3cc9f10a99a1
--- /dev/null
+++ b/Integrations/GoogleCloudStorage/GoogleCloudStorage.yml
@@ -0,0 +1,519 @@
+category: IT Services
+commonfields:
+ id: Google Cloud Storage
+ version: -1
+configuration:
+- display: Service Account Private Key file contents (JSON)
+ name: service_account_json
+ required: true
+ type: 4
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+description: Google Cloud Storage is a RESTful online file storage web service for
+ storing and accessing data on Google Cloud Platform infrastructure.
+display: Google Cloud Storage
+name: Google Cloud Storage
+script:
+ commands:
+ - deprecated: false
+ description: Retrieves the list of buckets.
+ execution: false
+ name: gcs-list-buckets
+ outputs:
+ - contextPath: GCS.Bucket.Name
+ description: Bucket name (also ID).
+ type: String
+ - contextPath: GCS.Bucket.TimeCreated
+ description: Bucket creation time.
+ type: Date
+ - contextPath: GCS.Bucket.TimeUpdated
+ description: Last time bucket was modified.
+ type: Date
+ - contextPath: GCS.Bucket.OwnerID
+ description: Bucket owner ID.
+ type: String
+ - arguments:
+ - default: true
+ description: Name of the bucket to retrieve.
+ isArray: false
+ name: bucket_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves bucket information.
+ execution: false
+ name: gcs-get-bucket
+ outputs:
+ - contextPath: GCS.Bucket.Name
+ description: Bucket name (also ID).
+ type: String
+ - contextPath: GCS.Bucket.TimeCreated
+ description: Bucket creation time.
+ type: Date
+ - contextPath: GCS.Bucket.TimeUpdated
+ description: Last time bucket was modified.
+ type: Date
+ - contextPath: GCS.Bucket.OwnerID
+ description: Bucket owner ID.
+ type: String
+ - arguments:
+ - default: true
+ description: Name of the bucket to create.
+ isArray: false
+ name: bucket_name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Access Control List for the bucket.
+ isArray: false
+ name: bucket_acl
+ predefined:
+ - authenticatedRead
+ - private
+ - projectPrivate
+ - publicRead
+ - publicReadWrite
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Default Access Control List for the object.
+ isArray: false
+ name: default_object_acl
+ predefined:
+ - authenticatedRead
+ - bucketOwnerFullControl
+ - bucketOwnerRead
+ - private
+ - projectPrivate
+ - publicRead
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a new bucket.
+ execution: false
+ name: gcs-create-bucket
+ - arguments:
+ - default: true
+ description: Name of the bucket to delete.
+ isArray: false
+ name: bucket_name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Forces the bucket to delete (if not empty).
+ isArray: false
+ name: force
+ predefined:
+ - 'true'
+ - 'false'
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes a bucket.
+ execution: false
+ name: gcs-delete-bucket
+ - arguments:
+ - default: true
+ description: Name of the bucket in which to list objects.
+ isArray: false
+ name: bucket_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the list of objects in a bucket.
+ execution: false
+ name: gcs-list-bucket-objects
+ outputs:
+ - contextPath: GCS.BucketObject.Name
+ description: Object name.
+ type: String
+ - contextPath: GCS.BucketObject.Bucket
+ description: Name of the bucket containing the object.
+ type: String
+ - contextPath: GCS.BucketObject.ContentType
+ description: Content-Type of the object data.
+ type: String
+ - contextPath: GCS.BucketObject.TimeCreated
+ description: Object creation time.
+ type: Date
+ - contextPath: GCS.BucketObject.TimeUpdated
+ description: Last time object was modified.
+ type: Date
+ - contextPath: GCS.BucketObject.TimeDeleted
+ description: Object deletion time (available if the object is archived).
+ type: Date
+ - contextPath: GCS.BucketObject.Size
+ description: Object size in bytes.
+ type: Number
+ - contextPath: GCS.BucketObject.MD5
+ description: MD5 hash of the data in Base64.
+ type: String
+ - contextPath: GCS.BucketObject.OwnerID
+ description: Object owner ID.
+ type: String
+ - contextPath: GCS.BucketObject.CRC32c
+ description: CRC32c checksum (as described in RFC 4960, Appendix B https://tools.ietf.org/html/rfc4960#appendix-B),
+ encoded using Base64 in big-endian byte order.
+ type: String
+ - contextPath: GCS.BucketObject.EncryptionAlgorithm
+ description: The encryption algorithm.
+ type: String
+ - contextPath: GCS.BucketObject.EncryptionKeySHA256
+ description: SHA256 hash value of the encryption key.
+ type: String
+ - arguments:
+ - default: false
+ description: Name of the bucket in which the object resides.
+ isArray: false
+ name: bucket_name
+ required: true
+ secret: false
+ - default: true
+ description: Name of the object to download.
+ isArray: false
+ name: object_name
+ required: true
+ secret: false
+ - default: false
+ description: Name of the file in which the object is downloaded (if not specified,
+ the name is derived from the object name, but this may fail if the object
+ contains invalid filename characters).
+ isArray: false
+ name: saved_file_name
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves object data into a file.
+ execution: false
+ name: gcs-download-file
+ - arguments:
+ - default: true
+ description: ID of a context entry containing the file to upload.
+ isArray: false
+ name: entry_id
+ required: true
+ secret: false
+ - default: false
+ description: Name of the bucket in which to upload the object.
+ isArray: false
+ name: bucket_name
+ required: true
+ secret: false
+ - default: false
+ description: Name of the uploaded object within the bucket.
+ isArray: false
+ name: object_name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Access Control List for the uploaded object.
+ isArray: false
+ name: object_acl
+ predefined:
+ - authenticatedRead
+ - bucketOwnerFullControl
+ - bucketOwnerRead
+ - private
+ - projectPrivate
+ - publicRead
+ required: false
+ secret: false
+ deprecated: false
+ description: Uploads a file (object) into a bucket.
+ execution: false
+ name: gcs-upload-file
+ - arguments:
+ - default: true
+ description: Name of the bucket for the Access Control List.
+ isArray: false
+ name: bucket_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the Access Control List of a bucket.
+ execution: false
+ name: gcs-list-bucket-policy
+ outputs:
+ - contextPath: GCS.BucketPolicy.Bucket
+ description: Name of the bucket holding the Access Control List.
+ type: String
+ - contextPath: GCS.BucketPolicy.Entity
+ description: The entity holding the permission.
+ type: String
+ - contextPath: GCS.BucketPolicy.Email
+ description: Email address associated with the entity (if any).
+ type: String
+ - contextPath: GCS.BucketPolicy.Role
+ description: The access permission for the entity.
+ type: String
+ - contextPath: GCS.BucketPolicy.Team
+ description: Project team associated with the entity (if any).
+ type: String
+ - arguments:
+ - default: false
+ description: Name of the bucket in which to modify the Access Control List.
+ isArray: false
+ name: bucket_name
+ required: true
+ secret: false
+ - default: true
+ description: |-
+ Entity to add into the Access Control List.
+ Common entity formats are:
+ * user-
+ * group-
+ * allUsers
+ * allAuthenticatedUsers
+ For more options and details, see: https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls#resource
+ isArray: false
+ name: entity
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The access permission for the entity.
+ isArray: false
+ name: role
+ predefined:
+ - Reader
+ - Writer
+ - Owner
+ required: true
+ secret: false
+ deprecated: false
+ description: |-
+ Adds a new entity to a bucket's Access Control List.
+ Note: use the gcs-put-bucket-policy command to update an existing entry.
+ execution: false
+ name: gcs-create-bucket-policy
+ - arguments:
+ - default: false
+ description: Name of the bucket in which to modify the Access Control List.
+ isArray: false
+ name: bucket_name
+ required: true
+ secret: false
+ - default: true
+ description: |-
+ The entity to update in the Access Control List.
+ Common entity formats are:
+ * user-
+ * group-
+ * allUsers
+ * allAuthenticatedUsers
+ For more options and details, see: https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls#resource
+ isArray: false
+ name: entity
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The access permissions for the entity.
+ isArray: false
+ name: role
+ predefined:
+ - Reader
+ - Writer
+ - Owner
+ required: true
+ secret: false
+ deprecated: false
+ description: |-
+ Updates an existing entity in a bucket's Access Control List.
+ Note: use the gcs-create-bucket-policy command to create a new entry.
+ execution: false
+ name: gcs-put-bucket-policy
+ - arguments:
+ - default: false
+ description: Name of the bucket in which to modify the Access Control List.
+ isArray: false
+ name: bucket_name
+ required: true
+ secret: false
+ - default: true
+ description: |-
+ Entity to remove from the Access Control List.
+ Common entity formats are:
+ * user-
+ * group-
+ * allUsers
+ * allAuthenticatedUsers
+ For more options and details, see: https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls#resource
+ isArray: false
+ name: entity
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes an entity from a bucket's Access Control List.
+ execution: false
+ name: gcs-delete-bucket-policy
+ - arguments:
+ - default: false
+ description: Name of the bucket in which the object resides.
+ isArray: false
+ name: bucket_name
+ required: true
+ secret: false
+ - default: true
+ description: Name of the object in which to list access controls.
+ isArray: false
+ name: object_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the Access Control List of an object.
+ execution: false
+ name: gcs-list-bucket-object-policy
+ outputs:
+ - contextPath: GCS.BucketObjectPolicy.Bucket
+ description: Name of the bucket in which the object resides.
+ type: String
+ - contextPath: GCS.BucketObjectPolicy.Object
+ description: Name of the object holding the Access Control List.
+ type: String
+ - contextPath: GCS.BucketObjectPolicy.Entity
+ description: The entity holding the permission.
+ type: String
+ - contextPath: GCS.BucketObjectPolicy.Email
+ description: Email address associated with the entity (if any).
+ type: String
+ - contextPath: GCS.BucketObjectPolicy.Role
+ description: The access permission for the entity.
+ type: String
+ - contextPath: GCS.BucketObjectPolicy.Team
+ description: Project team associated with the entity (if any).
+ type: String
+ - arguments:
+ - default: false
+ description: Name of the bucket in which the object resides.
+ isArray: false
+ name: bucket_name
+ required: true
+ secret: false
+ - default: false
+ description: Name of the object in which to modify the Access control List.
+ isArray: false
+ name: object_name
+ required: true
+ secret: false
+ - default: true
+ description: |-
+ Entity to add into the Access Control List.
+ Common entity formats are:
+ * user-
+ * group-
+ * allUsers
+ * allAuthenticatedUsers
+ For more options and details, see: https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls#resource
+ isArray: false
+ name: entity
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The access permission for the entity.
+ isArray: false
+ name: role
+ predefined:
+ - Reader
+ - Owner
+ required: true
+ secret: false
+ deprecated: false
+ description: |-
+ Adds a new entity to an object's Access Control List.
+ Note: use the gcs-put-bucket-object-policy command to update an existing entry.
+ execution: false
+ name: gcs-create-bucket-object-policy
+ - arguments:
+ - default: false
+ description: Name of the bucket in which the object resides.
+ isArray: false
+ name: bucket_name
+ required: true
+ secret: false
+ - default: false
+ description: Name of the object in which to modify access controls.
+ isArray: false
+ name: object_name
+ required: true
+ secret: false
+ - default: true
+ description: |-
+ The entity to update in the Access Control List.
+ Common entity formats are:
+ * user-
+ * group-
+ * allUsers
+ * allAuthenticatedUsers
+ For more options and details, see: https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls#resource
+ isArray: false
+ name: entity
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The access permissions for the entity.
+ isArray: false
+ name: role
+ predefined:
+ - Reader
+ - Owner
+ required: true
+ secret: false
+ deprecated: false
+ description: |-
+ Updates an existing entity in an object's Access Control List.
+ Note: use gcs-create-bucket-object-policy command to create a new entry.
+ execution: false
+ name: gcs-put-bucket-object-policy
+ - arguments:
+ - default: false
+ description: Name of the bucket in which the object resides.
+ isArray: false
+ name: bucket_name
+ required: true
+ secret: false
+ - default: false
+ description: Name of the object in which to modify access controls.
+ isArray: false
+ name: object_name
+ required: true
+ secret: false
+ - default: true
+ description: |-
+ Entity to remove from the Access Control List.
+ Common entity formats are:
+ * user-
+ * group-
+ * allUsers
+ * allAuthenticatedUsers
+ For more options and details, see: https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls#resource
+ isArray: false
+ name: entity
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes an entity from an object's Access Control List.
+ execution: false
+ name: gcs-delete-bucket-object-policy
+ dockerimage: demisto/google-cloud-storage:1.0.0.1734
+ isfetch: false
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- GCS - Test
diff --git a/Integrations/GoogleCloudStorage/GoogleCloudStorage_CHANGELOG.md b/Integrations/GoogleCloudStorage/GoogleCloudStorage_CHANGELOG.md
new file mode 100644
index 000000000000..5172a2e79dee
--- /dev/null
+++ b/Integrations/GoogleCloudStorage/GoogleCloudStorage_CHANGELOG.md
@@ -0,0 +1,3 @@
+## [Unreleased]
+#### New integration
+Google Cloud Storage is a RESTful online file storage web service for storing and accessing data on Google Cloud Platform infrastructure.
diff --git a/Integrations/GoogleCloudStorage/GoogleCloudStorage_description.md b/Integrations/GoogleCloudStorage/GoogleCloudStorage_description.md
new file mode 100644
index 000000000000..acb5951df79a
--- /dev/null
+++ b/Integrations/GoogleCloudStorage/GoogleCloudStorage_description.md
@@ -0,0 +1,4 @@
+Create a Service Account
+1. Go to the [Google documentation](https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount) and follow the procedure in the Creating a Service Account section. After you create a service account, a Service Account Private Key file is downloaded. You will need this file in step 3.
+2. Grant the Storage Admin permission to the Service Account to enable the Service Account to perform all Google Storage API commands.
+3. In Demisto, configure an instance of the Google Cloud Storage integration. For the Service Account Private Key parameter, copy the JSON contents of the file you downloaded in step 1.
diff --git a/Integrations/GoogleCloudStorage/GoogleCloudStorage_image.png b/Integrations/GoogleCloudStorage/GoogleCloudStorage_image.png
new file mode 100644
index 000000000000..965ed729399b
Binary files /dev/null and b/Integrations/GoogleCloudStorage/GoogleCloudStorage_image.png differ
diff --git a/Integrations/GoogleCloudStorage/GoogleCloudStorage_test.py b/Integrations/GoogleCloudStorage/GoogleCloudStorage_test.py
new file mode 100644
index 000000000000..6b43e4a09289
--- /dev/null
+++ b/Integrations/GoogleCloudStorage/GoogleCloudStorage_test.py
@@ -0,0 +1,52 @@
+from GoogleCloudStorage import *
+from datetime import datetime
+
+
+def test_ec_key():
+
+ # No entry merging
+ assert ec_key('Foo.Bar.Baz') == 'Foo.Bar.Baz'
+
+ # Merge entries by single ID field
+ assert ec_key('Foo.Bar.Baz', 'ID') == 'Foo.Bar.Baz(val.ID && val.ID === obj.ID)'
+
+ # Merge entries by multiple ID fields
+ assert ec_key('Foo.Bar.Baz', 'ID1', 'ID2') ==\
+ 'Foo.Bar.Baz(val.ID1 && val.ID1 === obj.ID1' \
+ ' && val.ID2 && val.ID2 === obj.ID2)'
+
+ assert ec_key('Foo.Bar.Baz', 'ID1', 'ID2', 'ID3') ==\
+ 'Foo.Bar.Baz(val.ID1 && val.ID1 === obj.ID1' \
+ ' && val.ID2 && val.ID2 === obj.ID2' \
+ ' && val.ID3 && val.ID3 === obj.ID3)'
+
+
+def test_reformat_datetime_str():
+ assert reformat_datetime_str('2019-08-28T11:28:47.165Z') == '2019-08-28T11:28:47'
+ assert reformat_datetime_str('2001-04-14T23:32:15.999Z') == '2001-04-14T23:32:15'
+ assert reformat_datetime_str('2030-11-07T02:00:00.000Z') == '2030-11-07T02:00:00'
+
+
+def test_datetime2str():
+ assert datetime2str(datetime(year=2019, month=8, day=28, hour=11, minute=28, second=47, microsecond=165123))\
+ == '2019-08-28T11:28:47'
+ assert datetime2str(datetime(year=2001, month=4, day=14, hour=23, minute=32, second=15, microsecond=999999))\
+ == '2001-04-14T23:32:15'
+ assert datetime2str(datetime(year=2030, month=11, day=7, hour=14, minute=7, second=0, microsecond=0))\
+ == '2030-11-07T14:07:00'
+
+
+def test_human_readable_table():
+ # Verify that 1. header order is preserved, 2. spaces are added between (capitalized) header words
+ assert human_readable_table(
+ 'My Table', {'HeaderOne': 'value one', 'HeaderTwo': 'value two', 'HeaderThree': 'value three'})\
+ == '### My Table\n' \
+ '|Header One|Header Two|Header Three|\n' \
+ '|---|---|---|\n' \
+ '| value one | value two | value three |\n'
+
+
+def test_format_error():
+ assert format_error(ValueError('Somebody set up us the bomb.')) == 'ValueError: Somebody set up us the bomb.'
+ assert format_error(ValueError()) == 'Error occurred in the Google Cloud Storage Integration (ValueError)'
+ assert format_error(7) == 'Error occurred in the Google Cloud Storage Integration (7)'
diff --git a/Integrations/GoogleDocs/CHANGELOG.md b/Integrations/GoogleDocs/CHANGELOG.md
new file mode 100644
index 000000000000..7324389412d1
--- /dev/null
+++ b/Integrations/GoogleDocs/CHANGELOG.md
@@ -0,0 +1,4 @@
+## [Unreleased]
+
+#### New Integration
+Google DOCS
\ No newline at end of file
diff --git a/Integrations/GoogleDocs/GoogleDocs.py b/Integrations/GoogleDocs/GoogleDocs.py
new file mode 100644
index 000000000000..56f4d1532336
--- /dev/null
+++ b/Integrations/GoogleDocs/GoogleDocs.py
@@ -0,0 +1,396 @@
+from CommonServerPython import *
+
+''' IMPORTS '''
+
+import requests
+import httplib2
+import urllib.parse
+from oauth2client import service_account
+from googleapiclient import discovery
+
+import typing
+from collections import defaultdict
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+SCOPES = ['https://www.googleapis.com/auth/documents'] # Permissions the application needs to use google docs
+
+''' HELPER FUNCTIONS '''
+
+
+def get_function_by_action_name(action):
+ action_to_function = {
+ 'createNamedRange': create_named_range,
+ 'createParagraphBullets': create_paragraph_bullets,
+ 'deleteContentRange': delete_content_range,
+ 'deleteNamedRangeByName': delete_named_range_by_name,
+ 'deleteNamedRangeById': delete_named_range_by_id,
+ 'deleteParagraphBullets': delete_paragraph_bullets,
+ 'deletePositionedObject': delete_positioned_object,
+ 'deleteTableColumn': delete_table_column,
+ 'deleteTableRow': delete_table_row,
+ 'insertInlineImage': insert_inline_image,
+ 'insertPageBreak': insert_page_break,
+ 'insertTable': insert_table,
+ 'insertTableColumn': insert_table_column,
+ 'insertTableRow': insert_table_row,
+ 'insertText': insert_text,
+ 'replaceAllText': replace_all_text
+ }
+ return action_to_function[action]
+
+
+def parse_actions(actions: str):
+ """Destructs action1{param1,param2,...};action2{param1,param2,...}... to a dictionary where keys are action type and
+ values are function params"""
+ parsed_actions = dict()
+ actions = actions.split(';')
+ for action in actions:
+ action_type, params = action.split('{')
+ params = params[:-1]
+ params = params.split(',')
+ parsed_actions[action_type] = params
+ return parsed_actions
+
+
+def get_http_client_with_proxy(disable_ssl):
+ proxies = handle_proxy()
+ if not proxies.get('https', True):
+ raise Exception('https proxy value is empty. Check Demisto server configuration')
+ https_proxy = proxies['https']
+ if not https_proxy.startswith('https') and not https_proxy.startswith('http'):
+ https_proxy = 'https://' + https_proxy
+ parsed_proxy = urllib.parse.urlparse(https_proxy)
+ proxy_info = httplib2.ProxyInfo(
+ proxy_type=httplib2.socks.PROXY_TYPE_HTTP, # disable-secrets-detection
+ proxy_host=parsed_proxy.hostname,
+ proxy_port=parsed_proxy.port,
+ proxy_user=parsed_proxy.username,
+ proxy_pass=parsed_proxy.password)
+ return httplib2.Http(proxy_info=proxy_info, disable_ssl_certificate_validation=disable_ssl)
+
+
+def get_credentials(credentials, scopes):
+ credentials = service_account.ServiceAccountCredentials.from_json_keyfile_dict(credentials, scopes=scopes)
+ return credentials
+
+
+def get_client(credentials, scopes, proxy, disable_ssl):
+ credentials = get_credentials(credentials, scopes)
+
+ if proxy or disable_ssl:
+ http_client = credentials.authorize(get_http_client_with_proxy(disable_ssl))
+ return discovery.build('docs', 'v1', http=http_client)
+ return discovery.build('docs', 'v1', credentials=credentials)
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def get_range_object(start_index, end_index, segment_id=None):
+ range_obj = {
+ 'range': {
+ 'startIndex': start_index,
+ 'endIndex': end_index,
+ }
+ }
+
+ if segment_id:
+ range_obj['range']['segmentId'] = segment_id
+
+ return range_obj
+
+
+def get_location_object(index, segment_id=None):
+ location_obj = {
+ "index": index,
+ }
+ if segment_id:
+ location_obj["segmentId"] = segment_id
+ return location_obj
+
+
+def replace_all_text(action_name, source, target, match_case):
+ return {
+ action_name: {
+ "replaceText": target,
+ 'containsText': {
+ "text": source,
+ "matchCase": match_case
+ }
+ }
+ }
+
+
+def insert_text(action_name, index, text, segment_id=None):
+ return {
+ action_name: {
+ "location": get_location_object(index, segment_id),
+ 'text': text
+ }
+ }
+
+
+def create_paragraph_bullets(action_name, start_index, end_index, bullet_type, segment_id=None):
+ return {
+ action_name: {
+ **get_range_object(start_index, end_index, segment_id),
+ 'bulletPreset': bullet_type,
+ }
+ }
+
+
+def delete_paragraph_bullets(action_name, start_index, end_index, segment_id=None):
+ return {
+ action_name: {
+ **get_range_object(start_index, end_index, segment_id),
+ }
+ }
+
+
+def create_named_range(action_name, start_index, end_index, name, segment_id=None):
+ return {
+ action_name: {
+ "name": name,
+ **get_range_object(start_index, end_index, segment_id),
+ }
+ }
+
+
+def delete_named_range_by_id(action_name, named_range_id):
+ return {
+ action_name: {
+ "namedRangeId": named_range_id
+ }
+ }
+
+
+def delete_named_range_by_name(action_name, name):
+ return {
+ action_name: {
+ "name": name
+ }
+ }
+
+
+def delete_content_range(action_name, start_index, end_index, segment_id=None):
+ return {action_name: get_range_object(segment_id, start_index, end_index)}
+
+
+def insert_inline_image(action_name, index, uri, width, height, segment_id=None):
+ return {
+ action_name: {
+ "uri": uri,
+ "objectSize": {
+ "height": {
+ "magnitude": height,
+ "unit": 'PT'
+ },
+ "width": {
+ "magnitude": width,
+ "unit": 'PT'
+ }
+ },
+ "location": get_location_object(index, segment_id)
+ }
+ }
+
+
+def insert_table(action_name, index, rows, columns, segment_id=None):
+ return {
+ action_name: {
+ "rows": rows,
+ "columns": columns,
+ "location": get_location_object(index, segment_id)
+ }
+ }
+
+
+def insert_table_row(action_name, index, row_index, column_index, insert_below, segment_id=None):
+ return {
+ action_name: {
+ "tableCellLocation": {
+ "tableStartLocation": get_location_object(index, segment_id),
+ "rowIndex": row_index,
+ "columnIndex": column_index
+ },
+ "insertBelow": insert_below
+ }
+ }
+
+
+def insert_table_column(action_name, index, row_index, column_index, insert_below, segment_id=None):
+ return {
+ action_name: {
+ "tableCellLocation": {
+ "tableStartLocation": get_location_object(index, segment_id),
+ "rowIndex": row_index,
+ "columnIndex": column_index
+ },
+ "insertRight": insert_below
+ }
+ }
+
+
+def delete_table_row(action_name, index, row_index, column_index, segment_id=None):
+ return {
+ action_name: {
+ "tableCellLocation": {
+ "tableStartLocation": get_location_object(index, segment_id),
+ "rowIndex": row_index,
+ "columnIndex": column_index
+ },
+ }
+ }
+
+
+def delete_table_column(action_name, index, row_index, column_index, segment_id=None):
+ return {
+ action_name: {
+ "tableCellLocation": {
+ "tableStartLocation": get_location_object(index, segment_id),
+ "rowIndex": row_index,
+ "columnIndex": column_index
+ },
+ }
+ }
+
+
+def insert_page_break(action_name, index, segment_id=None):
+ return {
+ action_name: {
+ "location": get_location_object(index, segment_id)
+ }
+ }
+
+
+def delete_positioned_object(action_name, object_id):
+ return {
+ action_name: {
+ "objectId": object_id
+ }
+ }
+
+
+def batch_update_document_command(service):
+ args = demisto.args()
+ document_id = args.get('document_id')
+ actions = parse_actions(args.get('actions'))
+ required_revision_id = args.get("required_revision_id", None)
+ target_revision_id = args.get("target_revision_id", None)
+ document = batch_update_document(service, document_id, actions, required_revision_id, target_revision_id)
+ human_readable_text = "The document with the title {title} and actions {actions} was updated. the results are:".\
+ format(title=document['title'], actions=args.get('actions'))
+ return document, human_readable_text
+
+
+def batch_update_document(service, document_id, actions, required_revision_id=None, target_revision_id=None):
+ payload: dict = {
+ "requests": []
+ }
+
+ write_control: typing.DefaultDict = defaultdict(dict)
+ if required_revision_id and target_revision_id:
+ raise Exception("Enter required_revision_id or target_revision_id but not both")
+ elif required_revision_id:
+ write_control['writeControl']["requiredRevisionId"] = required_revision_id
+ elif target_revision_id:
+ write_control['writeControl']["targetRevisionId"] = target_revision_id
+
+ payload = {**payload, **write_control}
+
+ # Return a function based on the action name and execute it
+ for action_type, params in actions.items():
+ request = get_function_by_action_name(action_type)(action_type, *params)
+ payload["requests"].append(request)
+
+ service.documents().batchUpdate(documentId=document_id, body=payload).execute()
+ document = get_document(service, document_id)
+ return document
+
+
+def create_document_command(service):
+ args = demisto.args()
+ title = args.get('title')
+ document = create_document(service, title)
+ human_readable_text = "The document with the title {title} was created. The results are:".format(title=title)
+ return document, human_readable_text
+
+
+def create_document(service, title):
+ payload = {
+ "title": title,
+ }
+
+ document = service.documents().create(body=payload).execute()
+ return document
+
+
+def get_document_command(service):
+ args = demisto.args()
+ document_id = args.get('document_id')
+ document = get_document(service, document_id)
+ human_readable_text = "The document with the title {title} was returned. The results are:".\
+ format(title=document['title'])
+ return document, human_readable_text
+
+
+def get_document(service, document_id):
+ document = service.documents().get(documentId=document_id).execute()
+ return document
+
+
+def main():
+ demisto.debug('Command being called is %s' % (demisto.command()))
+ proxy = demisto.params().get('proxy')
+ disable_ssl = demisto.params().get('insecure', False)
+ service_account_credentials = json.loads(demisto.params().get('service_account_credentials'))
+ if demisto.command() == 'test-module':
+ try:
+ get_client(service_account_credentials, SCOPES, proxy, disable_ssl)
+ demisto.results('ok')
+ except Exception as e:
+ return_error("Failed to execute test. Error: {}".format(str(e)), e)
+
+ try:
+ service = get_client(service_account_credentials, SCOPES, proxy, disable_ssl)
+ if demisto.command() == 'google-docs-update-document':
+ document, human_readable_text = batch_update_document_command(service)
+ elif demisto.command() == 'google-docs-create-document':
+ document, human_readable_text = create_document_command(service)
+ elif demisto.command() == 'google-docs-get-document':
+ document, human_readable_text = get_document_command(service)
+ else:
+ return_error("Command {} does not exist".format(demisto.command()))
+ return
+
+ res = {
+ 'RevisionId': document['revisionId'],
+ 'DocumentId': document['documentId'],
+ 'Title': document['title']
+ }
+ ec = {
+ 'GoogleDocs(val.DocumentId && val.DocumentId == obj.DocumentId)': res
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': ec,
+ 'HumanReadable': tableToMarkdown(human_readable_text, res),
+ 'EntryContext': ec
+ })
+
+ # Log exceptions
+ except Exception as e:
+ LOG(str(e))
+ LOG.print_log()
+ return_error("Failed to execute {} command. Error: {}".format(demisto.command(), str(e)), e)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/GoogleDocs/GoogleDocs.yml b/Integrations/GoogleDocs/GoogleDocs.yml
new file mode 100644
index 000000000000..37c4ec514ac3
--- /dev/null
+++ b/Integrations/GoogleDocs/GoogleDocs.yml
@@ -0,0 +1,117 @@
+category: Authentication
+commonfields:
+ id: GoogleDocs
+ version: -1
+configuration:
+- display: Service Account Private Key file contents (JSON)
+ name: service_account_credentials
+ required: true
+ type: 4
+- defaultvalue: ''
+ display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: ''
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Use the Google Docs integration to create and modify Google Docs documents.
+display: Google Docs
+name: GoogleDocs
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The document ID of the document to fetch.
+ isArray: false
+ name: document_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the document that matches the specified document ID.
+ execution: false
+ name: google-docs-get-document
+ outputs:
+ - contextPath: GoogleDocs.Title
+ description: The title of the document.
+ type: String
+ - contextPath: GoogleDocs.RevisionId
+ description: The revision ID of the updated document.
+ type: String
+ - contextPath: GoogleDocs.DocumentId
+ description: The document ID of the updated document.
+ type: String
+ - arguments:
+ - default: false
+ description: The title of the document to create.
+ isArray: false
+ name: title
+ required: true
+ secret: false
+ deprecated: false
+ description: Creates a document.
+ execution: false
+ name: google-docs-create-document
+ outputs:
+ - contextPath: GoogleDocs.Title
+ description: The title of the new document.
+ type: String
+ - contextPath: GoogleDocs.RevisionId
+ description: The revision ID of the new document.
+ type: String
+ - contextPath: GoogleDocs.DocumentId
+ description: The document ID of the new document.
+ type: String
+ - arguments:
+ - default: false
+ description: The document ID of the document to update.
+ isArray: false
+ name: document_id
+ required: true
+ secret: false
+ - default: false
+ description: 'Updates to the document in the format: action1{param1,param2,...};action2{param1,param2,...}.'
+ isArray: false
+ name: actions
+ required: true
+ secret: false
+ - default: false
+ description: The target revision ID of the document to which the write request will
+ be applied. If a newer revision exists you will receive an error. If you specify the target_revision_id argument, you cannot use this argument.
+ isArray: false
+ name: required_revision_id
+ required: false
+ secret: false
+ - default: false
+ description: The target revision ID of the document to which the write request will
+ be applied. If a newer revision exists you will receive an error. If you specify the required_revision_id argument, you cannot use this argument.
+ isArray: false
+ name: target_revision_id
+ required: false
+ secret: false
+ deprecated: false
+ description: Updates the document with the specified document ID.
+ execution: false
+ name: google-docs-update-document
+ outputs:
+ - contextPath: GoogleDocs.Title
+ description: The title of the updated.
+ type: String
+ - contextPath: GoogleDocs.RevisionId
+ description: The revision ID of the updated document.
+ type: String
+ - contextPath: GoogleDocs.DocumentId
+ description: The document ID of the updated document.
+ type: Unknown
+ dockerimage: demisto/googleapi-python3:1.0.0.2205
+ isfetch: false
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- GoogleDocs-test
diff --git a/Integrations/GoogleDocs/GoogleDocs_description.md b/Integrations/GoogleDocs/GoogleDocs_description.md
new file mode 100644
index 000000000000..1148a624b47a
--- /dev/null
+++ b/Integrations/GoogleDocs/GoogleDocs_description.md
@@ -0,0 +1,33 @@
+##Creating a Service Account
+1. Go to the [Google documentation](https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount) and follow the procedure in the Creating a Service Account section. After you create a service account, a Service Account Private Key file is downloaded. You will need this file when configuring an instance of the integration.
+2. Grant the Compute Admin permission to the Service Account to enable the Service Account to perform certain Google Cloud API commands.
+3. In Demisto, configure an instance of the Google Cloud Compute integration. For the Service Account Private Key parameter, add the Service Account Private Key file contents (JSON).
+##Create document command:
+Creates a blank document. It's not possible to pass body arguments.
+That's a limitation by the google api. For inserting content you should the update document command.
+##Update document command:
+Update a document based upon it's ID. Actions to perform are passed in the following format:
+* action1{param1,param2,...};action2{param1,param2,...}...
+It is then converted to:
+* action1(param1,param2,...), action2(param1,param2),...
+where action1 is the function name to be called and param1 and param2 are the parameters
+For example:
+insertText(5,hello)
+insertTable(5,7,7);insertText(5,hello)
+###List of allowed actions:
+-createNamedRange(start_index, end_index, name, segment_id(optional))
+-createParagraphBullets(start_index, end_index, bullet_type, segment_id(optional))
+-deleteContentRange(start_index, end_index, segment_id(optional))
+-deleteNamedRangeByName(name)
+-deleteNamedRangeById(named_range_id)
+-deleteParagraphBullets(start_index, end_index, segment_id(optional))
+-deletePositionedObject(object_id)
+-deleteTableColumn(index, row_index, column_index, segment_id(optional))
+-deleteTableRow(index, row_index, column_index, segment_id(optional))
+-insertInlineImage(index, uri, width, height, segment_id(optional))
+-insertPageBreak(index, segment_id(optional))
+-insertTable(index, rows, columns, segment_id(optional))
+-insertTableColumn(index, row_index, column_index, insert_below, segment_id(optional))
+-insertTableRow(index, row_index, column_index, insert_below, segment_id(optional))
+-insertText(index, text, segment_id(optional))
+-replaceAllText(source, target, match_case)
diff --git a/Integrations/GoogleDocs/GoogleDocs_image.png b/Integrations/GoogleDocs/GoogleDocs_image.png
new file mode 100644
index 000000000000..11376da82ea7
Binary files /dev/null and b/Integrations/GoogleDocs/GoogleDocs_image.png differ
diff --git a/Integrations/GoogleVault/GoogleVault.py b/Integrations/GoogleVault/GoogleVault.py
new file mode 100644
index 000000000000..1a40f73685bf
--- /dev/null
+++ b/Integrations/GoogleVault/GoogleVault.py
@@ -0,0 +1,1513 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+from googleapiclient.discovery import build
+from httplib2 import Http
+import json
+from oauth2client import service_account
+from google.oauth2 import service_account as google_service_account
+import googleapiclient.http
+from googleapiclient._auth import authorized_http
+import dateparser
+import io
+import os
+
+# @@@@@@@@ GLOBALS @@@@@@@@
+
+# If modifying these scopes, delete the file token.json.
+SCOPES = ['https://www.googleapis.com/auth/ediscovery', 'https://www.googleapis.com/auth/devstorage.full_control']
+DEMISTO_MATTER = 'test_search_phishing'
+
+ADMIN_EMAIL = demisto.params()['gsuite_credentials']['identifier'].encode('utf-8')
+PRIVATE_KEY_CONTENT = demisto.params()['auth_json'].encode('utf-8')
+USE_SSL = not demisto.params().get('insecure', False)
+
+
+# @@@@@@@@ HELPER FUNCS @@@@@@@@
+def validate_input_values(arguments_values_to_verify, available_values):
+ for value in arguments_values_to_verify:
+ if value not in available_values:
+ return_error(
+ 'Argument: \'{}\' is not one of the possible values: {}'.format(value, ', '.join(available_values)))
+
+
+def get_credentials(additional_scopes=None, delegated_user=ADMIN_EMAIL):
+ """Gets valid user credentials from storage.
+ If nothing has been stored, or if the stored credentials are invalid,
+ the OAuth2 flow is completed to obtain the new credentials.
+ Returns:
+ Credentials, the obtained credential.
+ """
+ if delegated_user == 'me':
+ delegated_user = ADMIN_EMAIL
+ scopes = SCOPES
+ if additional_scopes is not None:
+ scopes += additional_scopes
+ try:
+ json_keyfile = json.loads(PRIVATE_KEY_CONTENT)
+ if not isinstance(json_keyfile, dict):
+ json_keyfile = json.loads(json_keyfile)
+ cred = service_account.ServiceAccountCredentials.from_json_keyfile_dict(json_keyfile,
+ scopes=scopes)
+ delegated_creds = cred.create_delegated(delegated_user)
+ except Exception as e:
+ LOG('An error occurred in the \'get_credentials\' function.')
+ err_msg = 'An error occurred while trying to construct an OAuth2 ' \
+ 'ServiceAccountCredentials object - {}'.format(str(e))
+ return_error(err_msg)
+ return delegated_creds
+
+
+def connect():
+ creds = get_credentials()
+ try:
+ service = build('vault', 'v1', http=creds.authorize(Http(disable_ssl_certificate_validation=(not USE_SSL))))
+ except Exception as e:
+ LOG('There was an error creating the Vault service in the \'connect\' function.')
+ err_msg = 'There was an error creating the Vault service - {}'.format(str(e))
+ return_error(err_msg)
+ return service
+
+
+def is_matter_exist(service, matter_name): # Not needed at the moment
+ """
+ Searches for existence of a matter by its name
+ Note - this is case-sensitive
+ :param service: Vault service object
+ :param matter_name: name of the matter to be searched
+ :return: True if exists, False otherwise.
+ """
+ existing_matters = get_open_matters(service)
+ if any(matter_name == matter['name'] for matter in existing_matters):
+ return True
+ return False
+
+
+def get_open_matters(service):
+ """ Gets first 10 matters """
+ open_matters = service.matters().list(state='OPEN').execute()
+ return open_matters
+
+
+def get_matter_by_id(service, matter_id):
+ matter = service.matters().get(matterId=matter_id).execute()
+ return matter
+
+
+def get_matters_by_state(service, state):
+ state = state.upper()
+ matter_state = state if state in ('OPEN', 'CLOSED', 'DELETED') else 'STATE_UNSPECIFIED'
+ matter_list = service.matters().list(state=matter_state).execute()
+ return matter_list
+
+
+def delete_matter(service, matter_id):
+ _ = service.matters().delete(matterId=matter_id).execute()
+ return get_matter_by_id(service, matter_id) # Note - this is different that the other state updates
+
+
+def close_matter(service, matter_id):
+ close_response = service.matters().close(matterId=matter_id, body={}).execute()
+ return close_response['matter']
+
+
+def reopen_matter(service, matter_id):
+ reopen_response = service.matters().reopen(matterId=matter_id, body={}).execute()
+ return reopen_response['matter']
+
+
+def undelete_matter(service, matter_id):
+ undeleted_matter = service.matters().undelete(matterId=matter_id, body={}).execute()
+ return undeleted_matter
+
+
+def add_held_account(service, matter_id, hold_id, account_id):
+ held_account = {'accountId': account_id}
+ return service.matters().holds().accounts().create(matterId=matter_id, holdId=hold_id, body=held_account).execute()
+
+
+def remove_held_account(service, matter_id, hold_id, account_id):
+ return service.matters().holds().accounts().delete(matterId=matter_id, holdId=hold_id,
+ accountId=account_id).execute()
+
+
+def remove_hold(service, matter_id, hold_id):
+ return service.matters().holds().delete(matterId=matter_id, holdId=hold_id).execute()
+
+
+def list_holds(service, matter_id):
+ """
+ Return a list of existing holds
+ """
+ done_paginating = False
+ response = service.matters().holds().list(matterId=matter_id).execute()
+ # append first page:
+ the_holds = response['holds']
+ # Keep paginating and appending:
+ while not done_paginating:
+ if 'nextPageToken' in response:
+ response = service.matters().holds.list(pageSize=10, pageToken=response['nextPageToken']).execute()
+ the_holds.extend(response['holds'])
+ else:
+ done_paginating = True
+ return the_holds
+
+
+def timeframe_to_utc_zulu_range(timeframe_str):
+ """
+ Converts a time-frame to UTC Zulu format that can be used for startTime and endTime in various Google Vault requests.
+ """
+ try:
+ parsed_str = dateparser.parse(timeframe_str)
+ end_time = datetime.utcnow().isoformat() + 'Z' # Current time
+ start_time = parsed_str.isoformat() + 'Z'
+ return (start_time, end_time)
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to parse date correctly: {}'.format(err_msg))
+ else:
+ raise ex
+
+
+def create_hold_query(hold_name, corpus, accounts, terms, time_frame="", start_time="", end_time=""):
+ """
+ Creates the query that will be used to request the creation of a new hold. Returns the ready-to-be-sent request.
+ """
+ # --- Sanitizing Input ---
+ corpus = corpus.upper()
+ if time_frame:
+ start_time, end_time = timeframe_to_utc_zulu_range(time_frame) # Making it UTC Zulu format
+ elif start_time:
+ if not end_time:
+ end_time = datetime.utcnow().isoformat() + 'Z' # End time will be now, if no end time was given
+ if isinstance(accounts, unicode):
+ accounts = accounts.split(',')
+
+ # --- Building Request ---
+ request = {}
+ mail_query = {} # type: Dict[Any, Any]
+ accounts_for_query = []
+ if not terms:
+ if start_time and end_time:
+ mail_query = {'startTime': start_time, 'endTime': end_time}
+ else:
+ if start_time and end_time:
+ mail_query = {'startTime': start_time, 'endTime': end_time, 'terms': terms}
+
+ # --- Building all small parts into big request object ---
+ request['name'] = hold_name
+ request['corpus'] = corpus
+ if mail_query:
+ request['query'] = {'mailQuery': mail_query} # Adding the ready mail query
+ for acc_id in accounts:
+ accounts_for_query.append({'accountId': acc_id})
+ request['accounts'] = accounts_for_query
+ return request
+
+
+def create_hold_mail_accounts(service, matter_id, request_body):
+ """
+ Creates a hold in Google Vault
+ """
+ return service.matters().holds().create(matterId=matter_id, body=request_body).execute()
+
+
+def create_export(service, matter, request_body):
+ """
+ Creates an export in the given matter, with the given request_body (which is the actual JSON for the request).
+ """
+ return service.matters().exports().create(matterId=matter, body=request_body).execute()
+
+
+def create_mail_export_query(export_name, emails, time_frame, start_time, end_time, terms, org_unit="",
+ export_pst='True', export_mbox='False', search_method='All Accounts',
+ include_drafts='True', data_scope='All Data'):
+ """
+ Creates the query that will be used in the request to create a mail export
+ """
+ org_unit_id = org_unit
+ # --- Sanitizing Input ---
+ exclude_drafts = 'false'
+ if time_frame:
+ start_time, end_time = timeframe_to_utc_zulu_range(time_frame) # Making it UTC Zulu format
+ elif start_time:
+ if not end_time:
+ end_time = datetime.utcnow().isoformat() + 'Z' # End time will be now, if no end time was given
+ if isinstance(emails, (str, unicode)):
+ if ',' in emails:
+ emails = emails.split(',')
+ else:
+ emails = [emails]
+ if str(include_drafts).upper() == 'FALSE':
+ exclude_drafts = 'true'
+ if data_scope.upper() == 'HELD DATA':
+ data_scope = 'HELD_DATA'
+ if data_scope.upper() == 'ALL DATA':
+ data_scope = 'ALL_DATA'
+ if data_scope.upper() == 'UNPROCESSED DATA':
+ data_scope = 'UNPROCESSED_DATA'
+ if search_method.upper() == 'ORGANIZATIONAL UNIT(REQUIRES OU ARGUMENT)':
+ search_method = 'ORG_UNIT'
+ if search_method.upper() == 'ALL ACCOUNTS':
+ search_method = 'ENTIRE_ORG'
+ if search_method.upper() == 'SPECIFIC ACCOUNTS(REQUIRES EMAILS ARGUMENT)':
+ search_method = 'ACCOUNT'
+
+ # --- Building Request ---
+ request = {}
+ query = {}
+ emails_for_query = []
+ account_info = {'emails': []} # type: Dict[Any, Any]
+ org_unit_info = {'orgUnitId': org_unit_id}
+ corpus = 'MAIL'
+ export_format = 'PST' # Default
+ if export_mbox.upper() == 'TRUE':
+ export_format = 'MBOX'
+ mail_options = {
+ 'exportFormat': export_format
+ }
+
+ # --- Building all small parts into big request object ---
+ query['dataScope'] = data_scope
+ query['searchMethod'] = search_method
+ query['corpus'] = corpus
+ query['mailOptions'] = {'excludeDrafts': exclude_drafts}
+ if start_time and end_time:
+ query['startTime'] = start_time
+ query['endTime'] = end_time
+ if terms:
+ query['terms'] = terms
+ if emails: # If user specified emails
+ for email in emails: # Go over all of them
+ emails_for_query.append(email) # Add them to the list
+ account_info['emails'] = emails_for_query # Add the list to the account_info dictionary
+ query['accountInfo'] = account_info # Add the account_info dictionary into the query object
+ if search_method == 'ORG_UNIT':
+ query['orgUnitInfo'] = org_unit_info
+ request['query'] = query # Adding query AFTER IT'S COMPLETED
+ request['exportOptions'] = {'mailOptions': mail_options}
+ request['name'] = export_name
+ return request
+
+
+def create_drive_export_query(export_name, emails, team_drives, time_frame, start_time, end_time, terms, org_unit="",
+ search_method='Specific Accounts(requires emails argument)', include_teamdrives='True',
+ data_scope='All Data'):
+ """
+ Creates the query that will be used in the request to create a groups export
+ """
+ org_unit_id = org_unit
+ # --- Sanitizing Input ---
+ include_teamdrives = 'true'
+ if time_frame:
+ start_time, end_time = timeframe_to_utc_zulu_range(time_frame) # Making it UTC Zulu format
+ elif start_time:
+ if not end_time:
+ end_time = datetime.utcnow().isoformat() + 'Z' # End time will be now, if no end time was given
+ if isinstance(emails, (str, unicode)): # If emails were specified, making it a list:
+ if ',' in emails:
+ emails = emails.split(',')
+ else:
+ emails = [emails]
+ if isinstance(team_drives, (str, unicode)): # If team_drives were specified, making it a list:
+ if ',' in team_drives:
+ team_drives = team_drives.split(',')
+ else:
+ team_drives = [team_drives]
+ if str(include_teamdrives).upper() == 'FALSE':
+ include_teamdrives = 'false'
+ if data_scope.upper() == 'HELD DATA':
+ data_scope = 'HELD_DATA'
+ if data_scope.upper() == 'ALL DATA':
+ data_scope = 'ALL_DATA'
+ if data_scope.upper() == 'UNPROCESSED DATA':
+ data_scope = 'UNPROCESSED_DATA'
+ if search_method.upper() == 'ORGANIZATIONAL UNIT(REQUIRES OU ARGUMENT)':
+ search_method = 'ORG_UNIT'
+ if search_method.upper() == 'SPECIFIC ACCOUNTS(REQUIRES EMAILS ARGUMENT)':
+ search_method = 'ACCOUNT'
+ if search_method.upper() == 'TEAM DRIVE':
+ search_method = 'TEAM_DRIVE'
+
+ # --- Building Request ---
+ request = {}
+ query = {}
+ emails_for_query = []
+ teamdrives_for_query = []
+ account_info = {'emails': []} # type: Dict[Any, Any]
+ teamdrive_info = {'teamDriveIds': []} # type: Dict[Any, Any]
+ org_unit_info = {'orgUnitId': org_unit_id}
+ corpus = 'DRIVE'
+
+ # --- Building all small parts into big request object ---
+ query['dataScope'] = data_scope
+ query['searchMethod'] = search_method
+ query['corpus'] = corpus
+ query['driveOptions'] = {'includeTeamDrives': include_teamdrives}
+ if start_time and end_time:
+ query['startTime'] = start_time
+ query['endTime'] = end_time
+ if terms:
+ query['terms'] = terms
+ if emails: # If user specified emails
+ for email in emails: # Go over all of them
+ emails_for_query.append(email) # Add them to the list
+ account_info['emails'] = emails_for_query # Add the list to the account_info dictionary
+ if team_drives and include_teamdrives.upper() == 'TRUE': # If user specified team_drives and not emails
+ for teamdrive_id in team_drives:
+ teamdrives_for_query.append(teamdrive_id)
+ teamdrive_info['teamDriveIds'] = teamdrives_for_query
+ if search_method == 'ORG_UNIT':
+ query['orgUnitInfo'] = org_unit_info
+ if search_method == 'TEAM_DRIVE':
+ query['teamDriveInfo'] = teamdrive_info
+ if search_method == 'ACCOUNT':
+ # Add the account_info dictionary into the query object.
+ # This line SHOULD NOT exist if the user wants to use team_drives.
+ query['accountInfo'] = account_info
+ request['query'] = query # Adding query AFTER IT'S COMPLETED
+ request['name'] = export_name
+ return request
+
+
+def create_groups_export_query(export_name, emails, time_frame, start_time, end_time, terms, search_method,
+ export_pst='True', export_mbox='False', data_scope='All Data'):
+ """
+ Creates the query that will be used in the request to create a groups export
+ """
+ # --- Sanitizing Input ---
+ if time_frame:
+ start_time, end_time = timeframe_to_utc_zulu_range(time_frame) # Making it UTC Zulu format
+ elif start_time:
+ if not end_time:
+ end_time = datetime.utcnow().isoformat() + 'Z' # End time will be now, if no end time was given
+ if isinstance(emails, (str, unicode)):
+ if ',' in emails:
+ emails = emails.split(',')
+ else:
+ emails = [emails]
+ if data_scope.upper() == 'HELD DATA':
+ data_scope = 'HELD_DATA'
+ if data_scope.upper() == 'ALL DATA':
+ data_scope = 'ALL_DATA'
+ if data_scope.upper() == 'UNPROCESSED DATA':
+ data_scope = 'UNPROCESSED_DATA'
+
+ # --- Building Request ---
+ request = {}
+ query = {}
+ emails_for_query = []
+ account_info = {'emails': []} # type: Dict[Any, Any]
+ corpus = 'GROUPS'
+ export_format = 'PST' # Default
+ if export_mbox.upper() == 'TRUE':
+ export_format = 'MBOX'
+ groups_options = {
+ 'exportFormat': export_format
+ }
+
+ # --- Building all small parts into big request object ---
+ query['dataScope'] = data_scope
+ query['searchMethod'] = search_method
+ query['corpus'] = corpus
+ if start_time and end_time:
+ query['startTime'] = start_time
+ query['endTime'] = end_time
+ if terms:
+ query['terms'] = terms
+ if emails: # If user specified emails
+ for email in emails: # Go over all of them
+ emails_for_query.append(email) # Add them to the list
+ account_info['emails'] = emails_for_query # Add the list to the account_info dictionary
+ query['accountInfo'] = account_info # Add the account_info dictionary into the query object
+ request['query'] = query # Adding query AFTER IT'S COMPLETED
+ request['exportOptions'] = {'groupsOptions': groups_options}
+ request['name'] = export_name
+ return request
+
+
+def get_export_by_id(service, matter_id, export_id):
+ return service.matters().exports().get(matterId=matter_id, exportId=export_id).execute()
+
+
+def list_held_accounts(service, matter_id, hold_id):
+ return service.matters().holds().accounts().list(matterId=matter_id, holdId=hold_id).execute()['accounts']
+
+
+def remove_held_accounts(service, matter_id, hold_id):
+ pass
+
+
+def download_storage_object(object_ID, bucket_name):
+ service = connect_to_storage()
+ req = service.objects().get_media(bucket=bucket_name, object=object_ID) # pylint: disable=no-member
+ out_file = io.BytesIO()
+ downloader = googleapiclient.http.MediaIoBaseDownload(out_file, req)
+ done = False
+ while not done:
+ done = downloader.next_chunk()[1]
+ return out_file
+
+
+def get_storage_credentials():
+ try:
+ privateKeyJson = json.loads(PRIVATE_KEY_CONTENT)
+ if not isinstance(privateKeyJson, dict):
+ privateKeyJson = json.loads(privateKeyJson)
+ crads = google_service_account.Credentials.from_service_account_info(privateKeyJson, scopes=SCOPES,
+ subject=ADMIN_EMAIL)
+ except Exception as e:
+ LOG('An error occurred in the \'get_storage_credentials\' function.')
+ err_msg = 'An error occurred while trying to construct an OAuth2 ' \
+ 'Storage Credentials object - {}'.format(str(e))
+ return_error(err_msg)
+ return crads
+
+
+def connect_to_storage():
+ try:
+ creds = get_storage_credentials()
+ ptth = authorized_http(creds)
+ ptth.disable_ssl_certificate_validation = (not USE_SSL)
+ service = build('storage', 'v1', http=ptth)
+ except Exception as e:
+ LOG('There was an error creating the Storage service in the \'connect_to_storage\' function.')
+ err_msg = 'There was an error creating the Storage service - {}'.format(str(e))
+ return_error(err_msg)
+ return service
+
+
+def get_object_mame_by_type(objectsArr, extension):
+ for file in objectsArr:
+ objName = str(file.get('objectName'))
+ if (objName.endswith(extension)):
+ return objName
+
+
+def build_key_val_pair(tagDict):
+ demisto.info('this is value: ')
+ demisto.info(tagDict['@TagName'])
+ demisto.info('this is key: ')
+ demisto.info(tagDict['@TagValue'])
+
+ key = filter(str.isalnum, str(tagDict['@TagName']))
+ value = tagDict['@TagValue'].encode('utf-8')
+ keyValPair = {key: value}
+ return keyValPair
+
+
+def build_document_dict(document):
+ file_info = document['Files']['File']['ExternalFile']
+ newDocumentDict = {
+ 'DocType': os.path.splitext(file_info['@FileName'])[1][1:].strip().lower(),
+ 'MD5': file_info['@Hash']
+ }
+ tags = document['Tags']['Tag']
+ for currentTagDict in tags:
+ newDocumentDict.update(build_key_val_pair(currentTagDict))
+ return newDocumentDict
+
+
+def build_dict_list(documentsArr):
+ documentsDictList = []
+ for document in documentsArr:
+ currentDocumentDict = build_document_dict(document)
+ documentsDictList.append(currentDocumentDict)
+
+ return documentsDictList
+
+
+def get_current_matter_from_context(matter_id):
+ context_matter = demisto.dt(demisto.context(), 'GoogleVault.Matter(val.MatterID === "{0}")'.format(matter_id))
+
+ context_matter = context_matter[0] if type(context_matter) is list else context_matter
+
+ if not context_matter:
+ context_matter = {
+ 'MatterID': matter_id,
+ 'Export': []
+ }
+ return context_matter
+
+
+def populate_matter_with_export(current_matter, current_export):
+ # add new export to matter
+
+ exports = current_matter.get('Export', [])
+ if type(exports) is dict:
+ exports = [exports]
+
+ # remove duplicate export after new updated exports were entered
+ filtered_export = list(filter(lambda export:
+ export['ExportID'] != current_export['ExportID'],
+ exports))
+ filtered_export.append(current_export)
+ current_matter['Export'] = filtered_export
+
+ return current_matter
+
+
+# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
+# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ACTUAL FUNCS @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
+# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
+
+
+def list_matters_command():
+ """
+ Lists all matters in the project, with their corresponding state.
+ """
+ try:
+ service = connect()
+ state = demisto.args().get('state', 'STATE_UNSPECIFIED')
+ validate_input_values([state], ['All', 'Open', 'Closed', 'Deleted', 'STATE_UNSPECIFIED', ''])
+ matters = (get_matters_by_state(service, state))['matters']
+
+ if not matters:
+ demisto.results('No matters found.')
+ else:
+ output = []
+ context_output = []
+ for matter in matters:
+ output.append({
+ 'Matter Name': matter.get('name'),
+ 'Matter ID': matter.get('matterId'),
+ 'Matter State': matter.get('state')
+ })
+ context_output.append({
+ 'Name': matter.get('name'),
+ 'MatterID': matter.get('matterId'),
+ 'State': matter.get('state') # Getting new state
+ })
+ markdown = '' # Use this to add extra line
+ title = ""
+ if state == 'All' or not state:
+ title = 'Here are all your matters'
+ else:
+ title = 'Here are your {} matters'.format(state.lower())
+ markdown += tableToMarkdown(title, output, ['Matter Name', 'Matter ID', 'Matter State'])
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': matters,
+ 'HumanReadable': markdown,
+ 'EntryContext': {
+ 'GoogleVault.Matter(val.MatterID === obj.MatterID)': context_output
+ }
+ })
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to list matters. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+
+def create_matter_command():
+ try:
+ service = connect()
+ matter_name = demisto.getArg('name')
+ matter_description = demisto.getArg('description')
+ matter_content = {
+ 'name': matter_name,
+ 'description': matter_description,
+ }
+ matter = service.matters().create(body=matter_content).execute() # pylint: disable=no-member
+ markdown = ""
+ if matter_description:
+ markdown = 'Matter: {} was created successfully with description: {}.\nID: {}.'.format(matter_name,
+ matter_description,
+ matter.get(
+ 'matterId'))
+ else:
+ markdown = 'Matter: {} was created successfully without a description.\nID: {}.'.format(matter_name,
+ matter.get(
+ 'matterId'))
+ title = 'Matter creation successful.'
+ markdown_matter = []
+ markdown_matter.append({
+ 'Matter Name': matter.get('name'),
+ 'Matter ID': matter.get('matterId'),
+ 'Matter State': matter.get('state')
+ })
+ markdown += tableToMarkdown(title, markdown_matter, ['Matter Name', 'Matter ID',
+ 'Matter State']) # Why is the title displayed in a weird way?
+
+ output_context = []
+ output_context.append({
+ 'Name': matter.get('name'),
+ 'MatterID': matter.get('matterId'),
+ 'State': matter.get('state')
+ })
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': matter,
+ 'HumanReadable': markdown,
+ 'EntryContext': {
+ 'GoogleVault.Matter(val.MatterID === obj.MatterID)': output_context
+ }
+ })
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to create matter. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+
+def update_matter_state_command():
+ """
+ * Note: This updates context only if a change in the current state was successful
+ """
+ try:
+ service = connect()
+ matter_id = demisto.getArg('matterID')
+ wanted_state = demisto.getArg('state')
+
+ validate_input_values([wanted_state], ['CLOSE', 'DELETE', 'REOPEN', 'UNDELETE'])
+ matter_found = get_matter_by_id(service, matter_id)
+ current_state = matter_found.get('state')
+
+ if current_state: # if a matter was found with that ID:
+ context_output = []
+ result_of_update = ""
+ # Dealing with CLOSE:
+ if wanted_state == 'CLOSE':
+ if current_state == 'DELETED':
+ result_of_update = 'Matter is deleted and so it cannot be closed. It is possible to re-open it ' \
+ 'and then close.'
+ elif current_state == 'CLOSED':
+ demisto.results('Matter is already closed.')
+ elif current_state == 'OPEN':
+ try:
+ close_response = close_matter(service, matter_id)
+ result_of_update = 'Matter was successfully closed.'
+ except Exception as ex:
+ if 'Matters have users on hold' in str(ex):
+ demisto.log('{}'.format(ex))
+ return_error('The matter has holds that prevent it from being closed.')
+ elif 'Quota exceeded for quota metric' in str(ex):
+ return_error('Quota for Google Vault API exceeded')
+ else:
+ raise ex
+
+ # Dealing with DELETE:
+ elif wanted_state == 'DELETE':
+ if current_state == 'OPEN':
+ try:
+ # Todo: check if contains holds. If it does, return error to user
+ close_response = close_matter(service, matter_id) # noqa: F841
+ _ = delete_matter(service, matter_id)
+ result_of_update = 'Matter was {} and is now DELETED.'.format(current_state)
+ except Exception as ex:
+ if 'Matters have users on hold' in str(ex):
+ demisto.log('{}'.format(ex))
+ return_error('The matter has holds that prevent it from being deleted.')
+ elif 'Quota exceeded for quota metric' in str(ex):
+ return_error('Quota for Google Vault API exceeded')
+ else:
+ raise ex
+
+ elif current_state == 'CLOSED':
+ try:
+ _ = delete_matter(service, matter_id)
+ result_of_update = 'Matter was {} and is not DELETED.'.format(current_state)
+ except Exception as ex:
+ if 'Matters have users on hold' in str(ex):
+ demisto.log('{}'.format(ex))
+ return_error('The matter has holds that prevent it from being deleted.')
+ elif 'Quota exceeded for quota metric' in str(ex):
+ return_error('Quota for Google Vault API exceeded')
+ else:
+ raise ex
+
+ elif current_state == 'DELETED':
+ demisto.results('Matter is already deleted.')
+
+ # Dealing with REOPEN:
+ elif wanted_state == 'REOPEN':
+ if current_state == 'OPEN':
+ demisto.results('Matter is already open.')
+ elif current_state == 'CLOSED':
+ _ = reopen_matter(service, matter_id)
+ result_of_update = 'Matter was {} and is now OPEN.'.format(current_state)
+ elif current_state == 'DELETED':
+ _ = undelete_matter(service, matter_id)
+ _ = reopen_matter(service, matter_id)
+ result_of_update = 'Matter was {} and is now OPEN.'.format(current_state)
+
+ # Dealing with UNDELETE:
+ elif wanted_state == 'UNDELETE':
+ if current_state == 'OPEN':
+ demisto.results('Matter is already open.')
+ elif current_state == 'CLOSED':
+ demisto.results('Matter is closed at the moment.')
+ elif current_state == 'DELETED':
+ _ = undelete_matter(service, matter_id)
+ result_of_update = 'Matter was {} and is now CLOSED.'.format(current_state)
+
+ if result_of_update: # If an update was done then update context:
+ context_output.append({
+ 'Name': matter_found.get('name'),
+ 'MatterID': matter_found.get('matterId'),
+ 'State': get_matter_by_id(service, matter_id).get('state') # Getting new state
+ })
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': result_of_update,
+ 'EntryContext': {
+ 'GoogleVault.Matter(val.MatterID === obj.MatterID)': context_output
+ }
+ })
+ else:
+ demisto.results('No matter was found with that ID.') # Todo: never gets here. Gotta catch the exception
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to update matter. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+
+def add_account_to_hold_command(): # Todo: Not sure if context is good (It works, but maybe not according to conventions)
+ try:
+ service = connect()
+ matter_id = demisto.getArg('matterID')
+ hold_id = demisto.getArg('holdID')
+ account_id = demisto.getArg('accountID')
+ _ = add_held_account(service, matter_id, hold_id, account_id)
+
+ msg_to_usr = 'Account {} was successfully added to hold {} in matter {}'.format(account_id, hold_id, matter_id)
+ context_output = []
+ context_output.append({
+ 'ID': hold_id,
+ 'matterID': matter_id,
+ 'HeldAccount': {
+ 'accountID': account_id,
+ 'IsHeld': True
+ }
+ })
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': msg_to_usr,
+ 'EntryContext': {
+ 'GoogleVault.Hold(val.ID === obj.ID)': context_output
+ }
+ })
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to add account to hold. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+
+def search_matter_command():
+ """
+ * This can be highly optimized. What it currently does is search ALL matters and then filter by name / ID
+ * If a matter with an ID is found, there's no need to keep on searching. This can be optimized too.
+ * Note - this is case INSENSITIVE. Searching for 'MatTER1' will find 'matter1' too.
+ """
+ try:
+ service = connect()
+ wanted_name = demisto.getArg('matterName')
+ wanted_id = demisto.getArg('matterID')
+ if wanted_name or wanted_id:
+ if wanted_name:
+ wanted_name = wanted_name.lower()
+ if wanted_id:
+ wanted_id = wanted_id.lower()
+ else:
+ demisto.results('No name or ID were specified. Please specify at least one of them.')
+ sys.exit(0)
+ matters = get_matters_by_state(service, state='STATE_UNSPECIFIED')['matters']
+ output = []
+ markdown_matters = []
+ found_anything = False
+ for matter in matters:
+ if matter.get('name').lower() == wanted_name or matter.get('matterId').lower() == wanted_id:
+ found_anything = True
+ markdown_matters.append({
+ 'Matter Name': matter.get('name'),
+ 'Matter ID': matter.get('matterId'),
+ 'Matter State': matter.get('state')
+ })
+ output.append({
+ 'Name': matter.get('name'),
+ 'MatterID': matter.get('matterId'),
+ 'State': matter.get('state')
+ })
+ if not found_anything: # If finished for loop through matters and no matter was found
+ demisto.results('No matters found.')
+ else:
+ markdown = '' # Use this to add extra line
+ if wanted_name:
+ title = 'Here are matters that have the name {}'.format(wanted_name)
+ else:
+ title = 'Here is the matter with ID {}'.format(wanted_id)
+ markdown += tableToMarkdown(title, markdown_matters, ['Matter Name', 'Matter ID', 'Matter State'])
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': markdown_matters,
+ 'HumanReadable': markdown,
+ 'EntryContext': {
+ 'GoogleVault.Matter(val.MatterID === obj.MatterID)': output
+ }
+ })
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to search matter. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+
+def remove_account_from_hold_command():
+ try:
+ service = connect()
+ matter_id = demisto.getArg('matterID')
+ hold_id = demisto.getArg('holdID')
+ account_id = demisto.getArg('accountID')
+ _ = remove_held_account(service, matter_id, hold_id, account_id)
+
+ msg_to_usr = 'Account {} was successfully removed from hold {} in matter {}'.format(account_id, hold_id,
+ matter_id)
+ context_output = []
+ context_output.append({
+ 'matterID': matter_id,
+ 'ID': hold_id,
+ 'HeldAccount': { # Does this allow only 1 HeldAccount to exist in a hold?
+ 'ID': account_id,
+ 'IsHeld': False
+ },
+ })
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': msg_to_usr,
+ 'EntryContext': {
+ 'GoogleVault.Hold(val.ID === obj.ID)': context_output
+ }
+ })
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to remove account from hold. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+
+def delete_hold_command():
+ try:
+ service = connect()
+ matter_id = demisto.getArg('matterID')
+ hold_id = demisto.getArg('holdID')
+ _ = remove_hold(service, matter_id, hold_id)
+ msg_to_usr = 'Hold {} was successfully deleted from matter {}'.format(hold_id, matter_id)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': msg_to_usr,
+ })
+
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to delete hold. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+
+def list_holds_command():
+ try:
+ service = connect()
+ matter_id = demisto.getArg('matterID')
+ holds = list_holds(service, matter_id)
+ if not holds:
+ demisto.results('No holds found.')
+ else:
+ output = []
+ context_output = []
+ for hold in holds:
+ output.append({
+ 'Matter ID': matter_id,
+ 'Hold Name': hold.get('name'),
+ 'Hold ID': hold.get('holdId')
+ })
+ context_output.append({
+ 'name': hold.get('name'),
+ 'ID': hold.get('holdId'),
+ 'MatterID': matter_id
+ })
+ markdown = '' # Use this to add extra line
+ title = 'Here are all the holds under matter {}.'.format(matter_id)
+ markdown += tableToMarkdown(title, output, ['Hold Name', 'Hold ID', 'Matter ID'])
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': holds,
+ 'HumanReadable': markdown,
+ 'EntryContext': {
+ 'GoogleVault.Hold(val.ID === obj.ID)': context_output
+ }
+ })
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to list holds. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+
+def create_hold_command():
+ service = connect()
+ matter_id = demisto.getArg('matterID')
+ hold_name = demisto.getArg('holdName')
+ corpus = demisto.getArg('corpus')
+ accounts = demisto.getArg('accountID')
+ time_frame = demisto.getArg('timeFrame')
+ start_time = demisto.getArg('startTime')
+ end_time = demisto.getArg('endTime')
+ terms = demisto.getArg('terms')
+
+ validate_input_values([corpus], ['Mail', 'Drive', 'Groups'])
+ query = create_hold_query(hold_name, corpus, accounts, time_frame, start_time, end_time, terms)
+ try:
+ response = create_hold_mail_accounts(service, matter_id, query)
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to create hold. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+ hold_id = response['holdId']
+ output = []
+ context_output = []
+ output.append({
+ 'Hold Name': hold_name,
+ 'Hold ID': hold_id
+ })
+ context_output.append({
+ 'name': hold_name,
+ 'ID': hold_id,
+ 'matterID': matter_id
+ })
+ markdown = '' # Use this to add extra line
+ title = 'Here are the details of your newly created hold:'
+ markdown += tableToMarkdown(title, output, ['Hold Name', 'Hold ID'])
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': {'Hold Name': hold_name, 'Hold ID': hold_id},
+ 'HumanReadable': markdown,
+ 'EntryContext': {
+ 'GoogleVault.Hold(val.ID === obj.ID)': context_output
+ }
+ })
+
+
+def create_mail_export_command():
+ """
+ Creates a mail export in Google Vault
+ """
+ service = connect()
+ matter_id = demisto.getArg('matterID')
+ export_name = demisto.getArg('exportName')
+ data_scope = demisto.getArg('dataScope')
+ search_method = demisto.getArg('searchMethod')
+ emails = demisto.getArg('emails')
+ include_drafts = demisto.getArg('includeDrafts')
+ start_time = demisto.getArg('startTime')
+ end_time = demisto.getArg('endTime')
+ time_frame = demisto.getArg('timeFrame')
+ terms = demisto.getArg('terms')
+ export_pst = demisto.getArg('exportPST')
+ export_mbox = demisto.getArg('exportMBOX')
+ org_unit = demisto.getArg('ou')
+
+ validate_input_values([include_drafts, export_pst, export_mbox], ['true', 'false', ''])
+ validate_input_values([data_scope], ['All Data', 'Held Data', 'Unprocessed Data'])
+ validate_input_values([search_method], ['All Accounts', 'Specific Accounts(requires emails argument)',
+ 'Organizational Unit(requires ou argument)'])
+
+ query = create_mail_export_query(export_name, emails, time_frame, start_time, end_time, terms, org_unit, export_pst,
+ export_mbox, search_method, include_drafts, data_scope)
+ try:
+ response = create_export(service, matter_id, query)
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to create export. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+ create_time = response.get('createTime')
+ export_id = response.get('id')
+
+ title = 'A new export has been created successfully:\n'
+ output_for_markdown = { # This one is for tableToMarkdown to correctly map
+ 'Matter ID': matter_id,
+ 'Export ID': export_id,
+ 'Export Name': export_name,
+ 'Created Time': create_time
+ }
+ markdown = tableToMarkdown(title, output_for_markdown, ['Matter ID', 'Export ID', 'Export Name', 'Created Time'])
+
+ new_export = {
+ 'MatterID': matter_id,
+ 'ExportID': export_id,
+ 'Name': export_name,
+ 'CreateTime': create_time
+ }
+
+ context_matter = get_current_matter_from_context(matter_id)
+ new_matter = populate_matter_with_export(context_matter, new_export)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': response,
+ 'HumanReadable': markdown,
+ 'EntryContext': {
+ 'GoogleVault.Matter(val.MatterID === "{0}")'.format(matter_id): new_matter
+ }
+ })
+
+
+def create_drive_export_command():
+ service = connect()
+ matter_id = demisto.getArg('matterID')
+ export_name = demisto.getArg('exportName')
+ data_scope = demisto.getArg('dataScope')
+ search_method = demisto.getArg('searchMethod')
+ emails = demisto.getArg('emails')
+ org_unit = demisto.getArg('ou')
+ team_drives = demisto.getArg('teamDrive')
+ include_teamdrives = demisto.getArg('includeTeamDrives')
+ time_frame = demisto.getArg('timeFrame')
+ start_time = demisto.getArg('startTime')
+ end_time = demisto.getArg('endTime')
+ terms = demisto.getArg('terms')
+
+ validate_input_values([include_teamdrives], ['true', 'false', ''])
+ validate_input_values([data_scope], ['All Data', 'Held Data', 'Unprocessed Data'])
+ validate_input_values([search_method], ['Team Drive', 'Specific Accounts(requires emails argument)',
+ 'Organizational Unit(requires ou argument)'])
+
+ query = create_drive_export_query(export_name, emails, team_drives, time_frame, start_time, end_time, terms,
+ org_unit, search_method, include_teamdrives, data_scope)
+ try:
+ response = create_export(service, matter_id, query)
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to create export. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+ create_time = response.get('createTime')
+ export_id = response.get('id')
+
+ new_export = {
+ 'MatterID': matter_id,
+ 'ExportID': export_id,
+ 'Name': export_name,
+ 'CreateTime': create_time
+ }
+
+ context_matter = get_current_matter_from_context(matter_id)
+ new_matter = populate_matter_with_export(context_matter, new_export)
+
+ title = 'A new export has been created successfully:\n'
+ output_for_markdown = { # This one is for tableToMarkdown to correctly map
+ 'Matter ID': matter_id,
+ 'Export ID': export_id,
+ 'Export Name': export_name,
+ 'Created Time': create_time
+ }
+ markdown = tableToMarkdown(title, output_for_markdown, ['Matter ID', 'Export ID', 'Export Name', 'Created Time'])
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': response,
+ 'HumanReadable': markdown,
+ 'EntryContext': {
+ 'GoogleVault.Matter(val.MatterID === "{0}")'.format(matter_id): new_matter
+ }
+ })
+
+
+def create_groups_export_command():
+ service = connect()
+ matter_id = demisto.getArg('matterID')
+ export_name = demisto.getArg('exportName')
+ data_scope = demisto.getArg('dataScope')
+ search_method = 'ACCOUNT' # Hard-coded only for groups export
+ emails = demisto.getArg('groups')
+ start_time = demisto.getArg('startTime')
+ end_time = demisto.getArg('endTime')
+ time_frame = demisto.getArg('timeFrame')
+ terms = demisto.getArg('terms')
+ export_pst = demisto.getArg('exportPST')
+ export_mbox = demisto.getArg('exportMBOX')
+
+ validate_input_values([export_pst, export_mbox], ['true', 'false', ''])
+ validate_input_values([data_scope], ['All Data', 'Held Data', 'Unprocessed Data'])
+
+ query = create_groups_export_query(export_name, emails, time_frame, start_time, end_time, terms, search_method,
+ export_pst, export_mbox, data_scope)
+ try:
+ response = create_export(service, matter_id, query)
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to create export. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+ create_time = response.get('createTime')
+ export_id = response.get('id')
+
+ new_export = {
+ 'MatterID': matter_id,
+ 'ExportID': export_id,
+ 'Name': export_name,
+ 'CreateTime': create_time
+ }
+
+ context_matter = get_current_matter_from_context(matter_id)
+ new_matter = populate_matter_with_export(context_matter, new_export)
+
+ title = 'A new export has been created successfully:\n'
+ output_for_markdown = { # This one is for tableToMarkdown to correctly map
+ 'Matter ID': matter_id,
+ 'Export ID': export_id,
+ 'Export Name': export_name,
+ 'Created Time': create_time
+ }
+ markdown = tableToMarkdown(title, output_for_markdown, ['Matter ID', 'Export ID', 'Export Name', 'Created Time'])
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': response,
+ 'HumanReadable': markdown,
+ 'EntryContext': {
+ 'GoogleVault.Matter(val.MatterID === "{0}")'.format(matter_id): new_matter
+ }
+ })
+
+
+def get_multiple_exports_command():
+ export_IDs = argToList(demisto.getArg('exportIDS'))
+ matter_id = demisto.getArg('matterId')
+ id_concatenation = demisto.getArg('queryIDS')
+ if id_concatenation:
+ if '#' not in id_concatenation:
+ return_error(
+ 'Should enter a concatenation of MatterID and ExportID with "#" delimeter such: #')
+
+ matter_id, export_id = id_concatenation.split('#')
+ export_IDs = [export_id]
+
+ if not (matter_id and export_IDs):
+ return_error('Missing parameter MetterID or ExportID')
+
+ current_matter = get_current_matter_from_context(matter_id)
+
+ for export_id in export_IDs:
+ new_export = get_export_command(export_id, matter_id)
+ current_matter = populate_matter_with_export(current_matter, new_export)
+
+ demisto.results({
+ 'ContentsFormat': formats['text'],
+ 'Contents': '',
+ 'Type': entryTypes['note'],
+ 'EntryContext': {
+ 'GoogleVault.Matter(val.MatterID === "{0}")'.format(matter_id): current_matter
+ }
+ })
+
+
+def get_export_command(export_id, matter_id):
+ service = connect()
+
+ try:
+ response = get_export_by_id(service, matter_id, export_id)
+ export_name = response.get('name')
+ export_status = response.get('status')
+ create_time = response.get('createTime')
+ bucket_name = response.get('cloudStorageSink').get('files')[0].get(
+ 'bucketName') if export_status == 'COMPLETED' else ''
+ zip_object_name = get_object_mame_by_type(response.get('cloudStorageSink').get('files'),
+ '.zip') if export_status == 'COMPLETED' else ''
+ xml_object_name = get_object_mame_by_type(response.get('cloudStorageSink').get('files'),
+ '.xml') if export_status == 'COMPLETED' else ''
+
+ title = 'You Export details:\n'
+ output_for_markdown = { # This one is for tableToMarkdown to correctly map
+ 'Matter ID': matter_id,
+ 'Export ID': export_id,
+ 'Export Name': export_name,
+ 'Status': export_status,
+ 'Created Time': create_time,
+ 'Bucket Name(for download)': bucket_name,
+ 'Download ID': zip_object_name,
+ 'View ID': xml_object_name
+ }
+ if (export_status == 'COMPLETED'):
+ headers = ['Matter ID', 'Export ID', 'Export Name', 'Status', 'Created Time', 'Bucket Name(for download)',
+ 'Download ID', 'View ID']
+ else:
+ headers = ['Matter ID', 'Export ID', 'Export Name', 'Status', 'Created Time']
+ markdown = tableToMarkdown(title, output_for_markdown, headers)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': response,
+ 'HumanReadable': markdown,
+ })
+
+ export_status = {
+ 'MatterID': matter_id,
+ 'ExportID': export_id,
+ 'ExportName': export_name,
+ 'Status': export_status,
+ 'BucketName': bucket_name,
+ 'DownloadID': zip_object_name,
+ 'ViewID': xml_object_name
+ }
+
+ return export_status
+
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to get export. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+
+def download_export_command():
+ try:
+ bucket_name = demisto.getArg('bucketName')
+ download_ID = demisto.getArg('downloadID')
+ out_file = download_storage_object(download_ID, bucket_name)
+ demisto.results(fileResult(demisto.uniqueFile() + '.zip', out_file.getvalue()))
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to download export. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+
+def download_and_sanitize_export_results(object_ID, bucket_name, max_results):
+ out_file = download_storage_object(object_ID, bucket_name)
+ out_file_json = json.loads(xml2json(out_file.getvalue()))
+
+ if not out_file_json['Root']['Batch'].get('Documents'):
+ demisto.results('The export given contains 0 documents')
+ sys.exit(0)
+ documents = out_file_json['Root']['Batch']['Documents']['Document']
+
+ if type(documents) is dict:
+ documents = [documents]
+
+ dictList = build_dict_list(documents)
+
+ if len(dictList) > max_results:
+ return dictList[0:max_results]
+
+ return dictList
+
+
+def get_drive_results_command():
+ try:
+ max_results = int(demisto.getArg('maxResult'))
+ view_ID = demisto.getArg('viewID')
+ bucket_name = demisto.getArg('bucketName')
+ output = download_and_sanitize_export_results(view_ID, bucket_name, max_results)
+
+ if not (output[0].get('Author') or output[0].get('Collaborators') or output[0].get('Title')):
+ return_error(
+ 'Error displaying results: Corpus of the invoked command and the supplied ViewID does not match')
+
+ markedown_output = map(lambda document: {
+ 'Title': document.get('Title'),
+ 'Author': document.get('Author'),
+ 'Collaborators': document.get('Collaborators'),
+ 'Others': document.get('Others'),
+ 'DateCreated': document.get('DateCreated'),
+ 'DateModified': document.get('DateModified'),
+ 'DocType': document.get('DocType'),
+ 'MD5': document.get('MD5'),
+ }, output)
+
+ title = 'Your DRIVE inquiry details\n'
+ headers = ['Title', 'Author', 'Collaborators', 'Others', 'Labels', 'Viewers', 'DateCreated', 'DateModified',
+ 'DocType', 'MD5']
+ markdown = tableToMarkdown(title, markedown_output, headers)
+
+ exportID = str(view_ID).split('/')[1]
+ contextOutput = {'ExportID': exportID, 'Results': markedown_output}
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contextOutput,
+ 'HumanReadable': markdown,
+ 'EntryContext': {
+ 'GoogleVault.Matter.Export(val.ExportID === obj.ExportID)': contextOutput
+ }
+ })
+
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to display export result. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+
+def get_mail_and_groups_results_command(inquiryType):
+ try:
+ max_results = int(demisto.getArg('maxResult'))
+ view_ID = demisto.getArg('viewID')
+ bucket_name = demisto.getArg('bucketName')
+ output = download_and_sanitize_export_results(view_ID, bucket_name, max_results)
+
+ if not (output[0].get('From') or output[0].get('To') or output[0].get('Subject')):
+ return_error(
+ 'Error displaying results: Corpus of the invoked command and the supplied ViewID does not match')
+
+ markedown_output = map(lambda document: {
+ 'From': document.get('From'),
+ 'To': document.get('To'),
+ 'CC': document.get('CC'),
+ 'BCC': document.get('BCC'),
+ 'Subject': document.get('Subject'),
+ 'DateSent': document.get('DateSent'),
+ 'DateReceived': document.get('DateReceived'),
+ }, output)
+
+ title = 'Your {} inquiry details\n'.format(inquiryType)
+ headers = ['Subject', 'From', 'To', 'CC', 'BCC', 'DateSent']
+ markdown = tableToMarkdown(title, markedown_output, headers)
+
+ exportID = str(view_ID).split('/')[1]
+ contextOutput = {'ExportID': exportID, 'Results': markedown_output}
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contextOutput,
+ 'HumanReadable': markdown,
+ 'EntryContext': {
+ 'GoogleVault.Matter.Export(val.ExportID === obj.ExportID)': contextOutput
+ }
+ })
+
+ except Exception as ex:
+ err_msg = str(ex)
+ if 'Quota exceeded for quota metric' in err_msg:
+ err_msg = 'Quota for Google Vault API exceeded'
+ return_error('Unable to display export result. Error: {}'.format(err_msg))
+ else:
+ raise ex
+
+
+def test_module():
+ """
+ This is the call made when pressing the integration test button.
+ """
+ try:
+ service = connect()
+ get_matters_by_state(service, 'STATE_UNSPECIFIED')
+ demisto.results('ok')
+ sys.exit(0)
+ except Exception as ex:
+ if 'Quota exceeded for quota metric' in str(ex):
+ return_error('Quota for Google Vault API exceeded')
+ else:
+ return_error(str(ex))
+
+
+def main():
+ """Main Execution Block"""
+
+ try:
+ handle_proxy()
+
+ # @@@@@@@@ DEMISTO COMMANDS @@@@@@@@
+
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module()
+ elif demisto.command() == 'gvault-list-matters':
+ list_matters_command()
+ elif demisto.command() == 'gvault-create-matter':
+ create_matter_command()
+ elif demisto.command() == 'gvault-matter-update-state':
+ update_matter_state_command()
+ elif demisto.command() == 'gvault-add-heldAccount':
+ add_account_to_hold_command()
+ elif demisto.command() == 'gvault-get-matter':
+ search_matter_command()
+ elif demisto.command() == 'gvault-remove-heldAccount':
+ remove_account_from_hold_command()
+ elif demisto.command() == 'gvault-delete-hold':
+ delete_hold_command()
+ elif demisto.command() == 'gvault-list-holds':
+ list_holds_command()
+ elif demisto.command() == 'gvault-create-hold':
+ create_hold_command()
+ elif demisto.command() == 'gvault-create-export-mail':
+ create_mail_export_command()
+ elif demisto.command() == 'gvault-create-export-drive':
+ create_drive_export_command()
+ elif demisto.command() == 'gvault-create-export-groups':
+ create_groups_export_command()
+ elif demisto.command() == 'gvault-export-status':
+ get_multiple_exports_command()
+ elif demisto.command() == 'gvault-download-results':
+ download_export_command()
+ elif demisto.command() == 'gvault-get-drive-results':
+ get_drive_results_command()
+ elif demisto.command() == 'gvault-get-mail-results':
+ get_mail_and_groups_results_command('MAIL')
+ elif demisto.command() == 'gvault-get-groups-results':
+ get_mail_and_groups_results_command('GROUPS')
+ except Exception as e:
+ return_error(str(e))
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == '__builtin__' or __name__ == 'builtins':
+ main()
diff --git a/Integrations/GoogleVault/GoogleVault.yml b/Integrations/GoogleVault/GoogleVault.yml
new file mode 100644
index 000000000000..76057671125d
--- /dev/null
+++ b/Integrations/GoogleVault/GoogleVault.yml
@@ -0,0 +1,860 @@
+category: IT Services
+commonfields:
+ id: google-vault
+ version: -1
+configuration:
+- display: Username
+ name: gsuite_credentials
+ required: true
+ type: 9
+- display: Authentication file contents
+ name: auth_json
+ required: true
+ type: 4
+- defaultvalue: 'true'
+ display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: 'false'
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Archiving and eDiscovery for G Suite.
+display: Google Vault
+name: google-vault
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The matter ID
+ isArray: false
+ name: matterID
+ required: true
+ secret: false
+ - default: false
+ description: Export Name
+ isArray: false
+ name: exportName
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: All Data
+ description: Search scope, default is "All Data"; "All Data", "Held Data", "Unprocessed
+ Data"
+ isArray: false
+ name: dataScope
+ predefined:
+ - All Data
+ - Held Data
+ - Unprocessed Data
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: All Accounts
+ description: Search method, default is "All Accounts"; "All Accounts", "Specific
+ Accounts" (requires the "email" argument), "Organizational Unit" (requires
+ the "ou" argument)
+ isArray: false
+ name: searchMethod
+ predefined:
+ - All Accounts
+ - Specific Accounts(requires emails argument)
+ - Organizational Unit(requires ou argument)
+ required: false
+ secret: false
+ - default: false
+ description: CSV list of emails to search in. Use this argument if the "searchMethod"
+ argument is set to "Specific Accounts"
+ isArray: false
+ name: emails
+ required: false
+ secret: false
+ - default: false
+ description: The organizational unit. Only use this argument if the "searchMethod"
+ argument is set to "Organizational Unit"
+ isArray: false
+ name: ou
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: Whether to include drafts in the search, default is "true"; "true"
+ or "false"
+ isArray: false
+ name: includeDrafts
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Search time frame, e.g., "1 min ago","2 weeks ago","3 months ago"
+ isArray: false
+ name: timeFrame
+ required: false
+ secret: false
+ - default: false
+ description: Search start time in UTC (2018-10-16T12:59:02.584000Z)
+ isArray: false
+ name: startTime
+ required: false
+ secret: false
+ - default: false
+ description: Search end time in UTC (2018-10-16T12:59:02.584000Z)
+ isArray: false
+ name: endTime
+ required: false
+ secret: false
+ - default: false
+ description: Apply specific terms to the search, e.g., (subject:example)
+ isArray: false
+ name: terms
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: Export in PST format, default is "true"; "true" or "false"
+ isArray: false
+ name: exportPST
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Export in MBOX format, default is "false"; "true" or "false"
+ isArray: false
+ name: exportMBOX
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a Google Vault export in order to perform search actions
+ on emails
+ execution: false
+ name: gvault-create-export-mail
+ outputs:
+ - contextPath: GoogleVault.Matter.Export.MatterID
+ description: Matter ID
+ type: string
+ - contextPath: GoogleVault.Matter.Export.ExportID
+ description: Export ID
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Name
+ description: Export's name
+ type: string
+ - contextPath: GoogleVault.Matter.Export.CreateTime
+ description: Export's creation time
+ type: string
+ - arguments:
+ - default: false
+ description: A name for the new matter
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: A description for the matter
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a new matter with the specified name and description. The
+ initial state is open, and the owner is the method caller. First, checks if
+ a matter with the same name already exists.
+ execution: false
+ name: gvault-create-matter
+ outputs:
+ - contextPath: GoogleVault.Matter.Name
+ description: Matter name
+ type: string
+ - contextPath: GoogleVault.Matter.ID
+ description: Matter ID
+ type: string
+ - contextPath: GoogleVault.Matter.State
+ description: Matter's state
+ type: string
+ - arguments:
+ - default: false
+ description: Matter ID
+ isArray: false
+ name: matterID
+ required: true
+ secret: false
+ - default: false
+ description: Export Name
+ isArray: false
+ name: exportName
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: All Data
+ description: Search scope, default is "All Data"; "All Data", "Held Data", "Unprocessed
+ Data"
+ isArray: false
+ name: dataScope
+ predefined:
+ - All Data
+ - Held Data
+ - Unprocessed Data
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: Specific Accounts(requires emails argument)
+ description: Search method, default is "Specific Accounts"; "Specific Accounts"
+ (requires the "email" argument), "Organizational Unit" (requires the "ou"
+ argument), "Team Drive"
+ isArray: false
+ name: searchMethod
+ predefined:
+ - Specific Accounts(requires emails argument)
+ - Organizational Unit(requires ou argument)
+ - Team Drive
+ required: false
+ secret: false
+ - default: false
+ description: CSV list of emails to search in. Use this argument if the "searchMethod"
+ argument is set to "Specific Accounts"
+ isArray: false
+ name: emails
+ required: false
+ secret: false
+ - default: false
+ description: The organizational unit. Only use this argument if the "searchMethod"
+ argument is set to "Organizational Unit"
+ isArray: false
+ name: ou
+ required: false
+ secret: false
+ - default: false
+ description: If the searchMethod is set to Team Drives
+ isArray: false
+ name: teamDrive
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: Whether to include team drives, default is "true"; "true" or "false"
+ isArray: false
+ name: includeTeamDrives
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Search time frame
+ isArray: false
+ name: timeFrame
+ required: false
+ secret: false
+ - default: false
+ description: Search start time in UTC (2018-10-16T12:59:02.584000Z)
+ isArray: false
+ name: startTime
+ required: false
+ secret: false
+ - default: false
+ description: Search end time in UTC (2018-10-16T12:59:02.584000Z)
+ isArray: false
+ name: endTime
+ required: false
+ secret: false
+ - default: false
+ description: Apply specific terms to the search
+ isArray: false
+ name: terms
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a Google Vault export in order to perform search actions
+ on drives.
+ execution: false
+ name: gvault-create-export-drive
+ outputs:
+ - contextPath: GoogleVault.Matter.Export.MatterID
+ description: Matter ID
+ type: string
+ - contextPath: GoogleVault.Matter.Export.ExportID
+ description: Export ID
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Name
+ description: Export's name
+ type: string
+ - contextPath: GoogleVault.Matter.Export.CreateTime
+ description: Export's creation time
+ type: string
+ - arguments:
+ - default: false
+ description: The matter ID
+ isArray: false
+ name: matterID
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: New matter state; "CLOSE", "DELETE", "REOPEN", "UNDELETE"
+ isArray: false
+ name: state
+ predefined:
+ - CLOSE
+ - DELETE
+ - REOPEN
+ - UNDELETE
+ required: true
+ secret: false
+ deprecated: false
+ description: Matter's updated state
+ execution: false
+ name: gvault-matter-update-state
+ outputs:
+ - contextPath: GoogleVault.Matter.Name
+ description: Matter name
+ type: string
+ - contextPath: GoogleVault.Matter.MatterID
+ description: Matter ID
+ type: string
+ - contextPath: GoogleVault.Matter.State
+ description: Matter's state
+ type: string
+ - arguments:
+ - default: false
+ description: The matter ID
+ isArray: false
+ name: matterID
+ required: true
+ secret: false
+ - default: false
+ description: Export name
+ isArray: false
+ name: exportName
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: All Data
+ description: Search scope, default is "All Data"; "All Data", "Held Data", "Unprocessed
+ Data"
+ isArray: false
+ name: dataScope
+ predefined:
+ - All Data
+ - Held Data
+ - Unprocessed Data
+ required: false
+ secret: false
+ - default: false
+ description: CSV list of groups, maximum of 50 groups
+ isArray: false
+ name: groups
+ required: true
+ secret: false
+ - default: false
+ description: Search time frame
+ isArray: false
+ name: timeFrame
+ required: false
+ secret: false
+ - default: false
+ description: Search start time in UTC (2018-10-16T12:59:02.584000Z)
+ isArray: false
+ name: startTime
+ required: false
+ secret: false
+ - default: false
+ description: Search end time in UTC (2018-10-16T12:59:02.584000Z)
+ isArray: false
+ name: endTime
+ required: false
+ secret: false
+ - default: false
+ description: Apply specific terms to the search
+ isArray: false
+ name: terms
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: true
+ defaultValue: 'true'
+ description: Export in PST format, default is "true"; "true" or "false"
+ isArray: false
+ name: exportPST
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Export in MBOX format, default is "false"; "true" or "false"
+ isArray: false
+ name: exportMBOX
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a Google Vault Export in order to perform search actions
+ on Google groups.
+ execution: false
+ name: gvault-create-export-groups
+ outputs:
+ - contextPath: GoogleVault.Matter.Export.MatterID
+ description: Matter ID
+ type: string
+ - contextPath: GoogleVault.Matter.Export.ExportID
+ description: Export ID
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Name
+ description: Export's name
+ type: string
+ - contextPath: GoogleVault.Matter.Export.CreateTime
+ description: Export's creation time
+ type: string
+ - arguments:
+ - default: false
+ description: The matter ID
+ isArray: false
+ name: matterID
+ required: true
+ secret: false
+ - default: false
+ description: Name of the new hold
+ isArray: false
+ name: holdName
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: New hold's corpus type; "Mail", "Drive", "Groups"
+ isArray: false
+ name: corpus
+ predefined:
+ - Mail
+ - Drive
+ - Groups
+ required: true
+ secret: false
+ - default: false
+ description: CSV list of accounts/group IDs to place in the hold, requires at
+ least one account/group ID
+ isArray: true
+ name: accountID
+ required: true
+ secret: false
+ - default: false
+ description: Search timeframe
+ isArray: false
+ name: timeFrame
+ required: false
+ secret: false
+ - default: false
+ description: Search start time in UTC (1994-11-05T13:15:30Z )
+ isArray: false
+ name: startTime
+ required: false
+ secret: false
+ - default: false
+ description: Search end time in UTC (1994-11-05T13:15:30Z )
+ isArray: false
+ name: endTime
+ required: false
+ secret: false
+ - default: false
+ description: The terms that must be matched for a message to be covered by this
+ hold.
+ isArray: false
+ name: terms
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a hold in the given matter. Holds are used to preserve data
+ in the organization.
+ execution: false
+ name: gvault-create-hold
+ outputs:
+ - contextPath: GoogleVaulty.Hold.Name
+ description: Hold name
+ type: string
+ - contextPath: GoogleVault.Hold.ID
+ description: Hold ID
+ type: string
+ - contextPath: GoogleVault.Hold.Account.ID
+ description: Held account ID
+ type: string
+ - arguments:
+ - default: false
+ description: The matter ID
+ isArray: false
+ name: matterID
+ required: true
+ secret: false
+ - default: false
+ description: The hold ID
+ isArray: false
+ name: holdID
+ required: true
+ secret: false
+ - default: false
+ description: The account/group ID to place in hold
+ isArray: false
+ name: accountID
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds a Held Account to a hold. This structure is immutable.
+ execution: false
+ name: gvault-add-heldAccount
+ outputs:
+ - contextPath: GoogleVault.Matter.Hold.Account.ID
+ description: The held account ID
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The matter ID
+ isArray: false
+ name: matterID
+ required: true
+ secret: false
+ - default: false
+ description: The hold ID
+ isArray: false
+ name: holdID
+ required: true
+ secret: false
+ - default: false
+ description: Account ID to remove from hold
+ isArray: false
+ name: accountID
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes a Held Account from a hold
+ execution: false
+ name: gvault-remove-heldAccount
+ - arguments:
+ - default: false
+ description: The matter ID
+ isArray: false
+ name: matterID
+ required: true
+ secret: false
+ - default: false
+ description: The hold ID
+ isArray: false
+ name: holdID
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes a hold by ID. This will release any Held Accounts on this
+ Hold.
+ execution: false
+ name: gvault-delete-hold
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: All
+ description: If set, list only matters with that specific state, default is
+ "ALL"; "ALL", "OPEN", "CLOSED", "DELETED"
+ isArray: false
+ name: state
+ predefined:
+ - All
+ - Open
+ - Closed
+ - Deleted
+ required: false
+ secret: false
+ deprecated: false
+ description: Lists matters the user has access to.
+ execution: false
+ name: gvault-list-matters
+ outputs:
+ - contextPath: GoogleVault.Matter.Name
+ description: The matter name
+ type: string
+ - contextPath: GoogleVault.Matter.MatterID
+ description: Matter ID
+ type: string
+ - contextPath: GoogleVault.Matter.State
+ description: Matter's state
+ type: string
+ - arguments:
+ - default: false
+ description: Search by matter name (multiple matters with the same name can
+ exist)
+ isArray: false
+ name: matterName
+ required: false
+ secret: false
+ - default: false
+ description: Search by matter ID
+ isArray: false
+ name: matterID
+ required: false
+ secret: false
+ deprecated: false
+ description: Search for a matter by name or by ID
+ execution: false
+ name: gvault-get-matter
+ outputs:
+ - contextPath: GoogleVault.Matter.Name
+ description: Matter name
+ type: Unknown
+ - contextPath: GoogleVault.Matter.ID
+ description: Matter ID
+ type: Unknown
+ - contextPath: GoogleVault.Matter.State
+ description: Matter's state
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Matter ID
+ isArray: false
+ name: matterID
+ required: false
+ secret: false
+ deprecated: false
+ description: Lists all holds for a specified matter
+ execution: false
+ name: gvault-list-holds
+ outputs:
+ - contextPath: GoogleVault.Matter.ID
+ description: Matter ID
+ type: string
+ - contextPath: GoogleVault.Matter.Hold.name
+ description: Hold name
+ type: string
+ - contextPath: GoogleVault.Matter.Hold.ID
+ description: Hold ID
+ type: string
+ - arguments:
+ - default: false
+ description: The matter ID
+ isArray: false
+ name: matterId
+ required: false
+ secret: false
+ - default: false
+ description: CSV list of one or more export IDs
+ isArray: true
+ name: exportIDS
+ required: false
+ secret: false
+ - default: false
+ description: 'MatterID and ExportID concatenation separated by ''#'' (used only
+ for playbook) such: #'
+ isArray: false
+ name: queryIDS
+ required: false
+ secret: false
+ deprecated: false
+ description: Gets the status of one or more specified exports
+ execution: false
+ name: gvault-export-status
+ outputs:
+ - contextPath: GoogleVault.Matter.Export.MatterID
+ description: Matter ID
+ type: string
+ - contextPath: GoogleVault.Matter.Export.ExportID
+ description: Export ID
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Name
+ description: Export name
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Status
+ description: Export status
+ type: string
+ - contextPath: GoogleVault.Matter.Export.BucketName
+ description: Bucket holder name for this export
+ type: string
+ - contextPath: GoogleVault.Matter.Export.DownloadID
+ description: ID to be used by the "download-export" command
+ type: string
+ - contextPath: GoogleVault.Matter.Export.ViewID
+ description: ID to be used by the "get-X-results" command (X=drive/mail/groups)
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the bucket that holds the export
+ isArray: false
+ name: bucketName
+ required: true
+ secret: false
+ - default: false
+ description: The export download ID
+ isArray: false
+ name: downloadID
+ required: true
+ secret: false
+ deprecated: false
+ description: Downloads an export by bucket name and download ID
+ execution: false
+ name: gvault-download-results
+ - arguments:
+ - default: false
+ description: Name of the bucket that holds the export
+ isArray: false
+ name: bucketName
+ required: true
+ secret: false
+ - default: false
+ description: The export view ID
+ isArray: false
+ name: viewID
+ required: true
+ secret: false
+ - default: true
+ defaultValue: '30'
+ description: Maximum number of results to return (a high threshold can slow
+ down your browser), default is "30"
+ isArray: false
+ name: maxResult
+ required: false
+ secret: false
+ deprecated: false
+ description: Get the results of a specified drive export
+ execution: false
+ name: gvault-get-drive-results
+ outputs:
+ - contextPath: GoogleVault.Matter.Export.Results.Title
+ description: Title of the file
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.Author
+ description: Author of the file
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.Others
+ description: Other users related to the file
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.Viewers
+ description: Viewers of the file
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.DateModified
+ description: The date the file was last modified
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.DateCreated
+ description: The date the file was created
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.DocType
+ description: File type (extension)
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.MD5
+ description: The MD5 of the file (SHA-1)
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the bucket that holds the export
+ isArray: false
+ name: bucketName
+ required: true
+ secret: false
+ - default: false
+ description: The export view ID
+ isArray: false
+ name: viewID
+ required: true
+ secret: false
+ - default: true
+ defaultValue: '30'
+ description: Maximum number of results to return (a high threshold can slow
+ down your browser), default is "30"
+ isArray: false
+ name: maxResult
+ required: false
+ secret: false
+ deprecated: false
+ description: Get the results of a specified mail export
+ execution: false
+ name: gvault-get-mail-results
+ outputs:
+ - contextPath: GoogleVault.Matter.Export.Results.CC
+ description: Email addresses CCed on the mail
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.BCC
+ description: Email addresses BCCed on the mail
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.DateSent
+ description: The date the email was sent
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.From
+ description: The sender of the email
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.Subject
+ description: The subject of the email
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.DateReceived
+ description: The date the email was received
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.To
+ description: The address the email was sent to
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the bucket that holds the export
+ isArray: false
+ name: bucketName
+ required: true
+ secret: false
+ - default: false
+ description: The export view ID
+ isArray: false
+ name: viewID
+ required: true
+ secret: false
+ - default: true
+ defaultValue: '30'
+ description: Maximum number of results to return (a high threshold can slow
+ down your browser), default is "30"
+ isArray: false
+ name: maxResult
+ required: false
+ secret: false
+ deprecated: false
+ description: Get the results of a specified group export
+ execution: false
+ name: gvault-get-groups-results
+ outputs:
+ - contextPath: GoogleVault.Matter.Export.Results.CC
+ description: Email addresses CCed on the message
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.BCC
+ description: Email addresses BCCed on the message
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.DateSent
+ description: The date the message was sent
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.From
+ description: The sender of the message
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.Subject
+ description: The subject of the message
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.DateReceived
+ description: The date the message was received
+ type: string
+ - contextPath: GoogleVault.Matter.Export.Results.To
+ description: The address the message was sent to
+ type: string
+ dockerimage: demisto/gvault
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- Google-Vault-Generic-Test
diff --git a/Integrations/GoogleVault/GoogleVault_description.md b/Integrations/GoogleVault/GoogleVault_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/GoogleVault/GoogleVault_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/GoogleVault/GoogleVault_image.png b/Integrations/GoogleVault/GoogleVault_image.png
new file mode 100644
index 000000000000..adea931bc0c1
Binary files /dev/null and b/Integrations/GoogleVault/GoogleVault_image.png differ
diff --git a/Integrations/ImageOCR/ImageOCR.py b/Integrations/ImageOCR/ImageOCR.py
new file mode 100644
index 000000000000..cd5e9aa6f5b0
--- /dev/null
+++ b/Integrations/ImageOCR/ImageOCR.py
@@ -0,0 +1,92 @@
+import demistomock as demisto
+from CommonServerPython import *
+import subprocess
+import traceback
+from typing import List
+
+TESSERACT_EXE = 'tesseract'
+
+
+def list_languages() -> List[str]:
+ lang_out = subprocess.check_output([TESSERACT_EXE, '--list-langs'], text=True)
+ if not lang_out: # something went wrong
+ raise ValueError('No output from --list-langs')
+ lines = lang_out.splitlines()
+ if len(lines) <= 1:
+ raise ValueError('No output from --list-langs')
+ return sorted(lines[1:]) # ignore first line
+
+
+def extract_text(image_path: str, languages: List[str] = None) -> str:
+ exe_params = [TESSERACT_EXE, image_path, 'stdout']
+ if languages:
+ exe_params.extend(['-l', '+'.join(languages)])
+ res = subprocess.run(exe_params, capture_output=True, check=True, text=True)
+ if res.stderr:
+ demisto.debug('tesseract returned ok but stderr contains warnings: {}'.format(res.stderr))
+ return res.stdout
+
+
+def list_languages_command() -> dict:
+ langs = list_languages()
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': langs,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': "## Image OCR Supported Languages\n\n" + "\n".join(['* ' + s for s in langs]),
+ }
+
+
+def extract_text_command() -> dict:
+ langs = argToList(demisto.getArg('langs')) or argToList(demisto.getParam('langs'))
+ demisto.debug("Using langs settings: {}".format(langs))
+ entry_id = demisto.args()['entryid']
+ file_path = demisto.getFilePath(entry_id)
+ if not file_path:
+ return_error("Couldn't find entry id: {}".format(entry_id))
+ demisto.debug('Extracting text from file: {}'.format(file_path))
+ res = extract_text(file_path['path'], langs)
+ file_entry = {'EntryID': entry_id, 'Text': res}
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['text'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': "## Image OCR Extracted Text\n\n" + res,
+ "EntryContext": {"File(val.EntryID == obj.EntryID)": file_entry},
+ }
+
+
+def test_module() -> None:
+ try:
+ supported_langs = list_languages()
+ conf_langs = argToList(demisto.getParam('langs'))
+ if conf_langs:
+ for l in conf_langs:
+ if l not in supported_langs:
+ demisto.results('Unsupported language configured: {}'.format(l))
+ demisto.results('ok')
+ except Exception as ex:
+ demisto.results('Failed testing {}: {}'.format(TESSERACT_EXE, str(ex)))
+
+
+def main():
+ try:
+ if demisto.command() == 'test-module':
+ test_module()
+ elif demisto.command() == 'image-ocr-list-languages':
+ demisto.results(list_languages_command())
+ elif demisto.command() == 'image-ocr-extract-text':
+ demisto.results(extract_text_command())
+ else:
+ return_error('Unknown command: {}'.format(demisto.command()))
+ except subprocess.CalledProcessError as cpe:
+ return_error("Failed {} execution. Return status: {}.\nError:\n{}".format(cpe.cmd, cpe.returncode, cpe.stderr))
+ except Exception as ex:
+ return_error("Failed with error: {}\n\nTrace:\n{}".format(str(ex), traceback.format_exc()))
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/ImageOCR/ImageOCR.yml b/Integrations/ImageOCR/ImageOCR.yml
new file mode 100644
index 000000000000..0bcafa61e32d
--- /dev/null
+++ b/Integrations/ImageOCR/ImageOCR.yml
@@ -0,0 +1,47 @@
+category: Utilities
+commonfields:
+ id: Image OCR
+ version: -1
+configuration:
+- display: A CSV of language codes of the language to use for OCR (leave empty to use defaults).
+ name: langs
+ required: false
+ type: 0
+description: Extracts text from images.
+display: Image OCR
+name: Image OCR
+defaultEnabled: true
+fromversion: 4.0.0
+script:
+ commands:
+ - deprecated: false
+ description: Lists supported languages for which the integration can extract text.
+ execution: false
+ name: image-ocr-list-languages
+ - arguments:
+ - default: false
+ description: Entry ID of the image file to process.
+ isArray: false
+ name: entryid
+ required: true
+ secret: false
+ - default: false
+ description: A CSV of language codes of the language to use for OCR. Overrides the default configured language list.
+ isArray: true
+ name: langs
+ required: false
+ secret: false
+ deprecated: false
+ description: Extracts text from an image.
+ execution: false
+ name: image-ocr-extract-text
+ outputs:
+ - contextPath: File.Text
+ description: Extracted text from the passed image file.
+ type: String
+ dockerimage: demisto/tesseract:1.0.0.274
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
diff --git a/Integrations/ImageOCR/ImageOCR_description.md b/Integrations/ImageOCR/ImageOCR_description.md
new file mode 100644
index 000000000000..6a0b94bd69ea
--- /dev/null
+++ b/Integrations/ImageOCR/ImageOCR_description.md
@@ -0,0 +1,8 @@
+## Image OCR
+Use the Image OCR integration to extract text from images. The integration utilizes the open-source [**tesseract**](https://github.com/tesseract-ocr/tesseract/) OCR engine.
+
+The default language used for OCR is English. To configure additional languages, in the **Languages** parameter specify a CSV list of language codes. For example, to set the integration for English and French, set this value: *eng,fra*. To see all supported language codes,run the following command:
+```
+!image-ocr-list-languages
+```
+
diff --git a/Integrations/ImageOCR/ImageOCR_image.png b/Integrations/ImageOCR/ImageOCR_image.png
new file mode 100644
index 000000000000..24cd5581957b
Binary files /dev/null and b/Integrations/ImageOCR/ImageOCR_image.png differ
diff --git a/Integrations/ImageOCR/ImageOCR_test.py b/Integrations/ImageOCR/ImageOCR_test.py
new file mode 100644
index 000000000000..ed318c22f333
--- /dev/null
+++ b/Integrations/ImageOCR/ImageOCR_test.py
@@ -0,0 +1,59 @@
+from ImageOCR import list_languages, extract_text, main
+import pytest
+import demistomock as demisto
+from CommonServerPython import entryTypes
+
+
+RETURN_ERROR_TARGET = 'ImageOCR.return_error'
+
+
+def test_list_languages():
+ res = list_languages()
+ assert len(res) >= 16
+ assert "eng" in res
+
+
+@pytest.mark.parametrize('image,expected_text,langs', [
+ ('irs.png', 'Internal Revenue Service', None),
+ ('bomb.jpg', 'You must transfer bitcoins', None),
+ ('noisy1.jpg', 'Tesseract OCR', None),
+ ('noisy.png', 'Tesseract Will', None),
+ ('cnbc.gif', 'Goldman Sachs', None),
+ ('hebrew.tiff', 'ביטקוין', ['eng', 'heb'])
+ ]) # noqa: E124
+def test_extract_text(image, expected_text, langs):
+ res = extract_text('test_data/' + image, langs)
+ assert expected_text in res
+
+
+def test_extract_text_command(mocker):
+ mocker.patch.object(demisto, 'args', return_value={'entryid': 'test'})
+ mocker.patch.object(demisto, 'getFilePath', return_value={"path": "test_data/irs.png"})
+ mocker.patch.object(demisto, 'command', return_value='image-ocr-extract-text')
+ mocker.patch.object(demisto, 'results')
+ # validate our mocks are good
+ assert demisto.args()['entryid'] == 'test'
+ main()
+ assert demisto.results.call_count == 1
+ # call_args is tuple (args list, kwargs). we only need the first one
+ results = demisto.results.call_args[0]
+ assert len(results) == 1
+ assert results[0]['Type'] == entryTypes['note']
+ assert 'Internal Revenue Service' in results[0]['HumanReadable']
+ assert 'Internal Revenue Service' in results[0]["EntryContext"]["File(val.EntryID == obj.EntryID)"]['Text']
+
+
+# test with bad langs params
+def test_extract_text_command_bad(mocker):
+ mocker.patch.object(demisto, 'args', return_value={'entryid': 'test', 'langs': 'thisis,bad'})
+ mocker.patch.object(demisto, 'getFilePath', return_value={"path": "test_data/irs.png"})
+ mocker.patch.object(demisto, 'command', return_value='image-ocr-extract-text')
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET)
+ # validate our mocks are good
+ assert demisto.args()['entryid'] == 'test'
+ main()
+ assert return_error_mock.call_count == 1
+ # call_args last call with a tuple of args list and kwargs
+ err_msg = return_error_mock.call_args[0][0]
+ assert 'Error:' in err_msg
+ assert 'bad' in err_msg
diff --git a/Integrations/ImageOCR/Pipfile b/Integrations/ImageOCR/Pipfile
new file mode 100644
index 000000000000..f23c4eb48360
--- /dev/null
+++ b/Integrations/ImageOCR/Pipfile
@@ -0,0 +1,15 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+flake8 = "*"
+
+[packages]
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/ImageOCR/Pipfile.lock b/Integrations/ImageOCR/Pipfile.lock
new file mode 100644
index 000000000000..b5fccc99f686
--- /dev/null
+++ b/Integrations/ImageOCR/Pipfile.lock
@@ -0,0 +1,223 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "2197a28c3a643c22111a754c6a2a77781af6e24cc896194424a14c61711c72e7"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "entrypoints": {
+ "hashes": [
+ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
+ "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
+ ],
+ "version": "==0.3"
+ },
+ "flake8": {
+ "hashes": [
+ "sha256:859996073f341f2670741b51ec1e67a01da142831aa1fdc6242dbf88dffbe661",
+ "sha256:a796a115208f5c03b18f332f7c11729812c8c3ded6c46319c59b53efd3819da8"
+ ],
+ "index": "pypi",
+ "version": "==3.7.7"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7",
+ "sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db"
+ ],
+ "version": "==0.18"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:c40744b6bc5162bbb39c1257fe298b7a393861d50978b565f3ccd9cb9de0182a",
+ "sha256:f57abacd059dc3bd666258d1efb0377510a89777fda3e3274e3c01f7c03ae22d"
+ ],
+ "version": "==4.3.20"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:2112d2ca570bb7c3e53ea1a35cd5df42bb0fd10c45f0fb97178679c3c03d64c7",
+ "sha256:c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a"
+ ],
+ "markers": "python_version > '2.7'",
+ "version": "==7.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af",
+ "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"
+ ],
+ "version": "==19.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pyflakes": {
+ "hashes": [
+ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
+ "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ ],
+ "version": "==2.1.1"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a",
+ "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03"
+ ],
+ "version": "==2.4.0"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:4a784f1d4f2ef198fe9b7aef793e9fa1a3b2f84e822d9b3a64a181293a572d45",
+ "sha256:926855726d8ae8371803f7b2e6ec0a69953d9c6311fa7c3b6c1b929ff92d27da"
+ ],
+ "index": "pypi",
+ "version": "==4.6.3"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e",
+ "sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e",
+ "sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0",
+ "sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c",
+ "sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631",
+ "sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4",
+ "sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34",
+ "sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b",
+ "sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a",
+ "sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233",
+ "sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1",
+ "sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36",
+ "sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d",
+ "sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a",
+ "sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.4.0"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:8c1019c6aad13642199fbe458275ad6a84907634cc9f0989877ccc4a2840139d",
+ "sha256:ca943a7e809cc12257001ccfb99e3563da9af99d52f261725e96dfe0f9275bc3"
+ ],
+ "version": "==0.5.1"
+ }
+ }
+}
diff --git a/Integrations/ImageOCR/test_data/bomb.jpg b/Integrations/ImageOCR/test_data/bomb.jpg
new file mode 100644
index 000000000000..c9e20deaed35
Binary files /dev/null and b/Integrations/ImageOCR/test_data/bomb.jpg differ
diff --git a/Integrations/ImageOCR/test_data/cnbc.gif b/Integrations/ImageOCR/test_data/cnbc.gif
new file mode 100644
index 000000000000..b53db09168d0
Binary files /dev/null and b/Integrations/ImageOCR/test_data/cnbc.gif differ
diff --git a/Integrations/ImageOCR/test_data/cnbc.tiff b/Integrations/ImageOCR/test_data/cnbc.tiff
new file mode 100644
index 000000000000..85a64d5c9f3d
Binary files /dev/null and b/Integrations/ImageOCR/test_data/cnbc.tiff differ
diff --git a/Integrations/ImageOCR/test_data/file.tiff b/Integrations/ImageOCR/test_data/file.tiff
new file mode 100644
index 000000000000..40eb05b94b18
Binary files /dev/null and b/Integrations/ImageOCR/test_data/file.tiff differ
diff --git a/Integrations/ImageOCR/test_data/hebrew.tiff b/Integrations/ImageOCR/test_data/hebrew.tiff
new file mode 100644
index 000000000000..b36df8415ecc
Binary files /dev/null and b/Integrations/ImageOCR/test_data/hebrew.tiff differ
diff --git a/Integrations/ImageOCR/test_data/irs.png b/Integrations/ImageOCR/test_data/irs.png
new file mode 100644
index 000000000000..d4531687c17b
Binary files /dev/null and b/Integrations/ImageOCR/test_data/irs.png differ
diff --git a/Integrations/ImageOCR/test_data/noisy.png b/Integrations/ImageOCR/test_data/noisy.png
new file mode 100644
index 000000000000..3e81d204b44a
Binary files /dev/null and b/Integrations/ImageOCR/test_data/noisy.png differ
diff --git a/Integrations/ImageOCR/test_data/noisy1.jpg b/Integrations/ImageOCR/test_data/noisy1.jpg
new file mode 100644
index 000000000000..5b2f349ac922
Binary files /dev/null and b/Integrations/ImageOCR/test_data/noisy1.jpg differ
diff --git a/Integrations/ImageOCR/test_data/tesseract b/Integrations/ImageOCR/test_data/tesseract
new file mode 100755
index 000000000000..e562d3d7b01e
--- /dev/null
+++ b/Integrations/ImageOCR/test_data/tesseract
@@ -0,0 +1,17 @@
+#!/usr/bin/env bash
+
+# Script will run tesseract via the docker image configured for the integration
+# Can be used for running unit testing in your favorite editor
+#
+# Need to configure the editor to have this script on the path
+# For example run pycharm from the command line like this:
+# PATH=$PATH:`pwd`/test_data charm . &
+# Or vscode like this:
+# PATH=$PATH:`pwd`/test_data code . &
+#
+
+SCRIPT_DIR=$(dirname ${BASH_SOURCE})
+
+DOCKER=$(grep dockerimage $SCRIPT_DIR/../ImageOCR.yml | awk '{print $2}')
+
+docker run --rm -i -v `pwd`:/work "$DOCKER" sh -c "cd /work; tesseract $*"
diff --git a/Integrations/IntSight/CHANGELOG.md b/Integrations/IntSight/CHANGELOG.md
new file mode 100644
index 000000000000..4cf00615e9ec
--- /dev/null
+++ b/Integrations/IntSight/CHANGELOG.md
@@ -0,0 +1,13 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+- Fixed an issue where indicators were not extracted correctly in ***intsight-get-iocs*** command.
+- Improved implementation of the following commands.
+ - ***intsights-get-alert-image***
+ - ***intsights-get-alert-takedown-status***
+
+
+## [19.9.1] - 2019-09-18
+Improved the error message in cases where the URL address is incorrect.
+
diff --git a/Integrations/IntSight/IntSight.py b/Integrations/IntSight/IntSight.py
new file mode 100644
index 000000000000..1a65bd459fcd
--- /dev/null
+++ b/Integrations/IntSight/IntSight.py
@@ -0,0 +1,953 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import requests
+import json
+import base64
+import sys
+from datetime import datetime
+reload(sys)
+sys.setdefaultencoding('utf-8') # pylint: disable=E1101
+
+requests.packages.urllib3.disable_warnings()
+
+URL = demisto.getParam('server')
+if URL[-1] != '/':
+ URL += '/'
+
+if not demisto.getParam('proxy'):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+VALIDATE_CERT = not demisto.params().get('insecure', True)
+
+id_and_api_key = demisto.getParam('credentials')['identifier'] + ':' + demisto.getParam('credentials')['password']
+encoded_auth_key = base64.b64encode(id_and_api_key.encode("utf-8"))
+mssp_account_id = demisto.getParam('mssp_sub_account_id')
+
+HEADERS = {'Authorization': 'Basic {}'.format(encoded_auth_key.decode()), 'Content-Type': 'application/json',
+ 'Account-Id': demisto.getParam('credentials')['identifier']}
+
+# Change the Account-Id to the sub account id, so all actions will be on the sub account.
+if mssp_account_id:
+ HEADERS['Account-Id'] = mssp_account_id
+
+IOC_TYPE_TO_DBOT_TYPE = {
+ 'IpAddresses': 'ip',
+ 'Urls': 'url',
+ 'Domains': 'domain',
+ 'Hashes': 'hash'
+}
+
+DEFAULT_TIME_RANGE = '1 day'
+SEVERITY_LEVEL = {
+ 'All': 0,
+ 'Low': 1,
+ 'Medium': 2,
+ 'High': 3
+}
+
+
+def req(method, path, json_data=None, params=None, json_response=False):
+ """
+ Send the request to IntSights and return the JSON response
+ """
+ r = requests.request(method, URL + path, headers=HEADERS, json=json_data, params=params, verify=VALIDATE_CERT)
+
+ if r.status_code < 200 or r.status_code > 299:
+ if not (r.text == 'SeverityNotChanged' or r.text == 'TagExist' or r.text == 'IocBlocklistStatusNotChanged'):
+ return_error('Error in API call to IntSights service %s - [%d] %s' % (path, r.status_code, r.text))
+
+ if r.status_code == 204:
+ return [] # type: ignore
+
+ if json_response:
+ try:
+ return r.json()
+ except ValueError:
+ return_error('Error in API call to IntSights service - check your configured URL address')
+
+ return r
+
+
+def convert_iso_string_to_python_date(date_in_iso_format):
+ iso_format = "%Y-%m-%dT%H:%M:%S"
+ date_in_python_format = datetime.strptime(date_in_iso_format, iso_format)
+ return date_in_python_format
+
+
+def convert_python_date_to_unix_millisecond(python_date_object):
+ timestamp_in_unix_millisecond = date_to_timestamp(python_date_object, 'datetime.datetime')
+ return timestamp_in_unix_millisecond
+
+
+def increase_iso_by_x_days(date_in_iso_format, num_of_days):
+ date_in_python_format = convert_iso_string_to_python_date(date_in_iso_format)
+ new_date_in_python_format = date_in_python_format + timedelta(days=int(num_of_days))
+ new_date_in_iso_format = new_date_in_python_format.isoformat()
+ return new_date_in_iso_format
+
+
+def remove_milliseconds_from_iso(date_in_iso_format):
+ date_parts_arr = date_in_iso_format.split('.')
+ date_in_iso_without_milliseconds = date_parts_arr[0]
+ return date_in_iso_without_milliseconds
+
+
+def increase_timestamp_by_x_days(date_in_unix_ms_timestamp, num_of_days):
+ date_in_iso = timestamp_to_datestring(date_in_unix_ms_timestamp)
+ date_in_iso_without_ms = remove_milliseconds_from_iso(date_in_iso)
+ date_in_iso_plus_x_days = increase_iso_by_x_days(date_in_iso_without_ms, num_of_days)
+ timestamp_in_unix_ms_plus_x_days = date_to_timestamp(date_in_iso_plus_x_days)
+ return timestamp_in_unix_ms_plus_x_days
+
+
+def update_params_with_end_and_start_date(params, oldest_day_to_search_in_unix_timestamp, now_date_in_unix_timestamp):
+ params['foundDateFrom'] = oldest_day_to_search_in_unix_timestamp
+ params['foundDateTo'] = now_date_in_unix_timestamp
+ params['sourceDateFrom'] = oldest_day_to_search_in_unix_timestamp
+ params['sourceDateTo'] = now_date_in_unix_timestamp
+
+
+def update_params_with_delta_arg(params, time_delta_in_days_int):
+ now_date_in_iso = datetime.utcnow().isoformat()
+ now_date_in_iso_without_ms = remove_milliseconds_from_iso(now_date_in_iso)
+ now_date_in_unix_timestamp = date_to_timestamp(now_date_in_iso_without_ms)
+ oldest_day_to_search_in_unix_timestamp = increase_timestamp_by_x_days(now_date_in_unix_timestamp,
+ -1 * time_delta_in_days_int)
+ update_params_with_end_and_start_date(params, oldest_day_to_search_in_unix_timestamp, now_date_in_unix_timestamp)
+ del params['time-delta']
+
+
+def update_params_dict_according_to_delta_arg(params, time_delta_in_days_int):
+ if 'foundDateFrom' in params or 'foundDateTo' in params:
+ demisto.debug(
+ "ERROR in get_alerts() - can't use found-date-to or found-date-from arguments with time-delta argument")
+ return_error("Error: can't assign delta when assigned both found-date-to or found-date-from")
+ else:
+ update_params_with_delta_arg(params, time_delta_in_days_int)
+ return params
+
+
+def handle_filters(foundDateFrom=None):
+ """
+ Apply filters to alert list
+ """
+ argsConversion = {
+ 'alert-type': 'alertType',
+ 'source-type': 'sourceType',
+ 'network-type': 'networkType',
+ 'source-date-from': 'sourceDateFrom',
+ 'source-date-to': 'sourceDateTo',
+ 'found-date-from': 'foundDateFrom',
+ 'found-date-to': 'foundDateTo',
+ 'is-flagged': 'isFlagged',
+ 'is-closed': 'isClosed',
+ 'source-ID': 'sourceId',
+ 'first-seen-from': 'firstSeenFrom',
+ 'first-seen-to': 'firstSeenTo',
+ 'last-seen-from': 'lastSeenFrom',
+ 'last-seen-to': 'lastSeenTo',
+ 'value': 'iocValue',
+ }
+ params = {}
+ for k in demisto.args():
+ if demisto.getArg(k):
+ params[argsConversion.get(k) or k] = demisto.getArg(k)
+ if demisto.getArg('time-delta'):
+ time_delta_in_days = demisto.getArg('time-delta')
+ update_params_dict_according_to_delta_arg(params, int(time_delta_in_days))
+ elif foundDateFrom:
+ params['foundDateFrom'] = foundDateFrom
+ return params
+
+
+def get_alerts_helper(params):
+ demisto.info("Executing get_alerts with params: {}".format(params))
+
+ resp = req('GET', 'public/v1/data/alerts/alerts-list', params=params, json_response=True)
+
+ alerts_HR = []
+ alerts_ctx = []
+ for alert_id in resp:
+ alert_informationHR, alert_informationCtx = get_alert_by_id_helper(alert_id)
+ alerts_HR.append(alert_informationHR)
+ alerts_ctx.append(alert_informationCtx)
+ return alerts_HR, alerts_ctx
+
+
+def extract_mail(replies):
+ mails = []
+ for reply in replies:
+ mails.append(reply.get('Email'))
+
+ return '\n'.join(mails)
+
+
+def extract_remediation(remidiations):
+ remdies = []
+ string_format = "{0} - Status: {1}"
+ for remdy in remidiations:
+ remdies.append(string_format.format(remdy.get('Value'), remdy.get('Status')))
+
+ return '\n'.join(remdies)
+
+
+def hash_identifier(hash_val):
+ if md5Regex.match(hash_val):
+ return 'MD5'
+ elif sha1Regex.match(hash_val):
+ return 'SHA1'
+ elif sha256Regex.match(hash_val):
+ return 'SHA256'
+ return 'Unknown'
+
+
+def extract_tags(tags):
+ pretty_tags = []
+ string_format = "ID: {0} - Name: {1}"
+ for tag in tags:
+ pretty_tags.append(string_format.format(tag.get('_id'), tag.get('Name')))
+ return pretty_tags
+
+
+def get_alerts():
+ """
+ Gets all alerts and returns as a list.
+ """
+ alerts_HR, alerts_ctx = get_alerts_helper(handle_filters())
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.Alerts(val.ID === obj.ID)': alerts_ctx},
+ 'Contents': alerts_ctx,
+ 'HumanReadable': tableToMarkdown('IntSights Alerts', alerts_HR,
+ ['ID', 'Severity', 'Type', 'FoundDate', 'SourceType', 'SourceURL',
+ 'SourceEmail', 'SourceNetworkType', 'IsClosed', 'IsFlagged', 'Images', 'Tags',
+ 'Description', 'Title', 'TakedownStatus', 'SubType'], removeNull=False),
+ 'ContentsFormat': formats['json']
+ })
+
+
+def alert_to_readable(r, parse_tags):
+ """
+ Convert alert to readable format
+ """
+
+ readable = {
+ 'ID': demisto.get(r, '_id'),
+ 'Severity': demisto.get(r, 'Details.Severity'),
+ 'Type': demisto.get(r, 'Details.Type'),
+ 'FoundDate': demisto.get(r, 'FoundDate'),
+ 'SourceType': demisto.get(r, 'Details.Source.Type'),
+ 'SourceURL': demisto.get(r, 'Details.Source.URL'),
+ 'SourceEmail': demisto.get(r, 'Details.Source.Email'),
+ 'SourceNetworkType': demisto.get(r, 'Details.Source.NetworkType'),
+ 'IsClosed': demisto.get(r, 'IsClosed'),
+ 'IsFlagged': demisto.get(r, 'IsFlagged'),
+ 'Assets': demisto.get(r, 'Assets'),
+ 'Images': demisto.get(r, 'Details.Images'),
+ 'Description': demisto.get(r, 'Details.Description'),
+ 'Title': demisto.get(r, 'Details.Title'),
+ 'TakedownStatus': demisto.get(r, 'TakedownStatus'),
+ 'SubType': demisto.get(r, 'Details.SubType')
+ }
+
+ tags = demisto.get(r, 'Details.Tags')
+ if parse_tags:
+ readable['Tags'] = extract_tags(tags)
+ else:
+ readable['Tag'] = []
+ for tag in tags:
+ readable['Tag'].append({'ID': tag.get('_id'), 'Name': tag.get('Name')})
+
+ return readable
+
+
+def get_alert_by_id_helper(alert_id):
+ """
+ Helper for getting details by ID
+ """
+ r = req('GET', 'public/v1/data/alerts/get-complete-alert/' + alert_id, json_response=True)
+ return alert_to_readable(r, True), alert_to_readable(r, False)
+
+
+def get_alert_by_id():
+ """
+ Get alert details by id
+ """
+ alert_id = demisto.getArg('alert-id')
+ activity_hr, activity_ctx = get_alert_by_id_helper(alert_id)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.Alerts(val.ID === obj.ID)': activity_ctx},
+ 'Contents': activity_hr,
+ 'HumanReadable': tableToMarkdown('IntSights Alert Details', [activity_hr],
+ ['ID', 'Severity', 'Type', 'FoundDate', 'SourceType', 'SourceURL',
+ 'SourceEmail', 'SourceNetworkType', 'IsClosed', 'IsFlagged', 'Images', 'Tags',
+ 'Description', 'Title', 'TakedownStatus', 'SubType']),
+ 'ContentsFormat': formats['json']
+ })
+
+
+def get_alert_image():
+ """
+ Retrieves the alert image by image_id
+ """
+ image_id = demisto.getArg('image-id')
+ r = req('GET', 'public/v1/data/alerts/alert-image/' + image_id)
+ demisto.results(fileResult(image_id + '-image.jpeg', r.content))
+
+
+def ask_analyst():
+ """
+ Send question to an analyst about the requested alert
+ """
+ alert_id = demisto.getArg('alert-id')
+ question = demisto.getArg('question')
+ req('POST', 'public/v1/data/alerts/ask-the-analyst/' + alert_id, json_data={'Question': question})
+ question_details = {'ID': alert_id, 'Question': question}
+ demisto.results(
+ {
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.Alerts(val.ID === obj.ID)': question_details},
+ 'Contents': question_details,
+ 'HumanReadable': tableToMarkdown(
+ 'IntSights Ask the Analyst: Your question has been successfully sent to an analyst about the requested alert',
+ [question_details], ['ID', 'Question']),
+ 'ContentsFormat': formats['json']
+ }
+ )
+
+
+def get_alert_activity():
+ """
+ Retrieves the alert activity by alert-id
+ """
+ alert_id = demisto.getArg('alert-id')
+ r = req('GET', 'public/v1/data/alerts/activity-log/' + alert_id, json_response=True)
+
+ human_readables = []
+ alert = {'ID': alert_id, 'Activities': []}
+ for k in r:
+ alert['Activities'].append({
+ 'ID': demisto.get(k, '_id'),
+ 'Type': demisto.get(k, 'Type'),
+ 'Initiator': demisto.get(k, 'Initiator'),
+ 'CreatedDate': demisto.get(k, 'CreatedDate'),
+ 'UpdateDate': demisto.get(k, 'UpdateDate'),
+ 'RemediationBlocklistUpdate': demisto.get(k, 'AdditionalInformation.RemediationBlocklistUpdate'),
+ 'AskTheAnalyst': {'Replies': demisto.get(k, 'AdditionalInformation.AskTheAnalyst.Replies')},
+ 'Mail': {'Replies': demisto.get(k, 'AdditionalInformation.Mail.Replies')},
+ 'ReadBy': demisto.get(k, 'ReadBy')
+ })
+ human_readables.append({
+ 'ID': demisto.get(k, '_id'),
+ 'Type': demisto.get(k, 'Type'),
+ 'Initiator': demisto.get(k, 'Initiator'),
+ 'CreatedDate': demisto.get(k, 'CreatedDate'),
+ 'UpdateDate': demisto.get(k, 'UpdateDate'),
+ 'RemediationBlocklistUpdate': extract_remediation(
+ demisto.get(k, 'AdditionalInformation.RemediationBlocklistUpdate')),
+ 'AskTheAnalyst': {'Replies': demisto.get(k, 'AdditionalInformation.AskTheAnalyst.Replies')},
+ 'Mail': extract_mail(demisto.get(k, 'AdditionalInformation.Mail.Replies')),
+ 'ReadBy': demisto.get(k, 'ReadBy')
+ })
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.Alerts(val.ID === obj.ID)': alert},
+ 'Contents': r,
+ 'HumanReadable': tableToMarkdown('IntSights Alert Activity Log', human_readables,
+ ['ID', 'Type', 'Initiator', 'CreatedDate', 'UpdateDate',
+ 'RemediationBlocklistUpdate', 'AskTheAnalyst', 'Mail', 'ReadBy']),
+ 'ContentsFormat': formats['json']
+ })
+
+
+def change_severity():
+ """
+ Change severity of an alert
+ """
+ alert_id = demisto.getArg('alert-id')
+ severity = demisto.getArg('severity')
+ req('PATCH', 'public/v1/data/alerts/change-severity/' + alert_id, json_data={'Severity': severity})
+ severity_details = {'ID': alert_id, 'Severity': severity}
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.Alerts(val.ID === obj.ID)': severity_details},
+ 'Contents': severity_details,
+ 'HumanReadable': tableToMarkdown(
+ 'IntSights Update Alert Severity: The Alert severity has been successfully updated.', [severity_details],
+ ['ID', 'Severity']),
+ 'ContentsFormat': formats['json']
+ })
+
+
+def get_assignee_id(assignee_email):
+ r = req('GET', 'public/v1/account/users-details', json_response=True)
+ for user in r:
+ if assignee_email == user.get('Email', ''):
+ return user.get('_id')
+
+ raise Exception('user not found')
+
+
+def assign_alert():
+ """
+ Assign alert to an Assignee ID
+ """
+ alert_id = demisto.getArg('alert-id')
+ assignee_email = demisto.getArg('assignee-email')
+ is_mssp = demisto.getArg('is-mssp-optional')
+ assignee_id = get_assignee_id(assignee_email)
+ assign_details = {'ID': alert_id, 'Assignees.AssigneeID': assignee_id}
+
+ url = 'public/v1/data/alerts/assign-alert/' + alert_id
+ if is_mssp:
+ url += '?IsMssp=' + is_mssp
+ req('PATCH', url, json_data={'AssigneeID': assignee_id})
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.Alerts(val.ID === obj.ID)': assign_details},
+ 'Contents': assign_details,
+ 'HumanReadable': tableToMarkdown(
+ 'IntSights Assign Alert: The Alert has been successfully assigned to assigneeID', [assign_details],
+ ['ID', 'Assignees.AssigneeID']),
+ 'ContentsFormat': formats['json']
+ })
+
+
+def unassign_alert():
+ """
+ Unassign an alert
+ """
+ alert_id = demisto.getArg('alert-id')
+ req('PATCH', 'public/v1/data/alerts/unassign-alert/' + alert_id)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.Alerts(val.ID === obj.ID)': {'ID': alert_id}},
+ 'Contents': {'ID': alert_id},
+ 'HumanReadable': 'Alert id: ' + alert_id + ' successfully unassigned',
+ 'ContentsFormat': formats['json']
+ })
+
+
+def close_alert():
+ """
+ Close an alert
+ """
+ alert_id = demisto.getArg('alert-id')
+ reason = demisto.getArg('reason')
+ free_text = demisto.getArg('free-text')
+ is_hidden = demisto.getArg('is-hidden')
+ rate = demisto.getArg('rate')
+ close_details = {'ID': alert_id, 'Close Reason': reason, 'Closed FreeText': free_text, 'Closed Rate': rate,
+ 'IsHidden': is_hidden}
+ close_details_context = {'ID': alert_id, 'Closed': {'Reason': reason, 'FreeText': free_text, 'Rate': rate},
+ 'IsHidden': is_hidden}
+ url = 'public/v1/data/alerts/close-alert/' + alert_id
+ json_data = {'Reason': reason}
+
+ if free_text:
+ json_data['FreeText'] = free_text
+ if free_text:
+ json_data['IsHidden'] = is_hidden
+ if free_text:
+ json_data['Rate'] = rate
+
+ req('PATCH', url, json_data)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.Alerts(val.ID === obj.ID)': close_details},
+ 'Contents': close_details_context,
+ 'HumanReadable': tableToMarkdown('IntSights Close Alert: The Alert has successfully been closed.',
+ [close_details],
+ ['ID', 'Close Reason', 'Closed FreeText', 'Closed Rate', 'IsHidden']),
+ 'ContentsFormat': formats['json']
+ })
+
+
+def send_mail():
+ """
+ Send email with the alert details and a question
+ """
+ alert_id = demisto.getArg('alert-id')
+ emails = argToList(demisto.getArg('emails'))
+ content = demisto.getArg('content')
+ req('POST', 'public/v1/data/alerts/send-mail/' + alert_id, {'Emails': emails, 'Content': content})
+ ec = {
+ 'ID': alert_id,
+ 'EmailID': emails,
+ 'Question': content
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.Alerts(val.ID === obj.ID)': ec},
+ 'Contents': ec,
+ 'HumanReadable': 'Email with content (' + content + ') sent to emails',
+ 'ContentsFormat': formats['json']
+ })
+
+
+def get_tag_id(alert_id, tag_name):
+ res = req('GET', 'public/v1/data/alerts/get-complete-alert/' + alert_id, json_response=True)
+
+ details = res.get('Details', {})
+ tags = details.get('Tags', [])
+ for tag in tags:
+ if tag.get('Name', '') == tag_name:
+ return tag.get('_id', '')
+
+ return 'Not found'
+
+
+def add_tag():
+ """
+ Adds a tag to the alert
+ """
+ alert_id = demisto.getArg('alert-id')
+ tag_name = demisto.getArg('tag-name')
+ req('PATCH', 'public/v1/data/alerts/add-tag/' + alert_id, json_data={'TagName': tag_name})
+ tag_info = {
+ 'TagName': tag_name,
+ 'ID': get_tag_id(alert_id, tag_name)
+ }
+ ec = {
+ 'ID': alert_id,
+ 'Tags': tag_info
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.Alerts(val.ID === obj.ID)': ec},
+ 'Contents': ec,
+ 'HumanReadable': 'Tag (' + tag_name + ') added to alert id: ' + alert_id,
+ 'ContentsFormat': formats['json']
+ })
+
+
+def remove_tag():
+ """
+ Removes a tag from an alert
+ """
+ alert_id = demisto.getArg('alert-id')
+ tag_id = demisto.getArg('tag-id')
+ req('PATCH', 'public/v1/data/alerts/remove-tag/' + alert_id, json_data={'TagID': tag_id})
+ ec = {
+ 'ID': alert_id,
+ 'Tags': {'ID': tag_id}
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.Alerts(val.ID === obj.ID)': ec},
+ 'Contents': ec,
+ 'HumanReadable': 'Tag id: ' + tag_id + ' removed from alert id: ' + alert_id,
+ 'ContentsFormat': formats['json']
+ })
+
+
+def add_comment():
+ """
+ Adds a comment to an alert
+ """
+ alert_id = demisto.getArg('alert-id')
+ comment = demisto.getArg('comment')
+ req('PATCH', 'public/v1/data/alerts/add-comment/' + alert_id, json_data={'Comment': comment})
+ ec = {
+ 'ID': alert_id,
+ 'Comment': comment
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.Alerts(val.ID === obj.ID)': ec},
+ 'Contents': ec,
+ 'HumanReadable': 'Succesfully added comment "' + comment + '" to alert id: ' + alert_id,
+ 'ContentsFormat': formats['json']
+ })
+
+
+def IOC_to_readable(r):
+ """
+ Convert IOC to readable format
+ """
+ ioc_context = {
+ 'ID': demisto.get(r, '_id'),
+ 'SourceID': demisto.get(r, 'SourceID'),
+ 'AccountID': demisto.get(r, 'AccountID'),
+ 'Type': demisto.get(r, 'Type'),
+ 'Value': demisto.get(r, 'Value'),
+ 'FirstSeen': demisto.get(r, 'FirstSeen'),
+ 'LastSeen': demisto.get(r, 'LastSeen'),
+ 'Domain': demisto.get(r, 'Domain'),
+ 'Status': demisto.get(r, 'Status'),
+ 'Severity': demisto.get(r, 'Severity'),
+ 'SourceName': demisto.get(r, 'Source.Name'),
+ 'SourceConfidence': demisto.get(r, 'Source.Confidence'),
+ 'Flags': {'IsInAlexa': demisto.get(r, 'Flags.IsInAlexa')},
+ 'Enrichment': {
+ 'Status': demisto.get(r, 'Enrichment.Status'),
+ 'Data': demisto.get(r, 'Enrichment.Data'),
+ 'Date': demisto.get(r, 'Enrichment.Data') # Backwards compatability issue
+ }
+ }
+ ioc_readable = {
+ 'ID': demisto.get(r, '_id'),
+ 'SourceID': demisto.get(r, 'SourceID'),
+ 'AccountID': demisto.get(r, 'AccountID'),
+ 'Type': demisto.get(r, 'Type'),
+ 'Value': demisto.get(r, 'Value'),
+ 'FirstSeen': demisto.get(r, 'FirstSeen'),
+ 'LastSeen': demisto.get(r, 'LastSeen'),
+ 'Domain': demisto.get(r, 'Domain'),
+ 'Status': demisto.get(r, 'Status'),
+ 'Severity': demisto.get(r, 'Severity').get('Value'),
+ 'SourceName': demisto.get(r, 'Source.Name'),
+ 'SourceConfidence': demisto.get(r, 'Source.Confidence'),
+ 'IsInAlexa': demisto.get(r, 'Flags.IsInAlexa'),
+ 'Enrichment Status': demisto.get(r, 'Enrichment.Status'),
+ 'Enrichment Data': demisto.get(r, 'Enrichment.Data')
+ }
+ dbot_score = {
+ 'Indicator': ioc_context['Value'],
+ 'Type': IOC_TYPE_TO_DBOT_TYPE[ioc_context['Type']],
+ 'Vendor': 'IntSights',
+ 'Score': translate_severity(ioc_readable['Severity'])
+ }
+ malicious_dict = {
+ 'Vendor': 'IntSights',
+ 'Description': 'IntSights severity level is High'
+ }
+ domain = {}
+ if ioc_context['Domain']:
+ domain['Name'] = ioc_context['Domain']
+ if translate_severity(ioc_readable['Severity']) == 3:
+ domain['Malicious'] = malicious_dict
+
+ ip_info = {}
+ if ioc_context['Type'] == 'IpAddresses':
+ ip_info['Address'] = ioc_context['Value']
+ if translate_severity(ioc_readable['Severity']) == 3:
+ ip_info['Malicious'] = malicious_dict
+
+ url_info = {}
+ if ioc_context['Type'] == 'Urls':
+ url_info['Data'] = ioc_context['Value']
+ if translate_severity(ioc_readable['Severity']) == 3:
+ url_info['Malicious'] = malicious_dict
+
+ hash_info = {}
+ if ioc_context['Type'] == 'Hashes':
+ hash_info['Name'] = ioc_context['Value']
+ hash_info[hash_identifier(ioc_context['Value'])] = ioc_context['Value']
+ if translate_severity(ioc_readable['Severity']) == 3:
+ hash_info['Malicious'] = malicious_dict
+
+ return ioc_context, ioc_readable, dbot_score, domain, ip_info, url_info, hash_info
+
+
+def search_for_IOC():
+ """
+ Search for IOC by value
+ """
+ r = req('GET', 'public/v1/iocs/ioc-by-value', params=handle_filters(), json_response=True)
+
+ if r:
+ ioc_context, ioc_readable, dbot_score, domain, ip_info, url_info, hash_info = IOC_to_readable(r)
+
+ demisto.results(
+ {
+ 'Type': entryTypes['note'],
+ 'EntryContext': {
+ 'IntSights.Iocs(val.ID === obj.ID)': ioc_context,
+ 'DBotScore': dbot_score,
+ 'Domain': domain,
+ 'IP': ip_info,
+ 'URL': url_info,
+ 'File': hash_info
+ },
+ 'Contents': r,
+ 'HumanReadable': tableToMarkdown('IOC Information', [ioc_readable],
+ ['ID', 'SourceID', 'AccountID', 'Type', 'Value', 'FirstSeen',
+ 'LastSeen', 'Domain', 'Status', 'Severity', 'SourceName',
+ 'SourceConfidence', 'IsInAlexa', 'Enrichment Status',
+ 'Enrichment Data']),
+ 'ContentsFormat': formats['json']
+ }
+ )
+ else:
+ results_for_no_content('IOC Information')
+
+
+def results_for_no_content(cmd_name):
+ demisto.results(
+ {
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights': {}},
+ 'Contents': {},
+ 'HumanReadable': '### {} \n\n Could not get any results.'.format(cmd_name),
+ 'ContentsFormat': formats['json']
+ }
+ )
+
+
+def translate_severity(sev):
+ """
+ Translate alert severity to demisto
+ """
+ if sev == 'High' or sev == 'Medium':
+ return 3
+ elif sev == 'Low':
+ return 2
+ return 0
+
+
+def fetch_incidents():
+ """
+ Fetch incidents for Demisto
+ """
+ now = int((datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds() * 1000)
+ lastRunObject = demisto.getLastRun()
+ if not lastRunObject and lastRunObject.get('time'):
+ fetch_delta, _ = parse_date_range(demisto.params().get('fetch_delta', DEFAULT_TIME_RANGE), to_timestamp=True)
+ else:
+ fetch_delta = lastRunObject.get('time')
+
+ alert_type = demisto.getParam('type')
+ min_severity_level = demisto.params().get('severity_level', 'All')
+ if min_severity_level not in SEVERITY_LEVEL:
+ raise Exception("Minimum Alert severity level to fetch incidents incidents from, allowed values are: ''All'',"
+ " ''Low'', ''Medium'',''High''(Setting to All will fetch all incidents)")
+
+ alerts_HR, alerts_ctx = get_alerts_helper(handle_filters(fetch_delta))
+ incidents = []
+ for alert in alerts_ctx:
+ if SEVERITY_LEVEL[min_severity_level] <= SEVERITY_LEVEL[alert.get('Severity', 'Low')]:
+ if not alert_type or alert_type.lower() == alert.get('Type', '').lower():
+ incidents.append({
+ 'name': '{type} - {id}'.format(type=alert.get('Type', 'Type not found'), id=alert.get('ID')),
+ 'occurred': alert.get('FoundDate'),
+ 'severity': translate_severity(alert.get('Severity')),
+ 'rawJSON': json.dumps(alert)
+ })
+ demisto.incidents(incidents)
+ demisto.setLastRun({'time': now})
+
+
+def get_iocs():
+ """
+ Gets all IOCs with the given filters
+ """
+ r = req('GET', 'public/v1/iocs/complete-iocs-list', params=handle_filters(), json_response=True)
+ domains = []
+ ip_infos = []
+ url_infos = []
+ hash_infos = []
+ dbot_scores = []
+ iocs_context = []
+ iocs_readable = []
+ for k in r:
+ ioc_context, ioc_readable, dbot_score, domain, ip_info, url_info, hash_info = IOC_to_readable(k)
+ iocs_context.append(ioc_context)
+ iocs_readable.append(ioc_readable)
+ dbot_scores.append(dbot_score)
+ domains.append(domain)
+ ip_infos.append(ip_info)
+ url_infos.append(url_info)
+ hash_infos.append(hash_info)
+ demisto.results(
+ {
+ 'Type': entryTypes['note'],
+ 'EntryContext': {
+ 'IntSights.Iocs': iocs_context,
+ 'DBotScore': dbot_scores,
+ 'Domain': domains,
+ 'IP': ip_infos,
+ 'URL': url_infos,
+ 'File': hash_infos
+ },
+ 'Contents': r,
+ 'HumanReadable': tableToMarkdown('IOC Information', iocs_readable,
+ ['ID', 'SourceID', 'AccountID', 'Type', 'Value', 'FirstSeen', 'LastSeen',
+ 'Domain', 'Status', 'Severity', 'SourceName', 'SourceConfidence',
+ 'IsInAlexa', 'Enrichment Status', 'Enrichment Data']),
+ 'ContentsFormat': formats['json']
+ }
+ )
+
+
+def takedown_request():
+ """
+ Request alert takedown
+ """
+ alert_id = demisto.getArg('alert-id')
+ req('PATCH', 'public/v1/data/alerts/takedown-request/' + alert_id)
+ ec = {
+ 'ID': alert_id,
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.Alerts(val.ID === obj.ID)': ec},
+ 'Contents': ec,
+ 'HumanReadable': '### IntSights Alert Takedown\n' + 'The Alert Takedown request has been sent successfully for ' + str(
+ alert_id),
+ 'ContentsFormat': formats['json']
+ })
+
+
+def get_alert_takedown_status():
+ """
+ Get an alert's takedown status
+ """
+ alert_id = demisto.getArg('alert-id')
+ r = req('GET', 'public/v1/data/alerts/takedown-status/' + alert_id)
+ ec = {
+ 'ID': alert_id,
+ 'TakedownStatus': r.text
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.Alerts(val.ID === obj.ID)': ec},
+ 'Contents': ec,
+ 'HumanReadable': tableToMarkdown('IntSights Alert Takedown Status', [ec], ['ID', 'TakedownStatus']),
+ 'ContentsFormat': formats['json']
+ })
+
+
+def update_ioc_blocklist_status():
+ alert_id = demisto.getArg('alert-id')
+ types = argToList(demisto.getArg('type'))
+ values = argToList(demisto.getArg('value'))
+ statuses = argToList(demisto.getArg('blocklist-status'))
+ if len(types) != len(values) or len(types) != len(statuses):
+ return_error('The lists must be of equal length. For each IOC, provide an entry in each list.')
+ data = []
+ for i in range(len(types)):
+ data.append({
+ 'Type': types[i],
+ 'Value': values[i],
+ 'BlocklistStatus': statuses[i]
+ })
+ req('PATCH', 'public/v1/data/alerts/change-iocs-blocklist-status/' + alert_id, json_data={'Iocs': data})
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.Alerts(val.ID === obj.ID)': {'ID': alert_id, 'Status': statuses}},
+ 'Contents': {'ID': alert_id, 'Status': statuses},
+ 'HumanReadable': tableToMarkdown('IntSights Update IOC BlockList Status for ' + alert_id, data,
+ ['BlocklistStatus']),
+ 'ContentsFormat': formats['json']
+ })
+
+
+def get_ioc_blocklist_status():
+ alert_id = demisto.getArg('alert-id')
+ r = req('GET', 'public/v1/data/alerts/blocklist-status/' + alert_id, json_response=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {
+ 'IntSights.Alerts(val.ID === obj.ID)': {'ID': alert_id, 'Status': [s.get('Status') for s in r]}},
+ 'Contents': r,
+ 'HumanReadable': tableToMarkdown('IntSights Blocklist Status for ' + alert_id, r, ['Status']),
+ 'ContentsFormat': formats['json']
+ })
+
+
+def get_mssp_sub_accounts():
+ account_id = demisto.getParam('credentials')['identifier']
+ accounts = req('GET', 'public/v1/mssp/customers', json_response=True)
+ if not accounts:
+ return_error("intsights-mssp-get-sub-accounts failed to return data.")
+
+ # Fix accounts _id keys
+ for account in accounts:
+ account["ID"] = account["_id"]
+ del account["_id"]
+
+ if len(accounts) < 1:
+ return_error('Current MSSP Account has no sub accounts.')
+
+ account_ids = [i["ID"] for i in accounts]
+ if mssp_account_id not in account_ids:
+ demisto.log("[DEBUG] - MSSP sub accounts:" + str(accounts))
+ return_error('Entered sub account id ({}) is not part of this mssp account'.format(mssp_account_id))
+
+ for i, account in enumerate(account_ids):
+ # Call account
+ HEADERS['Account-Id'] = account
+ account_ua = req('GET', 'public/v1/account/used-assets', json_response=True)
+
+ if not account_ua:
+ continue
+
+ accounts[i].update(account_ua)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'IntSights.MsspAccount(val.ID === obj.ID)': accounts},
+ 'HumanReadable': tableToMarkdown('IntSights MSSP accounts used assets ' + account_id, [a for a in accounts],
+ ["ID", 'CompanyName', "Status", "AssetsLimit", "AssetsCount"]),
+ 'Contents': accounts,
+ 'ContentsFormat': formats['json']
+ })
+
+ # Restore the header
+ HEADERS['Account-Id'] = mssp_account_id
+
+
+def test_module():
+ req('GET', 'public/v1/api/version')
+ if demisto.params().get('isFetch'):
+ min_severity_level = demisto.params().get('severity_level', 'All')
+ if min_severity_level not in SEVERITY_LEVEL:
+ return_error("Minimum Alert severity level to fetch incidents incidents from, allowed values are: "
+ "''All'', ''Low'', ''Medium'',''High''(Setting to All will fetch all incidents)")
+
+ demisto.results('ok')
+
+
+if demisto.command() == 'test-module':
+ test_module()
+elif demisto.command() == 'intsights-mssp-get-sub-accounts':
+ get_mssp_sub_accounts()
+elif demisto.command() == 'intsights-get-alerts':
+ get_alerts()
+elif demisto.command() == 'intsights-get-alert-image':
+ get_alert_image()
+elif demisto.command() == 'intsights-get-alert-activities':
+ get_alert_activity()
+elif demisto.command() == 'intsights-assign-alert':
+ assign_alert()
+elif demisto.command() == 'intsights-unassign-alert':
+ unassign_alert()
+elif demisto.command() == 'intsights-send-mail':
+ send_mail()
+elif demisto.command() == 'intsights-ask-the-analyst':
+ ask_analyst()
+elif demisto.command() == 'intsights-add-tag-to-alert':
+ add_tag()
+elif demisto.command() == 'intsights-remove-tag-from-alert':
+ remove_tag()
+elif demisto.command() == 'intsights-add-comment-to-alert':
+ add_comment()
+elif demisto.command() == 'intsights-update-alert-severity':
+ change_severity()
+elif demisto.command() == 'intsights-get-alert-by-id':
+ get_alert_by_id()
+elif demisto.command() == 'intsights-get-ioc-by-value':
+ search_for_IOC()
+elif demisto.command() == 'intsights-get-iocs':
+ get_iocs()
+elif demisto.command() == 'intsights-alert-takedown-request':
+ takedown_request()
+elif demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+elif demisto.command() == 'intsights-get-alert-takedown-status':
+ get_alert_takedown_status()
+elif demisto.command() == 'intsights-get-ioc-blocklist-status':
+ get_ioc_blocklist_status()
+elif demisto.command() == 'intsights-update-ioc-blocklist-status':
+ update_ioc_blocklist_status()
+elif demisto.command() == 'intsights-close-alert':
+ close_alert()
+else:
+ return_error('Unrecognized command: ' + demisto.command())
diff --git a/Integrations/IntSight/IntSight.yml b/Integrations/IntSight/IntSight.yml
new file mode 100644
index 000000000000..5408e8d5482d
--- /dev/null
+++ b/Integrations/IntSight/IntSight.yml
@@ -0,0 +1,993 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: IntSights
+ version: -1
+configuration:
+- defaultvalue: https://api.intsights.com/
+ display: Server URL (e.g. https://192.168.0.1)
+ name: server
+ required: true
+ type: 0
+- display: Credentials
+ name: credentials
+ required: true
+ type: 9
+- display: 'Alert type to fetch as incidents, allowed: "AttackIndication", "DataLeakage",
+ "Phishing", "BrandSecurity", "ExploitableData", "VIP"'
+ name: type
+ required: false
+ type: 0
+- defaultvalue: All
+ display: 'Minimum Alert severity level to fetch incidents incidents from, allowed
+ values are: ''All'', ''Low'', ''Medium'',''High''(Setting to All will fetch all
+ incidents)'
+ name: severity_level
+ required: false
+ type: 0
+- defaultvalue: 'false'
+ display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: 'false'
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- defaultvalue: 1 day
+ display: Last date to fetch. e.g., "1 min ago","2 weeks ago","3 months ago"
+ name: fetch_delta
+ required: false
+ type: 0
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- display: '(MSSP accounts only) Sub account ID:'
+ name: mssp_sub_account_id
+ required: false
+ type: 0
+description: Use IntSights to manage and mitigate threats.
+display: IntSights
+name: IntSights
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: The ID of the image to return.
+ isArray: false
+ name: image-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns an image of an alert by ID.
+ execution: false
+ name: intsights-get-alert-image
+ - arguments:
+ - default: true
+ description: The ID of the alert.
+ isArray: false
+ name: alert-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns alert activities.
+ execution: false
+ name: intsights-get-alert-activities
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Activities.Type
+ description: The type of the activity.
+ type: string
+ - contextPath: IntSights.Alerts.Activities.Initiator
+ description: The initiator of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Activities.CreatedDate
+ description: The date the alert was created.
+ type: date
+ - contextPath: IntSights.Alerts.Activities.UpdateDate
+ description: The date the alert was updated.
+ type: date
+ - contextPath: IntSights.Alerts.Activities.RemediationBlocklistUpdate
+ description: The remediation blocked list update.
+ type: string
+ - contextPath: IntSights.Alerts.Activities.AskTheAnalyst.Replies
+ description: The replies to questions of the analyst.
+ type: string
+ - contextPath: IntSights.Alerts.Activities.Mail.Replies
+ description: The replies to an email.
+ type: string
+ - contextPath: IntSights.Alerts.Activities.ReadBy
+ description: The alert that was read by.
+ type: string
+ - arguments:
+ - default: true
+ description: The unique ID of the Alert.
+ isArray: false
+ name: alert-id
+ required: true
+ secret: false
+ - default: false
+ description: The user email of the assignee.
+ isArray: false
+ name: assignee-email
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Whether the assigned user is an MSSP user.
+ isArray: false
+ name: is-mssp-optional
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Assigns an alert.
+ execution: false
+ name: intsights-assign-alert
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Assignees.AssigneeID
+ description: The ID of the assignee.
+ type: string
+ - arguments:
+ - default: true
+ description: The unique ID of the alert.
+ isArray: false
+ name: alert-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Unassigns an alert from a user.
+ execution: false
+ name: intsights-unassign-alert
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the alert.
+ type: string
+ - arguments:
+ - default: true
+ description: The unique ID of the alert.
+ isArray: false
+ name: alert-id
+ required: true
+ secret: false
+ - default: false
+ description: The destination email addresses array (comma-separated).
+ isArray: false
+ name: emails
+ required: true
+ secret: false
+ - default: false
+ description: The content added to the alert details.
+ isArray: false
+ name: content
+ required: true
+ secret: false
+ deprecated: false
+ description: Sends an email containing a question and details of the alert.
+ execution: false
+ name: intsights-send-mail
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the Alert.
+ type: string
+ - contextPath: IntSights.Alerts.Mail.EmailID
+ description: The ID of the email.
+ type: string
+ - contextPath: IntSights.Alerts.Question
+ description: Details of the question.
+ type: string
+ - arguments:
+ - default: true
+ description: The unique ID of the alert.
+ isArray: false
+ name: alert-id
+ required: true
+ secret: false
+ - default: false
+ description: Question to ask the Intsights analyst about the requested alert.
+ isArray: false
+ name: question
+ required: true
+ secret: false
+ deprecated: false
+ description: Sends a question to the IntSights analyst about the requested alert.
+ execution: false
+ name: intsights-ask-the-analyst
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the Alert.
+ type: string
+ - contextPath: IntSights.Alerts.Question
+ description: Details of the question.
+ type: string
+ - arguments:
+ - default: true
+ description: The ID of the unique alert.
+ isArray: false
+ name: alert-id
+ required: true
+ secret: false
+ - default: false
+ description: The new tag string.
+ isArray: false
+ name: tag-name
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds a tag to the alert.
+ execution: false
+ name: intsights-add-tag-to-alert
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Tags.TagName
+ description: The name of the tag.
+ type: string
+ - contextPath: IntSights.Alerts.Tags.ID
+ description: The ID of the Tag.
+ type: string
+ - arguments:
+ - default: true
+ description: The unique ID of the alert.
+ isArray: false
+ name: alert-id
+ required: true
+ secret: false
+ - default: false
+ description: The unique ID of the tag to remove.
+ isArray: false
+ name: tag-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes a tag from the specified alert.
+ execution: false
+ name: intsights-remove-tag-from-alert
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Tags.ID
+ description: The ID of the tag.
+ type: string
+ - arguments:
+ - default: true
+ description: The unique ID of the alert.
+ isArray: false
+ name: alert-id
+ required: true
+ secret: false
+ - default: false
+ description: The comment to add to the alert.
+ isArray: false
+ name: comment
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds a comment to a specified alert.
+ execution: false
+ name: intsights-add-comment-to-alert
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Comment
+ description: The comment in the alert.
+ type: string
+ - arguments:
+ - default: true
+ description: The unique ID of the alert.
+ isArray: false
+ name: alert-id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The severity of the alert. Can be: "High", "Medium", or "Low".'
+ isArray: false
+ name: severity
+ predefined:
+ - High
+ - Medium
+ - Low
+ required: true
+ secret: false
+ deprecated: false
+ description: Changes the severity of a specified alert.
+ execution: false
+ name: intsights-update-alert-severity
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Severity
+ description: The severity of the alert.
+ type: string
+ - arguments:
+ - default: true
+ description: The unique ID of the alert.
+ isArray: false
+ name: alert-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the alert object by alert ID.
+ execution: false
+ name: intsights-get-alert-by-id
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Severity
+ description: The severity of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Type
+ description: The type of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.FoundDate
+ description: The date that the alert was found.
+ type: date
+ - contextPath: IntSights.Alerts.SourceType
+ description: The source type of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.SourceURL
+ description: The source URL of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.SourceEmail
+ description: The source email of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.SourceNetworkType
+ description: The network type of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.IsClosed
+ description: Whether or not the alert is closed.
+ type: boolean
+ - contextPath: IntSights.Alerts.IsFlagged
+ description: Whether or not the alert is flagged.
+ type: boolean
+ - contextPath: IntSights.Alerts.Tags.CreatedBy
+ description: Name of the service for which the tag was created.
+ type: string
+ - contextPath: IntSights.Alerts.Tag.Name
+ description: Name of the tag.
+ type: string
+ - contextPath: IntSights.Alerts.Tag.ID
+ description: The ID of the tag.
+ type: string
+ - contextPath: IntSights.Alerts.Images
+ description: The ID of the images.
+ type: string
+ - contextPath: IntSights.Alerts.Description
+ description: The description of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Title
+ description: The title of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.TakedownStatus
+ description: The TakedownStatus of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.SubType
+ description: The sub type of the alert.
+ type: string
+ - arguments:
+ - default: true
+ description: The IOC value for which to search.
+ isArray: false
+ name: value
+ required: true
+ secret: false
+ deprecated: false
+ description: Searches for an exact IOC value.
+ execution: false
+ name: intsights-get-ioc-by-value
+ outputs:
+ - contextPath: IntSights.Iocs.ID
+ description: The ID of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.Value
+ description: The value of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.Type
+ description: The type of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.FirstSeen
+ description: The date the IOC was first seen.
+ type: date
+ - contextPath: IntSights.Iocs.LastSeen
+ description: The date the IOC was last seen.
+ type: date
+ - contextPath: IntSights.Iocs.SourceID
+ description: The ID source of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.SourceName
+ description: The source name of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.SourceConfidenceLevel
+ description: The source confidence level of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.Severity
+ description: The severity of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.AccountID
+ description: The account ID of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.Domain
+ description: The domain of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.Status
+ description: The status of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.Flags.IsInAlexa
+ description: Whether or not the IOC is in Alexa.
+ type: boolean
+ - contextPath: IntSights.Iocs.Enrichment.Status
+ description: The enrichment status of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.Enrichment.Data
+ description: The enrichment data of the IOC.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The type of the indicator.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: String
+ - contextPath: File.Name
+ description: The full file name (including file extension).
+ type: String
+ - contextPath: File.Malicious.Vendor
+ description: The vendor that reported the file as malicious.
+ type: String
+ - contextPath: File.Malicious.Description
+ description: A description explaining why the file was determined to be malicious.
+ type: String
+ - contextPath: File.MD5
+ description: The MD5 hash of the file.
+ type: String
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ type: String
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: String
+ - contextPath: URL.Data
+ description: The URL.
+ type: String
+ - contextPath: URL.Malicious.Vendor
+ description: The vendor reporting the URL as malicious.
+ type: String
+ - contextPath: URL.Malicious.Description
+ description: A description of the malicious URL.
+ type: String
+ - contextPath: IP.Malicious.Vendor
+ description: The vendor reporting the IP address as malicious.
+ type: String
+ - contextPath: IP.Malicious.Description
+ description: A description explaining why the IP address was reported as malicious.
+ type: String
+ - contextPath: IP.Address
+ description: The IP address.
+ type: String
+ - contextPath: Domain.Name
+ description: 'The domain name. For example, "google.com".'
+ type: String
+ - contextPath: Domain.Malicious.Vendor
+ description: The vendor reporting the domain as malicious.
+ type: String
+ - contextPath: Domain.Malicious.Description
+ description: A description explaining why the domain was reported as malicious.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the IOC. Can be: "Urls", "Hashes", "IpAddresses", or "domains".'
+ isArray: false
+ name: type
+ predefined:
+ - Urls
+ - Hashes
+ - IpAddresses
+ - Domains
+ required: false
+ secret: false
+ - default: false
+ description: 'The maximum number of results from 1-1000. Default is 1000.'
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The severity level of the IOC. Can be: "High", "Medium", or "Low"'
+ isArray: false
+ name: severity
+ predefined:
+ - High
+ - Medium
+ - Low
+ required: false
+ secret: false
+ - default: false
+ description: The source of the IOC.
+ isArray: false
+ name: source-ID
+ required: false
+ secret: false
+ - default: false
+ description: 'Beginning of the date range when the IOC was first seen (MM/DD/YYYY). Default is 0.'
+ isArray: false
+ name: first-seen-from
+ required: false
+ secret: false
+ - default: false
+ description: 'End of the date range when the IOC was first seen (MM/DD/YYYY). Default is 0.'
+ isArray: false
+ name: first-seen-to
+ required: false
+ secret: false
+ - default: false
+ description: 'Beginning of the date range when the IOC was last seen (MM/DD/YYYY). Default is 0.'
+ isArray: false
+ name: last-seen-from
+ required: false
+ secret: false
+ - default: false
+ description: 'End of the date range when the IOC was last seen (MM/DD/YYYY). Default is 0.'
+ isArray: false
+ name: last-seen-to
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns count totals of the available IOCs.
+ execution: false
+ name: intsights-get-iocs
+ outputs:
+ - contextPath: IntSights.Iocs.ID
+ description: The ID of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.Value
+ description: The value of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.Type
+ description: The type of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.FirstSeen
+ description: The date the IOC was first seen.
+ type: date
+ - contextPath: IntSights.Iocs.LastSeen
+ description: The date the IOC was last seen.
+ type: date
+ - contextPath: IntSights.Iocs.SourceID
+ description: The source ID of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.SourceName
+ description: The source name of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.SourceConfidenceLevel
+ description: The confidence level of the IOC source.
+ type: string
+ - contextPath: IntSights.Iocs.Severity
+ description: The severity of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.AccountID
+ description: The account ID of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.Domain
+ description: The domain of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.Status
+ description: The status of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.Flags.IsInAlexa
+ description: Whether or not the IOC is in Alexa.
+ type: boolean
+ - contextPath: IntSights.Iocs.Enrichment.Status
+ description: The enrichment status of the IOC.
+ type: string
+ - contextPath: IntSights.Iocs.Enrichment.Data
+ description: The enrichment data of the IOC.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The type of the indicator.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - contextPath: File.Name
+ description: The full file name (including file extension).
+ type: String
+ - contextPath: File.Malicious.Vendor
+ description: The vendor that reported the file as malicious.
+ type: String
+ - contextPath: File.Malicious.Description
+ description: A description explaining why the file was determined to be malicious.
+ type: String
+ - contextPath: File.MD5
+ description: The MD5 hash of the file.
+ type: String
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ type: String
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: String
+ - contextPath: URL.Data
+ description: The URL.
+ type: String
+ - contextPath: URL.Malicious.Vendor
+ description: The vendor reporting the URL as malicious.
+ type: String
+ - contextPath: URL.Malicious.Description
+ description: A description of the malicious URL.
+ type: String
+ - contextPath: IP.Malicious.Vendor
+ description: The vendor reporting the IP address as malicious.
+ type: String
+ - contextPath: IP.Malicious.Description
+ description: A description explaining why the IP address was reported as malicious.
+ type: String
+ - contextPath: IP.Address
+ description: IP address.
+ type: String
+ - contextPath: Domain.Name
+ description: 'The domain name. For example, "google.com".'
+ type: String
+ - contextPath: Domain.Malicious.Vendor
+ description: The vendor reporting the domain as malicious.
+ type: String
+ - contextPath: Domain.Malicious.Description
+ description: A description explaining why the domain was reported as malicious.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the alert. Can be: "AttackIndication", "DataLeakage", "Phishing", "BrandSecurity", "ExploitableData", "VIP".'
+ isArray: false
+ name: alert-type
+ predefined:
+ - AttackIndication
+ - DataLeakage
+ - Phishing
+ - BrandSecurity
+ - ExploitableData
+ - VIP
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The severity of the alert. Can be: "High", "Medium", or "Low".'
+ isArray: false
+ name: severity
+ predefined:
+ - High
+ - Medium
+ - Low
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The source type of the alert. Can be: "ApplicationStores", "BlackMarkets", "HackingForums", "SocialMedia", "PasteSites", or "Others".'
+ isArray: false
+ name: source-type
+ predefined:
+ - ApplicationStores
+ - BlackMarkets
+ - HackingForums
+ - SocialMedia
+ - PasteSites
+ - Others
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The network type of the alert. Can be: "ClearWeb", or "DarkWeb".'
+ isArray: false
+ name: network-type
+ predefined:
+ - ClearWeb
+ - DarkWeb
+ required: false
+ secret: false
+ - default: false
+ description: The start date for which to fetch in Millisecond Timestamp in UNIX.
+ isArray: false
+ name: source-date-from
+ required: false
+ secret: false
+ - default: false
+ description: The end date for which to fetch in Millisecond Timestamp in UNIX.
+ isArray: false
+ name: source-date-to
+ required: false
+ secret: false
+ - default: false
+ description: The start date for which fetch in Millisecond Timestamp in UNIX.
+ isArray: false
+ name: found-date-from
+ required: false
+ secret: false
+ - default: false
+ description: The end date for which fetch in Millisecond Timestamp in UNIX.
+ isArray: false
+ name: found-date-to
+ required: false
+ secret: false
+ - default: false
+ description: Whether to show assigned or unassigned alerts.
+ isArray: false
+ name: assigned
+ required: false
+ secret: false
+ - default: false
+ description: Whether to show flagged or unflagged alerts.
+ isArray: false
+ name: is-flagged
+ required: false
+ secret: false
+ - default: false
+ description: Whether to show closed/open alerts.
+ isArray: false
+ name: is-closed
+ required: false
+ secret: false
+ - default: false
+ description: Shows alerts within a specified time delta, given in days.
+ isArray: false
+ name: time-delta
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns alerts.
+ execution: false
+ name: intsights-get-alerts
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Severity
+ description: The severity of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Type
+ description: The type of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.FoundDate
+ description: The date that the alert was found.
+ type: date
+ - contextPath: IntSights.Alerts.SourceType
+ description: The source type of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.SourceURL
+ description: The source URL of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.SourceEmail
+ description: The source email of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.SourceNetworkType
+ description: The network type of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.IsClosed
+ description: Whether or not the alert is closed.
+ type: boolean
+ - contextPath: IntSights.Alerts.IsFlagged
+ description: Whether or not the alert is flagged.
+ type: boolean
+ - contextPath: IntSights.Alerts.Tags.CreatedBy
+ description: Name of the service that the tag was created.
+ type: string
+ - contextPath: IntSights.Alerts.Tag.Name
+ description: Name of the tag.
+ type: string
+ - contextPath: IntSights.Alerts.Tag.ID
+ description: The ID of the tag.
+ type: string
+ - contextPath: IntSights.Alerts.Images
+ description: The ID of each image.
+ type: string
+ - contextPath: IntSights.Alerts.Description
+ description: The description of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Title
+ description: The title of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.TakedownStatus
+ description: The TakedownStatus of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.SubType
+ description: The sub type of the alert.
+ type: string
+ - arguments:
+ - default: true
+ description: The ID of the alert.
+ isArray: false
+ name: alert-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Requests an alert takedown.
+ execution: false
+ name: intsights-alert-takedown-request
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the alert.
+ type: string
+ - arguments:
+ - default: true
+ description: The ID of the alert.
+ isArray: false
+ name: alert-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the alert takedown status.
+ execution: false
+ name: intsights-get-alert-takedown-status
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.TakedownStatus
+ description: The status of the takedown.
+ type: string
+ - arguments:
+ - default: true
+ description: The ID of the alert.
+ isArray: false
+ name: alert-id
+ required: true
+ secret: false
+ - default: false
+ description: 'A comma separated list of each type of IOC. Options: Domains, IPs, URLs'
+ isArray: false
+ name: type
+ required: true
+ secret: false
+ - default: false
+ description: A comma separated list of the value of the IOCs.
+ isArray: false
+ name: value
+ required: true
+ secret: false
+ - default: false
+ description: 'A comma separated list of the IOCs blocklist status. Options:
+ Sent, NotSent.'
+ isArray: false
+ name: blocklist-status
+ required: true
+ secret: false
+ deprecated: false
+ description: Updates the IOC blocklist status.
+ execution: false
+ name: intsights-update-ioc-blocklist-status
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Status
+ description: 'The status of the blocklist.'
+ type: string
+ - arguments:
+ - default: true
+ description: The ID of the alert.
+ isArray: false
+ name: alert-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the status of the IOC blocklist.
+ execution: false
+ name: intsights-get-ioc-blocklist-status
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Status
+ description: The status of the blocklist.
+ type: string
+ - arguments:
+ - default: false
+ description: The ID of the alert.
+ isArray: false
+ name: alert-id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The reason to close the alert. Can be: "ProblemSolved", "InformationalOnly",
+ "ProblemWeAreAlreadyAwareOf", "CompanyOwnedDomain", "LegitimateApplication/Profile", "NotRelatedToMyCompany", "FalsePositive", or "Other".'
+ isArray: false
+ name: reason
+ predefined:
+ - ProblemSolved
+ - InformationalOnly
+ - ProblemWeAreAlreadyAwareOf
+ - CompanyOwnedDomain
+ - LegitimateApplication/Profile
+ - NotRelatedToMyCompany
+ - FalsePositive
+ - Other
+ required: true
+ secret: false
+ - default: false
+ description: The comments in the alert.
+ isArray: false
+ name: free-text
+ required: false
+ secret: false
+ - default: false
+ defaultValue: 'False'
+ description: The hidden status of the alert. Deletes an alert from the account instance
+ - only when reason is a FalsePositive).
+ isArray: false
+ name: is-hidden
+ required: false
+ secret: false
+ - default: false
+ description: The rate of the alert.
+ isArray: false
+ name: rate
+ required: false
+ secret: false
+ deprecated: false
+ description: Closes an alert
+ execution: false
+ name: intsights-close-alert
+ outputs:
+ - contextPath: IntSights.Alerts.ID
+ description: The ID of the alert.
+ type: string
+ - contextPath: IntSights.Alerts.Closed.Reason
+ description: The closed reason of the alert.
+ type: string
+ - deprecated: false
+ description: Returns all Managed Security Service Provider's (MSSP) sub accounts.
+ execution: false
+ name: intsights-mssp-get-sub-accounts
+ outputs:
+ - contextPath: IntSights.MsspAccount.ID
+ description: The ID of IntSights MSSP sub account.
+ type: String
+ - contextPath: IntSights.MsspAccount.Status
+ description: The enabled status of IntSights MSSP sub account
+ type: String
+ - contextPath: IntSights.MsspAccount.AssetsCount
+ description: The assets count of IntSights MSSP sub account.
+ type: Number
+ - contextPath: IntSights.MsspAccount.AssetLimit
+ description: The asset limit of IntSights MSSP sub account.
+ type: Number
+ - contextPath: IntSights.MsspAccount.CompanyName
+ description: The company name of IntSights MSSP sub account.
+ type: String
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- IntSights Test
diff --git a/Integrations/IntSight/IntSight_description.md b/Integrations/IntSight/IntSight_description.md
new file mode 100644
index 000000000000..efe4164cff87
--- /dev/null
+++ b/Integrations/IntSight/IntSight_description.md
@@ -0,0 +1,3 @@
+The credentials username is your user ID and the password is the API key.
+
+When changing `Last date to fetch` it's recommended to reset the "last run" timestamp
\ No newline at end of file
diff --git a/Integrations/IntSight/IntSight_image.png b/Integrations/IntSight/IntSight_image.png
new file mode 100644
index 000000000000..9896abc1a337
Binary files /dev/null and b/Integrations/IntSight/IntSight_image.png differ
diff --git a/Integrations/IntezerV2/IntezerV2.py b/Integrations/IntezerV2/IntezerV2.py
new file mode 100644
index 000000000000..3b0ed916f383
--- /dev/null
+++ b/Integrations/IntezerV2/IntezerV2.py
@@ -0,0 +1,256 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+
+''' IMPORTS '''
+
+import requests
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+API_KEY = demisto.getParam('APIKey')
+SERVER_URL = 'https://analyze.intezer.com/api'
+API_VERSION = '/v2-0'
+BASE_URL = SERVER_URL + API_VERSION
+IS_AVAILABLE_URL = 'is-available'
+ERROR_PREFIX = 'Error from Intezer:'
+ACCEPTABLE_HTTP_CODES = {200, 201, 202}
+USE_SSL = not demisto.params().get('insecure', False)
+
+http_status_to_error_massage = {
+ 400: '400 Bad Request - Wrong or invalid parameters',
+ 401: '401 Unauthorized - Wrong or invalid api key',
+ 403: '403 Forbidden - The account is not allowed to preform this task',
+ 404: '404 Not Found - Analysis was not found',
+ 410: '410 Gone - Analysis no longer exists in the service',
+ 500: '500 Internal Server Error - Internal error',
+ 503: '503 Service Unavailable'
+}
+dbot_score_by_verdict = {
+ 'malicious': 3,
+ 'suspicious': 2,
+ 'trusted': 1,
+ 'neutral': 1,
+ 'no_threats': 1
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def handle_response(response, acceptable_http_status_codes):
+ if response.status_code not in acceptable_http_status_codes:
+ error_msg = http_status_to_error_massage.get(response.status_code, "Failed to perform request")
+ return_error(f'{ERROR_PREFIX} {error_msg}')
+
+ try:
+ return response.json()
+ except json.decoder.JSONDecodeError:
+ # This error is unlikely to happen, as the return code should indicate of error beforehand
+ return_error(f'Response returned with no data. This might be an issue with Intezer.\nPlease try again later\n'
+ f'Response content:\n{response.content}')
+
+
+def get_session():
+ response = requests.post(BASE_URL + '/get-access-token', json={'api_key': API_KEY}, verify=USE_SSL)
+ response = handle_response(response, {200})
+ session = requests.session()
+ session.headers['Authorization'] = f'Bearer {response["result"]}'
+
+ return session
+
+
+''' COMMANDS '''
+
+
+def check_is_available():
+ url = f'{SERVER_URL}/{IS_AVAILABLE_URL}'
+ result = SESSION.get(url, verify=USE_SSL)
+
+ return 'ok' if result.json()['is_available'] else None
+
+
+def analyze_by_hash_command():
+ file_hash = demisto.getArg('file_hash')
+ response = make_analyze_by_hash_request(file_hash)
+ handle_analyze_by_hash_response(response, file_hash)
+
+
+def make_analyze_by_hash_request(file_hash):
+ data = {'hash': file_hash}
+ return SESSION.post(BASE_URL + '/analyze-by-hash', json=data, verify=USE_SSL)
+
+
+def handle_analyze_by_hash_response(response, file_hash):
+ if response.status_code == 404:
+ dbot = {
+ 'Vendor': 'Intezer',
+ 'Type': 'hash',
+ 'Indicator': file_hash,
+ 'Score': 0
+ }
+ hr = f'Hash {file_hash} does not exist on Intezer genome database'
+ ec = {'DBotScore': dbot}
+ return_outputs(hr, ec)
+ return
+
+ elif response.status_code == 400:
+ return_error('File hash is not valid.\nIntezer file hash reputation supports only SHA-256, '
+ 'SHA-1 and MD5 hash formats.\n')
+
+ handle_analyze_response(response)
+
+
+def analyze_by_uploaded_file_command():
+ response = make_analyze_by_file_request(demisto.getArg('file_entry_id'))
+ handle_analyze_response(response)
+
+
+def make_analyze_by_file_request(file_id):
+ file_data = demisto.getFilePath(file_id)
+ with open(file_data['path'], 'rb') as file_to_upload:
+ files = {'file': (file_data['name'], file_to_upload)}
+ return SESSION.post(BASE_URL + '/analyze', files=files, verify=USE_SSL)
+
+
+def handle_analyze_response(response):
+ response = handle_response(response, ACCEPTABLE_HTTP_CODES)
+
+ result_url = response['result_url']
+ analysis_id = result_url.rsplit('/', 1)[-1]
+
+ context_json = {'Intezer.Analysis(obj.ID === val.ID)': {'ID': analysis_id, 'Status': 'Created', 'type': 'File'}}
+
+ return_outputs('Analysis created successfully', context_json, response)
+
+
+def check_analysis_status_and_get_results_command():
+ analysis_type = demisto.args().get('analysis_type', 'File')
+ analysis_ids = argToList(demisto.args().get('analysis_id'))
+ indicator_name = demisto.args().get('indicator_name')
+
+ for analysis_id in analysis_ids:
+ response = make_analysis_status_request(analysis_id, analysis_type)
+ analysis_result = handle_analysis_result(response)
+
+ if analysis_result and analysis_type == 'Endpoint':
+ enrich_dbot_and_display_endpoint_analysis_results(analysis_result, indicator_name)
+ elif analysis_result and analysis_type == 'File':
+ enrich_dbot_and_display_file_analysis_results(analysis_result)
+
+
+def make_analysis_status_request(analysis_id, analysis_type):
+ analysis_endpoint = 'endpoint-analyses/' if analysis_type == 'Endpoint' else 'analyses/'
+ result_url = f'{BASE_URL}/{analysis_endpoint}{analysis_id}'
+ return SESSION.get(result_url, verify=USE_SSL)
+
+
+def handle_analysis_result(response):
+ json_response = handle_response(response, ACCEPTABLE_HTTP_CODES)
+
+ if response.status_code != 200:
+ result_url = json_response['result_url']
+ analysis_id = result_url.rsplit('/', 1)[-1]
+
+ context_json = {'Intezer.Analysis(val.ID === obj.ID)': {'ID': analysis_id,
+ 'Status': 'InProgress'}}
+
+ return_outputs('Analysis is still in progress', context_json)
+ return
+
+ return json_response['result']
+
+
+def enrich_dbot_and_display_file_analysis_results(result):
+ verdict = result.get('verdict')
+ sha256 = result.get('sha256')
+ analysis_id = result.get('analysis_id')
+
+ dbot = {
+ 'Vendor': 'Intezer',
+ 'Type': 'hash',
+ 'Indicator': sha256,
+ 'Score': dbot_score_by_verdict.get(verdict, 0)
+ }
+
+ file = {'SHA256': sha256, 'Metadata': result, 'ExistsInIntezer': True}
+
+ if verdict == 'malicious':
+ file['Malicious'] = {'Vendor': 'Intezer'}
+
+ presentable_result = '## Intezer File analysis result\n'
+ presentable_result += f' SHA256: {sha256}\n'
+ presentable_result += f' Verdict: **{verdict}** ({result["sub_verdict"]})\n'
+ if 'family_name' in result:
+ presentable_result += f'Family: **{result["family_name"]}**\n'
+ presentable_result += f'[Analysis Link]({result["analysis_url"]})\n'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {
+ outputPaths['dbotscore']: dbot,
+ outputPaths['file']: file,
+ 'Intezer.Analysis(val.ID === obj.ID)': {'ID': analysis_id, 'Status': 'Done'}},
+ 'HumanReadable': presentable_result,
+ 'ContentsFormat': formats['json'],
+ 'Contents': result
+ })
+
+
+def enrich_dbot_and_display_endpoint_analysis_results(result, indicator_name=None):
+ verdict = result['verdict']
+ computer_name = result['computer_name']
+ analysis_id = result['analysis_id']
+
+ dbot = {
+ 'Vendor': 'Intezer',
+ 'Type': 'hostname',
+ 'Indicator': indicator_name if indicator_name else computer_name,
+ 'Score': dbot_score_by_verdict.get(verdict, 0)
+ }
+
+ endpoint = {'Metadata': result}
+
+ presentable_result = '## Intezer Endpoint analysis result\n'
+ presentable_result += f'Host Name: {computer_name}\n'
+ presentable_result += f' Verdict: **{verdict}**\n'
+ if result.get('families') is not None:
+ presentable_result += f'Families: **{result["families"]}**\n'
+ presentable_result += f' Scan Time: {result["scan_start_time"]}\n'
+ presentable_result += f'[Analysis Link]({result["analysis_url"]})\n'
+ ec = {
+ 'DBotScore': dbot,
+ 'Endpoint': endpoint,
+ 'Intezer.Analysis(val.ID === obj.ID)': {'ID': analysis_id, 'Status': 'Done'}
+ }
+ return_outputs(presentable_result, ec, result)
+
+
+''' EXECUTION CODE '''
+
+
+try:
+ SESSION = get_session()
+except Exception as e:
+ return_error(str(e))
+
+
+def main():
+ try:
+ handle_proxy()
+ if demisto.command() == 'test-module':
+ demisto.results(check_is_available())
+ elif demisto.command() == 'intezer-analyze-by-hash':
+ analyze_by_hash_command()
+ elif demisto.command() == 'intezer-analyze-by-file':
+ analyze_by_uploaded_file_command()
+ elif demisto.command() == 'intezer-get-analysis-result':
+ check_analysis_status_and_get_results_command()
+ except Exception as e:
+ return_error(str(e))
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/IntezerV2/IntezerV2.yml b/Integrations/IntezerV2/IntezerV2.yml
new file mode 100644
index 000000000000..705f9cd035da
--- /dev/null
+++ b/Integrations/IntezerV2/IntezerV2.yml
@@ -0,0 +1,129 @@
+category: Forensics & Malware Analysis
+commonfields:
+ id: Intezer v2
+ version: -1
+configuration:
+- display: API Key
+ name: APIKey
+ required: true
+ type: 4
+- display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+description: Malware detection and analysis based on code reuse
+display: Intezer v2
+name: Intezer v2
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Hash of the file to query. Supports SHA256, MD5 and SHA1
+ isArray: false
+ name: file_hash
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks file reputation of the given hash, supports SHA256, SHA1 and
+ MD5
+ execution: false
+ name: intezer-analyze-by-hash
+ outputs:
+ - contextPath: Intezer.Analysis.ID
+ description: Intezer analysis id
+ type: string
+ - contextPath: Intezer.Analysis.Status
+ description: status of the analysis
+ type: string
+ - contextPath: Intezer.Analysis.Type
+ description: type of the analysis
+ type: string
+ - arguments:
+ - default: true
+ description: The file entry id to upload
+ isArray: false
+ name: file_entry_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks file reputation for uploaded file (up to 32MB)
+ execution: false
+ name: intezer-analyze-by-file
+ outputs:
+ - contextPath: Intezer.Analysis.ID
+ description: Intezer analysis id
+ type: string
+ - contextPath: Intezer.Analysis.Status
+ description: status of the analysis
+ type: string
+ - contextPath: Intezer.Analysis.Type
+ description: type of the analysis
+ type: string
+ - arguments:
+ - default: false
+ description: The analysis ID we want to get results for
+ isArray: true
+ name: analysis_id
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: File
+ description: The type of the analysis
+ isArray: false
+ name: analysis_type
+ predefined:
+ - File
+ - Endpoint
+ required: false
+ secret: false
+ - default: false
+ description: indicator to classify
+ isArray: false
+ name: indicator_name
+ required: false
+ secret: false
+ deprecated: false
+ description: Check the analysis status and get analysis result, support file and
+ endpoint analysis
+ execution: false
+ name: intezer-get-analysis-result
+ outputs:
+ - contextPath: File.SHA256
+ description: Hash SHA256
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator we tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type of the indicator
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: File.Metadata
+ description: Metadata returned from Intezer analysis (analysis id, analysis
+ url, family, family type, sha256, verdict, sub_verdict). Metedata will be
+ retuned only for supported files.
+ type: Unknown
+ - contextPath: Endpoint.Metadata
+ description: Metadata returned from Intezer analysis (endpoint analysis id,
+ endpoint analysis url, families, verdict, host_name)
+ type: Unknown
+ dockerimage: demisto/python3:3.7.3.286
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+tests:
+ - Intezer Testing v2
diff --git a/Integrations/IntezerV2/IntezerV2_description.md b/Integrations/IntezerV2/IntezerV2_description.md
new file mode 100644
index 000000000000..30ed8fb104a8
--- /dev/null
+++ b/Integrations/IntezerV2/IntezerV2_description.md
@@ -0,0 +1,6 @@
+Intezer is a cloud-based malware detection and analysis provides a fast, in-depth understanding of any file by mapping its code DNA.
+See [https://analyze.intezer.com](https://analyze.intezer.com/#/account-details) for creating an API key and more details.
+
+In order to use 'Intezer-scan host' playbook, you should add the latest version of intezer scanner tool (you can find it under https://analyze.intezer.com).
+After downloading the scanner, add it to your Demisto agent tool library(Settings->Integrations->agent tools).
+You should upload a zip file named 'Scanner' with the 'Scanner.exe' file inside it. (Files names are case sensitive)
diff --git a/Integrations/IntezerV2/IntezerV2_image.png b/Integrations/IntezerV2/IntezerV2_image.png
new file mode 100644
index 000000000000..7389b730e069
Binary files /dev/null and b/Integrations/IntezerV2/IntezerV2_image.png differ
diff --git a/Integrations/Ipstack/Ipstack.py b/Integrations/Ipstack/Ipstack.py
new file mode 100644
index 000000000000..f667a29eafad
--- /dev/null
+++ b/Integrations/Ipstack/Ipstack.py
@@ -0,0 +1,104 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import os
+import requests
+
+BASE_URL = 'http://api.ipstack.com'
+API_KEY = demisto.params().get('apikey')
+
+if not demisto.params()['proxy']:
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+''' HELPER FUNCTIONS '''
+# #returns a result of a api call
+
+
+def http_request(method, path):
+ """
+ HTTP request helper function
+ """
+ url = BASE_URL + path
+ res = requests.request(
+ method=method,
+ url=url
+ )
+
+ if not res.ok:
+ txt = 'error in URL {} status code: {} reason: {}'.format(url, res.status_code, res.text)
+ demisto.error(txt)
+ raise Exception(txt)
+
+ try:
+ res_json = res.json()
+ if res_json.get('code'):
+ txt = 'error in URL {} status code: {} reason: {}'.format(url, res.status_code, res.text)
+ demisto.error(txt)
+ raise Exception(txt)
+ else:
+ return res_json
+
+ except Exception as ex:
+ demisto.debug(str(ex))
+ demisto.results({"Type": entryTypes["error"], "ContentsFormat": formats["text"], "Contents": res.text})
+
+
+''' Commands '''
+
+
+def do_ip(ip):
+ path = "/{}?access_key={}".format(ip, API_KEY)
+ return http_request('GET', path)
+
+
+def do_ip_command():
+ ip = demisto.args().get('ip')
+ raw_response = do_ip(ip)
+ human_readable_data = {
+ "Address": raw_response.get('ip'),
+ "Country": raw_response.get('country_name'),
+ "Latitude": raw_response.get('latitude'),
+ "Longitude": raw_response.get('longitude')
+ }
+
+ outputs = {
+ 'IP(val.Address == obj.Address)': {
+ 'Address': raw_response.get('ip'),
+ 'Geo': {
+ 'Location': "{},{}".format(raw_response.get('latitude'), raw_response.get('longitude')),
+ 'Country': raw_response.get('country_name')
+ }
+ },
+ 'Ipstack.ip(val.ID==obj.ID)': {
+ 'address': raw_response.get('ip'),
+ 'type': raw_response.get('type'),
+ 'continent_name': raw_response.get('continent_name'),
+ 'latitude': raw_response.get('latitude'),
+ 'longitude': raw_response.get('longitude'),
+ }
+ }
+
+ headers = ['Address', 'Country', 'Latitude', 'Longitude']
+ human_readable = tableToMarkdown('Ipstack info on {}'.format(raw_response.get('ip')), human_readable_data, headers=headers)
+ return_outputs(human_readable, outputs, raw_response)
+
+
+def test_module():
+ path = "/1.2.3.4?access_key={}".format(API_KEY)
+ res = requests.request('GET', BASE_URL + path)
+ if res.json().get('ip') == '1.2.3.4':
+ demisto.results('ok')
+ else:
+ demisto.results('an error occurred. reason: {}'.format(res.text))
+
+
+try:
+ if demisto.command() == 'test-module':
+ test_module()
+ elif demisto.command() == 'ip':
+ do_ip_command()
+except Exception, e:
+ return_error('Unable to perform command : {}, Reason: {}'.format(demisto.command, e))
diff --git a/Integrations/Ipstack/Ipstack.yml b/Integrations/Ipstack/Ipstack.yml
new file mode 100644
index 000000000000..7677db3f0249
--- /dev/null
+++ b/Integrations/Ipstack/Ipstack.yml
@@ -0,0 +1,57 @@
+commonfields:
+ id: Ipstack
+ version: -1
+name: Ipstack
+display: ipstack
+category: Data Enrichment & Threat Intelligence
+description: "One of the leading IP to geolocation \nAPIs and global IP database services."
+configuration:
+- display: API Key
+ name: apikey
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: "true"
+ type: 8
+ required: false
+script:
+ script: '-'
+ type: python
+ commands:
+ - name: ip
+ arguments:
+ - name: ip
+ required: true
+ default: true
+ description: IP address to query.
+ outputs:
+ - contextPath: IP.Address
+ description: IP address.
+ type: string
+ - contextPath: IP.Geo.Location
+ description: Latitude and longitude of the IP address.
+ type: string
+ - contextPath: IP.Geo.Country
+ description: Country of origin of the IP address.
+ type: string
+ - contextPath: Ipstack.IP.address
+ description: IP address.
+ type: string
+ - contextPath: Ipstack.IP.type
+ description: IP type (ipv4 or ipv6).
+ type: string
+ - contextPath: Ipstack.IP.continent_name
+ description: Continent of the IP address.
+ type: string
+ - contextPath: Ipstack.IP.latitude
+ description: Latitude of the IP address.
+ type: string
+ - contextPath: Ipstack.IP.longitude
+ description: Longitude of the IP address.
+ type: string
+ description: Queries an IP address in ipstack.
+ runonce: false
+tests:
+ - Ipstack_Test
diff --git a/Integrations/Ipstack/Ipstack_description.md b/Integrations/Ipstack/Ipstack_description.md
new file mode 100644
index 000000000000..9e198c661462
--- /dev/null
+++ b/Integrations/Ipstack/Ipstack_description.md
@@ -0,0 +1,7 @@
+To configure an instance of the ipstack integration, you need your ipstack API Key.
+
+## How to Get Your API Key
+1. Go to ipstack.com
+2. Log in using your ipstack credentials.
+3. Click the **Dashboard** tab.
+4. Locate and copy your API Key.
diff --git a/Integrations/Ipstack/Ipstack_image.png b/Integrations/Ipstack/Ipstack_image.png
new file mode 100644
index 000000000000..efd027f8f67d
Binary files /dev/null and b/Integrations/Ipstack/Ipstack_image.png differ
diff --git a/Integrations/Jask/Jask.py b/Integrations/Jask/Jask.py
new file mode 100644
index 000000000000..8ce0a0b213d6
--- /dev/null
+++ b/Integrations/Jask/Jask.py
@@ -0,0 +1,547 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import json
+from datetime import datetime
+import requests
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+URL = demisto.getParam('URL')
+if URL[-1] != '/':
+ URL += '/'
+QUERY = {'username': demisto.getParam('Username'), 'api_key': demisto.getParam('APIKey')}
+FETCH_LIMIT = int(demisto.params().get('fetchLimit', 100))
+USE_SSL = not demisto.params().get('insecure', False)
+
+
+def req(method, path, query):
+ """
+ Send the request to JASK and return the JSON response
+ """
+ r = requests.request(method, URL + path, params=query, verify=USE_SSL)
+ if r.status_code != requests.codes.ok:
+ return_error('Error in API call to Jask service - %s' % r.text)
+ if not r.text:
+ return {}
+ return r.json()
+
+
+def to_readable(o, fields, translated):
+ """
+ Convert object properties to nice title readable
+ """
+ res = {}
+ if o:
+ for f in fields:
+ if o.get(f):
+ if translated.get(f):
+ res[translated.get(f)] = o.get(f)
+ else:
+ res[f.title().replace('_', '')] = o.get(f)
+ return res
+
+
+def entity_to_readable(entity_json):
+ """
+ Convert entity response JSON to nicely formatted object
+ """
+ entity = to_readable(entity_json,
+ ['id', 'name', 'source', 'hostname', 'risk_score', 'is_whitelisted', 'groups', 'asset_type',
+ 'firstSeen', 'lastSeen'],
+ {'asset_type': 'EntityType', 'firstSeen': 'FirstSeen', 'lastSeen': 'LastSeen'})
+ entity['PrimaryEntityType'] = demisto.get(entity_json, 'current_entity.primary_asset_type')
+ return entity
+
+
+def signal_to_readable(signal_json):
+ """
+ Convert signal response JSON to nicely formatted object
+ """
+ signal = to_readable(
+ signal_json, ['id', 'name', 'category', 'description', 'score', 'source_type', 'timestamp'], {})
+ threat_indicators = demisto.get(signal_json, 'threat_indicators')
+ if threat_indicators and isinstance(threat_indicators, dict):
+ signal['ThreatIndicators'] = [{
+ 'IndicatorType': ti.get('indicator_type'),
+ 'Value': ti.get('value')
+ } for ti in threat_indicators]
+ return signal
+
+
+def insight_to_readable(insight_json):
+ """
+ Convert insight response JSON to nicely formatted object
+ """
+ insight = to_readable(insight_json,
+ ['id', 'name', 'action', 'assigned_to', 'description', 'last_updated_by', 'last_updated',
+ 'severity', 'workflow_status'], {})
+ if insight_json.get('record_start_time'):
+ insight['InsightTime'] = datetime.utcfromtimestamp(insight_json.get('record_start_time')).isoformat()
+ if demisto.get(insight_json, 'ip') and demisto.get(insight_json['ip'], 'address'):
+ insight['IpAddress'] = demisto.get(insight_json['ip'], 'address')
+ return insight
+
+
+def convert_string_date_to_unix(dstr):
+ """
+ Convert a given string with MM/DD/YYYY format to millis since epoch
+ """
+ d = datetime.strptime(dstr, '%m/%d/%Y')
+ return int((d - datetime.utcfromtimestamp(0)).total_seconds() * 1000)
+
+
+def get_insight_details():
+ """
+ Get insight details
+ """
+ alert_id = demisto.getArg('insight-id')
+ resp_json = req('GET', 'alert/' + alert_id, QUERY)
+ ec = insight_to_readable(resp_json)
+
+ details_md = tableToMarkdown('Insight Details:', [ec],
+ ['Id', 'Name', 'Action', 'AssignedTo', 'Description', 'IpAddress', 'LastUpdated',
+ 'LastUpdatedBy', 'Severity', 'InsightTime', 'WorkflowStatus'])
+
+ entity_display = entity_to_readable(resp_json.get('asset_details'))
+ entity_display.update({'IpAddress': demisto.get(resp_json['asset_details']['ip'], 'address')})
+ ec['EntityDetails'] = entity_display
+ entity_display['Id'] = resp_json.get('id')
+
+ entity_markdown = tableToMarkdown('Insight\'s Main Entity Details:', [entity_display],
+ ['Id', 'EntityType', 'Hostname', 'Groups', 'FirstSeen', 'LastSeen', 'IpAddress',
+ 'IsWhitelisted', 'RiskScore', 'Source'])
+
+ related_assets_json = resp_json.get('related_assets') or []
+ results_assets_list = []
+ for rel_asset in related_assets_json:
+ results_asset = to_readable(rel_asset, ['id', 'is_whitelisted', 'risk_score', 'source', 'asset_type'],
+ {'asset_type': 'EntityType'})
+ if rel_asset.get('asset_type') == 'hostname':
+ results_asset['Name'] = rel_asset.get('hostname')
+ elif rel_asset.get('asset_type') == 'username':
+ results_asset['Name'] = rel_asset.get('username')
+
+ if demisto.get(rel_asset, 'ip.address'):
+ results_asset['IpAddress'] = demisto.get(rel_asset, 'ip.address')
+
+ results_assets_list.append(results_asset)
+ ec['RelatedEntityList'] = results_assets_list
+ rel_assets_md = tableToMarkdown('Related Entities:', results_assets_list,
+ ['Id', 'EntityType', 'IpAddress', 'Name', 'IsWhitelisted', 'RiskScore', 'Source'])
+
+ signal_list_json = resp_json.get('signals') or []
+ signal_list = []
+ threat_intel = 0
+ anomalies = 0
+ patterns = 0
+ for signal_item in signal_list_json:
+ result_signal = signal_to_readable(signal_item)
+ if result_signal['SourceType'] == 'threatintel':
+ threat_intel += 1
+ elif result_signal['SourceType'] == 'rule':
+ patterns += 1
+ elif result_signal['SourceType'] == 'anomaly':
+ anomalies += 1
+ signal_list.append(result_signal)
+ ec['SignalList'] = signal_list
+ signals_md = tableToMarkdown('Related Signals:', signal_list,
+ ['Id', 'Name', 'Description', 'Category', 'SourceType'])
+ ec['SignalListMetadata'] = {
+ 'Patterns': {
+ 'Count': patterns
+ },
+ 'Anomalies': {
+ 'Count': anomalies
+ },
+ 'ThreatIntel': {
+ 'Count': threat_intel
+ }
+ }
+ final_ec = {'Jask.Insight(val.Id === obj.Id)': ec}
+ signal_metadata_md = tableToMarkdown('Signal Metadata:', [
+ {'Pattern Count': patterns, 'Anomaly Count': anomalies, 'Threat Intel Count': threat_intel}],
+ ['Pattern Count', 'Anomaly Count', 'Threat Intel Count'])
+ combined_md = details_md + '\n\n' + entity_markdown + '\n\n' + rel_assets_md +\
+ '\n\n' + signals_md + '\n\n' + signal_metadata_md
+ link = URL.replace('/api/', '/insight/') + alert_id
+ md_link = "[" + link + "](" + link + ")"
+ combined_md += '\n\n' + md_link
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': final_ec,
+ 'HumanReadable': combined_md,
+ 'Contents': resp_json,
+ 'ContentsFormat': formats['json']
+ })
+
+
+def get_insight_comments():
+ """
+ Get comments for insight
+ """
+ alert_id = demisto.getArg('insight-id')
+ resp_json = req('GET', 'alert/%s/comments' % alert_id, QUERY)
+ comments = [to_readable(comment, ['id', 'alert_id', 'author', 'body', 'last_updated', 'timestamp'],
+ {'alert_id': 'InsightId'}) for comment in resp_json['objects']]
+ ec = {'Jask.Insight(val.Id == "%s").CommentList': comments}
+ md = tableToMarkdown('Insight Comments:', comments,
+ ['Id', 'InsightId', 'Author', 'Body', 'LastUpdated', 'Timestamp'])
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': ec,
+ 'HumanReadable': md,
+ 'Contents': resp_json,
+ 'ContentsFormat': formats['json']
+ })
+
+
+def get_signal_details():
+ """
+ Get signal details
+ """
+ alert_id = demisto.getArg('signal-id')
+ resp_json = req('GET', 'signal/' + alert_id, QUERY)
+ signal = signal_to_readable(resp_json)
+ md = tableToMarkdown('Insight Signal Details:', [signal],
+ ['Id', 'Name', 'Category', 'Description', 'Score', 'SourceType', 'Timestamp'])
+
+ flow = 0
+ notice = 0
+ http = 0
+ if resp_json.get('extra_records'):
+ for record in resp_json.get('extra_records'):
+ if record.get('type') == 'http':
+ http += 1
+ elif record.get('type') == 'flow':
+ flow += 1
+ elif record.get('type') == 'notice':
+ notice += 1
+ record_types = [{'RecordType': 'flow', 'RecordCount': flow}, {'RecordType': 'notice', 'RecordCount': notice},
+ {'RecordType': 'http', 'RecordCount': http}]
+ if signal.get('ThreatIndicators'):
+ md = md + tableToMarkdown('Threat Indicators', signal.get('ThreatIndicators'), ['IndicatorType', 'Value'])
+ md = md + tableToMarkdown('Record Metadata', {'Flow Count': flow, 'Notice Count': notice, 'Http Count': http},
+ ['Flow Count', 'Notice Count', 'Http Count'])
+ signal['Metadata'] = record_types
+ ec = {'Jask.Signal(val.Id === obj.Id)': signal}
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': ec,
+ 'HumanReadable': md,
+ 'Contents': resp_json,
+ 'ContentsFormat': formats['json']
+ })
+
+
+def get_entity_details():
+ """
+ Get entity details
+ """
+ entity_id = demisto.getArg('entity-id')
+ resp_json = req('GET', 'asset/' + entity_id, QUERY)
+ entity = entity_to_readable(resp_json)
+ md = tableToMarkdown('Entity Details:', [entity],
+ ['Id', 'Name', 'FirstSeen', 'LastSeen', 'Source', 'EntityType', 'PrimaryEntityType',
+ 'Hostname', 'RiskScore', 'IsWhitelisted', 'Groups'])
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': {'Jask.Entity': entity},
+ 'HumanReadable': md,
+ 'Contents': resp_json,
+ 'ContentsFormat': formats['json']
+ })
+
+
+def get_related_entities():
+ """
+ Get related entities
+ """
+ entity_id = demisto.getArg('entity-id')
+ resp_json = req('GET', 'asset/%s/related_assets' % entity_id, QUERY)
+ entities = [
+ to_readable(e,
+ ['id', 'name', 'email', 'source', 'username', 'hostname', 'active', 'admin', 'asset_type',
+ 'created_ts', 'firstSeen', 'given_name', 'is_whitelisted', 'lastSeen', 'last_name', 'risk_score',
+ 'groups'],
+ {
+ 'asset_type': 'EntityType', 'created_ts': 'CreatedTimestamp', 'firstSeen': 'FirstSeen',
+ 'lastSeen': 'LastSeen'
+ }) for e in resp_json['objects']
+ ]
+
+ ec = {'Jask.RelatedEntityList(val.Id === obj.Id)': entities}
+ md = tableToMarkdown('Related Entities:', entities, ['Id', 'Name', 'EntityType', 'FirstSeen', 'LastSeen', 'Source',
+ 'Hostname', 'Username', 'GivenName', 'Email', 'RiskScore',
+ 'IsWhitelisted', 'Groups', 'CreatedTimestamp', 'Admin'])
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': ec,
+ 'HumanReadable': md,
+ 'Contents': resp_json,
+ 'ContentsFormat': formats['json']
+ })
+
+
+def get_whitelisted_entities():
+ """
+ Get whitelisted entities
+ """
+ resp_json = req('GET', 'asset/whitelisted', QUERY)
+ items = []
+ for whitelisted_item in resp_json.get('objects'):
+ w = to_readable(whitelisted_item, ['id', 'name'], {})
+ w['ModelId'] = demisto.get(whitelisted_item, 'history.model_id')
+ w['Timestamp'] = demisto.get(whitelisted_item, 'history.timestamp')
+ w['UserName'] = demisto.get(whitelisted_item, 'history.username')
+ items.append(w)
+ ec = {
+ 'Jask.WhiteListed.EntityList(val.Id === obj.Id)': items,
+ 'Jask.WhiteListed.Metadata.TotalCount': len(items)
+ }
+ md = tableToMarkdown('Whitelisted:', items,
+ ['Id', 'Name', 'ModelId', 'Timestamp', 'UserName']) + '\n' + '### Count: ' + str(len(items))
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'EntryContext': ec,
+ 'HumanReadable': md,
+ 'Contents': resp_json,
+ 'ContentsFormat': formats['json']
+ })
+
+
+def convert_date_to_unix(d):
+ """
+ Convert a given date to seconds
+ """
+ return int((d - datetime.utcfromtimestamp(0)).total_seconds() * 1000)
+
+
+def translate_last_seen(last):
+ """
+ Convert last-seen argument to querystring
+ """
+ if not last or last == 'All time':
+ return ''
+ if last == 'Last week':
+ return 'timestamp:[%d TO *]' % (convert_date_to_unix(datetime.utcnow()) - 7 * 24 * 60 * 60 * 1000)
+ if last == 'Last 48 hours':
+ return 'timestamp:[%d TO *]' % (convert_date_to_unix(datetime.utcnow()) - 2 * 24 * 60 * 60 * 1000)
+ if last == 'Last 24 hours':
+ return 'timestamp:[%d TO *]' % (convert_date_to_unix(datetime.utcnow()) - 24 * 60 * 60 * 1000)
+
+
+def _add_list_to_q(q, translate):
+ """
+ Add arguments in the translate dictionary to querystring
+ """
+ for v in translate:
+ arg_list = argToList(demisto.getArg(translate[v]))
+ if len(arg_list) == 1:
+ q += ' AND ' + v + ':(%s)' % (arg_list[0])
+ elif len(arg_list) > 1:
+ q += ' AND ' + v + ':(%s)' % (' OR '.join(arg_list))
+ return q
+
+
+def _add_time_to_q(q):
+ """
+ Add the time filter to the query string
+ Defaults to All time if no fields specified
+ """
+ last_seen = demisto.getArg('last-seen')
+ time_from = demisto.getArg('time-from')
+ time_to = demisto.getArg('time-to')
+ if last_seen:
+ if time_from or time_to:
+ return_error('You cannot specify absolute times [time-to, time-from] with relative time [last-seen]')
+ else:
+ if translate_last_seen(last_seen) != '':
+ q += ' AND ' + translate_last_seen(last_seen)
+ elif time_from and time_to:
+ q += ' AND timestamp:[%d TO %d]' % (
+ convert_string_date_to_unix(time_from), convert_string_date_to_unix(time_to))
+ elif time_from or time_to:
+ return_error('You must specify both absolute times [time-to, time-from] or relative time [last-seen]')
+ return q
+
+
+def search_insights():
+ """
+ Search insights using available filters
+ """
+ q = _add_time_to_q('*')
+ q = _add_list_to_q(q, {'workflow_status': 'status', 'rating': 'rating', 'group_assigned_to': 'assigned_team',
+ 'assigned_to': 'assigned-user'})
+ query = QUERY.copy()
+ query['q'] = q
+ query['offset'] = demisto.getArg('offset')
+ query['limit'] = demisto.getArg('limit')
+ query['sort_by'] = demisto.getArg('sort')
+ resp_json = req('GET', 'search/alerts', query)
+ insights = []
+ for insight in resp_json['objects']:
+ readable_insight = insight_to_readable(insight)
+ readable_insight['IpAddress'] = demisto.get(insight, 'asset.ip')
+ readable_insight['InsightTime'] = demisto.get(insight, 'timestamp')
+ insights.append(readable_insight)
+ ec = {'Jask.Insight(val.Id === obj.Id)': insights}
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': resp_json,
+ 'EntryContext': ec,
+ 'HumanReadable': tableToMarkdown('Insights', insights,
+ ['Id', 'Name', 'Action', 'AssignedTo', 'Description', 'IpAddress',
+ 'LastUpdated', 'LastUpdatedBy', 'Severity', 'InsightTime', 'WorkflowStatus'])
+ })
+
+
+def search_signals():
+ """
+ Search signals using available filters
+ """
+ q = _add_time_to_q('*')
+ q = _add_list_to_q(q, {'source_type': 'source', 'category': 'category'})
+ query = QUERY.copy()
+ query['q'] = q
+ query['offset'] = demisto.getArg('offset')
+ query['limit'] = demisto.getArg('limit')
+ query['sort_by'] = demisto.getArg('sort')
+ resp_json = req('GET', 'search/signals', query)
+ signals = [signal_to_readable(signal) for signal in resp_json['objects']]
+ ec = {'Jask.Signal(val.Id === object.Id)': signals}
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': resp_json,
+ 'EntryContext': ec,
+ 'HumanReadable': tableToMarkdown('Signals', signals,
+ ['Id', 'Name', 'Category', 'Description', 'Score', 'SourceType', 'Timestamp',
+ 'ThreatIndicators'])
+ })
+
+
+def search_entities():
+ """
+ Search entities using the available filters
+ """
+ q = _add_time_to_q('*')
+ q = _add_list_to_q(q, {'asset_type': 'entity-type'})
+ query = QUERY.copy()
+ query['q'] = q
+ query['offset'] = demisto.getArg('offset')
+ query['limit'] = demisto.getArg('limit')
+ query['sort_by'] = demisto.getArg('sort')
+ resp_json = req('GET', 'search/assets', query)
+ entities = []
+ for entity in resp_json['objects']:
+ readable = entity_to_readable(entity)
+ readable['IpAddress'] = entity.get('ip')
+ entities.append(readable)
+ ec = {'Jask.Entity(val.Id === obj.Id)': entities}
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': resp_json,
+ 'EntryContext': ec,
+ 'HumanReadable': tableToMarkdown('Entities', entities, [
+ 'Id', 'Name', 'FirstSeen', 'LastSeen', 'Source', 'EntityType', 'PrimaryEntityType', 'Hostname', 'RiskScore',
+ 'IsWhitelisted', 'Groups', 'IpAddress'
+ ])
+ })
+
+
+def translate_severity(severity):
+ """
+ Translate from Jask insight severity to Demisto severity
+ """
+ if severity <= 4:
+ return severity
+ return 4
+
+
+def fetch_incidents():
+ """
+ Retrieve new incidents periodically based on pre-defined instance parameters
+ """
+ now = convert_date_to_unix(datetime.utcnow())
+ last_run_object = demisto.getLastRun()
+ if last_run_object and last_run_object.get('time'):
+ last_run = last_run_object.get('time')
+ else:
+ last_run = now - 24 * 60 * 60 * 1000
+ next_fetch = last_run
+ q = '* AND timestamp:[%d TO *]' % last_run
+ if demisto.getParam('fetchQuery'):
+ q += ' AND ' + demisto.getParam('fetchQuery')
+ else:
+ q += ' AND workflow_status:(new OR inprogress)'
+ query = QUERY.copy()
+ query['q'] = q
+ query['offset'] = 0
+ query['limit'] = FETCH_LIMIT
+ query['sort_by'] = 'timestamp:asc'
+ resp_json = req('GET', 'search/alerts', query)
+ incidents = []
+ for a in resp_json['objects']:
+ current_fetch = a.get('timestamp')
+ if current_fetch:
+ try:
+ current_fetch = datetime.strptime(current_fetch, "%Y-%m-%dT%H:%M:%S")
+ except ValueError:
+ current_fetch = datetime.strptime(current_fetch, "%Y-%m-%dT%H:%M:%S.%f")
+ current_fetch = convert_date_to_unix(current_fetch)
+ if current_fetch > last_run:
+ incidents.append({
+ 'name': a.get('name', 'No name') + ' - ' + a.get('id'),
+ 'occurred': a.get('timestamp') + 'Z',
+ 'details': a.get('description'),
+ 'severity': translate_severity(a.get('severity')),
+ 'rawJSON': json.dumps(a)
+ })
+ if current_fetch > next_fetch:
+ next_fetch = current_fetch
+
+ demisto.incidents(incidents)
+ demisto.setLastRun({'time': next_fetch})
+
+
+def main():
+ try:
+ handle_proxy()
+ if demisto.command() == 'test-module':
+ req('GET', 'asset/whitelisted', QUERY)
+ demisto.results('ok')
+ elif demisto.command() == 'jask-get-insight-details':
+ get_insight_details()
+ elif demisto.command() == 'jask-get-insight-comments':
+ get_insight_comments()
+ elif demisto.command() == 'jask-get-signal-details':
+ get_signal_details()
+ elif demisto.command() == 'jask-get-entity-details':
+ get_entity_details()
+ elif demisto.command() == 'jask-get-related-entities':
+ get_related_entities()
+ elif demisto.command() == 'jask-get-whitelisted-entities':
+ get_whitelisted_entities()
+ elif demisto.command() == 'jask-search-insights':
+ search_insights()
+ elif demisto.command() == 'jask-search-entities':
+ search_entities()
+ elif demisto.command() == 'jask-search-signals':
+ search_signals()
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+ else:
+ return_error('Unrecognized command: ' + demisto.command())
+ except Exception as e:
+ LOG(e)
+ LOG.print_log(False)
+ return_error(e.message)
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/Jask/Jask.yml b/Integrations/Jask/Jask.yml
new file mode 100644
index 000000000000..71d6085e06cd
--- /dev/null
+++ b/Integrations/Jask/Jask.yml
@@ -0,0 +1,652 @@
+category: Analytics & SIEM
+commonfields:
+ id: Jask
+ version: -1
+configuration:
+- display: Server URL
+ name: URL
+ required: true
+ type: 0
+- display: Username
+ name: Username
+ required: true
+ type: 0
+- display: API Key
+ name: APIKey
+ required: true
+ type: 4
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: 'true'
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Override default fetch query
+ name: fetchQuery
+ required: false
+ type: 0
+- defaultvalue: '100'
+ display: Limit the maximum incidents amount per fetch
+ name: fetchLimit
+ required: false
+ type: 0
+description: Freeing the analyst with autonomous decisions
+display: Jask
+name: Jask
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: The insight to retrieve details for
+ isArray: false
+ name: insight-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Get Insight details for a specific Insight ID. Insight details command
+ references SmartAlerts, which are the highest level abstractions in JASK SIEM
+ consisting of multiple signals, and records and relating to one or more assets.
+ execution: false
+ name: jask-get-insight-details
+ outputs:
+ - contextPath: Jask.Insight.Id
+ description: The insight ID
+ type: string
+ - contextPath: Jask.Insight.Name
+ description: The insight name
+ type: string
+ - contextPath: Jask.Insight.Action
+ description: The insight action
+ type: string
+ - contextPath: Jask.Insight.Entity
+ description: The main entity related to the insight
+ type: string
+ - contextPath: Jask.Insight.AssignedTo
+ description: Who the insight was assigned to
+ type: string
+ - contextPath: Jask.Insight.Description
+ description: The insight description
+ type: string
+ - contextPath: Jask.Insight.IpAddress
+ description: The IP address of the insight
+ type: string
+ - contextPath: Jask.Insight.LastUpdated
+ description: The time the insight was last updated
+ type: date
+ - contextPath: Jask.Insight.LastUpdatedBy
+ description: The last person to update the insight
+ type: string
+ - contextPath: Jask.Insight.Severity
+ description: 'The severity of the insight '
+ type: number
+ - contextPath: Jask.Insight.InsightTime
+ description: The time of the insight
+ type: date
+ - contextPath: Jask.Insight.WorkflowStatus
+ description: The status of the insight
+ type: string
+ - contextPath: Jask.Insight.RelatedEntityList.Id
+ description: The ID of the related entity
+ type: string
+ - contextPath: Jask.Insight.RelatedEntityList.EntityType
+ description: The type of the related entity
+ type: string
+ - contextPath: Jask.Insight.RelatedEntityList.Hostname
+ description: The hostname of the related entity
+ type: string
+ - contextPath: Jask.Insight.SignalList.Id
+ description: The ID of the signal
+ type: string
+ - contextPath: Jask.Insight.SignalList.Name
+ description: The name of the signal
+ type: string
+ - contextPath: Jask.Insight.SignalList.Category
+ description: The category of the signal
+ type: string
+ - contextPath: Jask.Insight.SignalList.SourceType
+ description: The source of the signal
+ type: string
+ - contextPath: Jask.Insight.SignalListMetadata.Patterns.Count
+ description: The number of signals of the category pattern
+ type: number
+ - contextPath: Jask.Insight.SignalListMetadata.Anomalies.Count
+ description: The number of signals of the category anomaly
+ type: number
+ - contextPath: Jask.Insight.SignalListMetadata.ThreatIntel.Count
+ description: The number of signals of the category threat intelligence
+ type: number
+ - contextPath: Jask.Insight.RelatedEntityList.IpAddress
+ description: IP address of related entity
+ type: string
+ - contextPath: Jask.Insight.RelatedEntityList.IsWhitelisted
+ description: Whether or not the entity is whitelisted
+ type: boolean
+ - contextPath: Jask.Insight.RelatedEntityList.RiskScore
+ description: The risk score of the related entity
+ type: number
+ - contextPath: Jask.Insight.RelatedEntityList.Source
+ description: The source of the related entity
+ type: string
+ - arguments:
+ - default: true
+ description: 'The insight ID for which to retrieve comments. '
+ isArray: false
+ name: insight-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Get comments for a specific Insight ID. (Users can post and update
+ comments on the JASK SIEM portal for any Insight ID.)
+ execution: false
+ name: jask-get-insight-comments
+ outputs:
+ - contextPath: Jask.InsightCommentList.id
+ description: ID of comment
+ type: string
+ - contextPath: Jask.InsightCommentList.InsightId
+ description: ID of insight
+ type: string
+ - contextPath: Jask.InsightCommentList.Author
+ description: Author of comment
+ type: string
+ - contextPath: Jask.InsightCommentList.Body
+ description: The comment body
+ type: string
+ - contextPath: Jask.InsightCommentList.LastUpdated
+ description: The date the comment was last updated
+ type: date
+ - contextPath: Jask.InsightCommentList.Timestamp
+ description: The time of the comment
+ type: date
+ - arguments:
+ - default: true
+ description: The signal to retrieve details for
+ isArray: false
+ name: signal-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Get Signal details for a specific Signal ID. Signal details command
+ references signals in JASK which are created when records exhibit suspicious
+ properties and mate with patterns or other detection logic.
+ execution: false
+ name: jask-get-signal-details
+ outputs:
+ - contextPath: Jask.Signal.Id
+ description: The signal ID
+ type: string
+ - contextPath: Jask.Signal.Name
+ description: The signal name
+ type: string
+ - contextPath: Jask.Signal.Category
+ description: The signal category
+ type: string
+ - contextPath: Jask.Signal.Description
+ description: The signal description
+ type: string
+ - contextPath: Jask.Signal.Score
+ description: The signal score
+ type: number
+ - contextPath: Jask.Signal.SourceType
+ description: The source type of the signal
+ type: string
+ - contextPath: Jask.Signal.Timestamp
+ description: The time of the signal
+ type: date
+ - contextPath: Jask.Signal.Metadata.RecordType
+ description: The record type
+ type: string
+ - contextPath: Jask.Signal.Metadata.RecordCount
+ description: the associated count of each record type
+ type: number
+ - contextPath: Jask.SignalThreatIndicators.IndicatorType
+ description: The type of threat indicator
+ type: string
+ - contextPath: Jask.Signal.ThreatIndicators.Value
+ description: The value of the threat indicator
+ type: string
+ - arguments:
+ - default: true
+ description: The entity to retrieve details for
+ isArray: false
+ name: entity-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Get entity details for a specific entity ID
+ execution: false
+ name: jask-get-entity-details
+ outputs:
+ - contextPath: Jask.Entity.Id
+ description: The entity ID
+ type: string
+ - contextPath: Jask.Entity.Name
+ description: The entity name
+ type: string
+ - contextPath: Jask.Entity.IpAddress
+ description: The IP address of the entity
+ type: string
+ - contextPath: Jask.Entity.FirstSeen
+ description: When the entity was first seen
+ type: date
+ - contextPath: Jask.Entity.LastSeen
+ description: The time the entity was last seen
+ type: date
+ - contextPath: Jask.Entity.Source
+ description: The source of the entity
+ type: string
+ - contextPath: Jask.Entity.AssetType
+ description: The asset type
+ type: string
+ - contextPath: Jask.Entity.PrimaryAssetType
+ description: The primary asset type
+ type: string
+ - contextPath: Jask.Entity.HostName
+ description: The hostname
+ type: string
+ - contextPath: Jask.Entity.RiskScore
+ description: The risk score
+ type: number
+ - contextPath: Jask.Entity.IsWhiteListed
+ description: Whether or not the entity is whitelisted
+ type: boolean
+ - arguments:
+ - default: true
+ description: 'The entity ID for which the related entities are retrieved. '
+ isArray: false
+ name: entity-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Get entities related to a specific entity ID.
+ execution: false
+ name: jask-get-related-entities
+ outputs:
+ - contextPath: Jask.RelatedEntityList.Id
+ description: The entity ID
+ type: string
+ - contextPath: Jask.RelatedEntityList.Name
+ description: The entity name
+ type: string
+ - contextPath: Jask.RelatedEntityList.Email
+ description: The entity email
+ type: string
+ - contextPath: Jask.RelatedEntityList.Source
+ description: The entity source
+ type: string
+ - contextPath: Jask.RelatedEntityList.UserName
+ description: The username of the related entity
+ type: string
+ - contextPath: Jask.RelatedEntityList.HostName
+ description: The hostname of the entity
+ type: string
+ - contextPath: Jask.RelatedEntityList.Active
+ description: Whether or not the entity is active
+ type: boolean
+ - contextPath: Jask.RelatedEntityList.Admin
+ description: The entity admin
+ type: string
+ - contextPath: Jask.RelatedEntityList.AssetType
+ description: The asset type
+ type: string
+ - contextPath: Jask.RelatedEntityList.CreatedTimestamp
+ description: The time of creation
+ type: date
+ - contextPath: Jask.RelatedEntityList.FirstSeen
+ description: The time the entity was first seen
+ type: date
+ - contextPath: Jask.RelatedEntityList.GivenName
+ description: The name given to the entity
+ type: string
+ - contextPath: Jask.RelatedEntityList.IsWhiteListed
+ description: Whether or not the entity is whitelisted
+ type: boolean
+ - contextPath: Jask.RelatedEntityList.LastSeen
+ description: The time the entity was last seen
+ type: date
+ - contextPath: Jask.RelatedEntityList.LastName
+ description: 'The last name '
+ type: string
+ - contextPath: Jask.RelatedEntityList.RiskScore
+ description: The risk score of the entity
+ type: number
+ - deprecated: false
+ description: Get the whitelisted entities.
+ execution: false
+ name: jask-get-whitelisted-entities
+ outputs:
+ - contextPath: Jask.Whitelisted.EntityList.Id
+ description: Id of the whitelisted entity
+ type: string
+ - contextPath: Jask.Whitelisted.EntityList.Name
+ description: Name of the whitelisted entity
+ type: string
+ - contextPath: Jask.Whitelisted.EntityList.UserName
+ description: Username of the whitelisted entity
+ type: string
+ - contextPath: Jask.Whitelisted.EntityList.ModelId
+ description: The modelID of the whitelisted entity
+ type: string
+ - contextPath: Jask.Whitelisted.EntityList.Timestamp
+ description: The time of the whitelisted entity
+ type: date
+ - contextPath: Jask.Whitelisted.EntityList.Metadata.TotalCount
+ description: The number of whitelisted entities
+ type: number
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: When the insight was last seen. Defaults to 'All time' if no time
+ arguments are specified.
+ isArray: false
+ name: last-seen
+ predefined:
+ - All time
+ - Last week
+ - Last 48 hours
+ - Last 24 hours
+ required: false
+ secret: false
+ - default: false
+ description: comma separated list of values between 1-5 inclusive
+ isArray: false
+ name: rating
+ required: false
+ secret: false
+ - default: false
+ defaultValue: new,inprogress
+ description: 'comma separated list of values from the options: new, inprogress,
+ closed'
+ isArray: false
+ name: status
+ required: false
+ secret: false
+ - default: false
+ description: comma separated list of values
+ isArray: false
+ name: assigned-team
+ required: false
+ secret: false
+ - default: false
+ description: comma separated list of values
+ isArray: false
+ name: assigned-user
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: The page offset for the results
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: How many results to retrieve
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ defaultValue: timestamp:desc
+ description: What to sort the results by
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ - default: false
+ description: The start time for the search in the following string format - MM/DD/YYYY
+ isArray: false
+ name: time-from
+ required: false
+ secret: false
+ - default: false
+ description: The end time for the search in the following string format - MM/DD/YYYY
+ isArray: false
+ name: time-to
+ required: false
+ secret: false
+ deprecated: false
+ description: Search insights using the given filters.
+ execution: false
+ name: jask-search-insights
+ outputs:
+ - contextPath: Jask.Insight.Id
+ description: The ID of the insight
+ type: string
+ - contextPath: Jask.Insight.Name
+ description: The name of the insight
+ type: string
+ - contextPath: Jask.Insight.Action
+ description: The action to take on the insight
+ type: string
+ - contextPath: Jask.Insight.AssignedTo
+ description: 'Who the insight was assigned to '
+ type: string
+ - contextPath: Jask.Insight.Description
+ description: The description of the insight
+ type: string
+ - contextPath: Jask.Insight.IpAddress
+ description: The IP address of the insight
+ type: string
+ - contextPath: Jask.Insight.LastUpdated
+ description: 'When the insight was last updated. '
+ type: date
+ - contextPath: Jask.Insight.LastUpdatedBy
+ description: Who the insight was last updated by
+ type: string
+ - contextPath: Jask.Insight.Severity
+ description: The severity of the insight
+ type: number
+ - contextPath: Jask.Insight.InsightTime
+ description: The time of the insight
+ type: date
+ - contextPath: Jask.WorkflowStatus
+ description: The status of the insight
+ type: string
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: When the insight was last seen. Defaults to 'All time' if no time
+ arguments are specified.
+ isArray: false
+ name: last-seen
+ predefined:
+ - All time
+ - Last week
+ - Last 48 hours
+ - Last 24 hours
+ required: false
+ secret: false
+ - default: false
+ description: 'comma separated list of values from options: threatintel, rule,
+ anomaly'
+ isArray: false
+ name: source
+ required: false
+ secret: false
+ - default: false
+ description: 'comma separated list of values form options: Attack Stage, C2,
+ Defense Evasion, Discovery, Exfiltration, Exploitation, External Recon, Internal
+ Recon, Lateral Movement, Threat Intelligence, Traffic Anomaly'
+ isArray: false
+ name: category
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: The page offset for the results
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: The maximum number of signals retrieved
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ defaultValue: score:desc
+ description: How to sort the results
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ - default: false
+ description: The start time for the search in the following string format - MM/DD/YYYY
+ isArray: false
+ name: time-from
+ required: false
+ secret: false
+ - default: false
+ description: The end time for the search in the following string format - MM/DD/YYYY
+ isArray: false
+ name: time-to
+ required: false
+ secret: false
+ deprecated: false
+ description: Search signals using the given filters.
+ execution: false
+ name: jask-search-signals
+ outputs:
+ - contextPath: Jask.Signal.Id
+ description: The ID of the signal
+ type: string
+ - contextPath: Jask.Signal.Name
+ description: The name of the signal
+ type: string
+ - contextPath: Jask.Signal.Category
+ description: The category of the signal
+ type: string
+ - contextPath: Jask.Signal.Description
+ description: The description of the signal
+ type: string
+ - contextPath: Jask.Signal.Score
+ description: The score of the signal
+ type: number
+ - contextPath: Jask.Signal.SourceType
+ description: The source type of the signal
+ type: string
+ - contextPath: Jask.Signal.Timestamp
+ description: The time of the signal
+ type: date
+ - contextPath: Jask.Signal.ThreatIndicators.IndicatorType
+ description: The type of threat indicator
+ type: string
+ - contextPath: Jask.Signal.ThreatIndicators.Value
+ description: The value of the threat indicator
+ type: string
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: When the insight was last seen. Defaults to 'All time' if no time
+ arguments are specified.
+ isArray: false
+ name: last-seen
+ predefined:
+ - All time
+ - Last week
+ - Last 48 hours
+ - Last 24 hours
+ required: false
+ secret: false
+ - default: false
+ description: 'comma separated list of values from the options: username, hostname,
+ ip'
+ isArray: false
+ name: entity-type
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: The page offset for the results
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: How many results to retrieve
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ defaultValue: risk_score:desc
+ description: What to sort by
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ - default: false
+ description: The start time for the search in the following string format - MM/DD/YYYY
+ isArray: false
+ name: time-from
+ required: false
+ secret: false
+ - default: false
+ description: The end time for the search in the following string format - MM/DD/YYYY
+ isArray: false
+ name: time-to
+ required: false
+ secret: false
+ deprecated: false
+ description: Search entities using the given filters.
+ execution: false
+ name: jask-search-entities
+ outputs:
+ - contextPath: Jask.Entity.Id
+ description: The ID of the entity
+ type: string
+ - contextPath: Jask.Entity.Name
+ description: The name of the entity
+ type: string
+ - contextPath: Jask.Entity.FirstSeen
+ description: When the entity was first seen
+ type: date
+ - contextPath: Jask.Entity.LastSeen
+ description: When the entity was last seen
+ type: date
+ - contextPath: Jask.Entity.Source
+ description: The source of the entity
+ type: string
+ - contextPath: Jask.Entity.EntityType
+ description: The type of the entity
+ type: string
+ - contextPath: Jask.Entity.PrimaryEntityType
+ description: The primary entity type
+ type: string
+ - contextPath: Jask.Entity.HostName
+ description: 'The hostname '
+ type: string
+ - contextPath: Jask.Entity.RiskScore
+ description: The risk score of the entity
+ type: number
+ - contextPath: Jask.Entity.IsWhiteListed
+ description: Whether or not the entity is whitelisted
+ type: boolean
+ - contextPath: Jask.Entity.Groups
+ description: The groups of the entity
+ type: string
+ - contextPath: Jask.Entity.Ip.Address
+ description: The IP address of the entity
+ type: string
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- No test
diff --git a/Integrations/Jask/Jask_description.md b/Integrations/Jask/Jask_description.md
new file mode 100644
index 000000000000..eb40d0f0f54a
--- /dev/null
+++ b/Integrations/Jask/Jask_description.md
@@ -0,0 +1,2 @@
+You can retrieve your personal API key from the following URL under the "Profile" tab:
+https://[CUSTOMER].portal.jask.ai/config
\ No newline at end of file
diff --git a/Integrations/Jask/Jask_image.png b/Integrations/Jask/Jask_image.png
new file mode 100644
index 000000000000..8c0b0ec88cf1
Binary files /dev/null and b/Integrations/Jask/Jask_image.png differ
diff --git a/Integrations/Jask/Jask_test.py b/Integrations/Jask/Jask_test.py
new file mode 100644
index 000000000000..9e9481ffdd7c
--- /dev/null
+++ b/Integrations/Jask/Jask_test.py
@@ -0,0 +1,28 @@
+import demistomock as demisto
+
+DATE_WITHOUT_MS = '2019-05-03T03:01:54'
+DATE_WITH_MS = '2019-05-03T03:02:54.123'
+
+
+def init_integration(mocker):
+ mocker.patch.object(demisto, 'params', return_value={
+ 'URL': 'mock.com'
+ })
+
+
+def test_fetch_incidents(mocker):
+ init_integration(mocker)
+ import Jask
+ from Jask import fetch_incidents
+ mocker.patch.object(Jask, 'req', return_value={
+ 'objects': [
+ {
+ 'timestamp': DATE_WITHOUT_MS
+ },
+ {
+ 'timestamp': DATE_WITH_MS
+ }
+ ]
+ })
+ # asserts there are no exceptions
+ assert(fetch_incidents() is None)
diff --git a/Integrations/Jask/Pipfile b/Integrations/Jask/Pipfile
new file mode 100644
index 000000000000..1367d68b8ad6
--- /dev/null
+++ b/Integrations/Jask/Pipfile
@@ -0,0 +1,14 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+
+[packages]
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/Jask/Pipfile.lock b/Integrations/Jask/Pipfile.lock
new file mode 100644
index 000000000000..ed5e480f2f24
--- /dev/null
+++ b/Integrations/Jask/Pipfile.lock
@@ -0,0 +1,240 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "73c4b534031f8318ec6903fbaa068b0bf0e2be5f86c3cfb708683f542b13df2e"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:0ef2bf9f07c3150929b25e8e61b5198c27b0dca195e156f0e4d5bdd89185ca1a",
+ "sha256:fc9b582dba0366e63540982c3944a9230cbc6f303641c51483fa547dcc22393a"
+ ],
+ "version": "==1.6.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==1.5"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:27594cf4fc279f321974061ac69164aaebd2749af962ac8686b20503ac0bcf2d",
+ "sha256:9d51fe0a382f05b6b117c5e601fc219fede4a8c71703324af3f7d883aef476a3"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==3.7.3"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==1.0.2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265",
+ "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.0"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:18c796c2cd35eb1a1d3f012a214a542790a1aed95e29768bdcb9f2197eccbd0b",
+ "sha256:96151fca2c6e736503981896495d344781b60d18bfda78dc11b290c6125ebdb6"
+ ],
+ "version": "==4.3.15"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33",
+ "sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39",
+ "sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019",
+ "sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088",
+ "sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b",
+ "sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e",
+ "sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6",
+ "sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b",
+ "sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5",
+ "sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff",
+ "sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd",
+ "sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7",
+ "sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff",
+ "sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d",
+ "sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2",
+ "sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35",
+ "sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4",
+ "sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514",
+ "sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252",
+ "sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109",
+ "sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f",
+ "sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c",
+ "sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92",
+ "sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577",
+ "sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d",
+ "sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d",
+ "sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f",
+ "sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a",
+ "sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b"
+ ],
+ "version": "==1.3.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1",
+ "sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==2.0.0"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742",
+ "sha256:5887121d7f7df3603bca2f710e7219f3eca0eb69e0b7cc6e0a022e155ac931a7"
+ ],
+ "markers": "python_version < '3.6'",
+ "version": "==2.3.3"
+ },
+ "pbr": {
+ "hashes": [
+ "sha256:8257baf496c8522437e8a6cfe0f15e00aedc6c0e0e7c9d55eeeeab31e0853843",
+ "sha256:8c361cc353d988e4f5b998555c88098b9d5964c2e11acf7b0d21925a66bb5824"
+ ],
+ "version": "==5.1.3"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f",
+ "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746"
+ ],
+ "version": "==0.9.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:02c2b6d268695a8b64ad61847f92e611e6afcff33fd26c3a2125370c4662905d",
+ "sha256:ee1e85575587c5b58ddafa25e1c1b01691ef172e139fc25585e5d3f02451da93"
+ ],
+ "index": "pypi",
+ "version": "==1.9.4"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:592eaa2c33fae68c7d75aacf042efc9f77b27c08a6224a4f59beab8d9a420523",
+ "sha256:ad3ad5c450284819ecde191a654c09b0ec72257a2c711b9633d677c71c9850c4"
+ ],
+ "index": "pypi",
+ "version": "==4.3.1"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:4d0d06d173eecf172703219a71dbd4ade0e13904e6bbce1ce660e2e0dc78b5c4",
+ "sha256:bfdf02789e3d197bd682a758cae0a4a18706566395fbe2803badcd1335e0173e"
+ ],
+ "index": "pypi",
+ "version": "==1.10.1"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ }
+ }
+}
diff --git a/Integrations/JiraV2/CHANGELOG.md b/Integrations/JiraV2/CHANGELOG.md
new file mode 100644
index 000000000000..406c97f4b531
--- /dev/null
+++ b/Integrations/JiraV2/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+API token parameter is now encrypted.
+
+## [19.8.2] - 2019-08-22
+- Add support for remote application links.
\ No newline at end of file
diff --git a/Integrations/JiraV2/JiraV2.py b/Integrations/JiraV2/JiraV2.py
new file mode 100644
index 000000000000..59e9bab10cf0
--- /dev/null
+++ b/Integrations/JiraV2/JiraV2.py
@@ -0,0 +1,656 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+import json
+import requests
+from base64 import b64encode
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+BASE_URL = demisto.getParam('url').rstrip('/') + '/'
+API_TOKEN = demisto.getParam('APItoken')
+USERNAME = demisto.getParam('username')
+PASSWORD = demisto.getParam('password')
+IS_OAUTH = demisto.getParam('consumerKey') and demisto.getParam('accessToken') and demisto.getParam('privateKey')
+IS_BASIC = USERNAME and (PASSWORD or API_TOKEN)
+
+# if not OAuth, check for valid parameters for basic auth, i.e. username & pass, or just APItoken
+if not IS_OAUTH and not IS_BASIC:
+ return_error('Please provide Authorization information, Basic(userName & password / API-token) or OAuth1.0')
+B64_AUTH = (b64encode((USERNAME + ":" + (API_TOKEN if API_TOKEN else PASSWORD)).encode('ascii'))).decode('ascii')
+BASIC_AUTH = 'Basic ' + B64_AUTH
+OAUTH = {
+ "ConsumerKey": demisto.getParam('consumerKey'),
+ "AccessToken": demisto.getParam('accessToken'),
+ "PrivateKey": demisto.getParam('privateKey')
+} if IS_OAUTH else ''
+
+HEADERS = {
+ 'Content-Type': 'application/json',
+}
+if not IS_OAUTH:
+ HEADERS['Authorization'] = BASIC_AUTH
+
+BASIC_AUTH_ERROR_MSG = "For cloud users: As of June 2019, Basic authentication with passwords for Jira is no" \
+ " longer supported, please use an API Token or OAuth"
+USE_SSL = not demisto.params().get('insecure', False)
+
+
+def jira_req(method, resource_url, body='', link=False):
+ url = resource_url if link else (BASE_URL + resource_url)
+ result = requests.request(
+ method=method,
+ url=url,
+ data=body,
+ headers=HEADERS,
+ verify=USE_SSL,
+ params=OAUTH,
+ )
+ if not result.ok:
+ demisto.debug(result.text)
+ try:
+ rj = result.json()
+ if rj.get('errorMessages'):
+ return_error(f'Status code: {result.status_code}\nMessage: {",".join(rj["errorMessages"])}')
+ elif rj.get('errors'):
+ return_error(f'Status code: {result.status_code}\nMessage: {",".join(rj["errors"].values())}')
+ else:
+ return_error(f'Status code: {result.status_code}\nError text: {result.text}')
+ except ValueError as ve:
+ demisto.debug(str(ve))
+ if result.status_code == 401:
+ return_error('Unauthorized request, please check authentication related parameters.'
+ f'{BASIC_AUTH_ERROR_MSG if IS_BASIC else ""}')
+ elif result.status_code == 404:
+ return_error("Could not connect to the Jira server. Verify that the server URL is correct.")
+ else:
+ return_error(
+ f"Failed reaching the server. status code: {result.status_code}")
+
+ return result
+
+
+def run_query(query, start_at='', max_results=None):
+ # EXAMPLE
+ """
+ request = {
+ "jql": "project = HSP",
+ "startAt": 0,
+ "maxResults": 15,
+ "fields": [ <-- not supported yet, but easily attainable
+ "summary",
+ "status",
+ "assignee"
+ ]
+ }
+ """
+ demisto.debug(f'querying with: {query}')
+ url = BASE_URL + 'rest/api/latest/search/'
+ query_params = {
+ 'jql': query,
+ "startAt": start_at,
+ "maxResults": max_results,
+ }
+ if OAUTH:
+ query_params.update(OAUTH) # type: ignore
+
+ result = requests.get(
+ url=url,
+ headers=HEADERS,
+ verify=USE_SSL,
+ params=query_params
+ )
+ try:
+ rj = result.json()
+ if rj.get('issues'):
+ return rj
+
+ errors = ",".join(rj.get("errorMessages", ['could not fetch any issues, please check your query']))
+ return_error(f'No issues were found, error message from Jira: {errors}')
+
+ except ValueError as ve:
+ demisto.debug(str(ve))
+ return_error(f'Failed to send request, reason: {result.reason}')
+
+
+def get_id_offset():
+ """
+ gets the ID Offset, i.e., the first issue id. used to fetch correctly all issues
+ """
+ query = "ORDER BY created ASC"
+ j_res = run_query(query=query, max_results=1)
+ first_issue_id = j_res.get('issues')[0].get('id')
+ return_outputs(
+ readable_output=f"ID Offset: {first_issue_id}",
+ outputs={'Ticket.idOffSet': first_issue_id},
+ )
+
+
+def expand_urls(data, depth=0):
+ if isinstance(data, dict) and depth < 10:
+ for key, value in data.items():
+ if key in ['_links', 'watchers', 'sla', 'request participants']:
+ # dictionary of links
+ if isinstance(value, dict):
+ for link_key, link_url in value.items():
+ value[link_key + '_expended'] = json.dumps(
+ jira_req(method='GET', resource_url=link_url, link=True).json())
+
+ # link
+ else:
+ data[key + '_expended'] = json.dumps(jira_req(method='GET', resource_url=value, link=True).json())
+
+ # search deeper
+ else:
+ if isinstance(value, dict):
+ return expand_urls(value, depth + 1)
+
+
+def generate_md_context_get_issue(data):
+ get_issue_obj: dict = {"md": [], "context": []}
+ if not isinstance(data, list):
+ data = [data]
+
+ for element in data:
+ md_obj, context_obj = {}, {}
+
+ context_obj['Id'] = md_obj['id'] = demisto.get(element, 'id')
+ context_obj['Key'] = md_obj['key'] = demisto.get(element, 'key')
+ context_obj['Summary'] = md_obj['summary'] = demisto.get(element, 'fields.summary')
+ context_obj['Status'] = md_obj['status'] = demisto.get(element, 'fields.status.name')
+
+ assignee = demisto.get(element, 'fields.assignee')
+ context_obj['Assignee'] = md_obj['assignee'] = "{name}({email})".format(
+ name=assignee.get('displayName', 'null'),
+ email=assignee.get('emailAddress', 'null')
+ ) if assignee else 'null(null)'
+
+ creator = demisto.get(element, 'fields.creator')
+ context_obj['Creator'] = md_obj['creator'] = "{name}({email})".format(
+ name=creator.get('displayName', 'null'),
+ email=creator.get('emailAddress', 'null')
+ ) if creator else 'null(null)'
+
+ reporter = demisto.get(element, 'fields.reporter')
+ md_obj['reporter'] = "{name}({email})".format(
+ name=reporter.get('displayName', 'null'),
+ email=reporter.get('emailAddress', 'null')
+ ) if reporter else 'null(null)'
+
+ md_obj.update({
+ 'issueType': demisto.get(element, 'fields.issuetype.description'),
+ 'priority': demisto.get(element, 'fields.priority.name'),
+ 'project': demisto.get(element, 'fields.project.name'),
+ 'labels': demisto.get(element, 'fields.labels'),
+ 'description': demisto.get(element, 'fields.description'),
+ 'duedate': demisto.get(element, 'fields.duedate'),
+ 'ticket_link': demisto.get(element, 'self'),
+ 'created': demisto.get(element, 'fields.created'),
+ })
+ attachments = demisto.get(element, 'fields.attachment')
+ if isinstance(attachments, list):
+ md_obj['attachment'] = ','.join(attach.get('filename') for attach in attachments)
+
+ get_issue_obj['md'].append(md_obj)
+ get_issue_obj['context'].append(context_obj)
+
+ return get_issue_obj
+
+
+def generate_md_context_create_issue(data, project_name=None, project_key=None):
+ create_issue_obj = {"md": [], "context": {"Ticket": []}} # type: ignore
+ if project_name:
+ data["projectName"] = project_name
+
+ if project_key:
+ data["projectKey"] = project_key
+
+ elif demisto.getParam('projectKey'):
+ data["projectKey"] = demisto.getParam('projectKey')
+
+ create_issue_obj['md'].append(data) # type: ignore
+ create_issue_obj['context']['Ticket'].append({"Id": demisto.get(data, 'id'), "Key": demisto.get(data, 'key')}) # type: ignore
+ return create_issue_obj
+
+
+def generate_md_upload_issue(data, issue_id):
+ upload_md = []
+ if not isinstance(data, list):
+ data = [data]
+
+ for element in data:
+ md_obj = {
+ 'id': demisto.get(element, 'id'),
+ 'issueId': issue_id,
+ 'attachment_name': demisto.get(element, 'filename'),
+ 'attachment_link': demisto.get(element, 'self')
+ }
+ upload_md.append(md_obj)
+
+ return upload_md
+
+
+def create_incident_from_ticket(issue):
+ labels = [
+ {'type': 'issue', 'value': json.dumps(issue)}, {'type': 'id', 'value': str(issue.get('id'))},
+ {'type': 'lastViewed', 'value': str(demisto.get(issue, 'fields.lastViewed'))},
+ {'type': 'priority', 'value': str(demisto.get(issue, 'fields.priority.name'))},
+ {'type': 'status', 'value': str(demisto.get(issue, 'fields.status.name'))},
+ {'type': 'project', 'value': str(demisto.get(issue, 'fields.project.name'))},
+ {'type': 'updated', 'value': str(demisto.get(issue, 'fields.updated'))},
+ {'type': 'reportername', 'value': str(demisto.get(issue, 'fields.reporter.displayName'))},
+ {'type': 'reporteremail', 'value': str(demisto.get(issue, 'fields.reporter.emailAddress'))},
+ {'type': 'created', 'value': str(demisto.get(issue, 'fields.created'))},
+ {'type': 'summary', 'value': str(demisto.get(issue, 'fields.summary'))},
+ {'type': 'description', 'value': str(demisto.get(issue, 'fields.description'))}
+ ]
+
+ name = demisto.get(issue, 'fields.summary')
+ if name:
+ name = f"Jira issue: {issue.get('id')}"
+
+ severity = 0
+ if demisto.get(issue, 'fields.priority') and demisto.get(issue, 'fields.priority.name'):
+ if demisto.get(issue, 'fields.priority.name') == 'Highest':
+ severity = 4
+ elif demisto.get(issue, 'fields.priority.name') == 'High':
+ severity = 3
+ elif demisto.get(issue, 'fields.priority.name') == 'Medium':
+ severity = 2
+ elif demisto.get(issue, 'fields.priority.name') == 'Low':
+ severity = 1
+
+ return {
+ "name": name,
+ "labels": labels,
+ "details": demisto.get(issue, "fields.description"),
+ "severity": severity,
+ "rawJSON": json.dumps(issue)
+ }
+
+
+def get_project_id(project_key='', project_name=''):
+ result = jira_req('GET', 'rest/api/latest/issue/createmeta')
+
+ for project in result.json().get('projects'):
+ if project_key.lower() == project.get('key').lower() or project_name.lower() == project.get('name').lower():
+ return project.get('id')
+ return_error('Project not found')
+
+
+def get_issue_fields(issue_creating=False, **issue_args):
+ """
+ refactor issues's argument as received from demisto into jira acceptable format, and back.
+ :param issue_creating: flag that indicates this function is called when creating an issue
+ :param issue_args: issue argument
+ """
+ issue = {} # type: dict
+ if 'issueJson' in issue_args:
+ try:
+ issue = json.loads(issue_args['issueJson'])
+ except TypeError as te:
+ demisto.debug(str(te))
+ return_error("issueJson must be in a valid json format")
+
+ if not issue.get('fields'):
+ issue['fields'] = {}
+
+ if not issue['fields'].get('issuetype') and issue_creating:
+ issue['fields']['issuetype'] = {}
+
+ if issue_args.get('summary'):
+ issue['fields']['summary'] = issue_args['summary']
+
+ if not issue['fields'].get('project') and (issue_args.get('projectKey') or issue_args.get('projectName')):
+ issue['fields']['project'] = {}
+
+ if issue_args.get('projectKey'):
+ issue['fields']['project']['key'] = issue_args.get('projectKey', '')
+ if issue_args.get('projectName'):
+ issue['fields']['project']['name'] = issue_args.get('projectName', '')
+
+ if issue_creating:
+ # make sure the key & name are right, and get the corresponding project id & key
+ project_id = get_project_id(issue['fields']['project'].get('key', ''),
+ issue['fields']['project'].get('name', ''))
+ issue['fields']['project']['id'] = project_id
+
+ if issue_args.get('issueTypeName'):
+ issue['fields']['issuetype']['name'] = issue_args['issueTypeName']
+
+ if issue_args.get('issueTypeId'):
+ issue['fields']['issuetype']['id'] = issue_args['issueTypeId']
+
+ if issue_args.get('parentIssueId'):
+ if not issue['fields'].get('parent'):
+ issue['fields']['parent'] = {}
+ issue['fields']['parent']['id'] = issue_args['parentIssueId']
+
+ if issue_args.get('parentIssueKey'):
+ if not issue['fields'].get('parent'):
+ issue['fields']['parent'] = {}
+ issue['fields']['parent']['key'] = issue_args['parentIssueKey']
+
+ if issue_args.get('description'):
+ issue['fields']['description'] = issue_args['description']
+
+ if issue_args.get('labels'):
+ issue['fields']['labels'] = issue_args['labels'].split(",")
+
+ if issue_args.get('priority'):
+ if not issue['fields'].get('priority'):
+ issue['fields']['priority'] = {}
+ issue['fields']['priority']['name'] = issue_args['priority']
+
+ if issue_args.get('duedate'):
+ issue['fields']['duedate'] = issue_args['duedate']
+
+ if issue_args.get('assignee'):
+ if not issue['fields'].get('assignee'):
+ issue['fields']['assignee'] = {}
+ issue['fields']['assignee']['name'] = issue_args['assignee']
+
+ if issue_args.get('reporter'):
+ if not issue['fields'].get('reporter'):
+ issue['fields']['reporter'] = {}
+ issue['fields']['reporter']['name'] = issue_args['reporter']
+
+ return issue
+
+
+def get_issue(issue_id, headers=None, expand_links=False, is_update=False, get_attachments=False):
+ result = jira_req('GET', 'rest/api/latest/issue/' + issue_id)
+ j_res = result.json()
+ if expand_links == "true":
+ expand_urls(j_res)
+
+ attachments = demisto.get(j_res, 'fields.attachment') # list of all attachments
+ if get_attachments == 'true' and attachments:
+ attachments_zip = jira_req(method='GET', resource_url=f'secure/attachmentzip/{issue_id}.zip').content
+ demisto.results(fileResult(filename=f'{j_res.get("id")}_attachments.zip', data=attachments_zip))
+
+ md_and_context = generate_md_context_get_issue(j_res)
+ human_readable = tableToMarkdown(demisto.command(), md_and_context['md'], argToList(headers))
+ if is_update:
+ human_readable += f'Issue #{issue_id} was updated successfully'
+
+ contents = j_res
+ outputs = {'Ticket(val.Id == obj.Id)': md_and_context['context']}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+
+def issue_query_command(query, start_at='', max_results=None, headers=''):
+ j_res = run_query(query, start_at, max_results)
+ issues = demisto.get(j_res, 'issues')
+ md_and_context = generate_md_context_get_issue(issues)
+ human_readable = tableToMarkdown(demisto.command(), md_and_context['md'], argToList(headers))
+ contents = j_res
+ outputs = {'Ticket(val.Id == obj.Id)': md_and_context['context']}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+
+def create_issue_command():
+ url = 'rest/api/latest/issue'
+ issue = get_issue_fields(issue_creating=True, **demisto.args())
+ result = jira_req('POST', url, json.dumps(issue))
+ j_res = result.json()
+
+ md_and_context = generate_md_context_create_issue(j_res, project_key=demisto.getArg('projectKey'),
+ project_name=demisto.getArg('issueTypeName'))
+ human_readable = tableToMarkdown(demisto.command(), md_and_context['md'], "")
+ contents = j_res
+ outputs = md_and_context['context']
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+
+def edit_issue_command(issue_id, headers=None, status=None, **_):
+ url = f'rest/api/latest/issue/{issue_id}/'
+ issue = get_issue_fields(**demisto.args())
+ jira_req('PUT', url, json.dumps(issue))
+ if status:
+ edit_status(issue_id, status)
+ return get_issue(issue_id, headers, is_update=True)
+
+
+def edit_status(issue_id, status):
+ # check for all authorized transitions available for this user
+ # if the requested transition is available, execute it.
+ url = f'rest/api/2/issue/{issue_id}/transitions'
+ result = jira_req('GET', url)
+ j_res = result.json()
+ transitions = [transition.get('name') for transition in j_res.get('transitions')]
+ for i, transition in enumerate(transitions):
+ if transition.lower() == status.lower():
+ url = f'rest/api/latest/issue/{issue_id}/transitions?expand=transitions.fields'
+ json_body = {"transition": {"id": str(j_res.get('transitions')[i].get('id'))}}
+ return jira_req('POST', url, json.dumps(json_body))
+
+ return_error(f'Status "{status}" not found. \nValid transitions are: {transitions} \n')
+
+
+def get_comments_command(issue_id):
+ url = f'rest/api/latest/issue/{issue_id}/comment'
+ result = jira_req('GET', url)
+ body = result.json()
+ comments = []
+ if body.get("comments"):
+ for comment in body.get("comments"):
+ comments.append({
+ 'Comment': comment.get("body"),
+ 'User': demisto.get(comment, 'updateAuthor.name'),
+ 'Created': comment.get("created")
+ })
+
+ human_readable = tableToMarkdown("Comments", comments)
+ contents = body
+ outputs = {'Ticket(val.Id == obj.Id)': {'Id': issue_id, "Comment": comments}}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+ else:
+ demisto.results('No comments were found in the ticket')
+
+
+def add_comment_command(issue_id, comment, visibility=''):
+ url = f'rest/api/latest/issue/{issue_id}/comment'
+ comment = {
+ "body": comment
+ }
+ if visibility:
+ comment["visibility"] = {
+ "type": "role",
+ "value": visibility
+ }
+ result = jira_req('POST', url, json.dumps(comment))
+ data = result.json()
+ md_list = []
+ if not isinstance(data, list):
+ data = [data]
+ for element in data:
+ md_obj = {
+ 'id': demisto.get(element, 'id'),
+ 'key': demisto.get(element, 'updateAuthor.key'),
+ 'comment': demisto.get(element, 'body'),
+ 'ticket_link': demisto.get(element, 'self')
+ }
+ md_list.append(md_obj)
+
+ human_readable = tableToMarkdown(demisto.command(), md_list, "")
+ contents = data
+ return_outputs(readable_output=human_readable, outputs={}, raw_response=contents)
+
+
+def issue_upload_command(issue_id, upload):
+ j_res = upload_file(upload, issue_id)
+ md = generate_md_upload_issue(j_res, issue_id)
+ human_readable = tableToMarkdown(demisto.command(), md, "")
+ contents = j_res
+ return_outputs(readable_output=human_readable, outputs={}, raw_response=contents)
+
+
+def upload_file(entry_id, issue_id):
+ headers = {
+ 'X-Atlassian-Token': 'no-check',
+ }
+ res = requests.post(
+ url=BASE_URL + f'rest/api/latest/issue/{issue_id}/attachments',
+ headers=headers,
+ files={'file': get_file(entry_id)},
+ auth=(USERNAME, API_TOKEN or PASSWORD),
+ verify=USE_SSL
+ )
+
+ if not res.ok:
+ return_error(
+ f'Failed to execute request, status code:{res.status_code}\nBody: {res.text}')
+
+ return res.json()
+
+
+def get_file(entry_id):
+ get_file_path_res = demisto.getFilePath(entry_id)
+ file_path = get_file_path_res["path"]
+ file_name = get_file_path_res["name"]
+ with open(file_path, 'rb') as fopen:
+ file_bytes = fopen.read()
+ return file_name, file_bytes
+
+
+def add_link_command(issue_id, title, url, summary=None, global_id=None, relationship=None,
+ application_type=None, application_name=None):
+ req_url = f'rest/api/latest/issue/{issue_id}/remotelink'
+ link = {
+ "object": {
+ "url": url,
+ "title": title
+ }
+ }
+
+ if summary:
+ link['summary'] = summary
+ if global_id:
+ link['globalId'] = global_id
+ if relationship:
+ link['relationship'] = relationship
+ if application_type or application_name:
+ link['application'] = {}
+ if application_type:
+ link['application']['type'] = application_type
+ if application_type:
+ link['application']['name'] = application_name
+
+ result = jira_req('POST', req_url, json.dumps(link))
+ data = result.json()
+ md_list = []
+ if not isinstance(data, list):
+ data = [data]
+ for element in data:
+ md_obj = {
+ 'id': demisto.get(element, 'id'),
+ 'key': demisto.get(element, 'updateAuthor.key'),
+ 'comment': demisto.get(element, 'body'),
+ 'ticket_link': demisto.get(element, 'self')
+ }
+ md_list.append(md_obj)
+ human_readable = tableToMarkdown(demisto.command(), md_list, "", removeNull=True)
+ contents = data
+ return_outputs(readable_output=human_readable, outputs={}, raw_response=contents)
+
+
+def delete_issue_command(issue_id_or_key):
+ url = f'rest/api/latest/issue/{issue_id_or_key}'
+ issue = get_issue_fields(**demisto.args())
+ result = jira_req('DELETE', url, json.dumps(issue))
+ if result.status_code == 204:
+ demisto.results('Issue deleted successfully.')
+ else:
+ demisto.results('Failed to delete issue.')
+
+
+def test_module():
+ """
+ Performs basic get request to get item samples
+ """
+ req_res = jira_req('GET', 'rest/api/latest/myself')
+ run_query(demisto.getParam('query'), max_results=1)
+ if req_res.ok:
+ demisto.results('ok')
+
+
+def fetch_incidents(query, id_offset=0, fetch_by_created=None, **_):
+ last_run = demisto.getLastRun()
+ demisto.debug(f"last_run: {last_run}" if last_run else 'last_run is empty')
+ id_offset = last_run.get("idOffset") if (last_run and last_run.get("idOffset")) else id_offset
+
+ incidents, max_results = [], 50
+ if id_offset:
+ query = f'{query} AND id >= {id_offset}'
+ if fetch_by_created:
+ query = f'{query} AND created>-1m'
+ res = run_query(query, '', max_results)
+ curr_id = id_offset
+ for ticket in res.get('issues'):
+ ticket_id = int(ticket.get("id"))
+ if ticket_id == curr_id:
+ continue
+
+ id_offset = max(int(id_offset), ticket_id)
+ incidents.append(create_incident_from_ticket(ticket))
+
+ demisto.setLastRun({"idOffset": id_offset})
+ demisto.incidents(incidents)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+demisto.debug('Command being called is %s' % (demisto.command()))
+try:
+ # Remove proxy if not set to true in params
+ handle_proxy()
+
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module()
+
+ elif demisto.command() == 'fetch-incidents':
+ # Set and define the fetch incidents command to run after activated via integration settings.
+ fetch_incidents(**snakify(demisto.params()))
+
+ elif demisto.command() == 'jira-get-issue':
+ get_issue(**snakify(demisto.args()))
+
+ elif demisto.command() == 'jira-issue-query':
+ issue_query_command(**snakify(demisto.args()))
+
+ elif demisto.command() == 'jira-create-issue':
+ create_issue_command()
+
+ elif demisto.command() == 'jira-edit-issue':
+ edit_issue_command(**snakify(demisto.args()))
+
+ elif demisto.command() == 'jira-get-comments':
+ get_comments_command(**snakify(demisto.args()))
+
+ elif demisto.command() == 'jira-issue-add-comment':
+ add_comment_command(**snakify(demisto.args()))
+
+ elif demisto.command() == 'jira-issue-upload-file':
+ issue_upload_command(**snakify(demisto.args()))
+
+ elif demisto.command() == 'jira-issue-add-link':
+ add_link_command(**snakify(demisto.args()))
+
+ elif demisto.command() == 'jira-delete-issue':
+ delete_issue_command(**snakify(demisto.args()))
+
+ elif demisto.command() == 'jira-get-id-offset':
+ get_id_offset()
+
+
+except Exception as ex:
+ return_error(str(ex))
+
+finally:
+ LOG.print_log()
diff --git a/Integrations/JiraV2/JiraV2.yml b/Integrations/JiraV2/JiraV2.yml
new file mode 100644
index 000000000000..cd16168b06b9
--- /dev/null
+++ b/Integrations/JiraV2/JiraV2.yml
@@ -0,0 +1,305 @@
+commonfields:
+ id: jira-v2
+ version: -1
+name: jira-v2
+display: Atlassian Jira (v2)
+fromversion: 2.6.0
+category: Case Management
+description: Use the Jira integration to manage issues and create Demisto incidents from the projects.
+configuration:
+- display: ' Jira URL, for example: https://demisto.atlassian.net/ '
+ name: url
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Username
+ name: username
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Password
+ name: password
+ defaultvalue: ""
+ type: 4
+ required: false
+- display: ConsumerKey
+ name: consumerKey
+ defaultvalue: OauthKey
+ type: 0
+ required: false
+- display: AccessToken
+ name: accessToken
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: PrivateKey
+ name: privateKey
+ defaultvalue: ""
+ type: 14
+ required: false
+- display: Query (in JQL) for fetching incidents
+ name: query
+ defaultvalue: status!=done
+ type: 0
+ required: false
+- display: Issue index to start fetching incidents from
+ name: idOffset
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Trust any certificate (not secure)
+ name: insecure
+ defaultvalue: ''
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: "false"
+ type: 8
+ required: false
+- display: Fetch incidents
+ name: isFetch
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Incident type
+ name: incidentType
+ defaultvalue: ""
+ type: 13
+ required: false
+- display: Use created field to fetch incidents
+ name: fetchByCreated
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: API token
+ name: APItoken
+ defaultvalue: ""
+ type: 4
+ required: false
+script:
+ script: ''
+ type: python
+ subtype: python3
+ commands:
+ - name: jira-issue-query
+ arguments:
+ - name: query
+ required: true
+ default: true
+ description: The JQL query string.
+ - name: startAt
+ description: The index (integer) of the first issue to return (0-based).
+ - name: maxResults
+ description: The maximum number of issues to return (default is 50). The maximum
+ allowed value is dictated by the JIRA property 'jira.search.views.default.max'.
+ If you specify a value that is higher than this number, your search results
+ will be truncated.
+ - name: headers
+ description: The headers to display in human readable format.
+ outputs:
+ - contextPath: Ticket.Id
+ description: The ID of the ticket.
+ - contextPath: Ticket.Key
+ description: The key of the ticket.
+ - contextPath: Ticket.Assignee
+ description: The user assigned to the ticket.
+ - contextPath: Ticket.Creator
+ description: The user who created the ticket.
+ - contextPath: Ticket.Summary
+ description: The summary of the ticket.
+ - contextPath: Ticket.Status
+ description: The status of the ticket.
+ description: Queries Jira issues.
+ - name: jira-get-issue
+ arguments:
+ - name: issueId
+ required: true
+ default: true
+ description: The ID of the issue.
+ - name: headers
+ description: Headers to display in human readable format.
+ - name: getAttachments
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ defaultValue: "false"
+ description: If "true", retrives the issue's attachments.
+ - name: expandLinks
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ defaultValue: "false"
+ description: If "true", expands the issue's links.
+ outputs:
+ - contextPath: Ticket.Id
+ description: The ID of the ticket.
+ - contextPath: Ticket.Key
+ description: The key of ticket.
+ - contextPath: Ticket.Assignee
+ description: The user assigned to the ticket.
+ - contextPath: Ticket.Creator
+ description: The user who created the ticket.
+ - contextPath: Ticket.Summary
+ description: The summary of the ticket.
+ - contextPath: Ticket.Status
+ description: The status of the ticket.
+ - contextPath: File.Size
+ description: The size of the file.
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ - contextPath: File.Name
+ description: The name of the file.
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ description: Fetches an issue from Jira.
+ - name: jira-create-issue
+ arguments:
+ - name: issueJson
+ description: The issue object (in JSON format).
+ - name: summary
+ required: true
+ description: The summary of the issue.
+ - name: projectKey
+ description: The project key with which to associate the issue.
+ - name: issueTypeName
+ description: ' Select an issue type by name, for example: "Problem". '
+ - name: issueTypeId
+ description: Select an issue type by its numeric ID.
+ - name: projectName
+ description: The project name with which to associate the issue.
+ - name: description
+ description: A description of the issue.
+ - name: labels
+ description: 'A CSV list of labels. '
+ - name: priority
+ description: ' The priorty name, for example: "High" or "Medium". '
+ - name: dueDate
+ description: ' The due date for the issue (in the format: 2018-03-11). '
+ - name: assignee
+ description: The name of the assignee.
+ - name: reporter
+ description: The name of the reporter.
+ - name: parentIssueKey
+ description: The parent issue key (if you create a sub-task).
+ - name: parentIssueId
+ description: The parent issue ID (if you create a sub-task).
+ outputs:
+ - contextPath: Ticket.Id
+ description: The ID of the ticket.
+ - contextPath: Ticket.Key
+ description: The key of the ticket.
+ description: Creates a new issue in Jira.
+ - name: jira-issue-upload-file
+ arguments:
+ - name: issueId
+ required: true
+ description: The ID of the issue.
+ - name: upload
+ description: The entry ID to upload.
+ description: Uploads a file attachment to an issue.
+ - name: jira-issue-add-comment
+ arguments:
+ - name: issueId
+ required: true
+ default: true
+ description: The ID of the issue.
+ - name: comment
+ required: true
+ description: The actual comment body.
+ - name: visibility
+ description: ' The roles that can view the comment, for example: "Administrators". '
+ description: Adds a new comment to an existing Jira issue.
+ - name: jira-issue-add-link
+ arguments:
+ - name: globalId
+ description: If a globalId is provided and a remote issue link exists with that
+ globalId, the remote issue link is updated.
+ - name: relationship
+ description: ' The object relationship to issue, for example: "causes". '
+ - name: url
+ required: true
+ description: The URL link.
+ - name: title
+ required: true
+ description: The title of the link.
+ - name: summary
+ description: The summary of the link.
+ - name: issueId
+ required: true
+ description: The ID of the issue.
+ - name: applicationType
+ description: The application type of the linked remote application. E.g "com.atlassian.confluence".
+ - name: applicationName
+ description: The application name of the linked remote application. E.g "My Confluence Instance".
+ description: Creates (or updates) an issue link.
+ - name: jira-edit-issue
+ arguments:
+ - name: issueId
+ required: true
+ description: The ID of the issue to edit.
+ - name: issueJson
+ description: The issue object (in JSON format).
+ - name: summary
+ description: The summary of the issue.
+ - name: description
+ description: The description of the issue.
+ - name: labels
+ description: ' A CSV list of labels. '
+ - name: priority
+ description: ' A priorty name, for example "High" or "Medium". '
+ - name: dueDate
+ description: The due date for the issue (in the format 2018-03-11).
+ - name: assignee
+ description: The name of the assignee.
+ - name: status
+ description: The name of the status.
+ outputs:
+ - contextPath: Ticket.Id
+ description: The ID of the ticket.
+ - contextPath: Ticket.Key
+ description: The key of the ticket.
+ - contextPath: Ticket.Assignee
+ description: The sser assigned to the ticket.
+ - contextPath: Ticket.Creator
+ description: The user who created the ticket.
+ - contextPath: Ticket.Summary
+ description: The summary of the ticket.
+ - contextPath: Ticket.Status
+ description: The status of the ticket.
+ description: Modifies an issue in JIRA.
+ - name: jira-get-comments
+ arguments:
+ - name: issueId
+ required: true
+ description: The ID of the issue to get comments of.
+ outputs:
+ - contextPath: Ticket.Comment.Comment
+ description: The text of the comment.
+ type: string
+ - contextPath: Ticket.Comment.Created
+ description: The issue creation date.
+ type: string
+ - contextPath: Ticket.Comment.User
+ description: The user that created the comment.
+ type: string
+ description: Returns the comments added to a ticket.
+ - name: jira-delete-issue
+ arguments:
+ - name: issueIdOrKey
+ required: true
+ description: The ID or key of the issue.
+ description: Deletes an issue in Jira.
+ - name: jira-get-id-offset
+ outputs:
+ - contextPath: Ticket.idOffSet
+ description: The ID offset.
+ type: string
+ description: Returns the ID offset, for example, the first issue ID.
+ dockerimage: demisto/python3:3.7.2.200
+ isfetch: true
+ runonce: false
+tests:
+ - Jira-v2-Test
\ No newline at end of file
diff --git a/Integrations/JiraV2/JiraV2_description.md b/Integrations/JiraV2/JiraV2_description.md
new file mode 100644
index 000000000000..33df76dd56c9
--- /dev/null
+++ b/Integrations/JiraV2/JiraV2_description.md
@@ -0,0 +1,8 @@
+For fetching incidents, please update the query param according to [JQL documentation](https://confluence.atlassian.com/jiracoreserver073/advanced-searching-861257209.html)
+Update the project you want to fetch from, using: `project = soc AND status = open`.
+This will fetch all tickets in your system (including past tickets) that are in Open status and in project soc. After the first run, it will create incidents only for new tickets.
+
+If you wish the first run to start from a specific time, use "Issue index to start fetching incidents from" param.
+
+Fetching incidents by creation time (using the Created field), instead of using IDs, is done by checking the "Use created field to fetch incidents" checkbox.
+To use OAuth1.0 follow [this tutorial](https://developer.atlassian.com/cloud/jira/platform/jira-rest-api-oauth-authentication/) to get the Access Token. Authorizing using OAuth1.0, requires Access Token, Private Key and Consumer Key.
diff --git a/Integrations/JiraV2/JiraV2_image.png b/Integrations/JiraV2/JiraV2_image.png
new file mode 100644
index 000000000000..4ac6414e6d65
Binary files /dev/null and b/Integrations/JiraV2/JiraV2_image.png differ
diff --git a/Integrations/JoeSecurity/JoeSecurity.py b/Integrations/JoeSecurity/JoeSecurity.py
new file mode 100644
index 000000000000..d201aa4eafea
--- /dev/null
+++ b/Integrations/JoeSecurity/JoeSecurity.py
@@ -0,0 +1,373 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+import time
+import shutil
+import requests
+from distutils.util import strtobool
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+
+''' GLOBAL VARS '''
+BASE_URL = urljoin(demisto.params().get('url'), 'api/')
+USE_SSL = not demisto.params().get('insecure', False)
+MAX_POLLS = int(demisto.params().get('maxpolls', 300))
+USE_PROXY = demisto.params().get('proxy', True)
+
+nothing_to_analyze_message = 'We found nothing to analyze in your uploaded email' \
+ '(possibly all elements where whitelisted, check Input filtering in your Settings).'
+nothing_to_analyze_output = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': 'We found nothing to analyze in your uploaded email',
+ 'HumanReadable': 'We found nothing to analyze in your uploaded email'
+}
+
+
+''' HELPER FUNCTIONS '''
+
+
+def http_post(url_suffix, data=None, files=None, parse_json=True):
+ data = {} if data is None else data
+
+ LOG('running request with url=%s\n\tdata=%s\n\tfiles=%s' % (BASE_URL + url_suffix,
+ data, files, ))
+ data.setdefault('apikey', demisto.params()['api_key'])
+
+ res = requests.post(BASE_URL + url_suffix, verify=USE_SSL, data=data, files=files)
+
+ if res.status_code == 403:
+ raise Exception('API Key is incorrect')
+
+ if res.status_code != 200:
+ error_msg = res.json()['errors'][0]['message']
+ if error_msg == nothing_to_analyze_message:
+ return 'nothing_to_analyze'
+
+ LOG('result is: %s' % (res.json(), ))
+ error_msg = res.json()['errors'][0]['message']
+ raise Exception('Your request failed with the following error: %s.\n%s' % (res.reason, error_msg, ))
+
+ if parse_json:
+ return res.json()
+ else:
+ return res.content
+
+
+def analysis_to_entry(title, info):
+ if not isinstance(info, list):
+ info = [info]
+
+ context = []
+ table = []
+ dbot_scores = []
+ for analysis in info:
+ analysis_info = {
+ 'ID': analysis['webid'], # for detonate generic polling
+ 'WebID': analysis['webid'],
+ 'SampleName': analysis['filename'],
+ 'Status': analysis['status'],
+ 'Comments': analysis['comments'],
+ 'Time': analysis['time'],
+ 'MD5': analysis['md5'],
+ 'SHA1': analysis['sha1'],
+ 'SHA256': analysis['sha256'],
+ 'Systems': [run['system'] for run in analysis['runs']],
+ 'Result': ', '.join([run['detection'] for run in analysis['runs']]),
+ 'Errors': [run['error'] for run in analysis['runs']],
+ }
+
+ analysis_context = dict(analysis_info)
+ analysis_context['Runs'] = analysis['runs']
+
+ analysis_table = dict(analysis_info)
+ if not any(analysis_table['Errors']):
+ analysis_table['Errors'] = None
+
+ dbot_score = 0
+ malicious = None
+ if 'malicious' in analysis_info['Result']:
+ dbot_score = 3
+ malicious = {
+ 'Vendor': 'JoeSecurity',
+ 'Detections': ', '.join(set([run['detection'] for run in analysis['runs']])),
+ 'SHA1': analysis_info['SHA1'],
+ }
+ elif 'suspicious' in analysis_info['Result']:
+ dbot_score = 2
+ elif 'clean' in analysis_info['Result']:
+ dbot_score = 1
+
+ dbot_scores.append({
+ 'Vendor': 'JoeSecurity',
+ 'Indicator': analysis_info['MD5'],
+ 'Type': 'file' if analysis_info['MD5'] else 'url',
+ 'Score': dbot_score,
+ 'Malicious': malicious,
+ })
+ context.append(analysis_context)
+ table.append(analysis_table)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': context,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, table, removeNull=True),
+ 'EntryContext': {'Joe.Analysis(val.ID && val.ID == obj.ID)': createContext(context, removeNull=True),
+ 'DBotScore': createContext(dbot_scores, removeNull=True), }
+ }
+
+ return entry
+
+
+def poll_webid(web_id):
+ result = {'data': {'status': 'pending'}}
+ max_polls = MAX_POLLS
+
+ while (max_polls >= 0) and result['data']['status'] != 'finished':
+ if result['data']['status'] != 'pending':
+ LOG('error while polling: result is %s' % (result, ))
+ result = info_request(web_id)
+ time.sleep(1)
+ max_polls -= 1
+
+ LOG('reached max_polls #%d' % (max_polls, ))
+ if max_polls < 0:
+ return analysis_to_entry('Polling timeout on Analysis #' + web_id, result['data'])
+ else:
+ return analysis_to_entry('Analysis #' + web_id, result['data'])
+
+
+''' FUNCTIONS '''
+
+
+def is_online():
+ cmd_url = 'v2/server/online'
+ res = http_post(cmd_url)
+ return res['data']['online']
+
+
+def list_analysis():
+ cmd_url = 'v2/analysis/list'
+ res = http_post(cmd_url)
+
+ data = [info_request(web_id['webid'])['data'] for web_id in res['data']]
+ return analysis_to_entry('All Analyses:', data)
+
+
+def analysis_info():
+ ids = demisto.args().get('webid')
+ if type(ids) in STRING_TYPES:
+ ids = ids.split(',')
+ LOG('info: web_id = %s' % (ids, ))
+ res = [info_request(webid)['data'] for webid in ids]
+ return analysis_to_entry('Analyses:', res)
+
+
+def info_request(web_id):
+ cmd_url = 'v2/analysis/info'
+ return http_post(cmd_url, data={'webid': web_id})
+
+
+def search():
+ cmd_url = 'v2/analysis/search'
+ query = demisto.args().get('query')
+ res = http_post(cmd_url, data={'q': query})
+ if len(res['data']) == 0:
+ return 'No Result was found.'
+
+ data = [info_request(web_id['webid'])['data'] for web_id in res['data']]
+ return analysis_to_entry('Analysis Search Results:', data)
+
+
+def analyse_url():
+ args = demisto.args()
+ url = args.get('url')
+ internet_access = bool(strtobool(args.get('internet-access', 'true')))
+ comments = args.get('comments')
+ systems = args.get('systems')
+
+ should_wait = bool(strtobool(demisto.get(args, 'should_wait')))
+
+ return analyse_url_request(url, should_wait, internet_access, comments, systems)
+
+
+def analyse_url_request(url, should_wait, internet_access, comments='', systems=''):
+ data = {
+ 'accept-tac': 1,
+ 'url': url,
+ 'internet-access': 1 if internet_access else 0,
+ }
+ if comments != '':
+ data['comments'] = comments
+ if systems != '':
+ data['systems[]'] = [s.strip() for s in systems.split(',')]
+ res = http_post('v2/analysis/submit', data=data)
+
+ if 'errors' in res:
+ LOG('Error! in command analyse_url: url=%s' % (url, ))
+ LOG('got the following errors:\n' + '\n'.join(e['message'] for e in res['errors']))
+ raise Exception('command failed to run.')
+
+ if should_wait:
+ return poll_webid(res['data']['webids'][0])
+
+ web_id = res['data']['webids'][0]
+ result = info_request(web_id)
+ return analysis_to_entry('Analysis #%s' % (web_id, ), result['data'])
+
+
+def analyse_sample():
+ args = demisto.args()
+ file_entry = args.get('file_id', '')
+ if type(file_entry) in STRING_TYPES:
+ file_entry = [f for f in file_entry.split(',') if f != '']
+ sample_url = args.get('sample_url', '')
+ if type(sample_url) in STRING_TYPES:
+ sample_url = [f for f in sample_url.split(',') if f != '']
+ internet_access = bool(strtobool(args.get('internet-access', 'true')))
+ should_wait = bool(strtobool(demisto.get(args, 'should_wait')))
+ comments = args.get('comments', '')
+ systems = args.get('systems', '')
+
+ if (len(file_entry) == 0 and len(sample_url) == 0) or ([] not in [file_entry, sample_url]):
+ raise ValueError('You must specify one (and only one) of the following: sample_url, file_id.')
+
+ LOG('analysing sample')
+ if len(file_entry) != 0:
+ return [analyse_sample_file_request(f, should_wait, internet_access, comments, systems) for f in file_entry]
+ else:
+ return [analyse_sample_url_request(s, should_wait, internet_access, comments, systems) for s in sample_url]
+
+
+def analyse_sample_file_request(file_entry, should_wait, internet_access, comments='', systems=''):
+ data = {
+ 'accept-tac': 1,
+ 'internet-access': 1 if internet_access else 0,
+ }
+ if comments != '':
+ data['comments'] = comments
+ if systems != '':
+ data['systems[]'] = [s.strip() for s in systems.split(',')] # type: ignore
+
+ shutil.copy(demisto.getFilePath(file_entry)['path'], demisto.getFilePath(file_entry)['name'])
+
+ with open(demisto.getFilePath(file_entry)['name'], 'rb') as f:
+ res = http_post('v2/analysis/submit', data=data, files={'sample': f})
+
+ if res == 'nothing_to_analyze':
+ return nothing_to_analyze_output
+
+ if 'errors' in res:
+ LOG('Error! in command sample file: file_entry=%s' % (file_entry, ))
+ LOG('got the following errors:\n' + '\n'.join(e['message'] for e in res['errors']))
+ raise Exception('command failed to run.')
+
+ shutil.rmtree(demisto.getFilePath(file_entry)['name'], ignore_errors=True)
+
+ if should_wait:
+ return poll_webid(res['data']['webids'][0])
+
+ web_id = res['data']['webids'][0]
+ result = info_request(web_id)
+ return analysis_to_entry('Analysis #%s' % (web_id, ), result['data'])
+
+
+def analyse_sample_url_request(sample_url, should_wait, internet_access, comments, systems):
+ data = {
+ 'accept-tac': 1,
+ 'sample-url': sample_url,
+ 'internet-access': 1 if internet_access else 0,
+ }
+ if comments != '':
+ data['comments'] = comments
+ if systems != '':
+ data['systems[]'] = [s.strip() for s in systems.split(',')]
+
+ res = http_post('v2/analysis/submit', data=data)
+
+ if res == 'nothing_to_analyze':
+ return nothing_to_analyze_output
+
+ if 'errors' in res:
+ LOG('Error! in command sample file: file url=%s' % (sample_url, ))
+ LOG('got the following errors:\n' + '\n'.join(e['message'] for e in res['errors']))
+ raise Exception('command failed to run.')
+
+ if should_wait:
+ return poll_webid(res['data']['webids'][0])
+
+ web_id = res['data']['webids'][0]
+ result = info_request(res['data']['webids'][0])
+ return analysis_to_entry('Analysis #%s' % (web_id, ), result['data'])
+
+
+def download_report():
+ args = demisto.args()
+ webid = args.get('webid')
+ rsc_type = args.get('type')
+ return download_request(webid, rsc_type)
+
+
+def download_sample():
+ args = demisto.args()
+ webid = args.get('webid')
+ rsc_type = 'sample'
+ return download_request(webid, rsc_type)
+
+
+def download_request(webid, rsc_type):
+ res = http_post('v2/analysis/download', data={'webid': webid, 'type': rsc_type.lower()}, parse_json=False)
+
+ info = info_request(webid)
+ if rsc_type == 'sample':
+ return fileResult('%s.dontrun' % (info.get('filename', webid), ), res)
+ else:
+ return fileResult('%s_report.%s' % (info.get('filename', webid), rsc_type, ), res, entryTypes['entryInfoFile'])
+
+
+''' EXECUTION CODE '''
+LOG('command is %s' % (demisto.command(), ))
+try:
+ handle_proxy()
+ if demisto.command() in ['test-module', 'joe-is-online']:
+ # This is the call made when pressing the integration test button.
+ if is_online():
+ demisto.results('ok')
+ else:
+ demisto.results('not online')
+ elif demisto.command() == 'joe-list-analysis':
+ demisto.results(list_analysis())
+ elif demisto.command() == 'joe-analysis-info':
+ demisto.results(analysis_info())
+ elif demisto.command() == 'joe-analysis-submit-url':
+ demisto.results(analyse_url())
+ elif demisto.command() == 'joe-detonate-url':
+ demisto.args()['should_wait'] = 'True'
+ demisto.results(analyse_url())
+ elif demisto.command() == 'joe-analysis-submit-sample':
+ demisto.results(analyse_sample())
+ elif demisto.command() == 'joe-detonate-file':
+ demisto.args()['should_wait'] = 'True'
+ demisto.results(analyse_sample())
+ elif demisto.command() == 'joe-download-report':
+ demisto.results(download_report())
+ elif demisto.command() == 'joe-download-sample':
+ demisto.results(download_sample())
+ elif demisto.command() == 'joe-search':
+ demisto.results(search())
+
+except Exception as e:
+ if demisto.params().get('verbose'):
+ LOG(e.message)
+ if demisto.command() != 'test-module':
+ LOG.print_log()
+
+ demisto.results({
+ 'Type': entryTypes['error'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': 'error has occurred: %s' % (e.message, ),
+ })
diff --git a/Integrations/JoeSecurity/JoeSecurity.yml b/Integrations/JoeSecurity/JoeSecurity.yml
new file mode 100644
index 000000000000..da87713f0553
--- /dev/null
+++ b/Integrations/JoeSecurity/JoeSecurity.yml
@@ -0,0 +1,757 @@
+category: Forensics & Malware Analysis
+commonfields:
+ id: Joe Security
+ version: -1
+configuration:
+- defaultvalue: https://jbxcloud.joesecurity.org
+ display: Joe Security Url
+ name: url
+ required: true
+ type: 0
+- display: API Key
+ name: api_key
+ required: true
+ type: 4
+- display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: '300'
+ display: 'Max. Polling Time (in seconds):'
+ name: maxpolls
+ required: false
+ type: 0
+- display: Verbose (show log in case of error)
+ name: verbose
+ required: false
+ type: 8
+- defaultvalue: 'true'
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Sandbox Cloud
+display: Joe Security
+name: Joe Security
+script:
+ commands:
+ - deprecated: false
+ description: Check if Joe Sandbox is online or in maintenance mode.
+ execution: false
+ name: joe-is-online
+ - arguments:
+ - default: true
+ description: sample url
+ isArray: false
+ name: url
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: Should the command poll for the result of the analysis
+ isArray: false
+ name: should_wait
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: 'Comments for the analysis '
+ isArray: false
+ name: comments
+ required: false
+ secret: false
+ - default: false
+ defaultValue: w7x64
+ description: 'Operating System to run analysis on(comma separated). possible
+ values are: w7, w7x64, w7_1, w7_2, w7native, android2, android3, mac1, w7l,
+ w7x64l, w10, android4, w7x64native, w7_3, w10native, android5native_1, w7_4,
+ w7_5, w10x64, w7x64_hvm, android6, iphone1, w7_sec, macvm, w7_lang_packs,
+ w7x64native_hvm, lnxubuntu1, lnxcentos1, android7_nougat'
+ isArray: false
+ name: systems
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'True'
+ description: Enable full internet access. Default is True
+ isArray: false
+ name: internet-access
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Submit a url for analysis.
+ execution: false
+ name: joe-analysis-submit-url
+ outputs:
+ - contextPath: Joe.Analysis.WebID
+ description: Web ID
+ type: string
+ - contextPath: Joe.Analysis.SampleName
+ description: Sample Data, could be a file name or URL
+ type: string
+ - contextPath: Joe.Analysis.Status
+ description: Analysis Status
+ type: string
+ - contextPath: Joe.Analysis.Comments
+ description: Analysis Comments
+ type: string
+ - contextPath: Joe.Analysis.Time
+ description: Submitted Time
+ type: date
+ - contextPath: Joe.Analysis.Runs
+ description: Sub-Analysis Information
+ type: Unknown
+ - contextPath: Joe.Analysis.Result
+ description: Analysis Results
+ type: string
+ - contextPath: Joe.Analysis.Errors
+ description: Raised errors during sampling
+ type: Unknown
+ - contextPath: Joe.Analysis.Systems
+ description: Analysis OS
+ type: Unknown
+ - contextPath: Joe.Analysis.MD5
+ description: MD5 of analysis sample
+ type: string
+ - contextPath: Joe.Analysis.SHA1
+ description: SHA1 of analysis sample
+ type: string
+ - contextPath: Joe.Analysis.SHA256
+ description: SHA256 of analysis sample
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: 'The name of the vendor: JoeSecurity'
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The name of the sample file or URL
+ type: Unknown
+ - contextPath: DBotScore.Type
+ description: '''url'' for url samples, otherwise ''file'''
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: DBotScore.Malicious.Vendor
+ description: 'The name of the vendor: JoeSecurity'
+ type: string
+ - contextPath: DBotScore.Malicious.Detections
+ description: The sub analysis detection statuses
+ type: string
+ - contextPath: DBotScore.Malicious.SHA1
+ description: The SHA1 of the file
+ type: string
+ - arguments:
+ - default: true
+ description: sample url
+ isArray: false
+ name: url
+ required: true
+ secret: false
+ - default: false
+ description: 'Comments for the analysis '
+ isArray: false
+ name: comments
+ required: false
+ secret: false
+ - default: false
+ defaultValue: w7x64
+ description: 'Operating System to run analysis on(comma separated). possible
+ values are: w7, w7x64, w7_1, w7_2, w7native, android2, android3, mac1, w7l,
+ w7x64l, w10, android4, w7x64native, w7_3, w10native, android5native_1, w7_4,
+ w7_5, w10x64, w7x64_hvm, android6, iphone1, w7_sec, macvm, w7_lang_packs,
+ w7x64native_hvm, lnxubuntu1, lnxcentos1, android7_nougat'
+ isArray: false
+ name: systems
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'True'
+ description: Enable full internet access. Default is True
+ isArray: false
+ name: internet-access
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: true
+ description: Submit a url for analysis.
+ execution: false
+ name: joe-detonate-url
+ outputs:
+ - contextPath: Joe.Analysis.WebID
+ description: Web ID
+ type: string
+ - contextPath: Joe.Analysis.SampleName
+ description: Sample Data, could be a file name or URL
+ type: string
+ - contextPath: Joe.Analysis.Status
+ description: Analysis Status
+ type: string
+ - contextPath: Joe.Analysis.Comments
+ description: Analysis Comments
+ type: string
+ - contextPath: Joe.Analysis.Time
+ description: Submitted Time
+ type: date
+ - contextPath: Joe.Analysis.Runs
+ description: Sub-Analysis Information
+ type: Unknown
+ - contextPath: Joe.Analysis.Result
+ description: Analysis Results
+ type: string
+ - contextPath: Joe.Analysis.Errors
+ description: Raised errors during sampling
+ type: Unknown
+ - contextPath: Joe.Analysis.Systems
+ description: Analysis OS
+ type: Unknown
+ - contextPath: Joe.Analysis.MD5
+ description: MD5 of analysis sample
+ type: string
+ - contextPath: Joe.Analysis.SHA1
+ description: SHA1 of analysis sample
+ type: string
+ - contextPath: Joe.Analysis.SHA256
+ description: SHA256 of analysis sample
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: 'The name of the vendor: JoeSecurity'
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The name of the sample file or URL
+ type: Unknown
+ - contextPath: DBotScore.Type
+ description: '''url'' for url samples, otherwise ''file'''
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: DBotScore.Malicious.Vendor
+ description: 'The name of the vendor: JoeSecurity'
+ type: string
+ - contextPath: DBotScore.Malicious.Detections
+ description: The sub analysis detection statuses
+ type: string
+ - contextPath: DBotScore.Malicious.SHA1
+ description: The SHA1 of the file
+ type: string
+ - arguments:
+ - default: true
+ description: Web IDs, supports comma-seperated arrays.
+ isArray: true
+ name: webid
+ required: true
+ secret: false
+ deprecated: false
+ description: Show information about an analysis.
+ execution: false
+ name: joe-analysis-info
+ outputs:
+ - contextPath: Joe.Analysis.WebID
+ description: Web ID
+ type: string
+ - contextPath: Joe.Analysis.SampleName
+ description: Sample Data, could be a file name or URL
+ type: string
+ - contextPath: Joe.Analysis.Status
+ description: Analysis Status
+ type: string
+ - contextPath: Joe.Analysis.Comments
+ description: Analysis Comments
+ type: string
+ - contextPath: Joe.Analysis.Time
+ description: Submitted Time
+ type: date
+ - contextPath: Joe.Analysis.Runs
+ description: Sub-Analysis Information
+ type: Unknown
+ - contextPath: Joe.Analysis.Result
+ description: Analysis Results
+ type: string
+ - contextPath: Joe.Analysis.Errors
+ description: Raised errors during sampling
+ type: Unknown
+ - contextPath: Joe.Analysis.Systems
+ description: Analysis OS
+ type: Unknown
+ - contextPath: Joe.Analysis.MD5
+ description: MD5 of analysis sample
+ type: string
+ - contextPath: Joe.Analysis.SHA1
+ description: SHA1 of analysis sample
+ type: string
+ - contextPath: Joe.Analysis.SHA256
+ description: SHA256 of analysis sample
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: 'The name of the vendor: JoeSecurity'
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The name of the sample file or URL
+ type: Unknown
+ - contextPath: DBotScore.Type
+ description: '''url'' for url samples, otherwise ''file'''
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: DBotScore.Malicious.Vendor
+ description: 'The name of the vendor: JoeSecurity'
+ type: string
+ - contextPath: DBotScore.Malicious.Detections
+ description: The sub analysis detection statuses
+ type: string
+ - contextPath: DBotScore.Malicious.SHA1
+ description: The SHA1 of the file
+ type: string
+ - deprecated: false
+ description: List all analyses.
+ execution: false
+ name: joe-list-analysis
+ outputs:
+ - contextPath: Joe.Analysis.WebID
+ description: Web ID
+ type: string
+ - contextPath: Joe.Analysis.SampleName
+ description: Sample Data, could be a file name or URL
+ type: string
+ - contextPath: Joe.Analysis.Status
+ description: Analysis Status
+ type: string
+ - contextPath: Joe.Analysis.Comments
+ description: Analysis Comments
+ type: string
+ - contextPath: Joe.Analysis.Time
+ description: Submitted Time
+ type: date
+ - contextPath: Joe.Analysis.Runs
+ description: Sub-Analysis Information
+ type: Unknown
+ - contextPath: Joe.Analysis.Result
+ description: Analysis Results
+ type: string
+ - contextPath: Joe.Analysis.Errors
+ description: Raised errors during sampling
+ type: Unknown
+ - contextPath: Joe.Analysis.Systems
+ description: Analysis OS
+ type: Unknown
+ - contextPath: Joe.Analysis.MD5
+ description: MD5 of analysis sample
+ type: string
+ - contextPath: Joe.Analysis.SHA1
+ description: SHA1 of analysis sample
+ type: string
+ - contextPath: Joe.Analysis.SHA256
+ description: SHA256 of analysis sample
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: 'The name of the vendor: JoeSecurity'
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The name of the sample file or URL
+ type: Unknown
+ - contextPath: DBotScore.Type
+ description: '''url'' for url samples, otherwise ''file'''
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: DBotScore.Malicious.Vendor
+ description: 'The name of the vendor: JoeSecurity'
+ type: string
+ - contextPath: DBotScore.Malicious.Detections
+ description: The sub analysis detection statuses
+ type: string
+ - contextPath: DBotScore.Malicious.SHA1
+ description: The SHA1 of the file
+ type: string
+ - arguments:
+ - default: true
+ description: War Room entry of a file (for example, 3245@4)
+ isArray: false
+ name: file_id
+ required: false
+ secret: false
+ - default: false
+ description: Url to a sample file, supports comma-seperated arrays
+ isArray: false
+ name: sample_url
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: Should the command poll for the result of the analysis
+ isArray: false
+ name: should_wait
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: Comments for the analysis
+ isArray: false
+ name: comments
+ required: false
+ secret: false
+ - default: false
+ description: 'Operating System to run analysis on(comma separated). possible
+ values are: w7, w7x64, w7_1, w7_2, w7native, android2, android3, mac1, w7l,
+ w7x64l, w10, android4, w7x64native, w7_3, w10native, android5native_1, w7_4,
+ w7_5, w10x64, w7x64_hvm, android6, iphone1, w7_sec, macvm, w7_lang_packs,
+ w7x64native_hvm, lnxubuntu1, lnxcentos1, android7_nougat'
+ isArray: false
+ name: systems
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'True'
+ description: Enable full internet access. Default is True
+ isArray: false
+ name: internet-access
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Submit a sample for analysis.
+ execution: false
+ name: joe-analysis-submit-sample
+ outputs:
+ - contextPath: Joe.Analysis.WebID
+ description: Web ID
+ type: string
+ - contextPath: Joe.Analysis.SampleName
+ description: Sample Data, could be a file name or URL
+ type: string
+ - contextPath: Joe.Analysis.Status
+ description: Analysis Status
+ type: string
+ - contextPath: Joe.Analysis.Comments
+ description: Analysis Comments
+ type: string
+ - contextPath: Joe.Analysis.Time
+ description: Submitted Time
+ type: date
+ - contextPath: Joe.Analysis.Runs
+ description: Sub-Analysis Information
+ type: Unknown
+ - contextPath: Joe.Analysis.Result
+ description: Analysis Results
+ type: string
+ - contextPath: Joe.Analysis.Errors
+ description: Raised errors during sampling
+ type: Unknown
+ - contextPath: Joe.Analysis.Systems
+ description: Analysis OS
+ type: Unknown
+ - contextPath: Joe.Analysis.MD5
+ description: MD5 of analysis sample
+ type: string
+ - contextPath: Joe.Analysis.SHA1
+ description: SHA1 of analysis sample
+ type: string
+ - contextPath: Joe.Analysis.SHA256
+ description: SHA256 of analysis sample
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: 'The name of the vendor: JoeSecurity'
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The name of the sample file or URL
+ type: Unknown
+ - contextPath: DBotScore.Type
+ description: '''url'' for url samples, otherwise ''file'''
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: DBotScore.Malicious.Vendor
+ description: 'The name of the vendor: JoeSecurity'
+ type: string
+ - contextPath: DBotScore.Malicious.Detections
+ description: The sub analysis detection statuses
+ type: string
+ - contextPath: DBotScore.Malicious.SHA1
+ description: The SHA1 of the file
+ type: string
+ - arguments:
+ - default: true
+ description: Web ID
+ isArray: false
+ name: webid
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: html
+ description: The resource type to download. Defaults to html.
+ isArray: false
+ name: type
+ predefined:
+ - html
+ - json
+ - pcap
+ - pdf
+ - xml
+ required: false
+ secret: false
+ deprecated: false
+ description: Download a resource belonging to a report. This can be the full report,
+ dropped binaries, etc.
+ execution: false
+ name: joe-download-report
+ outputs:
+ - contextPath: InfoFile.Name
+ description: FileName
+ type: string
+ - contextPath: InfoFile.EntryID
+ description: The EntryID of the report
+ type: string
+ - contextPath: InfoFile.Size
+ description: File Size
+ type: number
+ - contextPath: InfoFile.Type
+ description: File type e.g. "PE"
+ type: string
+ - contextPath: InfoFile.Info
+ description: Basic information of the file
+ type: string
+ - contextPath: File.Extension
+ description: File Extension
+ type: string
+ - arguments:
+ - default: true
+ description: War Room entry of a file (for example, 3245@4)
+ isArray: false
+ name: file_id
+ required: false
+ secret: false
+ - default: false
+ description: Url to a sample file
+ isArray: false
+ name: sample_url
+ required: false
+ secret: false
+ - default: false
+ description: Comments for the analysis
+ isArray: false
+ name: comments
+ required: false
+ secret: false
+ - default: false
+ description: 'Operating System to run analysis on(comma separated). possible
+ values are: w7, w7x64, w7_1, w7_2, w7native, android2, android3, mac1, w7l,
+ w7x64l, w10, android4, w7x64native, w7_3, w10native, android5native_1, w7_4,
+ w7_5, w10x64, w7x64_hvm, android6, iphone1, w7_sec, macvm, w7_lang_packs,
+ w7x64native_hvm, lnxubuntu1, lnxcentos1, android7_nougat'
+ isArray: false
+ name: systems
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'True'
+ description: Enable full internet access. Default is True
+ isArray: false
+ name: internet-access
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: true
+ description: Submit a sample for analysis.
+ execution: false
+ name: joe-detonate-file
+ outputs:
+ - contextPath: Joe.Analysis.WebID
+ description: Web ID
+ type: string
+ - contextPath: Joe.Analysis.SampleName
+ description: Sample Data, could be a file name or URL
+ type: string
+ - contextPath: Joe.Analysis.Status
+ description: Analysis Status
+ type: string
+ - contextPath: Joe.Analysis.Comments
+ description: Analysis Comments
+ type: string
+ - contextPath: Joe.Analysis.Time
+ description: Submitted Time
+ type: date
+ - contextPath: Joe.Analysis.Runs
+ description: Sub-Analysis Information
+ type: Unknown
+ - contextPath: Joe.Analysis.Result
+ description: Analysis Results
+ type: string
+ - contextPath: Joe.Analysis.Errors
+ description: Raised errors during sampling
+ type: Unknown
+ - contextPath: Joe.Analysis.Systems
+ description: Analysis OS
+ type: Unknown
+ - contextPath: Joe.Analysis.MD5
+ description: MD5 of analysis sample
+ type: string
+ - contextPath: Joe.Analysis.SHA1
+ description: SHA1 of analysis sample
+ type: string
+ - contextPath: Joe.Analysis.SHA256
+ description: SHA256 of analysis sample
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: 'The name of the vendor: JoeSecurity'
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The name of the sample file or URL
+ type: Unknown
+ - contextPath: DBotScore.Type
+ description: '''url'' for url samples, otherwise ''file'''
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: DBotScore.Malicious.Vendor
+ description: 'The name of the vendor: JoeSecurity'
+ type: string
+ - contextPath: DBotScore.Malicious.Detections
+ description: The sub analysis detection statuses
+ type: string
+ - contextPath: DBotScore.Malicious.SHA1
+ description: The SHA1 of the file
+ type: string
+ - arguments:
+ - default: true
+ description: 'Search string which will search in the following fields only:
+ webid, md5, sha1, sha256, filename, URL, comments.'
+ isArray: false
+ name: query
+ required: true
+ secret: false
+ deprecated: false
+ description: Search through all analyses.
+ execution: false
+ name: joe-search
+ outputs:
+ - contextPath: Joe.Analysis.WebID
+ description: Web ID
+ type: string
+ - contextPath: Joe.Analysis.SampleName
+ description: Sample Data, could be a file name or URL
+ type: string
+ - contextPath: Joe.Analysis.Status
+ description: Analysis Status
+ type: string
+ - contextPath: Joe.Analysis.Comments
+ description: Analysis Comments
+ type: string
+ - contextPath: Joe.Analysis.Time
+ description: Submitted Time
+ type: date
+ - contextPath: Joe.Analysis.Runs
+ description: Sub-Analysis Information
+ type: Unknown
+ - contextPath: Joe.Analysis.Result
+ description: Analysis Results
+ type: string
+ - contextPath: Joe.Analysis.Errors
+ description: Raised errors during sampling
+ type: Unknown
+ - contextPath: Joe.Analysis.Systems
+ description: Analysis OS
+ type: Unknown
+ - contextPath: Joe.Analysis.MD5
+ description: MD5 of analysis sample
+ type: string
+ - contextPath: Joe.Analysis.SHA1
+ description: SHA1 of analysis sample
+ type: string
+ - contextPath: Joe.Analysis.SHA256
+ description: SHA256 of analysis sample
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: 'The name of the vendor: JoeSecurity'
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The name of the sample file or URL
+ type: Unknown
+ - contextPath: DBotScore.Type
+ description: '''url'' for url samples, otherwise ''file'''
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: DBotScore.Malicious.Vendor
+ description: 'The name of the vendor: JoeSecurity'
+ type: string
+ - contextPath: DBotScore.Malicious.Detections
+ description: The sub analysis detection statuses
+ type: string
+ - contextPath: DBotScore.Malicious.SHA1
+ description: The SHA1 of the file
+ type: string
+ - arguments:
+ - default: true
+ description: Web ID
+ isArray: false
+ name: webid
+ required: true
+ secret: false
+ deprecated: false
+ description: Download the sample file of an analysis. for security reasons, the
+ extension will be "dontrun"
+ execution: false
+ name: joe-download-sample
+ outputs:
+ - contextPath: File.Size
+ description: File Size
+ type: number
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file
+ type: string
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file
+ type: string
+ - contextPath: File.Name
+ description: The sample name
+ type: string
+ - contextPath: File.SSDeep
+ description: SSDeep hash of the file
+ type: string
+ - contextPath: File.EntryID
+ description: War-Room Entry ID of the file
+ type: string
+ - contextPath: File.Info
+ description: Basic information of the file
+ type: string
+ - contextPath: File.Type
+ description: File type e.g. "PE"
+ type: string
+ - contextPath: File MD5
+ description: MD5 hash of the file
+ type: string
+ - contextPath: File.Extension
+ description: File Extension
+ type: string
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- JoeSecurityTestPlaybook
+- JoeSecurityTestDetonation
diff --git a/Integrations/JoeSecurity/JoeSecurity_description.md b/Integrations/JoeSecurity/JoeSecurity_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/JoeSecurity/JoeSecurity_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/JoeSecurity/JoeSecurity_image.png b/Integrations/JoeSecurity/JoeSecurity_image.png
new file mode 100644
index 000000000000..4df0dfe7fc19
Binary files /dev/null and b/Integrations/JoeSecurity/JoeSecurity_image.png differ
diff --git a/Integrations/Kafka_V2/CHANGELOG.md b/Integrations/Kafka_V2/CHANGELOG.md
new file mode 100644
index 000000000000..927c550dd3ec
--- /dev/null
+++ b/Integrations/Kafka_V2/CHANGELOG.md
@@ -0,0 +1,14 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+ - Added partitions outputs to the ***kafka-print-topic*** command.
+ - Added the *Max number of messages to fetch* parameter.
+ - Added the *Use TLS for connection* parameter.
+ - Improved debug logging outputs.
+ - Improved fetch incidents implementation (breaks backward compatibility).
+
+
+
+## [19.9.1] - 2019-09-18
+-
diff --git a/Integrations/Kafka_V2/Kafka_V2.py b/Integrations/Kafka_V2/Kafka_V2.py
new file mode 100644
index 000000000000..1c8f5cb67dc3
--- /dev/null
+++ b/Integrations/Kafka_V2/Kafka_V2.py
@@ -0,0 +1,449 @@
+import demistomock as demisto
+from CommonServerPython import *
+
+''' IMPORTS '''
+import requests
+from pykafka import KafkaClient, SslConfig
+from pykafka.common import OffsetType
+import logging
+from cStringIO import StringIO
+import traceback
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+BROKERS = demisto.params().get('brokers')
+
+# Should we use SSL
+USE_SSL = demisto.params().get('use_ssl', False)
+
+# Certificates
+CA_CERT = demisto.params().get('ca_cert', None)
+CLIENT_CERT = demisto.params().get('client_cert', None)
+CLIENT_CERT_KEY = demisto.params().get('client_cert_key', None)
+PASSWORD = demisto.params().get('additional_password', None)
+
+# Logging
+log_stream = None
+log_handler = None
+
+''' HELPER FUNCTIONS '''
+
+
+def start_logging():
+ logging.raiseExceptions = False
+ global log_stream
+ global log_handler
+ if log_stream is None:
+ log_stream = StringIO()
+ log_handler = logging.StreamHandler(stream=log_stream)
+ log_handler.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
+ logger = logging.getLogger()
+ logger.addHandler(log_handler)
+ logger.setLevel(logging.DEBUG)
+
+
+def check_params(topic, old_offset=None, old_partition=None):
+ """
+ :param topic: topic to check
+ :type topic: :class: `pykafka.topic.Topic`
+ :param offset: offset to check if is in the topic (and cast it to in if needed)
+ :type offset: int, str, unicode or None
+ :param partition: partition to check if is in the topic (and cast to int if needed)
+ :type partition: int or str or unicode or None
+ :returns: new_offset, new_partition
+ :rtype: int, int
+ """
+ partition = None
+ offset = None
+ # Casting
+ if old_partition:
+ # Casting
+ if isinstance(old_partition, (unicode, str)):
+ if old_partition.isdigit():
+ partition = int(old_partition)
+ else:
+ return_error('Supplied partition is not a number')
+ if isinstance(old_partition, int):
+ partition = old_partition
+ if old_offset:
+ # Casting
+ if isinstance(old_offset, (unicode, str)):
+ if old_offset.isdigit():
+ offset = int(old_offset)
+ offset = OffsetType.EARLIEST if offset == 0 else offset - 1
+ elif old_offset.lower() == 'earliest':
+ offset = OffsetType.EARLIEST
+ elif old_offset.lower() == 'latest':
+ offset = check_latest_offset(topic, partition_number=partition) - 1
+ else:
+ return_error('Supplied offset is not a number')
+ if check_latest_offset(topic, partition_number=partition) <= offset:
+ return_error('Offset is out of bounds')
+ else:
+ return_error('Offset is not a number, earliest or latest')
+ return offset, partition
+
+
+def create_incident(message, topic):
+ """
+ Creates incident
+ :param message: Kafka message to create incident from
+ :type message: :class:`pykafka.common.Message`
+ :param topic: Message's topic
+ :type topic: str
+ :return incident:
+ """
+ raw = {
+ 'Topic': topic,
+ 'Partition': message.partition_id,
+ 'Offset': message.offset,
+ 'Message': message.value
+ }
+ incident = {
+ 'name': 'Kafka {} partition:{} offset:{}'.format(topic, message.partition_id, message.offset),
+ 'details': message.value,
+ 'rawJSON': json.dumps(raw)
+ }
+ if message.timestamp_dt:
+ incident['occurred'] = message.timestamp_dt
+ return incident
+
+
+def check_latest_offset(topic, partition_number=None):
+ """
+ :param topic: topic to check the latest offset
+ :type topic: :class:`pykafka.topic.Topic`
+ :param partition_number: partition to take latest offset from
+ :type partition_number: int, str
+ :return latest_offset: last message offset
+ :rtype: int
+ """
+ partitions = topic.latest_available_offsets()
+ latest_offset = 0
+ if partition_number is not None:
+ partition = partitions.get(str(partition_number))
+ if partitions:
+ latest_offset = partition[0][0]
+ else:
+ return_error('Partition does not exist')
+ else:
+ for partition in partitions.values():
+ if latest_offset < partition[0][0]:
+ latest_offset = partition[0][0]
+ return latest_offset - 1
+
+
+def create_certificate():
+ """
+ Creating certificate
+ :return certificate:
+ :return type: :class: `pykafka.connection.SslConfig`
+ """
+ ca_path = None
+ client_path = None
+ client_key_path = None
+ if CA_CERT:
+ ca_path = 'ca.cert' # type: ignore
+ with open(ca_path, 'wb') as file:
+ file.write(CA_CERT)
+ ca_path = os.path.abspath(ca_path)
+ if CLIENT_CERT:
+ client_path = 'client.cert'
+ with open(client_path, 'wb') as file:
+ file.write(CLIENT_CERT)
+ client_path = os.path.abspath(client_path)
+ if CLIENT_CERT_KEY:
+ client_key_path = 'client_key.key'
+ with open(client_key_path, 'wb') as file:
+ file.write(CLIENT_CERT_KEY)
+ return SslConfig(
+ cafile=ca_path,
+ certfile=client_path,
+ keyfile=client_key_path,
+ password=PASSWORD
+ )
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module(client):
+ """
+ If we got here, the instance is working without any error
+ """
+ if client.topics is not None:
+ demisto.results('ok')
+
+
+def print_topics(client):
+ """
+ Prints available topics in Broker
+ """
+
+ kafka_topics = client.topics.values()
+ if kafka_topics:
+ topics = []
+ for topic in kafka_topics:
+ partitions = []
+ for partition in topic.partitions.values():
+ partitions.append({
+ 'ID': partition.id,
+ 'EarliestOffset': partition.earliest_available_offset(),
+ 'OldestOffset': partition.latest_available_offset()
+ })
+
+ topics.append({
+ 'Name': topic.name,
+ 'Partitions': partitions
+ })
+
+ ec = {
+ 'Kafka.Topic(val.Name === obj.Name)': topics
+ }
+
+ md = tableToMarkdown('Kafka Topics', topics)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': topics,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': md,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results('No topics found.')
+
+
+def produce_message(client):
+ """
+ Producing message to kafka topic
+ """
+ topic = demisto.args().get('topic')
+ value = demisto.args().get('value')
+ partitioning_key = demisto.args().get('partitioning_key')
+
+ partitioning_key = str(partitioning_key)
+ if partitioning_key.isdigit():
+ partitioning_key = int(partitioning_key) # type: ignore
+ else:
+ partitioning_key = None # type: ignore
+
+ if topic in client.topics:
+ kafka_topic = client.topics[topic]
+ with kafka_topic.get_sync_producer() as producer:
+ producer.produce(
+ message=str(value),
+ partition_key=partitioning_key
+ )
+ demisto.results('Message was successfully produced to topic \'{}\''.format(topic))
+ else:
+ return_error('Topic {} was not found in Kafka'.format(topic))
+
+
+def consume_message(client):
+ """
+ Consuming one message from topic
+ """
+ topic = demisto.args().get('topic')
+ offset = demisto.args().get('offset')
+ partition = demisto.args().get('partition')
+
+ if topic in client.topics:
+ kafka_topic = client.topics[topic]
+ offset, partition = check_params(kafka_topic, old_offset=offset, old_partition=partition)
+ consumer = kafka_topic.get_simple_consumer(
+ auto_offset_reset=offset,
+ reset_offset_on_start=True
+ )
+ message = consumer.consume()
+ md = tableToMarkdown(
+ name='Message consumed from topic \'{}\''.format(topic),
+ t={
+ 'Offset': message.offset,
+ 'Message': message.value
+ },
+ headers=[
+ 'Offset',
+ 'Message'
+ ]
+ )
+ ec = {
+ 'Kafka.Topic(val.Name === obj.Name)': {
+ 'Name': topic,
+ 'Message': {
+ 'Value': message.value,
+ 'Offset': message.offset
+ }
+ }
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': {
+ 'Message': message.value,
+ 'Offset': message.offset
+ },
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': md,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+ else:
+ return_error('Topic {} was not found in Kafka'.format(topic))
+
+
+def fetch_partitions(client):
+ """
+ Fetching available partitions in given topic
+ """
+ topic = demisto.args().get('topic')
+ if topic in client.topics:
+ kafka_topic = client.topics[topic]
+ partitions = kafka_topic.partitions.keys()
+
+ md = tableToMarkdown(
+ name='Available partitions for topic \'{}\''.format(topic),
+ t=partitions,
+ headers='Partitions'
+ )
+ ec = {
+ 'Kafka.Topic(val.Name === obj.Name)': {
+ 'Name': topic,
+ 'Partition': partitions
+ }
+ }
+ contents = {
+ topic: partitions
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': contents,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': md,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+ else:
+ return_error('Topic {} was not found in Kafka'.format(topic))
+
+
+def fetch_incidents(client):
+ """
+ Fetches incidents
+ """
+ topic = demisto.params().get('topic', '')
+ partition_to_fetch_from = argToList(demisto.params().get('partition', ''))
+ offset_to_fetch_from = demisto.params().get('offset', -2)
+ try:
+ offset_to_fetch_from = int(offset_to_fetch_from)
+ except ValueError as e:
+ demisto.error('Received invalid offset: {}. Using default of -2. Err: {}'.format(offset_to_fetch_from, e))
+ offset_to_fetch_from = -2
+ max_messages = demisto.params().get('max_messages', 50)
+ try:
+ max_messages = int(max_messages)
+ except ValueError:
+ max_messages = 50
+
+ last_fetched_partitions_offset = json.loads(demisto.getLastRun().get('last_fetched_partitions_offset', '{}'))
+ incidents = []
+
+ message_counter = 0
+
+ if topic in client.topics:
+ kafka_topic = client.topics[topic]
+
+ consumer_args = {
+ 'consumer_timeout_ms': 2000, # wait max 2 seconds for new messages
+ 'reset_offset_on_start': True
+ }
+
+ if partition_to_fetch_from:
+ partitions = []
+ for partition in kafka_topic.partitions.values():
+ partition_id = str(partition.id)
+ if partition_id in partition_to_fetch_from:
+ partitions.append(partition)
+ consumer_args['partitions'] = partitions # type: ignore
+
+ consumer = kafka_topic.get_simple_consumer(**consumer_args)
+
+ offsets = [(p, last_fetched_partitions_offset.get(str(p.id), offset_to_fetch_from)) for p in consumer._partitions]
+ consumer.reset_offsets(offsets)
+
+ for message in consumer:
+ if message and message.value:
+ incidents.append(create_incident(message=message, topic=kafka_topic.name))
+ if message.offset > last_fetched_partitions_offset.get(str(message.partition_id), offset_to_fetch_from):
+ last_fetched_partitions_offset[str(message.partition_id)] = message.offset
+ message_counter += 1
+ if message_counter == max_messages:
+ break
+ consumer.stop()
+ else:
+ return_error('No such topic \'{}\' to fetch incidents from.'.format(topic))
+
+ demisto.setLastRun({'last_fetched_partitions_offset': json.dumps(last_fetched_partitions_offset)})
+ demisto.incidents(incidents)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+
+def main():
+ LOG('Command being called is {0}'.format(demisto.command()))
+ global log_stream
+ try:
+
+ # Initialize KafkaClient
+ if USE_SSL:
+ ssl_config = create_certificate()
+ client = KafkaClient(hosts=BROKERS, ssl_config=ssl_config)
+ else:
+ client = KafkaClient(hosts=BROKERS)
+
+ if demisto.command() == 'test-module':
+ start_logging()
+ # This is the call made when pressing the integration test button.
+ test_module(client)
+ elif demisto.command() == 'kafka-print-topics':
+ print_topics(client)
+ elif demisto.command() == 'kafka-publish-msg':
+ produce_message(client)
+ elif demisto.command() == 'kafka-consume-msg':
+ consume_message(client)
+ elif demisto.command() == 'kafka-fetch-partitions':
+ fetch_partitions(client)
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents(client)
+
+ except Exception as e:
+ debug_log = 'Debug logs:\n\n{0}'.format(log_stream.getvalue() if log_stream else '')
+ error_message = str(e)
+ if demisto.command() != 'test-module':
+ stacktrace = traceback.format_exc()
+ if stacktrace:
+ debug_log += '\nFull stacktrace:\n\n{0}'.format(stacktrace)
+ return_error('{0}\n\n{1}'.format(error_message, debug_log))
+
+ finally:
+ if os.path.isfile('ca.cert'):
+ os.remove(os.path.abspath('ca.cert'))
+ if os.path.isfile('client.cert'):
+ os.remove(os.path.abspath('client.cert'))
+ if os.path.isfile('client_key.key'):
+ os.remove(os.path.abspath('client_key.key'))
+ if log_stream:
+ try:
+ logging.getLogger().removeHandler(log_handler) # type: ignore
+ log_stream.close()
+ log_stream = None
+ except Exception as e:
+ demisto.error('Kafka v2: unexpected exception when trying to remove log handler: {}'.format(e))
+
+
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/Kafka_V2/Kafka_V2.yml b/Integrations/Kafka_V2/Kafka_V2.yml
new file mode 100644
index 000000000000..41d02d5a5957
--- /dev/null
+++ b/Integrations/Kafka_V2/Kafka_V2.yml
@@ -0,0 +1,168 @@
+category: Messaging
+commonfields:
+ id: Kafka V2
+ version: -1
+configuration:
+- display: CSV list of Kafka brokers to connect to, e.g. 172.16.20.207:9092,172.16.20.234:9093
+ name: brokers
+ required: true
+ type: 0
+- display: Use TLS for connection
+ name: use_ssl
+ required: false
+ type: 8
+- display: |-
+ ┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉
+ ‎ Certificate Settings
+ CA certificate of Kafka server (.cer)
+ name: ca_cert
+ required: false
+ type: 12
+- display: Client certificate (.cer)
+ name: client_cert
+ required: false
+ type: 12
+- display: Client certificate key (.key)
+ name: client_cert_key
+ required: false
+ type: 12
+- display: Client certificate key password (if required)
+ name: additional_password
+ required: false
+ type: 4
+- display: |-
+ ┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉
+ ‎ Fetch Incidents Settings
+ Topic to fetch incidents from (Required for fetch incidents)
+ name: topic
+ required: false
+ type: 0
+- display: CSV list of partitions to fetch messages from
+ name: partition
+ required: false
+ type: 0
+- display: Offset to fetch messages from
+ name: offset
+ required: false
+ type: 0
+- defaultvalue: '50'
+ display: Max number of messages to fetch
+ name: max_messages
+ required: false
+ type: 50
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+description: The Open source distributed streaming platform
+display: Kafka V2
+name: Kafka V2
+script:
+ commands:
+ - deprecated: false
+ description: Prints all partitions of a topic.
+ execution: false
+ name: kafka-print-topics
+ outputs:
+ - contextPath: Kafka.Topic.Name
+ description: Kafka topic name
+ type: String
+ - contextPath: Kafka.Topic.Partitions.ID
+ description: Topic partition ID
+ type: Number
+ - contextPath: Kafka.Topic.Partitions.EarliestOffset
+ description: Topic partition earliest offset
+ type: Number
+ - contextPath: Kafka.Topic.Partitions.LatestOffset
+ description: Topic partition latest offset
+ type: Number
+ - arguments:
+ - default: false
+ description: A topic to filter messages by
+ isArray: false
+ name: topic
+ required: true
+ secret: false
+ - default: false
+ description: Message value (string)
+ isArray: false
+ name: value
+ required: true
+ secret: false
+ - default: false
+ description: Message partition key (number)
+ isArray: false
+ name: partitioning_key
+ required: false
+ secret: false
+ deprecated: false
+ description: Publishes a message to Kafka.
+ execution: false
+ name: kafka-publish-msg
+ - arguments:
+ - default: false
+ description: A topic to filter by
+ isArray: false
+ name: topic
+ required: true
+ secret: false
+ - default: false
+ defaultValue: Earliest
+ description: Message offset to filter by. Acceptable values are 'Earliest',
+ 'Latest', or any other offest number.
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ - default: false
+ description: Partition (number)
+ isArray: false
+ name: partition
+ required: false
+ secret: false
+ deprecated: false
+ description: Consumes a single Kafka message.
+ execution: false
+ name: kafka-consume-msg
+ outputs:
+ - contextPath: Kafka.Topic.Name
+ description: Name of the topic.
+ type: string
+ - contextPath: Kafka.Topic.Message.Value
+ description: Value of the message.
+ type: string
+ - contextPath: Kafka.Topic.Message.Offset
+ description: Offset of the value in the topic.
+ type: number
+ - arguments:
+ - default: false
+ description: A topic to filter by
+ isArray: false
+ name: topic
+ required: false
+ secret: false
+ deprecated: false
+ description: Print all partitions for a topic
+ execution: false
+ name: kafka-fetch-partitions
+ outputs:
+ - contextPath: Kafka.Topic.Name
+ description: Name of topic.
+ type: string
+ - contextPath: Kafka.Topic.Partition
+ description: Prints all partitions for a topic.
+ type: number
+ dockerimage: demisto/pykafka:1.0.0.128
+ isfetch: true
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- No Test - Can not connect to instance from remote
diff --git a/Integrations/Kafka_V2/Kafka_V2_image.png b/Integrations/Kafka_V2/Kafka_V2_image.png
new file mode 100644
index 000000000000..790995dcca3d
Binary files /dev/null and b/Integrations/Kafka_V2/Kafka_V2_image.png differ
diff --git a/Integrations/Kafka_V2/Pipfile b/Integrations/Kafka_V2/Pipfile
new file mode 100644
index 000000000000..5aa5cbefbde3
--- /dev/null
+++ b/Integrations/Kafka_V2/Pipfile
@@ -0,0 +1,19 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+v = {version = "*",editable = true}
+flake8 = "*"
+
+[packages]
+pykafka = "*"
+requests = "*"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/Kafka_V2/Pipfile.lock b/Integrations/Kafka_V2/Pipfile.lock
new file mode 100644
index 000000000000..fdf776ccdd99
--- /dev/null
+++ b/Integrations/Kafka_V2/Pipfile.lock
@@ -0,0 +1,428 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "3e77c84f5f506aaaa30beb074493d62167f0df848b357763e8d5a55a5ff2eb42"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "certifi": {
+ "hashes": [
+ "sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50",
+ "sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef"
+ ],
+ "version": "==2019.9.11"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "kazoo": {
+ "hashes": [
+ "sha256:8db774f7bdece7d0dc7decb21539ff0852e42c2ffe1c28d7f1ff6f9292a1c3a4",
+ "sha256:a5fa2e400c5068cfee9e86b35cf0dab8232b574152d8e3590d823b3e2426ab5e"
+ ],
+ "version": "==2.5.0"
+ },
+ "pykafka": {
+ "hashes": [
+ "sha256:f0bbd394ae6970042a587c99fe4dc0966e67787249d963d4ce2f810dc9490577"
+ ],
+ "index": "pypi",
+ "version": "==2.8.0"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
+ "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ ],
+ "index": "pypi",
+ "version": "==2.22.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "tabulate": {
+ "hashes": [
+ "sha256:d0097023658d4dea848d6ae73af84532d1e86617ac0925d1adf1dd903985dac3"
+ ],
+ "version": "==0.8.5"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398",
+ "sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86"
+ ],
+ "version": "==1.25.6"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:ec20e7a4825331c1b5ebf261d111e16fa9612c1f7a5e1f884f12bd53a664dfd2",
+ "sha256:f913492e1663d3c36f502e5e9ba6cd13cf19d7fab50aa13239e420fef95e1396"
+ ],
+ "version": "==19.2.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.5"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50",
+ "sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef"
+ ],
+ "version": "==2019.9.11"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c",
+ "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==4.0.2"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:7197aa736777caac513dbd800944c209a49765bf1979b12b037dce0277077ed3",
+ "sha256:9d2c67f18c1f9b6db1b46317f7f784aa82789d2ee5dea5d9c0f0f2a764eb862e"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.6.0"
+ },
+ "entrypoints": {
+ "hashes": [
+ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
+ "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
+ ],
+ "version": "==0.3"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "flake8": {
+ "hashes": [
+ "sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548",
+ "sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696"
+ ],
+ "index": "pypi",
+ "version": "==3.7.8"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.3'",
+ "version": "==1.0.2"
+ },
+ "functools32": {
+ "hashes": [
+ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0",
+ "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.3.post2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16",
+ "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.3.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26",
+ "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af"
+ ],
+ "markers": "python_version < '3.8'",
+ "version": "==0.23"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:02b260c8deb80db09325b99edf62ae344ce9bc64d68b7a634410b8e9a568edbf",
+ "sha256:18f9c401083a4ba6e162355873f906315332ea7035803d0fd8166051e3d402e3",
+ "sha256:1f2c6209a8917c525c1e2b55a716135ca4658a3042b5122d4e3413a4030c26ce",
+ "sha256:2f06d97f0ca0f414f6b707c974aaf8829c2292c1c497642f63824119d770226f",
+ "sha256:616c94f8176808f4018b39f9638080ed86f96b55370b5a9463b2ee5c926f6c5f",
+ "sha256:63b91e30ef47ef68a30f0c3c278fbfe9822319c15f34b7538a829515b84ca2a0",
+ "sha256:77b454f03860b844f758c5d5c6e5f18d27de899a3db367f4af06bec2e6013a8e",
+ "sha256:83fe27ba321e4cfac466178606147d3c0aa18e8087507caec78ed5a966a64905",
+ "sha256:84742532d39f72df959d237912344d8a1764c2d03fe58beba96a87bfa11a76d8",
+ "sha256:874ebf3caaf55a020aeb08acead813baf5a305927a71ce88c9377970fe7ad3c2",
+ "sha256:9f5caf2c7436d44f3cec97c2fa7791f8a675170badbfa86e1992ca1b84c37009",
+ "sha256:a0c8758d01fcdfe7ae8e4b4017b13552efa7f1197dd7358dc9da0576f9d0328a",
+ "sha256:a4def978d9d28cda2d960c279318d46b327632686d82b4917516c36d4c274512",
+ "sha256:ad4f4be843dace866af5fc142509e9b9817ca0c59342fdb176ab6ad552c927f5",
+ "sha256:ae33dd198f772f714420c5ab698ff05ff900150486c648d29951e9c70694338e",
+ "sha256:b4a2b782b8a8c5522ad35c93e04d60e2ba7f7dcb9271ec8e8c3e08239be6c7b4",
+ "sha256:c462eb33f6abca3b34cdedbe84d761f31a60b814e173b98ede3c81bb48967c4f",
+ "sha256:fd135b8d35dfdcdb984828c84d695937e58cc5f49e1c854eb311c4d6aa03f4f1"
+ ],
+ "version": "==1.4.2"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47",
+ "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108"
+ ],
+ "version": "==19.2"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db",
+ "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868"
+ ],
+ "markers": "python_version == '3.4.*' or python_version < '3'",
+ "version": "==2.3.5"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6",
+ "sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34"
+ ],
+ "version": "==0.13.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pyflakes": {
+ "hashes": [
+ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
+ "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ ],
+ "version": "==2.1.1"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42",
+ "sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300"
+ ],
+ "index": "pypi",
+ "version": "==1.9.5"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
+ "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
+ ],
+ "version": "==2.4.2"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:8fc39199bdda3d9d025d3b1f4eb99a192c20828030ea7c9a0d2840721de7d347",
+ "sha256:d100a02770f665f5dcf7e3f08202db29857fee6d15f34c942be0a511f39814f0"
+ ],
+ "index": "pypi",
+ "version": "==4.6.5"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:34520283d459cdf1d0dbb58a132df804697f1b966ecedf808bbf3d255af8f659",
+ "sha256:f1ab8aefe795204efe7a015900296d1719e7bf0f4a0558d71e8599da1d1309d0"
+ ],
+ "index": "pypi",
+ "version": "==1.11.1"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
+ "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ ],
+ "index": "pypi",
+ "version": "==2.22.0"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:510df890afe08d36eca5bb16b4aa6308a6f85e3159ad3013bac8b9de7bd5a010",
+ "sha256:88d3402dd8b3c69a9e4f9d3a73ad11b15920c6efd36bc27bf1f701cf4a8e4646"
+ ],
+ "index": "pypi",
+ "version": "==1.7.0"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typing": {
+ "hashes": [
+ "sha256:91dfe6f3f706ee8cc32d38edbbf304e9b7583fb37108fef38229617f8b3eba23",
+ "sha256:c8cabb5ab8945cd2f54917be357d134db9cc1eb039e59d1606dc1e60cb1d9d36",
+ "sha256:f38d83c5a7a7086543a0f649564d661859c5146a85775ab90c0d2f93ffaa9714"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==3.7.4.1"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398",
+ "sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86"
+ ],
+ "version": "==1.25.6"
+ },
+ "v": {
+ "hashes": [
+ "sha256:2d5a8f79a36aaebe62ef2c7068e3ec7f86656078202edabfdbf74715dc822d36",
+ "sha256:cd6b6b20b4a611f209c88bcdfb7211321f85662efb2bdd53a7b40314d0a84618"
+ ],
+ "index": "pypi",
+ "version": "==0.0.0"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e",
+ "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
+ ],
+ "version": "==0.6.0"
+ }
+ }
+}
diff --git a/Integrations/Kafka_V2/test_data/README.md b/Integrations/Kafka_V2/test_data/README.md
new file mode 100644
index 000000000000..d60c6dfdbfa7
--- /dev/null
+++ b/Integrations/Kafka_V2/test_data/README.md
@@ -0,0 +1,47 @@
+## Setting up a local instance of Kafka
+
+Use https://github.com/wurstmeister/kafka-docker
+
+```
+git clone https://github.com/wurstmeister/kafka-docker .
+```
+
+* Edit file `docker-compose-single-broker.yml`
+* Change KAFKA_ADVERTISED_HOST_NAME to your machine's IP. use `ifconfig | grep 'inet '` to see available ips. Recommneded to use an IP which doesn't change. Otherwise you will need to bring up again the cluster everytime the ip changes. You can use the IP of global protect interface: gpd0. On my machine it has a value of: `10.196.100.168`.
+* Start a Kafka cluster:
+```
+docker-compose -f docker-compose-single-broker.yml up
+```
+
+Setup via shell:
+Run the following to start a shell:
+```
+./start-kafka-shell.sh host.docker.internal host.docker.internal:2181
+```
+In the shell run:
+* Create topic with 4 partitions: `$KAFKA_HOME/bin/kafka-topics.sh --zookeeper $ZK --create --topic mytest-topic --partitions 4 --replication-factor 1`
+* List topics: `$KAFKA_HOME/bin/kafka-topics.sh --zookeeper $ZK --list`
+* Produce 10 messages:
+```
+for i in `seq 1 10`; do echo '{"id":'$i',"user":"test","date":"'`date -R`'","message":"this is a test from kafka shell"}' | $KAFKA_HOME/bin/kafka-console-producer.sh --broker-list=`broker-list.sh` --topic mytest-topic; done
+```
+* Test consume of the messages (ctrl+c to exit):
+```
+$KAFKA_HOME/bin/kafka-console-consumer.sh --bootstrap-server=`broker-list.sh` --topic mytest-topic --from-beginning
+```
+
+Recommend utility: `kafkacat`
+
+* Install via: `brew install kafkacat`
+* Then do: `kafkacat -b localhost:9092 -t mytest-topic`
+
+Another good cmdline client: https://github.com/fgeller/kt
+* Install via: go get -u github.com/fgeller/kt
+
+
+
+## Stop the cluster
+Press control+c in the terminal running `docker-compose`.
+You can later on brin up the cluster with your configured topic by running: `docker-compose -f docker-compose-single-broker.yml up`
+
+To full delete the cluster from the disk run: `docker-compose -f docker-compose-single-broker.yml down`.
diff --git a/Integrations/Kafka_V2/test_data/kafka_client_example.py b/Integrations/Kafka_V2/test_data/kafka_client_example.py
new file mode 100644
index 000000000000..e471d234ca5b
--- /dev/null
+++ b/Integrations/Kafka_V2/test_data/kafka_client_example.py
@@ -0,0 +1,40 @@
+import pykafka
+from pykafka.common import OffsetType
+
+# Example demonstrating how to fetch 5 messages and then fetch another 5 from the point we left off
+# running using: python kafka_client_example.py
+
+
+def consume_messages(topic, offsets, max):
+ part_offset_dict = {}
+
+ # default consumer starts from beginning
+ consumer = topic.get_simple_consumer(
+ consumer_timeout_ms=1000,
+ queued_max_messages=100,
+ )
+ offsets = [(p, offsets.get(p.id, OffsetType.EARLIEST)) for p in consumer._partitions]
+ consumer.reset_offsets(offsets)
+
+ max = 5
+ i = 0
+ for msg in consumer:
+ i += 1
+ print('msg {}: partition: {}, offest: {}, value: {}'.format(i, msg.partition_id, msg.offset, msg.value))
+ if msg.offset > part_offset_dict.get(msg.partition_id, OffsetType.EARLIEST):
+ part_offset_dict[msg.partition_id] = msg.offset
+ if i >= max:
+ print("\nreached max messages. offsets received: {}\n".format(part_offset_dict))
+ break
+
+ consumer.stop()
+ return part_offset_dict
+
+
+# localhost works only on mac when connecting to a docker running kafka
+client = pykafka.KafkaClient(hosts='localhost:9092')
+print("topics: {}".format(client.topics))
+topic = client.topics['mytest-topic']
+
+last_offsets = consume_messages(topic, {}, 5)
+consume_messages(topic, last_offsets, 5)
diff --git a/Integrations/LogRhythmRest/CHANGELOG.md b/Integrations/LogRhythmRest/CHANGELOG.md
new file mode 100644
index 000000000000..3a3708dd8fb5
--- /dev/null
+++ b/Integrations/LogRhythmRest/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+ - Fixed an issue in the ***lr-get-alarm-events*** command when *DrillDownLogs* is empty.
+ - Improved handling of the ***lr-get-alarm-events-by-id*** command when there are no events for the alarm.
diff --git a/Integrations/LogRhythmRest/LogRhythmRest.py b/Integrations/LogRhythmRest/LogRhythmRest.py
new file mode 100644
index 000000000000..4dadb853b56e
--- /dev/null
+++ b/Integrations/LogRhythmRest/LogRhythmRest.py
@@ -0,0 +1,620 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import json
+import requests
+import random
+import string
+from datetime import datetime, timedelta
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+
+''' GLOBALS/PARAMS '''
+
+TOKEN = demisto.params().get('token', '')
+BASE_URL = demisto.params().get('url', '').strip('/')
+INSECURE = not demisto.params().get('insecure')
+CLUSTER_ID = demisto.params().get('cluster-id')
+
+# Headers to be sent in requests
+HEADERS = {
+ 'Authorization': 'Bearer ' + TOKEN
+}
+
+HOSTS_HEADERS = ["ID", "Name", "EntityId", "EntityName", "OS", "Status", "Location", "RiskLevel", "ThreatLevel",
+ "ThreatLevelComments", "DateUpdated", "HostZone"]
+LOGS_HEADERS = ["Level", "Computer", "Channel", "Keywords", "EventData"]
+PERSON_HEADERS = ["ID", "HostStatus", "IsAPIPerson", "FirstName", "LastName", "UserID", "UserLogin", "DateUpdated"]
+NETWORK_HEADERS = ["ID", "BeganIP", "EndIP", "HostStatus", "Name", "RiskLevel", "EntityId", "EntityName", "Location",
+ "ThreatLevel", "DateUpdated", "HostZone"]
+ALARM_SUMMARY_HEADERS = ["PIFType", "DrillDownSummaryLogs"]
+
+PIF_TYPES = {
+ "1": "Direction",
+ "2": "Priority",
+ "3": "Normal Message Date",
+ "4": "First Normal Message Date",
+ "5": "Last Normal Message Date",
+ "6": "Count",
+ "7": "MessageDate",
+ "8": "Entity",
+ "9": "Log Source",
+ "10": "Log Source Host",
+ "11": "Log Source Type",
+ "12": "Log Class Type",
+ "13": "Log Class",
+ "14": "Common Event",
+ "15": "MPE Rule",
+ "16": "Source",
+ "17": "Destination",
+ "18": "Service",
+ "19": "Known Host",
+ "20": "Known Host (Origin)",
+ "21": "Known Host (Impacted)",
+ "22": "Known Service",
+ "23": "IP",
+ "24": "IP Address (Origin)",
+ "25": "IP Address (Impacted)",
+ "26": "Host Name",
+ "27": "Host Name (Origin)",
+ "28": "Host Name (Impacted)",
+ "29": "Port (Origin)",
+ "30": "Port (Impacted)",
+ "31": "Protocol",
+ "32": "User (Origin)",
+ "33": "User (Impacted)",
+ "34": "Sender",
+ "35": "Recipient",
+ "36": "Subject",
+ "37": "Object",
+ "38": "Vendor Message ID",
+ "39": "Vendor Message Name",
+ "40": "Bytes In",
+ "41": "Bytes Out",
+ "42": "Items In",
+ "43": "Items Out",
+ "44": "Duration",
+ "45": "Time Start",
+ "46": "Time End",
+ "47": "Process",
+ "48": "Amount",
+ "49": "Quantity",
+ "50": "Rate",
+ "51": "Size",
+ "52": "Domain (Impacted)",
+ "53": "Group",
+ "54": "URL",
+ "55": "Session",
+ "56": "Sequence",
+ "57": "Network (Origin)",
+ "58": "Network (Impacted)",
+ "59": "Location (Origin)",
+ "60": "Country (Origin)",
+ "61": "Region (Origin)",
+ "62": "City (Origin)",
+ "63": "Location (Impacted)",
+ "64": "Country (Impacted)",
+ "65": "Region (Impacted)",
+ "66": "City (Impacted)",
+ "67": "Entity (Origin)",
+ "68": "Entity (Impacted)",
+ "69": "Zone (Origin)",
+ "70": "Zone (Impacted)",
+ "72": "Zone",
+ "73": "User",
+ "74": "Address",
+ "75": "MAC",
+ "76": "NATIP",
+ "77": "Interface",
+ "78": "NATPort",
+ "79": "Entity (Impacted or Origin)",
+ "80": "RootEntity",
+ "100": "Message",
+ "200": "MediatorMsgID",
+ "201": "MARCMsgID",
+ "1040": "MAC (Origin)",
+ "1041": "MAC (Impacted)",
+ "1042": "NATIP (Origin)",
+ "1043": "NATIP (Impacted)",
+ "1044": "Interface (Origin)",
+ "1045": "Interface (Impacted)",
+ "1046": "PID",
+ "1047": "Severity",
+ "1048": "Version",
+ "1049": "Command",
+ "1050": "ObjectName",
+ "1051": "NATPort (Origin)",
+ "1052": "NATPort (Impacted)",
+ "1053": "Domain (Origin)",
+ "1054": "Hash",
+ "1055": "Policy",
+ "1056": "Vendor Info",
+ "1057": "Result",
+ "1058": "Object Type",
+ "1059": "CVE",
+ "1060": "UserAgent",
+ "1061": "Parent Process Id",
+ "1062": "Parent Process Name",
+ "1063": "Parent Process Path",
+ "1064": "Serial Number",
+ "1065": "Reason",
+ "1066": "Status",
+ "1067": "Threat Id",
+ "1068": "Threat Name",
+ "1069": "Session Type",
+ "1070": "Action",
+ "1071": "Response Code",
+ "1072": "User (Origin) Identity ID",
+ "1073": "User (Impacted) Identity ID",
+ "1074": "Sender Identity ID",
+ "1075": "Recipient Identity ID",
+ "1076": "User (Origin) Identity",
+ "1077": "User (Impacted) Identity",
+ "1078": "Sender Identity",
+ "1079": "Recipient Identity",
+ "1080": "User (Origin) Identity Domain",
+ "1081": "User (Impacted) Identity Domain",
+ "1082": "Sender Identity Domain",
+ "1083": "Recipient Identity Domain",
+ "1084": "User (Origin) Identity Company",
+ "1085": "User (Impacted) Identity Company",
+ "1086": "Sender Identity Company",
+ "1087": "Recipient Identity Company",
+ "1088": "User (Origin) Identity Department",
+ "1089": "User (Impacted) Identity Department",
+ "1090": "Sender Identity Department",
+ "1091": "Recipient Identity Department",
+ "1092": "User (Origin) Identity Title",
+ "1093": "User (Impacted) Identity Title",
+ "1094": "Sender Identity Title",
+ "1095": "Recipient Identity Title",
+ "10001": "Source Or Destination",
+ "10002": "Port (Origin or Impacted)",
+ "10003": "Network (Origin or Impacted)",
+ "10004": "Location (Origin or Impacted)",
+ "10005": "Country (Origin or Impacted)",
+ "10006": "Region (Origin or Impacted)",
+ "10007": "City (Origin or Impacted)",
+ "10008": "Bytes In/Out",
+ "10009": "Items In/Out"
+}
+
+ALARM_STATUS = {
+ "0": "Waiting",
+ "1": "In queue",
+ "2": "Sent to SvcHost",
+ "3": "Queued for retry",
+ "4": "Completed",
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def fix_date_values(item):
+ date_keys = ['normalDateMin', 'normalDate', 'normalMsgDateMax', 'logDate']
+
+ for key in date_keys:
+ if item.get(key):
+ item[key] = datetime.fromtimestamp(item.get(key) / 1000.0).\
+ strftime('%Y-%m-%d %H:%M:%S')
+
+
+def fix_location_value(items):
+ for item in items:
+ location_val = str(item.get('location'))
+ if location_val == '{u\'id\': -1}':
+ item['location'] = 'NA'
+
+ return items
+
+
+def get_time_frame(time_frame, start_arg, end_arg):
+ start = datetime.now()
+ end = datetime.now()
+
+ if time_frame == 'Today':
+ start = datetime(end.year, end.month, end.day)
+ elif time_frame == 'Last2Days':
+ start = end - timedelta(days=2)
+ elif time_frame == 'LastWeek':
+ start = end - timedelta(days=7)
+ elif time_frame == 'LastMonth':
+ start = end - timedelta(days=30)
+ elif time_frame == 'Custom':
+ if not start_arg:
+ return_error('start-date argument is missing')
+ if not end_arg:
+ return_error('end-date argument is missing')
+ start = datetime.strptime(start_arg, '%Y-%m-%d')
+ end = datetime.strptime(end_arg, '%Y-%m-%d')
+
+ return start, end
+
+
+def http_request(method, url_suffix, data=None, headers=HEADERS):
+ try:
+ res = requests.request(
+ method,
+ BASE_URL + '/' + url_suffix,
+ headers=headers,
+ verify=INSECURE,
+ data=data
+ )
+ except Exception as e:
+ return_error(e)
+
+ # Handle error responses gracefully
+ if res.headers['Content-Type'] != 'application/json':
+ return_error('invalid url or port: ' + BASE_URL)
+
+ if res.status_code == 404:
+ if res.json().get('message'):
+ return_error(res.json().get('message'))
+ else:
+ return_error('No data returned')
+
+ if res.status_code not in {200, 201, 202, 207}:
+ return_error(
+ 'Error in API call to {}, status code: {}, reason: {}'.format(BASE_URL + '/' + url_suffix, res.status_code,
+ res.json()['message']))
+
+ return res.json()
+
+
+def get_host_by_id(host_id):
+ res = http_request('GET', 'lr-admin-api/hosts/' + host_id)
+ return fix_location_value([res])
+
+
+def update_hosts_keys(hosts):
+ new_hosts = []
+
+ for host in hosts:
+ tmp_host = {
+ 'EntityId': host.get('entity').get('id'),
+ 'EntityName': host.get('entity').get('name'),
+ 'OS': host.get('os'),
+ 'ThreatLevel': host.get('threatLevel'),
+ 'UseEventlogCredentials': host.get('useEventlogCredentials'),
+ 'Name': host.get('name'),
+ 'DateUpdated': host.get('dateUpdated'),
+ 'HostZone': host.get('hostZone'),
+ 'RiskLevel': host.get('riskLevel'),
+ 'Location': host.get('location'),
+ 'Status': host.get('recordStatusName'),
+ 'ThreatLevelComments': host.get('threatLevelComments'),
+ 'ID': host.get('id'),
+ 'OSType': host.get('osType')
+ }
+ new_hosts.append(tmp_host)
+ return new_hosts
+
+
+def update_networks_keys(networks):
+ new_networks = []
+
+ for network in networks:
+ tmp_network = {
+ 'EndIP': network.get('eip'),
+ 'HostStatus': network.get('recordStatusName'),
+ 'Name': network.get('name'),
+ 'RiskLevel': network.get('riskLevel'),
+ 'EntityId': network.get('entity').get('id'),
+ 'EntityName': network.get('entity').get('name'),
+ 'Location': network.get('location'),
+ 'ThreatLevel': network.get('threatLevel'),
+ 'DateUpdated': network.get('dateUpdated'),
+ 'HostZone': network.get('hostZone'),
+ 'ID': network.get('id'),
+ 'BeganIP': network.get('bip')
+ }
+ new_networks.append(tmp_network)
+ return new_networks
+
+
+def update_persons_keys(persons):
+ new_persons = []
+
+ for person in persons:
+ tmp_person = {
+ 'ID': person.get('id'),
+ 'DateUpdated': person.get('dateUpdated'),
+ 'HostStatus': person.get('recordStatusName'),
+ 'LastName': person.get('lastName'),
+ 'FirstName': person.get('firstName'),
+ 'IsAPIPerson': person.get('isAPIPerson'),
+ 'UserID': person.get('user').get('id'),
+ 'UserLogin': person.get('user').get('login')
+ }
+ new_persons.append(tmp_person)
+ return new_persons
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ http_request('GET', 'lr-admin-api/hosts')
+ demisto.results('ok')
+
+
+def add_host(data_args):
+ data = {
+ "id": -1,
+ "entity": {
+ "id": int(data_args.get('entity-id')),
+ "name": data_args.get('entity-name')
+ },
+ "name": data_args.get('name'),
+ "shortDesc": data_args.get('short-description'),
+ "longDesc": data_args.get('long-description'),
+ "riskLevel": data_args.get('risk-level'),
+ "threatLevel": data_args.get('threat-level'),
+ "threatLevelComments": data_args.get('threat-level-comments'),
+ "recordStatusName": data_args.get('host-status'),
+ "hostZone": data_args.get('host-zone'),
+ "os": data_args.get('os'),
+ "useEventlogCredentials": bool(data_args.get('use-eventlog-credentials')),
+ "osType": data_args.get('os-type')
+ }
+
+ res = http_request('POST', 'lr-admin-api/hosts/', json.dumps(data))
+ res = fix_location_value([res])
+ context = createContext(update_hosts_keys(res), removeNull=True)
+ outputs = {'Logrhythm.Host(val.ID === obj.ID)': context}
+ return_outputs(readable_output=data_args.get('name') + " added successfully to " + data_args.get('entity-name'),
+ outputs=outputs, raw_response=res)
+
+
+def get_hosts_by_entity(data_args):
+ res = http_request('GET', 'lr-admin-api/hosts?entity=' + data_args['entity-name'] + '&count=' + data_args['count'])
+ res = fix_location_value(res)
+ res = update_hosts_keys(res)
+ context = createContext(res, removeNull=True)
+ human_readable = tableToMarkdown('Hosts for ' + data_args.get('entity-name'), res, HOSTS_HEADERS)
+ outputs = {'Logrhythm.Host(val.Name && val.ID === obj.ID)': context}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
+
+
+def get_hosts(data_args):
+ id = data_args.get('host-id')
+ if id:
+ res = get_host_by_id(id)
+ else:
+ res = http_request('GET', 'lr-admin-api/hosts?count=' + data_args['count'])
+
+ res = fix_location_value(res)
+ res = update_hosts_keys(res)
+ context = createContext(res, removeNull=True)
+ human_readable = tableToMarkdown('Hosts information:', res, HOSTS_HEADERS)
+ outputs = {'Logrhythm.Host(val.Name && val.ID === obj.ID)': context}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
+
+
+def change_status(data_args):
+ data = [{
+ "hostId": int(data_args.get('host-id')),
+ "status": data_args.get('status')
+ }]
+
+ res = http_request('PUT', 'lr-admin-api/hosts/status', json.dumps(data))
+
+ host_info = get_host_by_id(data_args.get('host-id'))
+ context = createContext(update_hosts_keys(host_info), removeNull=True)
+ outputs = {'Logrhythm.Host(val.ID === obj.ID)': context}
+ return_outputs(readable_output='Status updated to ' + data_args.get('status'), outputs=outputs, raw_response=res)
+
+
+def execute_query(data_args):
+ # generate random string for request id
+ req_id = ''.join(random.choice(string.ascii_letters) for x in range(8))
+ start, end = get_time_frame(data_args.get('time-frame'), data_args.get('start-date'), data_args.get('end-date'))
+ delta = end - start
+ dates = []
+
+ for i in range(delta.days + 1):
+ dates.append((start + timedelta(days=i)).strftime("logs-%Y-%m-%d"))
+
+ data = {
+ "indices": dates,
+ "searchType": "DFS_QUERY_THEN_FETCH",
+ "source": {
+ "size": data_args.get('page-size'),
+ "query": {
+ "query_string": {
+ "default_field": "logMessage",
+ "query": data_args.get('keyword')
+ }
+ },
+ "stored_fields": "logMessage",
+ "sort": [
+ {
+ "normalDate": {
+ "order": "asc"
+ }
+ }
+ ]
+ }
+ }
+
+ headers = dict(HEADERS)
+ headers['Content-Type'] = 'application/json'
+ headers['Request-Id'] = req_id
+ headers['Request-Origin-Date'] = str(datetime.now())
+ headers['x-gateway-route-to-tag'] = CLUSTER_ID
+
+ res = http_request('POST', 'lr-legacy-search-api/esquery', json.dumps(data), headers)
+ logs = res['hits']['hits']
+ logs_response = []
+
+ xml_ns = './/{http://schemas.microsoft.com/win/2004/08/events/event}'
+
+ for log in logs:
+ message = str(log['fields']['logMessage'])
+ message = message[3:-2]
+
+ try:
+ root = ET.fromstring(message)
+
+ log_item = {
+ "EventID": str(root.find(xml_ns + 'EventID').text), # type: ignore
+ "Level": str(root.find(xml_ns + 'Level').text), # type: ignore
+ "Task": str(root.find(xml_ns + 'Task').text), # type: ignore
+ "Opcode": str(root.find(xml_ns + 'Opcode').text), # type: ignore
+ "Keywords": str(root.find(xml_ns + 'Keywords').text), # type: ignore
+ "Channel": str(root.find(xml_ns + 'Channel').text), # type: ignore
+ "Computer": str(root.find(xml_ns + 'Computer').text), # type: ignore
+ "EventData": str(root.find(xml_ns + 'EventData').text) # type: ignore
+ .replace('\\r\\n', '\n').replace('\\t', '\t')
+ }
+ logs_response.append(log_item)
+ except Exception:
+ continue
+
+ context = createContext(logs_response, removeNull=True)
+ human_readable = tableToMarkdown('logs results', logs_response, LOGS_HEADERS)
+ outputs = {'Logrhythm.Log': context}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=logs_response)
+
+
+def get_persons(data_args):
+ id = data_args.get('person-id')
+ if id:
+ res = [http_request('GET', 'lr-admin-api/persons/' + id)]
+ else:
+ res = http_request('GET', 'lr-admin-api/persons?count=' + data_args['count'])
+ res = update_persons_keys(res)
+ context = createContext(res, removeNull=True)
+ outputs = {'Logrhythm.Person(val.ID === obj.ID)': context}
+ human_readable = tableToMarkdown('Persons information', context, PERSON_HEADERS)
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
+
+
+def get_networks(data_args):
+ id = data_args.get('network-id')
+ if id:
+ res = [http_request('GET', 'lr-admin-api/networks/' + id)]
+ else:
+ res = http_request('GET', 'lr-admin-api/networks?count=' + data_args['count'])
+ res = fix_location_value(res)
+ res = update_networks_keys(res)
+ context = createContext(res, removeNull=True)
+ outputs = {'Logrhythm.Network(val.ID === obj.ID)': context}
+ human_readable = tableToMarkdown('Networks information', context, NETWORK_HEADERS)
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
+
+
+def get_alarm_data(data_args):
+ id = data_args.get('alarm-id')
+ res = http_request('GET', 'lr-drilldown-cache-api/drilldown/' + id)
+
+ alarm_data = res['Data']['DrillDownResults']
+ alarm_summaries = res['Data']['DrillDownResults']['RuleBlocks']
+ del alarm_data['RuleBlocks']
+ aie_message = xml2json(str(alarm_data.get('AIEMsgXml'))).replace('\"@', '\"')
+ alarm_data['AIEMsgXml'] = json.loads(aie_message).get('aie')
+ alarm_data['Status'] = ALARM_STATUS[str(alarm_data['Status'])]
+ alarm_data['ID'] = alarm_data['AlarmID']
+ del alarm_data['AlarmID']
+
+ dds_summaries = []
+ for block in alarm_summaries:
+ for item in block['DDSummaries']:
+ item['PIFType'] = PIF_TYPES[str(item['PIFType'])]
+ m = re.findall(r'"field": "(([^"]|\\")*)"', item['DrillDownSummaryLogs'])
+ fields = [k[0] for k in m]
+ item['DrillDownSummaryLogs'] = ", ".join(fields)
+ del item['DefaultValue']
+ dds_summaries.append(item)
+
+ alarm_data['Summary'] = dds_summaries
+
+ context = createContext(alarm_data, removeNull=True)
+ outputs = {'Logrhythm.Alarm(val.ID === obj.ID)': context}
+
+ del alarm_data['AIEMsgXml']
+ del alarm_data['Summary']
+ human_readable = tableToMarkdown('Alarm information for alarm id ' + id, alarm_data) + tableToMarkdown(
+ 'Alarm summaries', dds_summaries, ALARM_SUMMARY_HEADERS)
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
+
+
+def get_alarm_events(data_args):
+ id = data_args.get('alarm-id')
+ count = int(data_args.get('count'))
+ fields = data_args.get('fields')
+ show_log_message = data_args.get('get-log-message') == 'True'
+
+ res = http_request('GET', 'lr-drilldown-cache-api/drilldown/' + id)
+ res = res['Data']['DrillDownResults']['RuleBlocks']
+
+ events = []
+
+ for block in res:
+ if not block.get('DrillDownLogs'):
+ continue
+ logs = json.loads(block['DrillDownLogs'])
+ for log in logs:
+ fix_date_values(log)
+ if not show_log_message:
+ del log['logMessage']
+ events.append((log))
+
+ events = events[:count]
+ human_readable = tableToMarkdown('Events information for alarm ' + id, events)
+
+ if fields:
+ fields = string.split(fields, ',')
+ for event in events:
+ for key in event.keys():
+ if key not in fields:
+ del event[key]
+
+ ec = {"ID": int(id), "Event": events}
+ context = createContext(ec, removeNull=True)
+ outputs = {'Logrhythm.Alarm(val.ID === obj.ID)': context}
+
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=res)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+
+def main():
+ LOG('Command being called is %s' % (demisto.command()))
+
+ try:
+ handle_proxy()
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module()
+ elif demisto.command() == 'lr-add-host':
+ add_host(demisto.args())
+ elif demisto.command() == 'lr-get-hosts-by-entity':
+ get_hosts_by_entity(demisto.args())
+ elif demisto.command() == 'lr-get-hosts':
+ get_hosts(demisto.args())
+ elif demisto.command() == 'lr-execute-query':
+ execute_query(demisto.args())
+ elif demisto.command() == 'lr-update-host-status':
+ change_status(demisto.args())
+ elif demisto.command() == 'lr-get-persons':
+ get_persons(demisto.args())
+ elif demisto.command() == 'lr-get-networks':
+ get_networks(demisto.args())
+ elif demisto.command() == 'lr-get-alarm-data':
+ get_alarm_data(demisto.args())
+ elif demisto.command() == 'lr-get-alarm-events':
+ get_alarm_events(demisto.args())
+ except Exception as e:
+ return_error('error has occurred: {}'.format(str(e)))
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/LogRhythmRest/LogRhythmRest.yml b/Integrations/LogRhythmRest/LogRhythmRest.yml
new file mode 100644
index 000000000000..892a1d13cad3
--- /dev/null
+++ b/Integrations/LogRhythmRest/LogRhythmRest.yml
@@ -0,0 +1,676 @@
+category: Analytics & SIEM
+commonfields:
+ id: LogRhythmRest
+ version: -1
+configuration:
+- display: Hostname, IP address, or server URL.
+ name: url
+ required: true
+ type: 0
+- display: API Token
+ name: token
+ required: true
+ type: 4
+- defaultvalue: 'False'
+ display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: 'False'
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Search API cluster ID.
+ name: cluster-id
+ required: false
+ type: 0
+description: LogRhythm security intelligence.
+display: LogRhythmRest
+name: LogRhythmRest
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Filter log messages by this argument.
+ isArray: false
+ name: keyword
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: Number of logs to return.
+ isArray: false
+ name: page-size
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: Custom
+ description: If time_frame is "Custom", specify the start time for the time
+ range.
+ isArray: false
+ name: time-frame
+ predefined:
+ - Today
+ - Last2Days
+ - LastWeek
+ - LastMonth
+ - Custom
+ required: false
+ secret: false
+ - default: false
+ description: 'Start date for the data query, for example: "2018-04-20". Only
+ use this argument if the time-frame argument is "Custom".'
+ isArray: false
+ name: start-date
+ required: false
+ secret: false
+ - default: false
+ description: 'End date for the data query, for example: "2018-04-20". Only use
+ this argument if the time-frame argument is "Custom".'
+ isArray: false
+ name: end-date
+ required: false
+ secret: false
+ deprecated: false
+ description: Executes a query for logs that match query parameters.
+ execution: false
+ name: lr-execute-query
+ outputs:
+ - contextPath: Logrhythm.Log.Channel
+ description: Channel
+ type: string
+ - contextPath: Logrhythm.Log.Computer
+ description: Computer
+ type: string
+ - contextPath: Logrhythm.Log.EventData
+ description: Event data
+ type: string
+ - contextPath: Logrhythm.Log.EventID
+ description: Event ID
+ type: string
+ - contextPath: Logrhythm.Log.Keywords
+ description: Keywords
+ type: string
+ - contextPath: Logrhythm.Log.Level
+ description: Level
+ type: string
+ - contextPath: Logrhythm.Log.Opcode
+ description: Opcode
+ type: string
+ - contextPath: Logrhythm.Log.Task
+ description: Task
+ type: string
+ - arguments:
+ - default: false
+ description: The entity name.
+ isArray: false
+ name: entity-name
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: Number of hosts to return.
+ isArray: false
+ name: count
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a list of hosts for a given entity, or an empty list if
+ none is found.
+ execution: false
+ name: lr-get-hosts-by-entity
+ outputs:
+ - contextPath: Logrhythm.Host.EntityId
+ description: The entity ID.
+ type: String
+ - contextPath: Logrhythm.Host.EntityName
+ description: The entity name.
+ type: String
+ - contextPath: Logrhythm.Host.OS
+ description: The host OS.
+ type: String
+ - contextPath: Logrhythm.Host.ThreatLevel
+ description: The host threat level.
+ type: String
+ - contextPath: Logrhythm.Host.UseEventlogCredentials
+ description: Use event log credentials
+ type: String
+ - contextPath: Logrhythm.Host.Name
+ description: The name of the host.
+ type: String
+ - contextPath: Logrhythm.Host.DateUpdated
+ description: The last update date of the host.
+ type: String
+ - contextPath: Logrhythm.Host.HostZone
+ description: The host zone.
+ type: String
+ - contextPath: Logrhythm.Host.RiskLevel
+ description: The risk level.
+ type: String
+ - contextPath: Logrhythm.Host.Location
+ description: The host location.
+ type: String
+ - contextPath: Logrhythm.Host.Status
+ description: The host status.
+ type: String
+ - contextPath: Logrhythm.Host.ID
+ description: The unique ID of the host object.
+ type: String
+ - contextPath: Logrhythm.Host.OSType
+ description: The type of the host OS.
+ type: String
+ - arguments:
+ - default: false
+ description: The entity ID.
+ isArray: false
+ name: entity-id
+ required: true
+ secret: false
+ - default: false
+ description: The entity name.
+ isArray: false
+ name: entity-name
+ required: true
+ secret: false
+ - default: false
+ description: The LogRhythm host name.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ defaultValue: None
+ description: The short description.
+ isArray: false
+ name: short-description
+ required: false
+ secret: false
+ - default: false
+ defaultValue: None
+ description: The long description.
+ isArray: false
+ name: long-description
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: The host risk level.
+ description: The short description.
+ isArray: false
+ name: risk-level
+ predefined:
+ - None
+ - Low-Low
+ - Low-Medium
+ - Low-High
+ - Medium-Low
+ - Medium-Medium
+ - Medium-High
+ - High-Low
+ - High-Medium
+ - High-High
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: None
+ description: The host threat level.
+ isArray: false
+ name: threat-level
+ predefined:
+ - None
+ - Low-Low
+ - Low-Medium
+ - Low-High
+ - Medium-Low
+ - Medium-Medium
+ - Medium-High
+ - High-Low
+ - High-Medium
+ - High-High
+ required: false
+ secret: false
+ - default: false
+ defaultValue: None
+ description: Comments for the host threat level.
+ isArray: false
+ name: threat-level-comments
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The host status.
+ isArray: false
+ name: host-status
+ predefined:
+ - New
+ - Retired
+ - Active
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The host zone.
+ isArray: false
+ name: host-zone
+ predefined:
+ - Unknown
+ - Internal
+ - DMZ
+ - External
+ required: true
+ secret: false
+ - default: false
+ description: The host OS.
+ isArray: false
+ name: os
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Use eventlog credentials.
+ isArray: false
+ name: use-eventlog-credentials
+ predefined:
+ - 'true'
+ - 'false'
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: Unknown
+ description: The host OS.
+ isArray: false
+ name: os-type
+ predefined:
+ - Unknown
+ - Other
+ - WindowsNT4
+ - Windows2000Professional
+ - Windows2000Server
+ - Windows2003Standard
+ - Windows2003Enterprise
+ - Windows95
+ - WindowsXP
+ - WindowsVista
+ - Linux
+ - Solaris
+ - AIX
+ - HPUX
+ - Windows
+ required: false
+ secret: false
+ deprecated: false
+ description: Add a new host to an entity.
+ execution: false
+ name: lr-add-host
+ outputs:
+ - contextPath: Logrhythm.Host.EntityId
+ description: The entity ID.
+ type: string
+ - contextPath: Logrhythm.Host.EntityName
+ description: The entity name.
+ type: string
+ - contextPath: Logrhythm.Host.OS
+ description: The host OS.
+ type: string
+ - contextPath: Logrhythm.Host.ThreatLevel
+ description: The host threat level.
+ type: string
+ - contextPath: Logrhythm.Host.UseEventlogCredentials
+ description: Use event log credentials
+ type: string
+ - contextPath: Logrhythm.Host.Name
+ description: The name of the host.
+ type: string
+ - contextPath: Logrhythm.Host.DateUpdated
+ description: The last update date of the host.
+ type: string
+ - contextPath: Logrhythm.Host.HostZone
+ description: The host zone.
+ type: string
+ - contextPath: Logrhythm.Host.RiskLevel
+ description: The risk level.
+ type: string
+ - contextPath: Logrhythm.Host.Location
+ description: The host location.
+ type: string
+ - contextPath: Logrhythm.Host.Status
+ description: The host status.
+ type: string
+ - contextPath: Logrhythm.Host.ID
+ description: The unique ID of the host object.
+ type: string
+ - contextPath: Logrhythm.Host.OSType
+ description: The type of the host OS.
+ type: string
+ - arguments:
+ - default: false
+ description: The unique ID of the host.
+ isArray: false
+ name: host-id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The enumeration status of the host.
+ isArray: false
+ name: status
+ predefined:
+ - Retired
+ - Active
+ required: true
+ secret: false
+ deprecated: false
+ description: Updates an host status.
+ execution: false
+ name: lr-update-host-status
+ outputs:
+ - contextPath: Logrhythm.Host.EntityId
+ description: The entity ID.
+ type: string
+ - contextPath: Logrhythm.Host.EntityName
+ description: The entity name.
+ type: string
+ - contextPath: Logrhythm.Host.OS
+ description: The host OS.
+ type: string
+ - contextPath: Logrhythm.Host.ThreatLevel
+ description: The host threat level.
+ type: string
+ - contextPath: Logrhythm.Host.UseEventlogCredentials
+ description: Use event log credentials
+ type: string
+ - contextPath: Logrhythm.Host.Name
+ description: The name of the host.
+ type: string
+ - contextPath: Logrhythm.Host.DateUpdated
+ description: The last update date of the host.
+ type: string
+ - contextPath: Logrhythm.Host.HostZone
+ description: The host zone.
+ type: string
+ - contextPath: Logrhythm.Host.RiskLevel
+ description: The risk level.
+ type: string
+ - contextPath: Logrhythm.Host.Location
+ description: The host location.
+ type: string
+ - contextPath: Logrhythm.Host.Status
+ description: The host status.
+ type: string
+ - contextPath: Logrhythm.Host.ID
+ description: The unique ID of the host object.
+ type: string
+ - contextPath: Logrhythm.Host.OSType
+ description: The type of the host OS.
+ type: string
+ - arguments:
+ - default: false
+ description: The LogRhythm person ID.
+ isArray: false
+ name: person-id
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '30'
+ description: Number of persons to return. Default is 30.
+ isArray: false
+ name: count
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a list of persons.
+ execution: false
+ name: lr-get-persons
+ outputs:
+ - contextPath: Logrhythm.Person.DateUpdated
+ description: Date that the person was updated.
+ type: String
+ - contextPath: Logrhythm.Person.FirstName
+ description: First name.
+ type: String
+ - contextPath: Logrhythm.Person.LastName
+ description: Last name.
+ type: String
+ - contextPath: Logrhythm.Person.HostStatus
+ description: Host status.
+ type: string
+ - contextPath: Logrhythm.Person.ID
+ description: Person ID.
+ type: String
+ - contextPath: Logrhythm.Person.IsAPIPerson
+ description: Whether the API is a person.
+ type: Boolean
+ - contextPath: Logrhythm.Person.UserID
+ description: User ID.
+ type: String
+ - contextPath: Logrhythm.Person.UserLogin
+ description: User login.
+ type: String
+ - arguments:
+ - default: false
+ description: The LogRhythm network ID.
+ isArray: false
+ name: network-id
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '30'
+ description: Number of networks to return.
+ isArray: false
+ name: count
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a list of networks.
+ execution: false
+ name: lr-get-networks
+ outputs:
+ - contextPath: Logrhythm.Network.BIP
+ description: Began IP address.
+ type: String
+ - contextPath: Logrhythm.Network.ThreatLevel
+ description: Threat level.
+ type: String
+ - contextPath: Logrhythm.Network.Name
+ description: Network name.
+ type: String
+ - contextPath: Logrhythm.Network.EIP
+ description: End IP address.
+ type: String
+ - contextPath: Logrhythm.Network.DateUpdated
+ description: Date updated.
+ type: String
+ - contextPath: Logrhythm.Network.EntityName
+ description: Entity name.
+ type: String
+ - contextPath: Logrhythm.Network.HostZone
+ description: Host zone.
+ type: String
+ - contextPath: Logrhythm.Network.RiskLevel
+ description: Risk level.
+ type: String
+ - contextPath: Logrhythm.Network.Location
+ description: Network location.
+ type: String
+ - contextPath: Logrhythm.Network.HostStatus
+ description: Host status.
+ type: String
+ - contextPath: Logrhythm.Network.ID
+ description: Network ID.
+ type: String
+ - contextPath: Logrhythm.Network.EntityId
+ description: Entity ID.
+ type: String
+ - arguments:
+ - default: false
+ description: The LogRhythm host ID.
+ isArray: false
+ name: host-id
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '30'
+ description: Number of hosts to return. Default is 30.
+ isArray: false
+ name: count
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of hosts.
+ execution: false
+ name: lr-get-hosts
+ outputs:
+ - contextPath: Logrhythm.Host.EntityId
+ description: The entity ID.
+ type: String
+ - contextPath: Logrhythm.Host.EntityName
+ description: The entity name.
+ type: String
+ - contextPath: Logrhythm.Host.OS
+ description: The host OS.
+ type: String
+ - contextPath: Logrhythm.Host.ThreatLevel
+ description: The host threat level.
+ type: String
+ - contextPath: Logrhythm.Host.UseEventlogCredentials
+ description: Use event log credentials.
+ type: String
+ - contextPath: Logrhythm.Host.Name
+ description: The name of the host.
+ type: String
+ - contextPath: Logrhythm.Host.DateUpdated
+ description: Date that the host was last updated.
+ type: String
+ - contextPath: Logrhythm.Host.HostZone
+ description: The host zone.
+ type: String
+ - contextPath: Logrhythm.Host.RiskLevel
+ description: The risk level.
+ type: String
+ - contextPath: Logrhythm.Host.Location
+ description: The host location.
+ type: String
+ - contextPath: Logrhythm.Host.Status
+ description: The host status.
+ type: String
+ - contextPath: Logrhythm.Host.ID
+ description: The unique ID of the host object.
+ type: String
+ - contextPath: Logrhythm.Host.OSType
+ description: Host OS type.
+ type: String
+ - arguments:
+ - default: false
+ description: The alarm ID.
+ isArray: false
+ name: alarm-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns data for an alarm.
+ execution: false
+ name: lr-get-alarm-data
+ outputs:
+ - contextPath: Logrhythm.Alarm.Status
+ description: The alarm status.
+ type: String
+ - contextPath: Logrhythm.Alarm.EventID
+ description: The alarm event ID.
+ type: String
+ - contextPath: Logrhythm.Alarm.LastDxTimeStamp
+ description: The timestamp when the drilldown returned new results
+ from the Data Indexer.
+ type: String
+ - contextPath: Logrhythm.Alarm.DateInserted
+ description: The alarm date inserted.
+ type: String
+ - contextPath: Logrhythm.Alarm.AIERuleName
+ description: The alarm AI engine (AIE) rule.
+ type: String
+ - contextPath: Logrhythm.Alarm.Priority
+ description: The alarm priority.
+ type: String
+ - contextPath: Logrhythm.Alarm.AIERuleID
+ description: The alarm AI engine (AIE) rule ID.
+ type: String
+ - contextPath: Logrhythm.Alarm.ID
+ description: The alarm ID.
+ type: String
+ - contextPath: Logrhythm.Alarm.NotificationSent
+ description: Whether an alarm notification was sent.
+ type: Boolean
+ - contextPath: Logrhythm.Alarm.AlarmGuid
+ description: The alarm GUID.
+ type: String
+ - contextPath: Logrhythm.Alarm.RetryCount
+ description: The alarm retry count.
+ type: String
+ - contextPath: Logrhythm.Alarm.NormalMessageDate
+ description: The alarm message date.
+ type: String
+ - contextPath: Logrhythm.Alarm.WebConsoleIds
+ description: The alarm web console IDs.
+ type: String
+ - contextPath: Logrhythm.Alarm.Summary.PIFType
+ description: Alarm Primary Inspection Field (the original name for "Summary
+ Field").
+ type: String
+ - contextPath: Logrhythm.Alarm.Summary.DrillDownSummaryLogs
+ description: Drill down summary logs.
+ type: String
+ - arguments:
+ - default: false
+ description: The alarm ID.
+ isArray: false
+ name: alarm-id
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Number of events to return. Default is 10.
+ isArray: false
+ name: count
+ required: false
+ secret: false
+ - default: false
+ description: A CSV list of fields (outputs) to return to the context.
+ If empty, all fields are returned.
+ isArray: false
+ name: fields
+ predefined:
+ - ''
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: Returns the log message from the event.
+ isArray: false
+ name: get-log-message
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of events, by alarm ID.
+ execution: false
+ name: lr-get-alarm-events
+ outputs:
+ - contextPath: Logrhythm.Alarm.Event
+ description: Alarm event information.
+ type: String
+ - contextPath: Logrhythm.Alarm.ID
+ description: The alarm ID.
+ type: String
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- LogRhythm REST test
diff --git a/Integrations/LogRhythmRest/LogRhythmRest_description.md b/Integrations/LogRhythmRest/LogRhythmRest_description.md
new file mode 100644
index 000000000000..9b7980cefafb
--- /dev/null
+++ b/Integrations/LogRhythmRest/LogRhythmRest_description.md
@@ -0,0 +1,12 @@
+Integration with LogRhythm with REST api. You can execute queries on logs, get hosts information, add new hosts and update host status.
+
+## Configuration Parameters
+
+**Hostname**
+This is the network address of the LogRhythm server host.
+
+**API Token**
+The credentials entered here should be those created in the LogRhythm console for REST api.
+
+**Search API cluster ID**
+Enter `http://localhost:8500/ui/#/dc1/services/lr-legacy-search-api` in LogRhythm host, the cluster ID is under `TAGS` header
diff --git a/Integrations/LogRhythmRest/LogRhythmRest_image.png b/Integrations/LogRhythmRest/LogRhythmRest_image.png
new file mode 100644
index 000000000000..f631d5943e55
Binary files /dev/null and b/Integrations/LogRhythmRest/LogRhythmRest_image.png differ
diff --git a/Integrations/LogRhythmRest/LogRhythmRest_test.py b/Integrations/LogRhythmRest/LogRhythmRest_test.py
new file mode 100644
index 000000000000..219b7e9600ee
--- /dev/null
+++ b/Integrations/LogRhythmRest/LogRhythmRest_test.py
@@ -0,0 +1,28 @@
+from datetime import date, timedelta
+
+
+def test_get_time_frame():
+ from LogRhythmRest import get_time_frame
+
+ date_format = "%Y-%m-%d"
+ today = date.today()
+
+ start, end = get_time_frame('Today', None, None)
+ assert end.strftime(date_format) == today.strftime(date_format)
+ assert start.strftime(date_format) == today.strftime(date_format)
+
+ start, end = get_time_frame('Last2Days', None, None)
+ assert end.strftime(date_format) == today.strftime(date_format)
+ assert start.strftime(date_format) == (today - timedelta(days=2)).strftime(date_format)
+
+ start, end = get_time_frame('LastWeek', None, None)
+ assert end.strftime(date_format) == today.strftime(date_format)
+ assert start.strftime(date_format) == (today - timedelta(days=7)).strftime(date_format)
+
+ start, end = get_time_frame('LastMonth', None, None)
+ assert end.strftime(date_format) == today.strftime(date_format)
+ assert start.strftime(date_format) == (today - timedelta(days=30)).strftime(date_format)
+
+ start, end = get_time_frame('Custom', '2019-04-01', '2019-04-20')
+ assert end.strftime(date_format) == '2019-04-20'
+ assert start.strftime(date_format) == '2019-04-01'
diff --git a/Integrations/Looker/Looker.py b/Integrations/Looker/Looker.py
new file mode 100644
index 000000000000..2c9becc8629d
--- /dev/null
+++ b/Integrations/Looker/Looker.py
@@ -0,0 +1,434 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+
+from typing import Dict
+import requests
+import traceback
+import json
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+
+''' CONSTANTS '''
+LAST_RUN_TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
+DEFAULT_RESULTS_LIMIT = 50
+MAX_TIMEOUT_MINUTES = 5
+
+
+''' GLOBALS/PARAMS '''
+SESSION_VALIDITY_THRESHOLD = timedelta(minutes=MAX_TIMEOUT_MINUTES)
+CLIENT_ID = demisto.params().get('client_id')
+CLIENT_SECRET = demisto.params().get('client_secret')
+# Remove trailing slash to prevent wrong URL path to service
+SERVER = demisto.params()['url'][:-1] if (demisto.params()['url']
+ and demisto.params()['url'].endswith('/')) else demisto.params()['url']
+# Should we use SSL
+USE_SSL = not demisto.params().get('unsecure', False)
+# How many time before the first fetch to retrieve incidents
+FETCH_TIME = demisto.params().get('fetch_time', '3 days')
+# Service base URL
+BASE_URL = SERVER + '/api/3.0'
+# Request headers (preparation)
+HEADERS: Dict[str, str] = {}
+
+
+''' HELPER FUNCTIONS '''
+
+
+def verify_url(url):
+ # validate url parameter format, extract port
+ try:
+ server, port = url.rsplit(':', 1)
+ assert 0 < int(port) < 65536
+
+ except (ValueError, AssertionError):
+ raise ValueError("Incorrect URL format. Use the following format: https://example.looker.com:19999\n"
+ "The default port for Looker API is 19999.")
+
+
+def http_request(method, url_suffix, params=None, data=None, response_type='json'):
+ # A wrapper for requests lib to send our requests and handle requests and responses better
+ res = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ verify=USE_SSL,
+ params=params,
+ data=data,
+ headers=HEADERS
+ )
+
+ # Handle error responses gracefully
+ if res.status_code not in {200}:
+ error_message = f'Error in API call to Looker [{res.status_code}] - {res.reason}'
+
+ # Try to get detailed errors from looker json response
+ if res.status_code in (400, 422):
+ try:
+ error_json = res.json()
+ error_message += f"\n{error_json['message']}"
+
+ if res.status_code == 422:
+ validation_error_message = ""
+ for validation_error in error_json['errors']:
+ validation_error_message += f"\n{validation_error['field']} {validation_error['message']}"
+ error_message += validation_error_message
+ except (KeyError, ValueError):
+ pass
+
+ raise requests.exceptions.HTTPError(error_message)
+
+ # Return by expected type
+ if response_type != 'json':
+ return res.content
+
+ res_obj = res.json()
+
+ # Handle non-http type error messages from looker
+ if isinstance(res_obj, list) and len(res_obj) == 1 and \
+ isinstance(res_obj[0], dict) and 'looker_error' in res_obj[0]:
+ raise Exception(res_obj[0]['looker_error'])
+
+ return res_obj
+
+
+def get_new_token(client_id, client_secret):
+ data = {
+ 'client_id': client_id,
+ 'client_secret': client_secret
+ }
+
+ try:
+ response_json = http_request('POST', '/login', data=data)
+
+ return {
+ 'token': response_json['access_token'],
+ 'expires': datetime.utcnow().timestamp() + response_json['expires_in']
+ }
+
+ except requests.exceptions.HTTPError as ex:
+ if '[404]' in str(ex):
+ raise Exception("Got 404 from server - check 'API3 Client ID' and 'API3 Client Secret' fields "
+ "in the instance configuration.")
+ raise
+
+
+def get_session_token(client_id, client_secret):
+ ic = demisto.getIntegrationContext()
+
+ if client_id not in ic or 'expires' not in ic[client_id] \
+ or datetime.fromtimestamp(ic[client_id]['expires']) < datetime.utcnow() + SESSION_VALIDITY_THRESHOLD:
+ ic[client_id] = get_new_token(client_id, client_secret)
+ if demisto.command() != 'test-module':
+ demisto.setIntegrationContext(ic)
+
+ return 'token {}'.format(ic[client_id]['token'])
+
+
+def get_limit():
+ try:
+ limit = int(demisto.args().get('limit', DEFAULT_RESULTS_LIMIT))
+ return None if limit == 0 else limit
+
+ except ValueError:
+ raise ValueError("limit must be a number")
+
+
+def get_look_id_from_name(name):
+ looks = search_looks({'title': name})
+ if len(looks) < 1:
+ raise Exception(f'No Look found with the name {name}.')
+ if len(looks) > 1:
+ raise Exception(f'There is more than one Look with the name "{name}".'
+ f"\nUse look ID instead - It can be found in the Look's URL or by running looker-search-looks")
+
+ return looks[0]['ID']
+
+
+def full_path_headers(src_data, base_path):
+ def to_full_path(k):
+ return f"{base_path}.{k}"
+
+ def full_path_headers_for_dict(src):
+ if not isinstance(src, dict):
+ return src
+
+ return {to_full_path(k): v for k, v in src.items()}
+
+ if not isinstance(src_data, list):
+ src_data = [src_data]
+
+ return [full_path_headers_for_dict(x) for x in src_data]
+
+
+def parse_filters_arg(filters_arg_value):
+ error_message = "'filters' argument format is invalid.\n"
+
+ filters_list = argToList(filters_arg_value, ';')
+ filters_list = [elem for elem in [x.strip() for x in filters_list] if elem] # Remove empty elems
+ if not filters_list:
+ return
+
+ filters = {}
+ filters_and_indices_list = zip(range(len(filters_list)), filters_list) # Track element index for error messages
+ for i, elem in filters_and_indices_list:
+ try:
+ k, v = elem.split('=', 1)
+ k = k.strip()
+ if not k:
+ raise ValueError(f"{error_message}Filter in position {i+1}: field is empty.")
+ v = v.strip()
+ if not v:
+ raise ValueError(f"{error_message}Filter in position {i+1} ({k}): value is empty.")
+ filters[k] = v
+ except ValueError:
+ raise ValueError(f"{error_message}Filter in position {i+1} is missing '=' separator")
+
+ return filters
+
+
+def get_entries_for_search_results(contents, look_id=None, result_format='json', look_name=''):
+ entries = []
+ if result_format == 'json':
+ camelized = camelize(contents, delim='_')
+ formatted_contents = replace_in_keys(camelized)
+ if not isinstance(formatted_contents, list):
+ formatted_contents = [formatted_contents]
+
+ if look_id:
+ context = {
+ 'LookerResults(val.LookID && val.LookID === obj.LookID)': {
+ 'LookID': int(look_id),
+ 'Results': formatted_contents
+ }
+ }
+ hr_title = f'Results for look "{look_name}"' if look_name else f'Results for look #{look_id}'
+ full_path_header_content = full_path_headers(formatted_contents, 'LookerResults.Results')
+ else:
+ context = {'LookerResults.InlineQuery': formatted_contents}
+ hr_title = 'Inline Query Results'
+ full_path_header_content = full_path_headers(formatted_contents, 'LookerResults.InlineQuery')
+
+ entries.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(hr_title, full_path_header_content, removeNull=True),
+ 'EntryContext': context
+ })
+
+ if contents:
+ entries.append( # type: ignore
+ 'This command has dynamic output keys.\n'
+ 'To access them in the context, copy the key\'s path from the column header in the results table.'
+ )
+
+ elif result_format == 'csv':
+ entries.append(fileResult('look_result.csv' if look_id else 'inline_query_result.csv', contents,
+ entryTypes['entryInfoFile']))
+
+ return entries
+
+
+def get_query_args(demisto_args):
+ str_args = ('model', 'view')
+ list_args = ('fields', 'pivots', 'sorts')
+ args_dict = {k: argToList(demisto_args[k]) for k in list_args if k in demisto_args} # Parse list-type arguments
+ args_dict.update({k: demisto_args[k] for k in str_args}) # Add string-type arguments
+ filters = parse_filters_arg(demisto_args.get('filters')) # Handle special argument
+ if filters:
+ args_dict['filters'] = filters
+
+ return args_dict
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """
+ Performs basic get request to check connectivity and authentication
+ """
+ http_request('GET', '/user')
+
+
+def run_look_command():
+ look_id = demisto.args().get('id')
+ look_name = demisto.args().get('name')
+ if not any((look_id, look_name)):
+ raise Exception('Provide Look id or name.')
+ if look_name and not look_id:
+ look_id = get_look_id_from_name(look_name)
+
+ result_format = demisto.args()['result_format']
+ limit = get_limit()
+ fields = argToList(demisto.args().get('result_format'))
+
+ contents = run_look(look_id, result_format, limit, fields)
+
+ demisto.results(get_entries_for_search_results(contents, look_id, result_format, look_name))
+
+
+def run_look(look_id, result_format, limit, fields):
+ endpoint_url = f'/looks/{look_id}/run/{result_format}'
+ params = {}
+ if limit:
+ params['limit'] = limit
+ if fields:
+ params['fields'] = fields
+ return http_request('GET', endpoint_url, params=params, response_type=result_format)
+
+
+def search_looks_command():
+ command_args = ('space_id', 'user_id') # Possible command arguments
+ args_dict = {k: demisto.args()[k] for k in command_args if k in demisto.args()} # Get args that were passed
+
+ # Arguments with special logic
+ args_dict['limit'] = get_limit()
+ if 'name' in demisto.args():
+ args_dict['title'] = demisto.args()['name']
+
+ contents = search_looks(args_dict)
+ context = {f'Looker.Look(val.ID && val.ID === {look["ID"]})': look for look in contents}
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(f'Look search results', contents, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def search_looks(args):
+ endpoint_url = '/looks/search'
+ params = {k: v for k, v in args.items() if v}
+ params['fields'] = 'id, title, space, updated_at'
+ response = http_request('GET', endpoint_url, params=params)
+
+ if not isinstance(response, list):
+ response = [response]
+
+ return [
+ {
+ 'ID': look['id'],
+ 'Name': look['title'],
+ 'SpaceID': look['space']['id'],
+ 'SpaceName': look['space']['name'],
+ 'LastUpdated': look['updated_at'].replace('+00:00', 'Z')
+ } for look in response
+ ]
+
+
+def run_inline_query_command():
+ result_format = demisto.args()['result_format']
+ args_dict = get_query_args(demisto.args())
+
+ args_dict['limit'] = get_limit()
+
+ contents = run_inline_query(result_format, args_dict)
+
+ demisto.results(get_entries_for_search_results(contents, result_format=result_format))
+
+
+def run_inline_query(result_format, args_dict):
+ return http_request(
+ method='POST',
+ url_suffix=f'/queries/run/{result_format}',
+ data=json.dumps(args_dict),
+ response_type=result_format
+ )
+
+
+def create_look_command():
+ space_id = demisto.args()['look_space_id']
+ try:
+ space_id = int(space_id)
+ except ValueError:
+ raise ValueError(f'space_id: invalid number: {space_id}')
+
+ look_title = demisto.args()['look_title']
+ look_description = demisto.args().get('look_description')
+ args_dict = get_query_args(demisto.args())
+
+ create_query_response = create_query(args_dict)
+ query_id = create_query_response['id']
+
+ contents = create_look(query_id, space_id, look_title, look_description)
+
+ context = {f'Looker.Look(val.ID && val.ID === {contents["ID"]})': contents}
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(f'Look "{look_title}" created successfully', contents, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def create_query(args_dict):
+ return http_request(method='POST', url_suffix='/queries', data=json.dumps(args_dict))
+
+
+def create_look(query_id, space_id, look_title, look_description=""):
+ data = {
+ 'title': look_title,
+ 'query_id': query_id,
+ 'space_id': space_id
+ }
+ if look_description:
+ data['look_description'] = look_description
+
+ look = http_request(method='POST', url_suffix='/looks', data=json.dumps(data))
+
+ return {
+ 'ID': look['id'],
+ 'Name': look['title'],
+ 'SpaceID': look['space']['id'],
+ 'SpaceName': look['space']['name'],
+ 'LastUpdated': look['updated_at'].replace('+00:00', 'Z')
+ }
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+
+def main():
+ LOG('Command being called is %s' % (demisto.command()))
+ try:
+ handle_proxy()
+ verify_url(SERVER)
+ HEADERS['Authorization'] = get_session_token(CLIENT_ID, CLIENT_SECRET)
+
+ if demisto.command() == 'test-module':
+ test_module()
+ demisto.results('ok')
+ elif demisto.command() == 'looker-run-look':
+ run_look_command()
+ elif demisto.command() == 'looker-search-looks':
+ search_looks_command()
+ elif demisto.command() == 'looker-run-inline-query':
+ run_inline_query_command()
+ elif demisto.command() == 'looker-create-look':
+ create_look_command()
+
+ # Log exceptions
+ except Exception as e:
+ LOG(e)
+ LOG(traceback.format_exc())
+ LOG.print_log()
+ if demisto.command() == 'test-module':
+ demisto.results(e)
+ else:
+ return_error(str(e))
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/Looker/Looker.yml b/Integrations/Looker/Looker.yml
new file mode 100644
index 000000000000..4de21089d215
--- /dev/null
+++ b/Integrations/Looker/Looker.yml
@@ -0,0 +1,277 @@
+category: Analytics & SIEM
+commonfields:
+ id: Looker
+ version: -1
+configuration:
+- display: API URL and port (e.g., https://example.looker.com:19999)
+ name: url
+ required: true
+ type: 0
+- display: API3 Client ID
+ name: client_id
+ required: true
+ type: 0
+- display: API3 Client Secret
+ name: client_secret
+ required: true
+ type: 4
+- display: Trust any certificate (not secure)
+ name: unsecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Use the Looker integration to query an explore, save queries as looks,
+ run looks, and fetch look results as incidents.
+display: Looker
+name: Looker
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: ID of the look. Can be found in the look's URL, or by running the
+ 'looker-search-looks' command.
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ - default: false
+ description: Fields to return.
+ isArray: true
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: Name of the look.
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: Maximum number of looks to return (0 for looker-determined limit).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: json
+ description: Format of the result.
+ isArray: false
+ name: result_format
+ predefined:
+ - json
+ - csv
+ required: true
+ secret: false
+ deprecated: false
+ description: Runs a saved look and returns the results in the specified format.
+ execution: false
+ name: looker-run-look
+ outputs:
+ - contextPath: LookerResults.LookID
+ description: Look ID.
+ type: Number
+ - contextPath: LookerResults.Results
+ description: Look results.
+ type: Unknown
+ - arguments:
+ - default: true
+ description: Match look name.
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: Filter results by a particular space.
+ isArray: false
+ name: space_id
+ required: false
+ secret: false
+ - default: false
+ description: Filter by dashboards created by a particular user.
+ isArray: false
+ name: user_id
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: Maximum number of looks to return (0 for looker-determined limit).
+ isArray: false
+ name: limit
+ predefined:
+ - ''
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves saved looks that match the search criteria.
+ execution: false
+ name: looker-search-looks
+ outputs:
+ - contextPath: Looker.Look.ID
+ description: Look ID.
+ type: Number
+ - contextPath: Looker.Look.Name
+ description: Look name.
+ type: String
+ - contextPath: Looker.Look.SpaceID
+ description: ID of the space that contains the look.
+ type: Number
+ - contextPath: Looker.Look.SpaceName
+ description: Name of the space that contains the look.
+ type: String
+ - contextPath: Looker.Look.LastUpdated
+ description: The time that the look was last updated.
+ type: Date
+ - arguments:
+ - default: false
+ description: Name of the model. Can be found in the explore's URL.
+ isArray: false
+ name: model
+ required: true
+ secret: false
+ - default: false
+ description: Name of the view or explore. Can be found in the explore's URL.
+ isArray: false
+ name: view
+ required: true
+ secret: false
+ - default: false
+ description: 'List of fields to display. (Field name format: "object_name.field_name")'
+ isArray: true
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: 'Filters for the query, passed as a semicolon-separated list with
+ the format: "field name=filter value;..." (Field name format: "object_name.field_name").'
+ isArray: true
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: 'List of pivots. (Field name format: "object_name.field_name")'
+ isArray: true
+ name: pivots
+ required: false
+ secret: false
+ - default: false
+ description: 'Sorting for the query results. (Field name format: "object_name.field_name").'
+ isArray: true
+ name: sorts
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: Maximum number of looks to return (0 for looker-determined limit).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: json
+ description: Format of the results.
+ isArray: false
+ name: result_format
+ predefined:
+ - json
+ - csv
+ required: true
+ secret: false
+ deprecated: false
+ description: Runs a query by defining it in the command arguments, rather than
+ a saved query in looker.
+ execution: false
+ name: looker-run-inline-query
+ outputs:
+ - contextPath: LookerResults.InlineQuery
+ description: Inline query results.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Name of the model. Can be found in the explore's URL.
+ isArray: false
+ name: model
+ required: true
+ secret: false
+ - default: false
+ description: Name of the view or explore. Can be found in the explore's URL.
+ isArray: false
+ name: view
+ required: true
+ secret: false
+ - default: false
+ description: 'List of fields to display. (Field name format: "object_name.field_name").'
+ isArray: true
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: 'Filters for the query, passed as a semicolon-separated list with
+ the format: "field name=filter value;..." (Field name format: "object_name.field_name").'
+ isArray: true
+ name: filters
+ required: false
+ secret: false
+ - default: false
+ description: 'List of pivots. (Field name format: "object_name.field_name").'
+ isArray: true
+ name: pivots
+ required: false
+ secret: false
+ - default: false
+ description: 'Sorting for the query results. (Field name format: "object_name.field_name").'
+ isArray: true
+ name: sorts
+ required: false
+ secret: false
+ - default: false
+ description: Title of the look.
+ isArray: false
+ name: look_title
+ required: true
+ secret: false
+ - default: false
+ description: Description of the look.
+ isArray: false
+ name: look_description
+ required: false
+ secret: false
+ - default: false
+ description: ID of the space that will contain the look
+ isArray: false
+ name: look_space_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Creates a look from a query.
+ execution: false
+ name: looker-create-look
+ outputs:
+ - contextPath: Looker.Look.ID
+ description: Look ID.
+ type: Number
+ - contextPath: Looker.Look.Name
+ description: Look name.
+ type: String
+ - contextPath: Looker.Look.SpaceID
+ description: ID of the space that contains the look.
+ type: Number
+ - contextPath: Looker.Look.SpaceName
+ description: Name of the space that contains the look.
+ type: String
+ - contextPath: Looker.Look.LastUpdated
+ description: The time that the look was last updated.
+ type: Date
+ dockerimage: demisto/python3:3.7.3.221
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- Test-Looker
diff --git a/Integrations/Looker/Looker_description.md b/Integrations/Looker/Looker_description.md
new file mode 100644
index 000000000000..3448c485bd55
--- /dev/null
+++ b/Integrations/Looker/Looker_description.md
@@ -0,0 +1,32 @@
+#### Generate an API3 key for a Looker user:
+1. Log in to the looker web interface with an account that is permitted to manage users.
+2. At the top of the page, click on the "Admin" drop down and select "Users"
+3. Select the user you would like to generate the API3 key for.
+4. Go to "API3 Keys" and select "Edit Keys"
+5. Click on "New API3 Key"
+
+#### Get a look ID:
+**Usages:**
+- "Look name or ID to fetch incidents from" integration parameter
+- Look ID command arguments
+- Uniquely identify a Look (the name is not unique).
+
+**Option A:** Looker Web Interface
+1. Click on the desired look.
+2. Look at the URL - it should end with a number - that is the ID of the look.
+
+**Option B:** Demisto commands
+1. Configure looker without fetching incidents or filling in the parameter.
+2. Run looker-search-queries or looker-search-looks
+3. The ID will be part of the results (among other look details).
+
+#### Get model and view names from an explore's URL:
+1. Navigate to the desired explore.
+2. The URL will be formatted like so: `https:///explore//`
+
+#### Get a field's SQL name (for command arguments):
+1. Navigate to the desired explore.
+2. Click on the desired field.
+3. In the "DATA" tab, Click on "SQL".
+
+You will see the field name in the following format: "object_name.field_name"
\ No newline at end of file
diff --git a/Integrations/Looker/Looker_image.png b/Integrations/Looker/Looker_image.png
new file mode 100644
index 000000000000..1af67e983b11
Binary files /dev/null and b/Integrations/Looker/Looker_image.png differ
diff --git a/Integrations/Looker/Looker_test.py b/Integrations/Looker/Looker_test.py
new file mode 100644
index 000000000000..ee3db68256f8
--- /dev/null
+++ b/Integrations/Looker/Looker_test.py
@@ -0,0 +1,20 @@
+import demistomock as demisto
+
+
+def test_parse_filters_arg(mocker):
+ mocker.patch.object(demisto, 'params', return_value={'url': ''})
+
+ from Looker import parse_filters_arg
+
+ assert parse_filters_arg('') is None
+ assert parse_filters_arg('e=f') == {'e': 'f'}
+ assert parse_filters_arg('e=f, g') == {'e': 'f, g'}
+ assert parse_filters_arg('e=f; g = h') == {'e': 'f', 'g': 'h'}
+ assert parse_filters_arg('a_b.c_d= e f g h ') == {'a_b.c_d': 'e f g h'}
+
+ for test_input in ('a', 'a;', ' ; a', 'a; b', 'e=f; g = ', 'e=f; g'):
+ try:
+ parse_filters_arg(test_input)
+ raise AssertionError(f'Negative test failed on input: {test_input}')
+ except ValueError:
+ continue
diff --git a/Integrations/MISP_V2/CHANGELOG.md b/Integrations/MISP_V2/CHANGELOG.md
new file mode 100644
index 000000000000..16311e1a1656
--- /dev/null
+++ b/Integrations/MISP_V2/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+-
\ No newline at end of file
diff --git a/Integrations/MISP_V2/MISP_V2.py b/Integrations/MISP_V2/MISP_V2.py
new file mode 100644
index 000000000000..92c639308bee
--- /dev/null
+++ b/Integrations/MISP_V2/MISP_V2.py
@@ -0,0 +1,1149 @@
+import logging
+import warnings
+from typing import Union, List, Any, Tuple, Dict
+from urllib.parse import urlparse
+
+import requests
+from pymisp import ExpandedPyMISP, PyMISPError, MISPObject # type: ignore
+from pymisp.tools import EMailObject, GenericObjectGenerator # type: ignore
+
+from CommonServerPython import *
+
+logging.getLogger("pymisp").setLevel(logging.CRITICAL)
+
+
+def warn(*args, **kwargs):
+ """
+ Do nothing with warnings
+ """
+ pass
+
+
+# Disable requests warnings
+requests.packages.urllib3.disable_warnings()
+
+# Disable python warnings
+warnings.warn = warn
+
+''' GLOBALS/PARAMS '''
+MISP_KEY = demisto.params().get('api_key')
+MISP_URL = demisto.params().get('url')
+USE_SSL = not demisto.params().get('insecure')
+proxies = handle_proxy() # type: ignore
+MISP_PATH = 'MISP.Event(obj.ID === val.ID)'
+MISP = ExpandedPyMISP(MISP_URL, MISP_KEY, ssl=USE_SSL, proxies=proxies) # type: ExpandedPyMISP
+
+"""
+dict format :
+ MISP key:DEMISTO key
+"""
+PREDEFINED_FEEDS = {
+ 'CIRCL': {'name': 'CIRCL OSINT Feed',
+ 'url': 'https://www.circl.lu/doc/misp/feed-osint',
+ 'format': 'misp',
+ 'input': 'network'},
+ 'Botvrij.eu': {'name': 'The Botvrij.eu Data',
+ 'url': 'http://www.botvrij.eu/data/feed-osint',
+ 'format': 'misp',
+ 'input': 'network'}
+}
+ENTITIESDICT = {
+ 'deleted': 'Deleted',
+ 'category': 'Category',
+ 'comment': 'Comment',
+ 'uuid': 'UUID',
+ 'sharing_group_id': 'SharingGroupID',
+ 'timestamp': 'Timestamp',
+ 'to_ids': 'ToIDs',
+ 'value': 'Value',
+ 'event_id': 'EventID',
+ 'ShadowAttribute': 'ShadowAttribute',
+ 'disable_correlation': 'DisableCorrelation',
+ 'distribution': 'Distribution',
+ 'type': 'Type',
+ 'id': 'ID',
+ 'date': 'Date',
+ 'info': 'Info',
+ 'published': 'Published',
+ 'attribute_count': 'AttributeCount',
+ 'proposal_email_lock': 'ProposalEmailLock',
+ 'locked': 'Locked',
+ 'publish_timestamp': 'PublishTimestamp',
+ 'event_creator_email': 'EventCreatorEmail',
+ 'name': 'Name',
+ 'analysis': 'Analysis',
+ 'threat_level_id': 'ThreatLevelID',
+ 'old_id': 'OldID',
+ 'org_id': 'OrganisationID',
+ 'Org': 'Organisation',
+ 'Orgc': 'OwnerOrganisation',
+ 'orgc_uuid': 'OwnerOrganisation.UUID',
+ 'orgc_id': 'OwnerOrganisation.ID',
+ 'orgc_name': 'OwnerOrganisation.Name',
+ 'event_uuid': 'EventUUID',
+ 'proposal_to_delete': 'ProposalToDelete',
+ 'description': 'Description',
+ 'version': 'Version',
+ 'Object': 'Object',
+ 'object_id': 'ObjectID',
+ 'object_relation': 'ObjectRelation',
+ 'template_version': 'TemplateVersion',
+ 'template_uuid': 'TemplateUUID',
+ 'meta-category': 'MetaCategory'
+}
+
+THREAT_LEVELS_WORDS = {
+ '1': 'HIGH',
+ '2': 'MEDIUM',
+ '3': 'LOW',
+ '4': 'UNDEFINED'
+}
+
+THREAT_LEVELS_NUMBERS = {
+ 'high': 1,
+ 'medium': 2,
+ 'low': 3,
+ 'undefined': 4
+}
+
+ANALYSIS_WORDS = {
+ '0': 'Initial',
+ '1': 'Ongoing',
+ '2': 'Completed'
+}
+
+ANALYSIS_NUMBERS = {
+ 'initial': 0,
+ 'ongoing': 1,
+ 'completed': 2
+}
+
+DISTRIBUTION_NUMBERS = {
+ 'Your_organisation_only': 0,
+ 'This_community_only': 1,
+ 'Connected_communities': 2,
+ 'All_communities': 3
+}
+''' HELPER FUNCTIONS '''
+
+
+def extract_error(error: list) -> List[dict]:
+ """Extracting errors
+
+ Args:
+ error: list of responses from error section
+
+ Returns:
+ List[Dict[str, any]]: filtered response
+
+ Examples:
+ extract_error([
+ (403,
+ {
+ 'name': 'Could not add object',
+ 'message': 'Could not add object',
+ 'url': '/objects/add/156/',
+ 'errors': 'Could not save object as at least one attribute has failed validation (ip). \
+ {"value":["IP address has an invalid format."]}'
+ }
+ )
+ ])
+
+ Response:
+ [{
+ 'code': 403,
+ 'message': 'Could not add object',
+ 'errors': 'Could not save object as at least one attribute has failed validation (ip). \
+ {"value":["IP address has an invalid format."]}'
+ }]
+
+ """
+ return [{
+ 'code': err[0],
+ 'message': err[1].get('message'),
+ 'errors': err[1].get('errors')
+ } for err in error]
+
+
+def build_list_from_dict(args: dict) -> List[dict]:
+ """
+
+ Args:
+ args: dictionary describes MISP object
+
+ Returns:
+ list: list containing dicts that GenericObjectGenerator can take.
+
+ Examples:
+ >>> {'ip': '8.8.8.8', 'domain': 'google.com'}
+ [{'ip': '8.8.8.8'}, {'domain': 'google.com'}]
+ """
+ return [{k: v} for k, v in args.items()]
+
+
+def build_generic_object(template_name: str, args: List[dict]) -> GenericObjectGenerator:
+ """
+
+ Args:
+ template_name: template name as described in
+ args: arguments to create the generic object
+
+ Returns:
+ GenericObjectGenerator: object created in MISP
+
+ Example:
+ args should look like:
+ [{'analysis_submitted_at': '2018-06-15T06:40:27'},
+ {'threat_score': {value=95, to_ids=False}},
+ {'permalink': 'https://panacea.threatgrid.com/mask/samples/2e445ef5389d8b'},
+ {'heuristic_raw_score': 7.8385159793597}, {'heuristic_score': 96},
+ {'original_filename': 'juice.exe'}, {'id': '2e445ef5389d8b'}] # guardrails-disable-line
+ """
+ misp_object = GenericObjectGenerator(template_name)
+ misp_object.generate_attributes(args)
+ return misp_object
+
+
+def convert_timestamp(timestamp: Union[str, int]) -> str:
+ """
+ Gets a timestamp from MISP response (1546713469) and converts it to human readable format
+ """
+ return datetime.utcfromtimestamp(int(timestamp)).strftime('%Y-%m-%d %H:%M:%S')
+
+
+def replace_keys(obj_to_build: Union[dict, list, str]) -> Union[dict, list, str]:
+ """
+ Replacing keys from MISP's format to Demisto's (as appear in ENTITIESDICT)
+
+ Args:
+ obj_to_build (Union[dict, list, str]): object to replace keys in
+
+ Returns:
+ Union[dict, list, str]: same object type that got in
+ """
+ if isinstance(obj_to_build, list):
+ return [replace_keys(item) for item in obj_to_build]
+ if isinstance(obj_to_build, dict):
+ return {
+ (ENTITIESDICT[key] if key in ENTITIESDICT else key): replace_keys(value)
+ for key, value in obj_to_build.items()
+ }
+ return obj_to_build
+
+
+def build_context(response: Union[dict, requests.Response]) -> dict: # type: ignore
+ """
+ Gets a MISP's response and building it to be in context. If missing key, will return the one written.
+
+ Args:
+ response (requests.Response or dict):
+ Returns:
+ dict: context output
+ """
+ event_args = [
+ 'id',
+ 'date',
+ 'threat_level_id',
+ 'info',
+ 'published',
+ 'uuid',
+ 'analysis',
+ 'timestamp',
+ 'distribution',
+ 'proposal_email_lock',
+ 'locked',
+ 'publish_timestamp',
+ 'sharing_group_id',
+ 'disable_correlation',
+ 'event_creator_email',
+ 'Org',
+ 'Orgc',
+ 'Attribute',
+ 'ShadowAttribute',
+ 'RelatedEvent',
+ 'Galaxy',
+ 'Tag',
+ 'Object'
+ ]
+ # Sometimes, PyMISP will return str instead of a dict. json.loads() wouldn't work unless we'll dumps it first
+ if isinstance(response, str):
+ response = json.loads(json.dumps(response))
+ # Remove 'Event' keyword
+ events = [event.get('Event') for event in response] # type: ignore
+ for i in range(0, len(events)):
+ # Filter object from keys in event_args
+ events[i] = {
+ key: events[i].get(key)
+ for key in event_args if key in events[i]
+ }
+
+ # Remove 'Event' keyword from 'RelatedEvent'
+ if events[i].get('RelatedEvent'):
+ events[i]['RelatedEvent'] = [
+ r_event.get('Event') for r_event in events[i].get('RelatedEvent')
+ ]
+
+ # Get only IDs from related event
+ events[i]['RelatedEvent'] = [
+ {
+ 'id': r_event.get('id')
+ } for r_event in events[i].get('RelatedEvent')
+ ]
+
+ # Build Galaxy
+ if events[i].get('Galaxy'):
+ events[i]['Galaxy'] = [
+ {
+ 'name': star.get('name'),
+ 'type': star.get('type'),
+ 'description': star.get('description')
+ } for star in events[i]['Galaxy']
+ ]
+
+ # Build tag
+ if events[i].get('Tag'):
+ events[i]['Tag'] = [
+ {'Name': tag.get('name')} for tag in events[i].get('Tag')
+ ]
+ events = replace_keys(events) # type: ignore
+ return events # type: ignore
+
+
+def get_misp_threat_level(threat_level_id: str) -> str: # type: ignore
+ """Gets MISP's thread level and returning it in Demisto's format
+
+ Args:
+ threat_level_id: str of thread level in MISP
+
+ Returns:
+ str: Threat-level in Demisto
+ """
+ if threat_level_id == '1':
+ return 'HIGH'
+ if threat_level_id == '2':
+ return 'MEDIUM'
+ if threat_level_id == '3':
+ return 'LOW'
+ if threat_level_id == '4':
+ return 'UNDEFINED'
+ return_error('Invalid MISP Threat Level with threat_level_id: ' + threat_level_id)
+
+
+def get_dbot_level(threat_level_id: str) -> int:
+ """
+ MISP to DBOT:
+ 4 = 0 (UNDEFINED to UNKNOWN)
+ 3 = 2 (LOW to SUSPICIOUS)
+ 1 | 2 = 3 (MED/HIGH to MALICIOUS)
+ Args:
+ threat_level_id (str):
+ Returns:
+ int: DBOT score
+ """
+ if threat_level_id in ('1', '2'):
+ return 3
+ if threat_level_id == '3':
+ return 2
+ if threat_level_id == '4':
+ return 0
+ return 0
+
+
+def check_file():
+ """
+ gets a file_hash and entities dict, returns MISP events
+
+ file_hash (str): File's hash from demisto
+
+ Returns:
+ dict: MISP's output formatted to demisto:
+ """
+ file_hash = demisto.args().get('file')
+ # hashFormat will be used only in output
+ hash_format = get_hash_type(file_hash).upper()
+ if hash_format == 'Unknown':
+ return_error('Invalid hash length, enter file hash of format MD5, SHA-1 or SHA-256')
+
+ # misp_response will remain the raw output of misp
+ misp_response = MISP.search(value=file_hash)
+ if misp_response:
+ dbot_list = list()
+ file_list = list()
+ md_list = list()
+ for i_event in misp_response:
+ event = i_event['Event']
+ i_event['Event']['RelatedEvent'] = [r_event.get('Event') for r_event in event.get('RelatedEvent')]
+
+ for i_event in misp_response:
+ event = i_event['Event']
+ misp_organisation = f"MISP.{event.get('orgc_name')}"
+ dbot_score = get_dbot_level(event.get('threat_level_id'))
+ # Build RelatedEvent
+ # if dbot_score is suspicious or malicious
+ dbot_obj = {
+ 'Indicator': file_hash,
+ 'Type': 'hash',
+ 'Vendor': misp_organisation,
+ 'Score': dbot_score
+ }
+
+ file_obj = {
+ hash_format: file_hash
+ }
+ # if malicious, find file with given hash
+ if dbot_score == 3:
+ file_obj['Malicious'] = {
+ 'Vendor': misp_organisation,
+ 'Description': f'file hash found in MISP event with ID: {event.get("id")}'
+ }
+
+ md_obj = {
+ 'EventID': event.get('id'),
+ 'Threat Level': THREAT_LEVELS_WORDS[event.get('threat_level_id')],
+ 'Organisation': misp_organisation
+ }
+
+ file_list.append(file_obj)
+ dbot_list.append(dbot_obj)
+ md_list.append(md_obj)
+
+ # Building entry
+ ec = {
+ outputPaths.get('file'): file_list,
+ outputPaths.get('dbotscore'): dbot_list
+ }
+
+ md = tableToMarkdown(f'Results found in MISP for hash: {file_hash}', md_list)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': misp_response,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': md,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results(f"No events found in MISP for hash {file_hash}")
+
+
+def check_ip():
+ """
+ Gets a IP and returning its reputation (if exists)
+ ip (str): IP to check
+ """
+ ip = demisto.args().get('ip')
+ if not is_ip_valid(ip):
+ return_error("IP isn't valid")
+
+ misp_response = MISP.search(value=ip)
+
+ if misp_response:
+ dbot_list = list()
+ ip_list = list()
+ md_list = list()
+
+ for event_in_response in misp_response:
+ event = event_in_response.get('Event')
+ dbot_score = get_dbot_level(event.get('threat_level_id'))
+ misp_organisation = f'MISP.{event.get("Orgc").get("name")}'
+
+ dbot_obj = {
+ 'Indicator': ip,
+ 'Type': 'ip',
+ 'Vendor': misp_organisation,
+ 'Score': dbot_score
+ }
+ ip_obj = {'Address': ip}
+ # if malicious
+ if dbot_score == 3:
+ ip_obj['Malicious'] = {
+ 'Vendor': misp_organisation,
+ 'Description': f'IP Found in MISP event: {event.get("id")}'
+ }
+ md_obj = {
+ 'EventID': event.get('id'),
+ 'Threat Level': THREAT_LEVELS_WORDS[event.get('threat_level_id')],
+ 'Organisation': misp_organisation
+ }
+
+ ip_list.append(ip_obj)
+ dbot_list.append(dbot_obj)
+ md_list.append(md_obj)
+
+ ec = {
+ outputPaths.get('ip'): ip_list,
+ outputPaths.get('dbotscore'): dbot_list,
+ MISP_PATH: build_context(misp_response)
+ }
+
+ md = tableToMarkdown(f'Results found in MISP for IP: {ip}', md_list)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': misp_response,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': md,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results(f'No events found in MISP for IP: {ip}')
+
+
+def upload_sample():
+ """
+ Misp needs to get files in base64. in the old integration (js) it was converted by a script.
+ """
+ # Creating dict with Demisto's arguments
+ args = ['distribution', 'to_ids', 'category', 'info', 'analysis', 'comment', 'threat_level_id']
+ args = {key: demisto.args().get(key) for key in args if demisto.args().get(key)}
+ args['threat_level_id'] = THREAT_LEVELS_NUMBERS.get(demisto.args().get('threat_level_id')) if demisto.args().get(
+ 'threat_level_id') in THREAT_LEVELS_NUMBERS else demisto.args().get('threat_level_id')
+ args['analysis'] = ANALYSIS_NUMBERS.get(demisto.args().get('analysis')) if demisto.args().get(
+ 'analysis') in ANALYSIS_NUMBERS else demisto.args().get('analysis')
+ event_id = demisto.args().get('event_id')
+
+ file = demisto.getFilePath(demisto.args().get('fileEntryID'))
+ filename = file.get('name')
+ file = file.get('path')
+
+ if not file:
+ return_error(f'file {filename} is empty or missing')
+
+ if not event_id:
+ if not demisto.args().get('info'):
+ demisto.args()['info'] = filename
+ event_id = create_event(ret_only_event_id=True)
+
+ res = MISP.upload_sample(filename=filename, filepath_or_bytes=file, event_id=event_id, **args)
+ if res.get('name') == 'Failed':
+ ec = None
+ else:
+ ec = {f"MISP.UploadedSample": {filename: event_id}}
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': res,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable':
+ f"MISP upload sample \n* message: {res.get('message')}\n* event id: {event_id}\n* file name: {filename}",
+ 'EntryContext': ec,
+ })
+
+
+def get_time_now():
+ """
+ Returns:
+ str: time in year--month--day format
+ """
+ time_now = time.gmtime(time.time())
+ return f'{time_now.tm_year}--{time_now.tm_mon}--{time_now.tm_mday}'
+
+
+def create_event(ret_only_event_id: bool = False) -> Union[int, None]:
+ """Creating event in MISP with the given attribute
+
+ Args:
+ ret_only_event_id (bool): returning event ID if set to True
+
+ Returns:
+ int: event_id
+ """
+ d_args = demisto.args()
+ # new_event in the old integration gets some args that belongs to attribute, so after creating the basic event,
+ # we will add attribute
+ event_dic = {
+ 'distribution': d_args.get('distribution'),
+ 'threat_level_id': THREAT_LEVELS_NUMBERS.get(d_args.get('threat_level_id')) if d_args.get(
+ 'threat_level_id') in THREAT_LEVELS_NUMBERS else d_args.get('threat_level_id'),
+ 'analysis': ANALYSIS_NUMBERS.get(demisto.args().get('analysis')) if demisto.args().get(
+ 'analysis') in ANALYSIS_NUMBERS else demisto.args().get('analysis'),
+ 'info': d_args.get('info') if d_args.get('info') else 'Event from Demisto',
+ 'date': d_args.get('date') if d_args.get('date') else get_time_now(),
+ 'published': True if d_args.get('published') == 'true' else False,
+ 'orgc_id': d_args.get('orgc_id'),
+ 'org_id': d_args.get('org_id'),
+ 'sharing_group_id': d_args.get('sharing_group_id')
+ }
+
+ event = MISP.new_event(**event_dic)
+ event_id = event.get('id')
+ if isinstance(event_id, str) and event_id.isdigit():
+ event_id = int(event_id)
+ elif not isinstance(event_id, int):
+ return_error('EventID must be a number')
+
+ if ret_only_event_id:
+ return event_id
+
+ # add attribute
+ add_attribute(event_id=event_id, internal=True)
+
+ event = MISP.search(eventid=event_id)
+
+ md = f"## MISP create event\nNew event with ID: {event_id} has been successfully created.\n"
+ ec = {
+ MISP_PATH: build_context(event)
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': event,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': ec
+ })
+ return None
+
+
+def add_attribute(event_id: int = None, internal: bool = None):
+ """Adding attribute to given event
+
+ Args:
+ event_id (int): Event ID to add attribute to
+ internal(bool): if set to True, will not post results to Demisto
+ """
+ d_args = demisto.args()
+ args = {
+ 'id': d_args.get('id'),
+ 'type': d_args.get('type') if d_args.get('type') else 'other',
+ 'category': d_args.get('category'),
+ 'to_ids': True if d_args.get('to_ids') == 'true' else False,
+ 'distribution': d_args.get('distribution'),
+ 'comment': d_args.get('comment'),
+ 'value': d_args.get('value')
+ }
+ if event_id:
+ args['id'] = event_id # type: ignore
+ if isinstance(args.get('id'), str) and args.get('id').isdigit(): # type: ignore
+ args['id'] = int(args['id'])
+ elif not isinstance(args.get('id'), int):
+ return_error('Invalid MISP event ID, must be a number')
+ if args.get('distribution') is not None:
+ if not isinstance(args.get('distribution'), int):
+ if isinstance(args.get('distribution'), str) and args.get('distribution').isdigit(): # type: ignore
+ args['distribution'] = int(args['distribution'])
+ elif isinstance(args.get('distribution'), str) and args['distribution'] in DISTRIBUTION_NUMBERS:
+ args['distribution'] = DISTRIBUTION_NUMBERS.get(args['distribution'])
+ else:
+ return_error(
+ "Distribution can be 'Your_organisation_only', "
+ "'This_community_only', 'Connected_communities' or 'All_communities'"
+ )
+
+ event = MISP.get_event(args.get('id'))
+
+ # add attributes
+ event.add_attribute(**args)
+ MISP.update_event(event=event)
+ if internal:
+ return
+ event = MISP.search(eventid=args.get('id'))
+ md = f"## MISP add attribute\nNew attribute: {args.get('value')} was added to event id {args.get('id')}.\n"
+ ec = {
+ MISP_PATH: build_context(event)
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': {},
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': ec
+ })
+
+
+def download_file():
+ """
+ Will post results of given file's hash if present.
+ MISP's response should be in case of success:
+ (True, [EventID, filename, fileContent])
+ in case of failure:
+ (False, 'No hits with the given parameters.')
+ """
+ file_hash = demisto.args().get('hash')
+ event_id = demisto.args().get('eventID')
+ unzip = True if demisto.args().get('unzipped') == 'true' else False
+ all_samples = True if demisto.args().get('allSamples') in ('1', 'true') else False
+
+ response = MISP.download_samples(sample_hash=file_hash,
+ event_id=event_id,
+ all_samples=all_samples,
+ unzip=unzip
+ )
+ if not response[0]:
+ demisto.results(f"Couldn't find file with hash {file_hash}")
+ else:
+ if unzip:
+ files = list()
+ for f in response:
+ # Check if it's tuple. if so, f = (EventID, hash, fileContent)
+ if isinstance(f, tuple) and len(f) == 3:
+ filename = f[1]
+ files.append(fileResult(filename, f[2].getbuffer()))
+ demisto.results(files)
+ else:
+ file_buffer = response[1][0][2].getbuffer()
+ filename = response[1][0][1]
+ demisto.results(fileResult(filename, file_buffer)) # type: ignore
+
+
+def check_url():
+ url = demisto.args().get('url')
+ response = MISP.search(value=url, type_attribute='url')
+
+ if response:
+ dbot_list = list()
+ md_list = list()
+ url_list = list()
+
+ for event_in_response in response:
+ event = event_in_response.get('Event')
+ dbot_score = get_dbot_level(event.get('threat_level_id'))
+ misp_organisation = f"MISP.{event.get('Orgc').get('name')}"
+
+ dbot_obj = {
+ 'Indicator': url,
+ 'Type': 'url',
+ 'Vendor': misp_organisation,
+ 'Score': dbot_score
+ }
+
+ url_obj = {
+ 'Data': url,
+ }
+ if dbot_score == 3:
+ url_obj['Malicious'] = {
+ 'Vendor': misp_organisation,
+ 'Description': f'IP Found in MISP event: {event.get("id")}'
+ }
+ md_obj = {
+ 'EventID': event.get('id'),
+ 'Threat Level': THREAT_LEVELS_WORDS[event.get('threat_level_id')],
+ 'Organisation': misp_organisation
+ }
+ dbot_list.append(dbot_obj)
+ md_list.append(md_obj)
+ url_list.append(url_obj)
+ ec = {
+ outputPaths.get('url'): url_list,
+ outputPaths.get('dbotscore'): dbot_list,
+ MISP_PATH: build_context(response)
+ }
+ md = tableToMarkdown(f'MISP Reputation for URL: {url}', md_list)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': md,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results(f'No events found in MISP for URL: {url}')
+
+
+def search(post_to_warroom: bool = True) -> Tuple[dict, Any]:
+ """
+ will search in MISP
+ Returns
+ dict: Object with results to demisto:
+ """
+ d_args = demisto.args()
+ # List of all applicable search arguments
+ search_args = [
+ 'event_id',
+ 'value',
+ 'type',
+ 'category',
+ 'org',
+ 'tags',
+ 'from',
+ 'to',
+ 'last',
+ 'eventid',
+ 'uuid',
+ 'to_ids'
+ ]
+
+ args = dict()
+ # Create dict to pass into the search
+ for arg in search_args:
+ if arg in d_args:
+ args[arg] = d_args[arg]
+ # Replacing keys and values from Demisto to Misp's keys
+ if 'type' in args:
+ args['type_attribute'] = d_args.pop('type')
+ # search function 'to_ids' parameter gets 0 or 1 instead of bool.
+ if 'to_ids' in args:
+ args['to_ids'] = 1 if d_args.get('to_ids') in ('true', '1', 1) else 0
+
+ response = MISP.search(**args)
+ if response:
+ response_for_context = build_context(response)
+
+ # Prepare MD. getting all keys and values if exists
+ args_for_md = {key: value for key, value in args.items() if value}
+ if post_to_warroom:
+ md = tableToMarkdown('Results in MISP for search:', args_for_md)
+ md_event = response_for_context[0]
+ md += f'Total of {len(response_for_context)} events found\n'
+ event_highlights = {
+ 'Info': md_event.get('Info'),
+ 'Timestamp': convert_timestamp(md_event.get('Timestamp')),
+ 'Analysis': ANALYSIS_WORDS[md_event.get('Analysis')],
+ 'Threat Level ID': THREAT_LEVELS_WORDS[md_event.get('ThreatLevelID')],
+ 'Event Creator Email': md_event.get('EventCreatorEmail'),
+ 'Attributes': json.dumps(md_event.get('Attribute'), indent=4),
+ 'Related Events': md_event.get('RelatedEvent')
+ }
+ md += tableToMarkdown(f'Event ID: {md_event.get("ID")}', event_highlights)
+ if md_event.get('Galaxy'):
+ md += tableToMarkdown(f'Galaxy:', md_event.get('Galaxy'))
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': md,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'EntryContext': {
+ MISP_PATH: response_for_context
+ }
+ })
+ return response_for_context, response
+ else:
+ demisto.results(f"No events found in MISP for {args}")
+ return {}, {}
+
+
+def delete_event():
+ """
+ Gets an event id and deletes it.
+ """
+ event_id = demisto.args().get('event_id')
+ event = MISP.delete_event(event_id)
+ if 'errors' in event:
+ return_error(f'Event ID: {event_id} has not found in MISP: \nError message: {event}')
+ else:
+ md = f'Event {event_id} has been deleted'
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': event,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md
+ })
+
+
+def add_tag():
+ """
+ Function will add tag to given UUID of event or attribute.
+ """
+ uuid = demisto.args().get('uuid')
+ tag = demisto.args().get('tag')
+
+ MISP.tag(uuid, tag)
+ event = MISP.search(uuid=uuid)
+ ec = {
+ MISP_PATH: build_context(event)
+ }
+ md = f'Tag {tag} has been successfully added to event {uuid}'
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': event,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': ec
+ })
+
+
+def add_sighting():
+ """Adds sighting to MISP attribute
+
+ """
+ sighting = {
+ 'sighting': 0,
+ 'false_positive': 1,
+ 'expiration': 2
+ }
+ kargs = {
+ 'id': demisto.args().get('id'),
+ 'uuid': demisto.args().get('uuid'),
+ 'type': sighting.get(demisto.args().get('type'))
+ }
+ att_id = demisto.args().get('id', demisto.args().get('uuid'))
+ if att_id:
+ MISP.set_sightings(kargs)
+ demisto.results(f'Sighting \'{demisto.args().get("type")}\' has been successfully added to attribute {att_id}')
+ else:
+ return_error('ID or UUID not specified')
+
+
+def test():
+ """
+ Test module.
+ """
+ if MISP.test_connection():
+ demisto.results('ok')
+ else:
+ return_error('MISP has not connected.')
+
+
+def add_events_from_feed():
+ """Gets an OSINT feed from url and publishing them to MISP
+ urls with feeds for example: `https://www.misp-project.org/feeds/`
+ feed format must be MISP.
+ """
+ headers = {'Accept': 'application/json'}
+ url = demisto.getArg('feed') # type: str
+ url = url[:-1] if url.endswith('/') else url
+ if PREDEFINED_FEEDS.get(url):
+ url = PREDEFINED_FEEDS[url].get('url') # type: ignore
+ limit = demisto.getArg('limit') # type: str
+ limit_int = int(limit) if limit.isdigit() else 0
+
+ osint_url = f'{url}/manifest.json'
+ not_added_counter = 0
+ try:
+ uri_list = requests.get(osint_url, verify=USE_SSL, headers=headers, proxies=proxies).json()
+ events_numbers = list() # type: List[Dict[str, int]]
+ for num, uri in enumerate(uri_list, 1):
+ req = requests.get(f'{url}/{uri}.json', verify=USE_SSL, headers=headers, proxies=proxies).json()
+ event = MISP.add_event(req)
+ if 'id' in event:
+ events_numbers.append({'ID': event['id']})
+ else:
+ not_added_counter += 1
+ # If limit exists
+ if limit_int == num:
+ break
+
+ entry_context = {MISP_PATH: events_numbers}
+ human_readable = tableToMarkdown(
+ f'Total of {len(events_numbers)} events was added to MISP.',
+ events_numbers,
+ headers='Event IDs'
+ )
+ if not_added_counter:
+ human_readable = f'{human_readable}\n' \
+ f'{not_added_counter} events were not added. Might already been added earlier.'
+
+ return_outputs(human_readable, outputs=entry_context)
+ except ValueError:
+ return_error(f'URL [{url}] is not a valid MISP feed')
+
+
+def add_object(event_id: str, obj: MISPObject):
+ """Sending object to MISP and returning outputs
+
+ Args:
+ obj: object to add to MISP
+ event_id: ID of event
+ """
+ response = MISP.add_object(event_id, misp_object=obj)
+ if 'errors' in response:
+ errors = extract_error(response["errors"])
+ error_string = str()
+ for err in errors:
+ error_string += f'' \
+ f'\n\tError code: {err["code"]} ' \
+ f'\n\tMessage: {err["message"]}' \
+ f'\n\tErrors: {err["errors"]}\n'
+ return_error(f'Error in `{command}` command: {error_string}')
+ for ref in obj.ObjectReference:
+ response = MISP.add_object_reference(ref)
+ formatted_response = replace_keys(response)
+ entry_context = {
+ MISP_PATH:
+ {
+ 'ID': event_id
+ }
+ }
+ entry_context[MISP_PATH].update(formatted_response) # type: ignore
+ human_readable = f'Object has been added to MISP event ID {event_id}'
+
+ return_outputs(
+ human_readable,
+ entry_context,
+ response
+ ) # type: ignore
+
+
+def add_email_object():
+ entry_id = demisto.getArg('entry_id')
+ event_id = demisto.getArg('event_id')
+ email_path = demisto.getFilePath(entry_id).get('path')
+ obj = EMailObject(email_path)
+ add_object(event_id, obj)
+
+
+def add_domain_object():
+ """Adds a domain object to MISP
+ domain-ip description: https://www.misp-project.org/objects.html#_domain_ip
+ """
+ template = 'domain-ip'
+ args = [
+ 'text'
+ 'creation_date',
+ 'first_seen',
+ 'last_seen'
+ ]
+ event_id = demisto.getArg('event_id')
+ domain = demisto.getArg('name')
+ obj = MISPObject(template)
+ ips = argToList(demisto.getArg('dns'))
+ for ip in ips:
+ obj.add_attribute('ip', value=ip)
+ obj.add_attribute('domain', value=domain)
+ for arg in args:
+ value = demisto.getArg(arg)
+ if value:
+ obj.add_attribute(arg, value=value)
+ add_object(event_id, obj)
+
+
+def add_url_object():
+ """Building url object in MISP scheme
+ Scheme described https://www.misp-project.org/objects.html#_url
+ """
+ template = 'url'
+ url_args = [
+ 'text',
+ 'last_seen',
+ 'first_seen'
+ ]
+ event_id = demisto.getArg('event_id')
+ url = demisto.getArg('url')
+ url_parse = urlparse(url)
+ url_obj = [
+ {'url': url}
+ ]
+ if url_parse.scheme:
+ url_obj.append({'scheme': url_parse.scheme})
+ if url_parse.path:
+ url_obj.append({'resource_path': url_parse.path})
+ if url_parse.query:
+ url_obj.append({'query_string': url_parse.query})
+ if url_parse.netloc:
+ url_obj.append({'domain': url_parse.netloc})
+ if url_parse.fragment:
+ url_obj.append({'fragment': url_parse.fragment})
+ if url_parse.port:
+ url_obj.append({'port': url_parse.port})
+ if url_parse.username and url_parse.password:
+ url_obj.append({'credential': (url_parse.username, url_parse.password)})
+ for arg in url_args:
+ new_arg = demisto.getArg(arg)
+ if new_arg:
+ url_obj.append({arg.replace('_', '-'): new_arg})
+
+ g_object = build_generic_object(template, url_obj)
+ add_object(event_id, g_object)
+
+
+def add_generic_object_command():
+ event_id = demisto.getArg('event_id')
+ template = demisto.getArg('template')
+ attributes = demisto.getArg('attributes') # type: str
+ attributes = attributes.replace("'", '"')
+ try:
+ args = json.loads(attributes)
+ if not isinstance(args, list):
+ args = build_list_from_dict(args)
+ obj = build_generic_object(template, args)
+ add_object(event_id, obj)
+ except ValueError as e:
+ return_error(f'`attribute` parameter could not be decoded, may not a valid JSON\nattribute: {attributes}',
+ str(e))
+
+
+def add_ip_object():
+ template = 'ip-port'
+ event_id = demisto.getArg('event_id')
+ args = [
+ 'dst_port',
+ 'src_port',
+ 'domain',
+ 'hostname',
+ 'ip_src',
+ 'ip_dst'
+ ]
+ attr = [{arg.replace('_', '-'): demisto.getArg(arg)} for arg in args if demisto.getArg(arg)]
+ ips = argToList(demisto.getArg('ip'))
+ for ip in ips:
+ attr.append({'ip': ip})
+ if attr:
+ non_req_args = [
+ 'first_seen',
+ 'last_seen',
+ ]
+ attr.extend({arg.replace('_', '-'): demisto.getArg(arg)} for arg in non_req_args if demisto.getArg(arg))
+ if demisto.getArg('comment'):
+ attr.append({'text': demisto.getArg('comment')})
+ obj = build_generic_object(template, attr)
+ add_object(event_id, obj)
+ else:
+ return_error(f'None of required arguments presents. command {command} requires one of {args}')
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+command = demisto.command()
+
+
+def main():
+ LOG(f'command is {command}')
+
+ demisto.info(f'command is {command}')
+ try:
+ if command == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test()
+ elif command == 'misp-upload-sample':
+ upload_sample()
+ elif command == 'misp-download-sample':
+ download_file()
+ elif command in ('internal-misp-create-event', 'misp-create-event'):
+ create_event()
+ elif command in ('internal-misp-add-attribute', 'misp-add-attribute'):
+ add_attribute()
+ elif command == 'misp-search':
+ search()
+ elif command == 'misp-delete-event':
+ delete_event()
+ elif command == 'misp-add-sighting':
+ add_sighting()
+ elif command == 'misp-add-tag':
+ add_tag()
+ elif command == 'misp-add-events-from-feed':
+ add_events_from_feed()
+ elif command == 'file':
+ check_file()
+ elif command == 'url':
+ check_url()
+ elif command == 'ip':
+ check_ip()
+ # Object commands
+ elif command == 'misp-add-email-object':
+ add_email_object()
+ elif command == 'misp-add-domain-object':
+ add_domain_object()
+ elif command == 'misp-add-url-object':
+ add_url_object()
+ elif command == 'misp-add-ip-object':
+ add_ip_object()
+ elif command == 'misp-add-object':
+ add_generic_object_command()
+ except PyMISPError as e:
+ return_error(e.message)
+ except Exception as e:
+ return_error(str(e))
+
+
+if __name__ in ('__builtin__', 'builtins'):
+ main()
+
+# TODO: in 5.0
+# * Add !file (need docker change).
diff --git a/Integrations/MISP_V2/MISP_V2.yml b/Integrations/MISP_V2/MISP_V2.yml
new file mode 100644
index 000000000000..d8d947616556
--- /dev/null
+++ b/Integrations/MISP_V2/MISP_V2.yml
@@ -0,0 +1,1931 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: MISP V2
+ version: -1
+configuration:
+- display: MISP server URL (e.g., https://192.168.0.1)
+ name: url
+ required: true
+ type: 0
+- display: API Key
+ name: api_key
+ required: true
+ type: 4
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+description: Malware information sharing platform and threat sharing.
+display: MISP V2
+name: MISP V2
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The attribute type. Use any valid MISP attribute.
+ isArray: false
+ name: type
+ required: false
+ secret: false
+ - default: false
+ description: Search for the specified value in the attributes' value field.
+ isArray: false
+ name: value
+ required: false
+ secret: false
+ - default: false
+ description: The attribute category. Use any valid MISP attribute category.
+ isArray: false
+ name: category
+ required: false
+ secret: false
+ - default: false
+ description: Search by creator organization by supplying the organization ID.
+ isArray: false
+ name: org
+ required: false
+ secret: false
+ - default: false
+ description: Supply a tag name to include a tag in the results. To exclude a
+ tag, prepend the tage name it with a '!'. You can also chain several tag commands
+ together with the '&&' operator. Note that you must use semicolors ";"" in
+ the tag search. You cannot use colons ":".
+ isArray: false
+ name: tags
+ required: false
+ secret: false
+ - default: false
+ description: Event search start date (2015-02-15)
+ isArray: false
+ name: from
+ required: false
+ secret: false
+ - default: false
+ description: Event search end date (2015-02-15)
+ isArray: false
+ name: to
+ required: false
+ secret: false
+ - default: false
+ description: Events published within the last "x" amount of time. Valid time
+ valudes are days, hours, and minutes (for example "5d", "12h", "30m"). This
+ filter uses the published timestamp of the event.
+ isArray: false
+ name: last
+ required: false
+ secret: false
+ - default: false
+ description: The events to include or exclude from the search
+ isArray: true
+ name: eventid
+ required: false
+ secret: false
+ - default: false
+ description: Return events that include an attribute with the given UUID. Alternatively
+ the event's UUID must match the value(s) passed, e.g., 59523300-4be8-4fa6-8867-0037ac110002
+ isArray: false
+ name: uuid
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: '''false'''
+ description: Return only the attributes set with the "to_ids" flag
+ isArray: false
+ name: to_ids
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Search for events in MISP.
+ execution: false
+ name: misp-search
+ outputs:
+ - contextPath: MISP.Event.ID
+ description: MISP event ID
+ type: number
+ - contextPath: MISP.Event.Distribution
+ description: MISP event distribution
+ type: number
+ - contextPath: MISP.Event.ThreatLevelID
+ description: Threat level of the MISP event (1 High, 2 Medium, 3 Low, 4 Undefined)
+ type: number
+ - contextPath: MISP.Event.PublishTimestamp
+ description: Timestamp of the publish time (if published)
+ type: number
+ - contextPath: MISP.Event.EventCreatorEmail
+ description: Email address of the event creator
+ type: string
+ - contextPath: MISP.Event.Date
+ description: Event creation date
+ type: date
+ - contextPath: MISP.Event.Locked
+ description: Is the event locked
+ type: boolean
+ - contextPath: MISP.Event.OwnerOrganisation.ID
+ description: Owner organization ID
+ type: number
+ - contextPath: MISP.Event.OwnerOrganisation.Name
+ description: Owner organization name
+ type: string
+ - contextPath: MISP.Event.OwnerOrganisation.UUID
+ description: Owner organization UUID
+ type: string
+ - contextPath: MISP.Event.RelatedEvent.ID
+ description: Event IDs of related events (can be a list)
+ type: number
+ - contextPath: MISP.Event.ProposalEmailLock
+ description: If email lock proposed
+ type: boolean
+ - contextPath: MISP.Event.Timestamp
+ description: Timestamp of the event
+ type: number
+ - contextPath: MISP.Event.Galaxy.Description
+ description: Event's galaxy description
+ type: string
+ - contextPath: MISP.Event.Galaxy.Name
+ description: Galaxy name
+ type: string
+ - contextPath: MISP.Event.Galaxy.Type
+ description: Galaxy type
+ type: number
+ - contextPath: MISP.Event.Published
+ description: Is the event published
+ type: boolean
+ - contextPath: MISP.Event.DisableCorrelation
+ description: Is correlation disabled
+ type: boolean
+ - contextPath: MISP.Event.UUID
+ description: Event UUID
+ type: string
+ - contextPath: MISP.Event.ShadowAttribute
+ description: Event shadow attributes
+ type: Unknown
+ - contextPath: MISP.Event.Attribute.Distribution
+ description: Attribute distribution
+ type: number
+ - contextPath: MISP.Event.Attribute.Value
+ description: Attribute value
+ type: string
+ - contextPath: MISP.Event.Attribute.EventID
+ description: Attribute event ID
+ type: number
+ - contextPath: MISP.Event.Attribute.Timestamp
+ description: Attribute timestamp
+ type: number
+ - contextPath: MISP.Event.Attribute.Deleted
+ description: Is the attribute deleted
+ type: boolean
+ - contextPath: MISP.Event.Attribute.DisableCorrelation
+ description: Is attribute correlation disabled
+ type: boolean
+ - contextPath: MISP.Event.Attribute.Type
+ description: Attribute type
+ type: string
+ - contextPath: MISP.Event.Attribute.ID
+ description: Attribute ID
+ type: number
+ - contextPath: MISP.Event.Attribute.UUID
+ description: Attribute UUID
+ type: string
+ - contextPath: MISP.Event.Attribute.ShadowAttribute
+ description: Attribute shadow attribute
+ type: Unknown
+ - contextPath: MISP.Event.Attribute.ToIDs
+ description: Is Intrusion Detection System flag set
+ type: boolean
+ - contextPath: MISP.Event.Attribute.Category
+ description: Attribute category
+ type: string
+ - contextPath: MISP.Event.Attribute.SharingGroupID
+ description: Attribute sharing group ID
+ type: number
+ - contextPath: MISP.Event.Attribute.Comment
+ description: Attribute comment
+ type: string
+ - contextPath: MISP.Event.Analysis
+ description: Event analysis (0 Initial, 1 Ongoing, 2 Completed)
+ type: number
+ - contextPath: MISP.Event.SharingGroupID
+ description: Event sharing group ID
+ type: number
+ - contextPath: MISP.Event.Tag.Name
+ description: All tag names in the event
+ type: string
+ - contextPath: MISP.Event.Object.MetaCategory
+ description: Object Meta Category
+ type: String
+ - contextPath: MISP.Event.Object.Distribution
+ description: Distribution of object
+ type: Number
+ - contextPath: MISP.Event.Object.Name
+ description: Name of the object
+ type: String
+ - contextPath: MISP.Event.Object.TemplateVersion
+ description: Template version of object
+ type: Number
+ - contextPath: MISP.Event.Object.EventID
+ description: ID of event which object first crated
+ type: Number
+ - contextPath: MISP.Event.Object.TemplateUUID
+ description: UUID of template
+ type: String
+ - contextPath: MISP.Event.Object.Timestamp
+ description: Timestamp of object creation
+ type: String
+ - contextPath: MISP.Event.Object.Deleted
+ description: Is object deleted
+ type: Boolean
+ - contextPath: MISP.Event.Object.ID
+ description: ID of object
+ type: Number
+ - contextPath: MISP.Event.Object.UUID
+ description: UUID of object
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.Value
+ description: Value of attribute
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.EventID
+ description: ID of first event which object from
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.Timestamp
+ description: Timestamp of object creation
+ type: Date
+ - contextPath: MISP.Event.Object.Attribute.Deleted
+ description: Is object deleted?
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.ObjectID
+ description: ID of object
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.DisableCorrelation
+ description: If correlation is disabled
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.ID
+ description: ID of attribute
+ type: Unknown
+ - contextPath: MISP.Event.Object.Attribute.ObjectRelation
+ description: Relation of object
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.Type
+ description: Type of object
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.UUID
+ description: UUID of attribute
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.ToIDs
+ description: Is ToIDs flag on
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.Category
+ description: Category of attribute
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.SharingGroupID
+ description: ID of sharing group
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.Comment
+ description: Comment of the attribute
+ type: String
+ - contextPath: MISP.Event.Object.Description
+ description: Description of the object
+ type: String
+ - arguments:
+ - default: true
+ description: Hash of the file to query. Supports MD5, SHA-1, and SHA-256. You
+ can pass CSV values to retrieve multiple responses.
+ isArray: false
+ name: file
+ required: false
+ secret: false
+ deprecated: false
+ description: Checks the file reputation of the given hash.
+ execution: false
+ name: file
+ outputs:
+ - contextPath: File.MD5
+ description: The MD5 hash of the file.
+ type: String
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ type: String
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: String
+ - contextPath: File.Malicious.Vendor
+ description: The vendor that reported the file as malicious.
+ type: String
+ - contextPath: File.Malicious.Description
+ description: A description explaining why the file was determined to be malicious.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: Number
+ - arguments:
+ - default: true
+ description: URL to check
+ isArray: false
+ name: url
+ required: false
+ secret: false
+ deprecated: false
+ description: Checks if the URL is in MISP events.
+ execution: false
+ name: url
+ outputs:
+ - contextPath: URL.Data
+ description: The URL
+ type: String
+ - contextPath: URL.Malicious.Vendor
+ description: The vendor reporting the URL as malicious.
+ type: String
+ - contextPath: URL.Malicious.Description
+ description: A description of the malicious URL.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: String
+ - arguments:
+ - default: true
+ description: IP address to check
+ isArray: false
+ name: ip
+ required: false
+ secret: false
+ deprecated: false
+ description: Checks the reputation of an IP address
+ execution: false
+ name: ip
+ outputs:
+ - contextPath: IP.Address
+ description: IP address
+ type: String
+ - contextPath: IP.Malicious.Vendor
+ description: The vendor reporting the IP address as malicious.
+ type: String
+ - contextPath: IP.Malicious.Description
+ description: A description explaining why the IP address was reported as malicious.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: Number
+ - arguments:
+ - default: false
+ description: Event type of the new event
+ isArray: false
+ name: type
+ required: false
+ secret: false
+ - default: false
+ defaultValue: External analysis
+ description: Category of the new event
+ isArray: false
+ name: category
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Creates an event with the Intrusion Detection System flag.
+ isArray: false
+ name: to_ids
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: Where to distribute
+ isArray: false
+ name: distribution
+ required: false
+ secret: false
+ - default: false
+ description: Comment
+ isArray: false
+ name: comment
+ required: false
+ secret: false
+ - default: false
+ description: Value to add
+ isArray: false
+ name: value
+ required: true
+ secret: false
+ - default: true
+ description: Event name
+ isArray: false
+ name: info
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Publish the event
+ isArray: false
+ name: published
+ predefined:
+ - 'false'
+ - 'true'
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '1'
+ description: MISP threat level ID, default is 1
+ isArray: false
+ name: threat_level_id
+ required: false
+ secret: false
+ deprecated: true
+ description: Create new MISP event
+ execution: false
+ name: internal-misp-create-event
+ outputs:
+ - contextPath: MISP.Event.ID
+ description: MISP event ID
+ type: number
+ - contextPath: MISP.Event.Distribution
+ description: MISP event distribution
+ type: number
+ - contextPath: MISP.Event.ThreatLevelID
+ description: Threat level of the MISP event (1 High, 2 Medium, 3 Low, 4 Undefined)
+ type: number
+ - contextPath: MISP.Event.PublishTimestamp
+ description: If published, timestamp of publish time
+ type: number
+ - contextPath: MISP.Event.EventCreatorEmail
+ description: Email addrssof the event creator
+ type: string
+ - contextPath: MISP.Event.Date
+ description: Event creation date
+ type: date
+ - contextPath: MISP.Event.Locked
+ description: If the event is locked
+ type: boolean
+ - contextPath: MISP.Event.OwnerOrganisation.ID
+ description: Owner organization ID
+ type: number
+ - contextPath: MISP.Event.OwnerOrganisation.Name
+ description: Owner organization name
+ type: string
+ - contextPath: MISP.Event.OwnerOrganisation.UUID
+ description: Owner organization UUID
+ type: string
+ - contextPath: MISP.Event.RelatedEvent.ID
+ description: Event IDs of related events (can be a list)
+ type: number
+ - contextPath: MISP.Event.ProposalEmailLock
+ description: Is email lock proposed
+ type: boolean
+ - contextPath: MISP.Event.Timestamp
+ description: Timestamp of the event
+ type: number
+ - contextPath: MISP.Event.Galaxy.Description
+ description: Event galaxy description
+ type: string
+ - contextPath: MISP.Event.Galaxy.Name
+ description: Galaxy name
+ type: string
+ - contextPath: MISP.Event.Galaxy.Type
+ description: Galaxy type
+ type: number
+ - contextPath: MISP.Event.Published
+ description: Is the event published
+ type: boolean
+ - contextPath: MISP.Event.DisableCorrelation
+ description: Is correlation disabled
+ type: boolean
+ - contextPath: MISP.Event.UUID
+ description: Event UUID
+ type: string
+ - contextPath: MISP.Event.ShadowAttribute
+ description: Event shadow attributes
+ type: Unknown
+ - contextPath: MISP.Event.Attribute.Distribution
+ description: Attribute distribution
+ type: number
+ - contextPath: MISP.Event.Attribute.Value
+ description: Attribute value
+ type: string
+ - contextPath: MISP.Event.Attribute.EventID
+ description: Attribute event ID
+ type: number
+ - contextPath: MISP.Event.Attribute.Timestamp
+ description: Attribute timestamp
+ type: number
+ - contextPath: MISP.Event.Attribute.Deleted
+ description: Is the attribute deleted
+ type: boolean
+ - contextPath: MISP.Event.Attribute.DisableCorrelation
+ description: Is attribute correlation disabled
+ type: boolean
+ - contextPath: MISP.Event.Attribute.Type
+ description: Attribute type
+ type: string
+ - contextPath: MISP.Event.Attribute.ID
+ description: Attribute ID
+ type: number
+ - contextPath: MISP.Event.Attribute.UUID
+ description: Attribute UUID
+ type: string
+ - contextPath: MISP.Event.Attribute.ShadowAttribute
+ description: Attribute shadow attribute
+ type: Unknown
+ - contextPath: MISP.Event.Attribute.ToIDs
+ description: Is the Intrusion Detection System flag set
+ type: boolean
+ - contextPath: MISP.Event.Attribute.Category
+ description: Attribute category
+ type: string
+ - contextPath: MISP.Event.Attribute.SharingGroupID
+ description: Attribute sharing group ID
+ type: number
+ - contextPath: MISP.Event.Attribute.Comment
+ description: Attribute comment
+ type: string
+ - contextPath: MISP.Event.Analysis
+ description: Event analysis (0 Initial, 1 Ongoing, 2 Completed)
+ type: number
+ - contextPath: MISP.Event.SharingGroupID
+ description: Event sharing group ID
+ type: number
+ - contextPath: MISP.Event.Tag.Name
+ description: All tag names in the event
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: other
+ description: Event type of the new event
+ isArray: false
+ name: type
+ required: false
+ secret: false
+ - default: false
+ defaultValue: External analysis
+ description: Category of the new event
+ isArray: false
+ name: category
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Create the event with the IDS flag
+ isArray: false
+ name: to_ids
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: Where to distribute
+ isArray: false
+ name: distribution
+ required: false
+ secret: false
+ - default: false
+ description: Comment
+ isArray: false
+ name: comment
+ required: false
+ secret: false
+ - default: false
+ description: Value to add
+ isArray: false
+ name: value
+ required: true
+ secret: false
+ - default: true
+ description: Event name
+ isArray: false
+ name: info
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Publish the event
+ isArray: false
+ name: published
+ predefined:
+ - 'false'
+ - 'true'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: high
+ description: MISP Threat level ID, default is "high"
+ isArray: false
+ name: threat_level_id
+ predefined:
+ - high
+ - medium
+ - low
+ - undefined
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: initial
+ description: The analysis level, default is "initial"
+ isArray: false
+ name: analysis
+ predefined:
+ - initial
+ - ongoing
+ - completed
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a new MISP event.
+ execution: false
+ name: misp-create-event
+ outputs:
+ - contextPath: MISP.Event.ID
+ description: MISP event ID
+ type: number
+ - contextPath: MISP.Event.Distribution
+ description: MISP event distribution
+ type: number
+ - contextPath: MISP.Event.ThreatLevelID
+ description: Threat level of the MISP event (1 High, 2 Medium, 3 Low, 4 Undefined)
+ type: number
+ - contextPath: MISP.Event.PublishTimestamp
+ description: Timestamp of the publish time (if published)
+ type: number
+ - contextPath: MISP.Event.EventCreatorEmail
+ description: Email address of the event creator
+ type: string
+ - contextPath: MISP.Event.Date
+ description: Event creation date
+ type: date
+ - contextPath: MISP.Event.Locked
+ description: Is the event locked
+ type: boolean
+ - contextPath: MISP.Event.OwnerOrganisation.ID
+ description: Owner organization ID
+ type: number
+ - contextPath: MISP.Event.OwnerOrganisation.Name
+ description: Owner organization name
+ type: string
+ - contextPath: MISP.Event.OwnerOrganisation.UUID
+ description: Owner organization UUID
+ type: string
+ - contextPath: MISP.Event.RelatedEvent.ID
+ description: Event IDs of related events (can be a list)
+ type: number
+ - contextPath: MISP.Event.ProposalEmailLock
+ description: Is email lock proposed
+ type: boolean
+ - contextPath: MISP.Event.Timestamp
+ description: Timestamp of the event
+ type: number
+ - contextPath: MISP.Event.Galaxy.Description
+ description: Event's galaxy description
+ type: string
+ - contextPath: MISP.Event.Galaxy.Name
+ description: Galaxy name
+ type: string
+ - contextPath: MISP.Event.Galaxy.Type
+ description: Galaxy type
+ type: number
+ - contextPath: MISP.Event.Published
+ description: Is the event published
+ type: boolean
+ - contextPath: MISP.Event.DisableCorrelation
+ description: Is correlation disabled
+ type: boolean
+ - contextPath: MISP.Event.UUID
+ description: Event UUID
+ type: string
+ - contextPath: MISP.Event.ShadowAttribute
+ description: Event shadow attributes
+ type: Unknown
+ - contextPath: MISP.Event.Attribute.Distribution
+ description: Attribute distribution
+ type: number
+ - contextPath: MISP.Event.Attribute.Value
+ description: Attribute value
+ type: string
+ - contextPath: MISP.Event.Attribute.EventID
+ description: Attribute event ID
+ type: number
+ - contextPath: MISP.Event.Attribute.Timestamp
+ description: Attribute timestamp
+ type: number
+ - contextPath: MISP.Event.Attribute.Deleted
+ description: Is the attribute deleted
+ type: boolean
+ - contextPath: MISP.Event.Attribute.DisableCorrelation
+ description: Is attribute correlation disabled?
+ type: boolean
+ - contextPath: MISP.Event.Attribute.Type
+ description: Attribute type
+ type: string
+ - contextPath: MISP.Event.Attribute.ID
+ description: Attribute ID
+ type: number
+ - contextPath: MISP.Event.Attribute.UUID
+ description: Attribute UUID
+ type: string
+ - contextPath: MISP.Event.Attribute.ShadowAttribute
+ description: Attribute shadow attribute
+ type: Unknown
+ - contextPath: MISP.Event.Attribute.ToIDs
+ description: Is the Intrusion Detection System flag set
+ type: boolean
+ - contextPath: MISP.Event.Attribute.Category
+ description: Attribute category
+ type: string
+ - contextPath: MISP.Event.Attribute.SharingGroupID
+ description: Attribute sharing group ID
+ type: number
+ - contextPath: MISP.Event.Attribute.Comment
+ description: Attribute comment
+ type: string
+ - contextPath: MISP.Event.Analysis
+ description: Event analysis (0 Initial, 1 Ongoing, 2 Completed)
+ type: number
+ - contextPath: MISP.Event.SharingGroupID
+ description: Event sharing group ID
+ type: number
+ - contextPath: MISP.Event.Tag.Name
+ description: All tag names in the event
+ type: string
+ - arguments:
+ - default: false
+ description: MISP event ID
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: Attribute type, e.g., "other" or "url"
+ isArray: false
+ name: type
+ required: true
+ secret: false
+ - default: false
+ description: Attribute category
+ isArray: false
+ name: category
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: Set IDs to on or off
+ isArray: false
+ name: to_ids
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Where to distribute
+ isArray: false
+ name: distribution
+ required: false
+ secret: false
+ - default: false
+ description: Attribute name
+ isArray: false
+ name: comment
+ required: true
+ secret: false
+ - default: false
+ description: Attribute value
+ isArray: false
+ name: value
+ required: true
+ secret: false
+ deprecated: true
+ description: Adds an attribute to existing an MISP event.
+ execution: false
+ name: internal-misp-add-attribute
+ - arguments:
+ - default: false
+ description: 'A hash in MD5 format. If the "allSamples" argument is supplied,
+ this can be any one of the following: md5, sha1, and sha256.'
+ isArray: false
+ name: hash
+ required: true
+ secret: false
+ - default: false
+ description: If set, it will only fetch data from the given event ID
+ isArray: false
+ name: eventID
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If set, it will return all samples from events that match the hash
+ supplied in the "hash" argument.
+ isArray: false
+ name: allSamples
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: true
+ defaultValue: 'false'
+ description: Return one zipped file, or all files unzipped, default is "false"
+ (one zipped file)
+ isArray: false
+ name: unzip
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Downloads a file sample from MISP.
+ execution: true
+ name: misp-download-sample
+ - arguments:
+ - default: false
+ description: MISP event ID
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: Attribute type
+ isArray: false
+ name: type
+ required: true
+ secret: false
+ - default: false
+ description: Attribute category
+ isArray: false
+ name: category
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: Return only events set with the "to_ids" flag, default is "true"
+ isArray: false
+ name: to_ids
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Where to distribute
+ isArray: false
+ name: distribution
+ required: false
+ secret: false
+ - default: false
+ description: Comment
+ isArray: false
+ name: comment
+ required: true
+ secret: false
+ - default: false
+ description: Attribute value
+ isArray: false
+ name: value
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds an attribute to an existing MISP event.
+ execution: false
+ name: misp-add-attribute
+ outputs:
+ - contextPath: MISP.Event.ID
+ description: MISP event ID
+ type: number
+ - contextPath: MISP.Event.Distribution
+ description: MISP event distribution
+ type: number
+ - contextPath: MISP.Event.ThreatLevelID
+ description: Threat level of the MISP event (1 High, 2 Medium, 3 Low, 4 Undefined)
+ type: number
+ - contextPath: MISP.Event.PublishTimestamp
+ description: Timestamp of the publish time (if published)
+ type: number
+ - contextPath: MISP.Event.EventCreatorEmail
+ description: Email address of the event creator
+ type: string
+ - contextPath: MISP.Event.Date
+ description: Event creation date
+ type: date
+ - contextPath: MISP.Event.Locked
+ description: Is the event locked
+ type: boolean
+ - contextPath: MISP.Event.OwnerOrganisation.ID
+ description: Owner organization ID
+ type: number
+ - contextPath: MISP.Event.OwnerOrganisation.Name
+ description: Owner organization name
+ type: string
+ - contextPath: MISP.Event.OwnerOrganisation.UUID
+ description: Owner organization UUID
+ type: string
+ - contextPath: MISP.Event.RelatedEvent.ID
+ description: Event IDs of related events (can be a list)
+ type: number
+ - contextPath: MISP.Event.ProposalEmailLock
+ description: Is email lock proposed
+ type: boolean
+ - contextPath: MISP.Event.Timestamp
+ description: Timestamp of the event
+ type: number
+ - contextPath: MISP.Event.Galaxy.Description
+ description: Event's galaxy description
+ type: string
+ - contextPath: MISP.Event.Galaxy.Name
+ description: Galaxy name
+ type: string
+ - contextPath: MISP.Event.Galaxy.Type
+ description: Galaxy type
+ type: number
+ - contextPath: MISP.Event.Published
+ description: Is the event published
+ type: boolean
+ - contextPath: MISP.Event.DisableCorrelation
+ description: Is correlation disabled
+ type: boolean
+ - contextPath: MISP.Event.UUID
+ description: Event UUID
+ type: string
+ - contextPath: MISP.Event.ShadowAttribute
+ description: Event shadow attributes
+ type: Unknown
+ - contextPath: MISP.Event.Attribute.Distribution
+ description: Attribute distribution
+ type: number
+ - contextPath: MISP.Event.Attribute.Value
+ description: Attribute value
+ type: string
+ - contextPath: MISP.Event.Attribute.EventID
+ description: Attribute event ID
+ type: number
+ - contextPath: MISP.Event.Attribute.Timestamp
+ description: Attribute timestamp
+ type: number
+ - contextPath: MISP.Event.Attribute.Deleted
+ description: Is the attribute deleted
+ type: boolean
+ - contextPath: MISP.Event.Attribute.DisableCorrelation
+ description: Is attribute correlation disabled
+ type: boolean
+ - contextPath: MISP.Event.Attribute.Type
+ description: Attribute type
+ type: string
+ - contextPath: MISP.Event.Attribute.ID
+ description: Attribute ID
+ type: number
+ - contextPath: MISP.Event.Attribute.UUID
+ description: Attribute UUID
+ type: string
+ - contextPath: MISP.Event.Attribute.ShadowAttribute
+ description: Attribute shadow attribute
+ type: Unknown
+ - contextPath: MISP.Event.Attribute.ToIDs
+ description: Is the Intrusion Detection System flag set
+ type: boolean
+ - contextPath: MISP.Event.Attribute.Category
+ description: Attribute category
+ type: string
+ - contextPath: MISP.Event.Attribute.SharingGroupID
+ description: Attribute sharing group ID
+ type: number
+ - contextPath: MISP.Event.Attribute.Comment
+ description: Attribute comment
+ type: string
+ - contextPath: MISP.Event.Analysis
+ description: Event analysis (0 Initial, 1 Ongoing, 2 Completed)
+ type: number
+ - contextPath: MISP.Event.SharingGroupID
+ description: Event sharing group ID
+ type: number
+ - contextPath: MISP.Event.Tag.Name
+ description: All tag names in the event
+ type: string
+ - arguments:
+ - default: false
+ description: Entry ID of the file to upload
+ isArray: false
+ name: fileEntryID
+ required: true
+ secret: false
+ - default: false
+ description: The event ID of the event to add the uploaded file to
+ isArray: false
+ name: event_id
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The distribution setting used for the attributes and for the newly
+ created event, if relevant (0-3)
+ isArray: false
+ name: distribution
+ predefined:
+ - '0'
+ - '1'
+ - '2'
+ - '3'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Flags all attributes created during the transaction to be marked
+ as "to_ids" or not.
+ isArray: false
+ name: to_ids
+ predefined:
+ - '0'
+ - '1'
+ required: false
+ secret: false
+ - default: false
+ description: The category that will be assigned to the uploaded samples, (Payload
+ delivery, Artifacts dropped, Payload Installation, External Analysis)
+ isArray: false
+ name: category
+ required: false
+ secret: false
+ - default: false
+ description: Used to populate the event info field if no event ID is supplied.
+ Alternatively, if not supplied, MISP will generate a message showing that
+ it is a malware sample collection generated on the given day.
+ isArray: false
+ name: info
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: initial
+ description: The analysis level, default is "initial"
+ isArray: false
+ name: analysis
+ predefined:
+ - initial
+ - ongoing
+ - completed
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: high
+ description: The threat level ID of the newly created event, default is "high"
+ isArray: false
+ name: threat_level_id
+ predefined:
+ - high
+ - medium
+ - low
+ - undefined
+ required: false
+ secret: false
+ - default: false
+ description: This will populate the comment field of any attribute created using
+ this API
+ isArray: false
+ name: comment
+ required: false
+ secret: false
+ deprecated: false
+ description: Uploads a file sample to MISP.
+ execution: true
+ name: misp-upload-sample
+ outputs:
+ - contextPath: MISP.UploadedSample
+ description: 'Object containing {filename: event id} of the uploaded file'
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Event ID to delete
+ isArray: false
+ name: event_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes an event according to event ID.
+ execution: false
+ name: misp-delete-event
+ - arguments:
+ - default: false
+ description: UUID of the attribute/event e.g. 59575300-4be8-4ff6-8767-0037ac110032
+ isArray: false
+ name: uuid
+ required: true
+ secret: false
+ - default: false
+ description: Tag to add to the attribute or event.
+ isArray: false
+ name: tag
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds a tag to the given UUID event or attribute.
+ execution: false
+ name: misp-add-tag
+ outputs:
+ - contextPath: MISP.Event.ID
+ description: MISP event ID
+ type: number
+ - contextPath: MISP.Event.Distribution
+ description: MISP event distribution
+ type: number
+ - contextPath: MISP.Event.ThreatLevelID
+ description: Threat level of the MISP event (1 High, 2 Medium, 3 Low, 4 Undefined)
+ type: number
+ - contextPath: MISP.Event.PublishTimestamp
+ description: Timestamp of the publish time (if published)
+ type: number
+ - contextPath: MISP.Event.EventCreatorEmail
+ description: Email address of the event creator
+ type: string
+ - contextPath: MISP.Event.Date
+ description: Event creation date
+ type: date
+ - contextPath: MISP.Event.Locked
+ description: Is the event locked
+ type: boolean
+ - contextPath: MISP.Event.OwnerOrganisation.ID
+ description: Owner organization ID
+ type: number
+ - contextPath: MISP.Event.OwnerOrganisation.Name
+ description: Owner organization name
+ type: string
+ - contextPath: MISP.Event.OwnerOrganisation.UUID
+ description: Owner organization UUID
+ type: string
+ - contextPath: MISP.Event.RelatedEvent.ID
+ description: Event IDs of related events (can be a list)
+ type: number
+ - contextPath: MISP.Event.ProposalEmailLock
+ description: Is email lock proposed
+ type: boolean
+ - contextPath: MISP.Event.Timestamp
+ description: Timestamp of the event
+ type: number
+ - contextPath: MISP.Event.Galaxy.Description
+ description: Event's galaxy description
+ type: string
+ - contextPath: MISP.Event.Galaxy.Name
+ description: Galaxy name
+ type: string
+ - contextPath: MISP.Event.Galaxy.Type
+ description: Galaxy type
+ type: number
+ - contextPath: MISP.Event.Published
+ description: Is the event published
+ type: boolean
+ - contextPath: MISP.Event.DisableCorrelation
+ description: Is correlation disabled
+ type: boolean
+ - contextPath: MISP.Event.UUID
+ description: Event UUID
+ type: string
+ - contextPath: MISP.Event.ShadowAttribute
+ description: Event shadow attributes
+ type: Unknown
+ - contextPath: MISP.Event.Attribute.Distribution
+ description: Attribute distribution
+ type: number
+ - contextPath: MISP.Event.Attribute.Value
+ description: Attribute value
+ type: string
+ - contextPath: MISP.Event.Attribute.EventID
+ description: Attribute event ID
+ type: number
+ - contextPath: MISP.Event.Attribute.Timestamp
+ description: Attribute timestamp
+ type: number
+ - contextPath: MISP.Event.Attribute.Deleted
+ description: Is the attribute deleted
+ type: boolean
+ - contextPath: MISP.Event.Attribute.DisableCorrelation
+ description: Is attribute correlation disabled
+ type: boolean
+ - contextPath: MISP.Event.Attribute.Type
+ description: Attribute type
+ type: string
+ - contextPath: MISP.Event.Attribute.ID
+ description: Attribute ID
+ type: number
+ - contextPath: MISP.Event.Attribute.UUID
+ description: Attribute UUID
+ type: string
+ - contextPath: MISP.Event.Attribute.ShadowAttribute
+ description: Attribute shadow attribute
+ type: Unknown
+ - contextPath: MISP.Event.Attribute.ToIDs
+ description: Is the Intrusion Detection System flag set
+ type: boolean
+ - contextPath: MISP.Event.Attribute.Category
+ description: Attribute category
+ type: string
+ - contextPath: MISP.Event.Attribute.SharingGroupID
+ description: Attribute sharing group ID
+ type: number
+ - contextPath: MISP.Event.Attribute.Comment
+ description: Attribute comment
+ type: string
+ - contextPath: MISP.Event.Analysis
+ description: Event analysis (0 Initial, 1 Ongoing, 2 Completed)
+ type: number
+ - contextPath: MISP.Event.SharingGroupID
+ description: Event sharing group ID
+ type: number
+ - contextPath: MISP.Event.Tag.Name
+ description: All tag names in the event
+ type: string
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: Type of sighting to add
+ isArray: false
+ name: type
+ predefined:
+ - sighting
+ - false_positive
+ - expiration
+ required: true
+ secret: false
+ - default: false
+ description: ID of attribute to add sighting to (Must be filled if UUID is empty).
+ Can be retreived from misp-search
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ - default: false
+ description: UUID of attribute to add sighting to (Must be filled if ID is empty).
+ Can be retreived from misp-search
+ isArray: false
+ name: uuid
+ required: false
+ secret: false
+ deprecated: false
+ description: Add sighting to an attribute
+ execution: false
+ name: misp-add-sighting
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: URL of the feed to add.
+ isArray: false
+ name: feed
+ predefined:
+ - CIRCL
+ - Botvrij.eu
+ required: false
+ secret: false
+ - default: true
+ defaultValue: '0'
+ description: Maximum number of files to add.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Adds an OSINT feed.
+ execution: false
+ name: misp-add-events-from-feed
+ outputs:
+ - contextPath: MISP.Event.ID
+ description: IDs of newly created events.
+ type: number
+ - arguments:
+ - default: false
+ description: Entry ID of the email.
+ isArray: false
+ name: entry_id
+ required: true
+ secret: false
+ - default: false
+ description: ID of the event to which to add object to
+ isArray: false
+ name: event_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds an email object to the specified event ID.
+ execution: false
+ name: misp-add-email-object
+ outputs:
+ - contextPath: MISP.Event.ID
+ description: MISP event ID
+ type: number
+ - contextPath: MISP.Event.Object.MetaCategory
+ description: Object meta category.
+ type: String
+ - contextPath: MISP.Event.Object.Distribution
+ description: Distribution of object.
+ type: Number
+ - contextPath: MISP.Event.Object.Name
+ description: Name of the object.
+ type: String
+ - contextPath: MISP.Event.Object.TemplateVersion
+ description: Template version of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.EventID
+ description: ID of the event in which the object was first created.
+ type: Number
+ - contextPath: MISP.Event.Object.TemplateUUID
+ description: UUID of the template.
+ type: String
+ - contextPath: MISP.Event.Object.Timestamp
+ description: Timestamp when the object was created.
+ type: String
+ - contextPath: MISP.Event.Object.Deleted
+ description: Whether the object was deleted.
+ type: Boolean
+ - contextPath: MISP.Event.Object.ID
+ description: ID of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.UUID
+ description: UUID of the object.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.Value
+ description: Value of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.EventID
+ description: ID of the first event from which the object originated.
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.Timestamp
+ description: Timestamp when the object was created.
+ type: Date
+ - contextPath: MISP.Event.Object.Attribute.Deleted
+ description: Whether the object was deleted.
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.ObjectID
+ description: ID of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.DisableCorrelation
+ description: Whether correlation is disabled.
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.ID
+ description: ID of the attribute.
+ type: Unknown
+ - contextPath: MISP.Event.Object.Attribute.ObjectRelation
+ description: Relation of the object.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.Type
+ description: Object type.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.UUID
+ description: UUID of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.ToIDs
+ description: Whether the ToIDs flag is on.
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.Category
+ description: Category of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.SharingGroupID
+ description: ID of the sharing group.
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.Comment
+ description: Comment of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Description
+ description: Description of the object.
+ type: String
+ - arguments:
+ - default: false
+ description: ID of a MISP event.
+ isArray: false
+ name: event_id
+ required: true
+ secret: false
+ - default: false
+ description: 'The domain name, for example: "google.com". '
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: A list (array) or IP addresses resolved by DNS.
+ isArray: true
+ name: dns
+ required: true
+ secret: false
+ - default: false
+ description: Date that the domain was created.
+ isArray: false
+ name: creation_date
+ required: false
+ secret: false
+ - default: false
+ description: 'Datetime that the domain was last seen, for example: `2019-02-03`.'
+ isArray: false
+ name: last_seen
+ required: false
+ secret: false
+ - default: false
+ description: 'Datetime that the domain was first seen, for example: `2019-02-03`.'
+ isArray: false
+ name: first_seen
+ required: false
+ secret: false
+ - default: false
+ description: A description of the domain.
+ isArray: false
+ name: text
+ required: false
+ secret: false
+ deprecated: false
+ description: Adds a domain object.
+ execution: false
+ name: misp-add-domain-object
+ outputs:
+ - contextPath: MISP.Event.ID
+ description: MISP event ID.
+ type: number
+ - contextPath: MISP.Event.Object.MetaCategory
+ description: Object meta category.
+ type: String
+ - contextPath: MISP.Event.Object.Distribution
+ description: Distribution of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.Name
+ description: Name of the object.
+ type: String
+ - contextPath: MISP.Event.Object.TemplateVersion
+ description: Template version of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.EventID
+ description: ID of the event in which the object was first created.
+ type: Number
+ - contextPath: MISP.Event.Object.TemplateUUID
+ description: UUID of template.
+ type: String
+ - contextPath: MISP.Event.Object.Timestamp
+ description: Timestamp when the object was created.
+ type: String
+ - contextPath: MISP.Event.Object.Deleted
+ description: Whether the object was deleted.
+ type: Boolean
+ - contextPath: MISP.Event.Object.ID
+ description: ID of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.UUID
+ description: UUID of the object.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.Value
+ description: Value of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.EventID
+ description: ID of the first event from which the object originated.
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.Timestamp
+ description: Timestamp of object creation
+ type: Date
+ - contextPath: MISP.Event.Object.Attribute.Deleted
+ description: Whether the object was deleted.
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.ObjectID
+ description: ID of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.DisableCorrelation
+ description: Whether correlation is disabled.
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.ID
+ description: ID of the attribute.
+ type: Unknown
+ - contextPath: MISP.Event.Object.Attribute.ObjectRelation
+ description: Relation of the object.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.Type
+ description: Object type.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.UUID
+ description: UUID of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.ToIDs
+ description: Whether the ToIDs flag is on.
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.Category
+ description: Category of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.SharingGroupID
+ description: ID of the sharing group.
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.Comment
+ description: Comment of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Description
+ description: Description of the object.
+ type: String
+ - arguments:
+ - default: false
+ description: Full URL to add to the event.
+ isArray: false
+ name: url
+ required: true
+ secret: false
+ - default: false
+ description: 'Date that this URL was first seen, for example: `2019-02-03`.'
+ isArray: false
+ name: first_seen
+ required: false
+ secret: false
+ - default: false
+ description: Description of the URL.
+ isArray: false
+ name: text
+ required: false
+ secret: false
+ - default: false
+ description: 'Date that this URL was last seen, for example: `2019-02-03`.'
+ isArray: false
+ name: last_seen
+ required: false
+ secret: false
+ - default: false
+ description: ID of the event.
+ isArray: false
+ name: event_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds a URL object to a MISP event.
+ execution: false
+ name: misp-add-url-object
+ outputs:
+ - contextPath: MISP.Event.ID
+ description: MISP event ID.
+ type: number
+ - contextPath: MISP.Event.Object.MetaCategory
+ description: Object meta category.
+ type: String
+ - contextPath: MISP.Event.Object.Distribution
+ description: Distribution of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.Name
+ description: Name of the object.
+ type: String
+ - contextPath: MISP.Event.Object.TemplateVersion
+ description: Template version of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.EventID
+ description: ID of the event in which the object was first created.
+ type: Number
+ - contextPath: MISP.Event.Object.TemplateUUID
+ description: UUID of the template.
+ type: String
+ - contextPath: MISP.Event.Object.Timestamp
+ description: Timestamp when the object was created.
+ type: String
+ - contextPath: MISP.Event.Object.Deleted
+ description: Whether the object was deleted.
+ type: Boolean
+ - contextPath: MISP.Event.Object.ID
+ description: ID of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.UUID
+ description: UUID of the object.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.Value
+ description: Value of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.EventID
+ description: ID of the first event from which the object originated.
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.Timestamp
+ description: Timestamp when the object was created.
+ type: Date
+ - contextPath: MISP.Event.Object.Attribute.Deleted
+ description: Whether the object was deleted.
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.ObjectID
+ description: ID of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.DisableCorrelation
+ description: Whether correlation is disabled.
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.ID
+ description: ID of the attribute.
+ type: Unknown
+ - contextPath: MISP.Event.Object.Attribute.ObjectRelation
+ description: Relation of the object.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.Type
+ description: Object type.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.UUID
+ description: UUID of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.ToIDs
+ description: Whether the ToIDs flag is on.
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.Category
+ description: Category of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.SharingGroupID
+ description: ID of the sharing group.
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.Comment
+ description: Comment of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Description
+ description: Description of the object.
+ type: String
+ - arguments:
+ - default: false
+ description: ID of the event to add the object to.
+ isArray: false
+ name: event_id
+ required: true
+ secret: false
+ - default: false
+ description: Template name (can be found at https://www.misp-project.org/objects.html).
+ isArray: false
+ name: template
+ required: true
+ secret: false
+ - default: false
+ description: attributes
+ isArray: false
+ name: attributes
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds any other object to MISP.
+ execution: false
+ name: misp-add-object
+ outputs:
+ - contextPath: MISP.Event.ID
+ description: MISP event ID.
+ type: number
+ - contextPath: MISP.Event.Object.MetaCategory
+ description: Object meta category.
+ type: String
+ - contextPath: MISP.Event.Object.Distribution
+ description: Distribution of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.Name
+ description: Name of the object.
+ type: String
+ - contextPath: MISP.Event.Object.TemplateVersion
+ description: Template version of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.EventID
+ description: ID of the event in which the object was first created.
+ type: Number
+ - contextPath: MISP.Event.Object.TemplateUUID
+ description: UUID of template.
+ type: String
+ - contextPath: MISP.Event.Object.Timestamp
+ description: Timestamp when the object was created.
+ type: String
+ - contextPath: MISP.Event.Object.Deleted
+ description: Whether the object was deleted.
+ type: Boolean
+ - contextPath: MISP.Event.Object.ID
+ description: ID of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.UUID
+ description: UUID of the object.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.Value
+ description: Value of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.EventID
+ description: ID of the first event from which the object originated.
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.Timestamp
+ description: Timestamp when the object was created.
+ type: Date
+ - contextPath: MISP.Event.Object.Attribute.Deleted
+ description: Whether the object was deleted?
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.ObjectID
+ description: ID of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.DisableCorrelation
+ description: Whether correlation is disabled.
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.ID
+ description: ID of the attribute.
+ type: Unknown
+ - contextPath: MISP.Event.Object.Attribute.ObjectRelation
+ description: Relation of the object.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.Type
+ description: Object type.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.UUID
+ description: UUID of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.ToIDs
+ description: Whether the ToIDs flag is on.
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.Category
+ description: Category of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.SharingGroupID
+ description: ID of the sharing group.
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.Comment
+ description: Comment of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Description
+ description: Description of the object.
+ type: String
+ - arguments:
+ - default: false
+ description: ID of an event.
+ isArray: false
+ name: event_id
+ required: true
+ secret: false
+ - default: false
+ description: IP address (require one of)
+ isArray: false
+ name: ip
+ required: false
+ secret: false
+ - default: false
+ description: Destination port number.
+ isArray: false
+ name: dst_port
+ required: false
+ secret: false
+ - default: false
+ description: Source port number.
+ isArray: false
+ name: src_port
+ required: false
+ secret: false
+ - default: false
+ description: Domain.
+ isArray: false
+ name: domain
+ required: false
+ secret: false
+ - default: false
+ description: Hostname.
+ isArray: false
+ name: hostname
+ required: false
+ secret: false
+ - default: false
+ description: IP source.
+ isArray: false
+ name: ip_src
+ required: false
+ secret: false
+ - default: false
+ description: IP destination.
+ isArray: false
+ name: ip_dst
+ required: false
+ secret: false
+ - default: false
+ description: Date when the tuple was first seen.
+ isArray: false
+ name: first_seen
+ required: false
+ secret: false
+ - default: false
+ description: Date when the tuple was last seen.
+ isArray: false
+ name: last_seen
+ required: false
+ secret: false
+ - default: false
+ description: A description of the object.
+ isArray: false
+ name: comment
+ required: false
+ secret: false
+ deprecated: false
+ description: 'Adds an IP Object to MISP event. The following arguments are optional,
+ but at least one must be supplied for the command to run successfully: "ip",
+ "dst_port", "src_port", "domain", "hostname", "ip_src", and "ip_dst".'
+ execution: false
+ name: misp-add-ip-object
+ outputs:
+ - contextPath: MISP.Event.ID
+ description: MISP event ID.
+ type: number
+ - contextPath: MISP.Event.Object.MetaCategory
+ description: Object meta category.
+ type: String
+ - contextPath: MISP.Event.Object.Distribution
+ description: Distribution of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.Name
+ description: Name of the object.
+ type: String
+ - contextPath: MISP.Event.Object.TemplateVersion
+ description: Template version of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.EventID
+ description: ID of the event in which the object was first created.
+ type: Number
+ - contextPath: MISP.Event.Object.TemplateUUID
+ description: UUID of the template.
+ type: String
+ - contextPath: MISP.Event.Object.Timestamp
+ description: Timestamp when the object was created.
+ type: String
+ - contextPath: MISP.Event.Object.Deleted
+ description: Whether the object was deleted.
+ type: Boolean
+ - contextPath: MISP.Event.Object.ID
+ description: ID of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.UUID
+ description: UUID of the object.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.Value
+ description: Value of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.EventID
+ description: ID of the first event from which the object originated.
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.Timestamp
+ description: Timestamp when the object was created.
+ type: Date
+ - contextPath: MISP.Event.Object.Attribute.Deleted
+ description: Whether the object was deleted.
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.ObjectID
+ description: ID of the object.
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.DisableCorrelation
+ description: Whether correlation is disabled.
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.ID
+ description: ID of the attribute.
+ type: Unknown
+ - contextPath: MISP.Event.Object.Attribute.ObjectRelation
+ description: Relation of the object.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.Type
+ description: Object type.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.UUID
+ description: UUID of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.ToIDs
+ description: Whether the ToIDs flag is on.
+ type: Boolean
+ - contextPath: MISP.Event.Object.Attribute.Category
+ description: Category of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Attribute.SharingGroupID
+ description: ID of the sharing group.
+ type: Number
+ - contextPath: MISP.Event.Object.Attribute.Comment
+ description: Comment of the attribute.
+ type: String
+ - contextPath: MISP.Event.Object.Description
+ description: Description of the object.
+ type: String
+ dockerimage: demisto/pymisp:1.0.0.52
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- MISP V2 Test
diff --git a/Integrations/MISP_V2/MISP_V2_description.md b/Integrations/MISP_V2/MISP_V2_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/MISP_V2/MISP_V2_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/MISP_V2/MISP_V2_image.png b/Integrations/MISP_V2/MISP_V2_image.png
new file mode 100644
index 000000000000..edcac9814fbb
Binary files /dev/null and b/Integrations/MISP_V2/MISP_V2_image.png differ
diff --git a/Integrations/MISP_V2/MISP_V2_test.py b/Integrations/MISP_V2/MISP_V2_test.py
new file mode 100644
index 000000000000..404da94739d7
--- /dev/null
+++ b/Integrations/MISP_V2/MISP_V2_test.py
@@ -0,0 +1,73 @@
+def mock_misp(mocker):
+ from pymisp import ExpandedPyMISP
+ mocker.patch.object(ExpandedPyMISP, '__init__', return_value=None)
+
+
+def test_get_misp_threat_level(mocker):
+ mock_misp(mocker)
+ from MISP_V2 import get_misp_threat_level
+ assert get_misp_threat_level('1') == 'HIGH'
+ assert get_misp_threat_level('2') == 'MEDIUM'
+ assert get_misp_threat_level('3') == 'LOW'
+ assert get_misp_threat_level('4') == 'UNDEFINED'
+
+
+def test_get_dbot_level(mocker):
+ mock_misp(mocker)
+ from MISP_V2 import get_dbot_level
+ assert get_dbot_level('1') == 3
+ assert get_dbot_level('2') == 3
+ assert get_dbot_level('3') == 2
+ assert get_dbot_level('4') == 0
+ assert get_dbot_level('random') == 0
+
+
+def test_convert_timestamp(mocker):
+ mock_misp(mocker)
+ from MISP_V2 import convert_timestamp
+ assert convert_timestamp(1546713469) == "2019-01-05 18:37:49"
+
+
+def test_build_list_from_dict(mocker):
+ mock_misp(mocker)
+ from MISP_V2 import build_list_from_dict
+ lst = build_list_from_dict({'ip': '8.8.8.8', 'domain': 'google.com'})
+ assert lst == [{'ip': '8.8.8.8'}, {'domain': 'google.com'}]
+
+
+def test_extract_error(mocker):
+ mock_misp(mocker)
+ from MISP_V2 import extract_error
+ error_response = [
+ (
+ 403,
+ {
+ 'name': 'Could not add object',
+ 'message': 'Could not add object',
+ 'url': '/objects/add/156/',
+ 'errors': 'Could not save object as at least one attribute has failed validation (ip). \
+ {"value":["IP address has an invalid format."]}'
+ }
+ )
+ ]
+ expected_response = [
+ {
+ 'code': 403,
+ 'message': 'Could not add object',
+ 'errors': 'Could not save object as at least one attribute has failed validation (ip). '
+ ' {"value":["IP address has an invalid format."]}'
+ }
+ ]
+ err = extract_error(error_response)
+ assert err == expected_response
+
+ error_response = [(404, {'name': 'Invalid event.', 'message': 'Invalid event.', 'url': '/objects/add/1546'})]
+ expected_response = [{'code': 404, 'message': 'Invalid event.', 'errors': None}]
+ err = extract_error(error_response)
+ assert err == expected_response
+
+ # Empty error
+ err = extract_error([])
+ assert err == []
+
+ # TODO check errors
diff --git a/Integrations/MailSenderNew/CHANGELOG.md b/Integrations/MailSenderNew/CHANGELOG.md
new file mode 100644
index 000000000000..af32e7fd263a
--- /dev/null
+++ b/Integrations/MailSenderNew/CHANGELOG.md
@@ -0,0 +1,8 @@
+## [Unreleased]
+The integration ignores the FQDN configuration parameter if it is empty or contains only white spaces.
+
+## [19.9.0] - 2019-09-04
+Improved debug failure logging when testing the integration instance configuration.
+
+## [19.8.2] - 2019-08-22
+Added the *additionalHeader* argument, which enables you to add custom headers to an email.
diff --git a/Integrations/MailSenderNew/MailSenderNew.py b/Integrations/MailSenderNew/MailSenderNew.py
new file mode 100644
index 000000000000..32e805693095
--- /dev/null
+++ b/Integrations/MailSenderNew/MailSenderNew.py
@@ -0,0 +1,356 @@
+import demistomock as demisto
+from CommonServerPython import *
+from email.mime.audio import MIMEAudio
+from email.mime.base import MIMEBase
+from email.mime.image import MIMEImage
+from email.mime.multipart import MIMEMultipart
+from email.mime.text import MIMEText
+from email.message import Message
+from email.header import Header
+from smtplib import SMTP
+from smtplib import SMTPRecipientsRefused
+import base64
+import json
+import mimetypes
+from email import encoders
+import re
+import random
+import string
+import smtplib
+
+SERVER = None
+UTF_8 = 'utf-8'
+
+
+def randomword(length):
+ """
+ Generate a random string of given length
+ """
+ letters = string.ascii_lowercase
+ return ''.join(random.choice(letters) for i in range(length))
+
+
+def return_error_mail_sender(data):
+ """
+ Return error as result and exit
+ """
+ if SERVER:
+ try:
+ SERVER.quit() # quite may throw if the connection was closed already
+ except Exception:
+ pass
+ return_error(data)
+
+
+def guess_type(filename):
+ """
+ Return the maintype and subtype guessed based on the extension
+ """
+ ctype, encoding = mimetypes.guess_type(filename)
+ if ctype is None or encoding is not None:
+ # No guess could be made, or the file is encoded (compressed), so
+ # use a generic bag-of-bits type.
+ ctype = 'application/octet-stream'
+ return ctype.split('/', 1)
+
+
+def handle_file(msg, filename, maintype, subtype, cid, data):
+ """
+ Add the attachment to the message and add the relevant header
+ """
+ if maintype == 'text':
+ # UTF-8 is a pretty safe bet
+ att = MIMEText(data, subtype, UTF_8) # type: MIMEBase
+ elif maintype == 'image':
+ att = MIMEImage(data, subtype)
+ elif maintype == 'audio':
+ att = MIMEAudio(data, subtype)
+ else:
+ att = MIMEBase(maintype, subtype)
+ att.set_payload(data)
+ # Encode the payload using Base64
+ encoders.encode_base64(att)
+ # Set the filename parameter
+ if cid:
+ att.add_header('Content-Disposition', 'inline', filename=filename)
+ att.add_header('Content-ID', '<' + cid + '>')
+ else:
+ att.add_header('Content-Disposition', 'attachment', filename=filename)
+ msg.attach(att)
+
+
+def handle_html(htmlBody):
+ """
+ Extract all data-url content from within the html and return as separate attachments.
+ Due to security implications, we support only images here
+ We might not have Beautiful Soup so just do regex search
+ """
+ attachments = []
+ cleanBody = ''
+ lastIndex = 0
+ for i, m in enumerate(re.finditer(r' i and attachNames[i]:
+ filename = attachNames[i]
+ else:
+ filename = fileRes['name']
+ if len(attachCIDs) > i and attachCIDs[i]:
+ cid = attachCIDs[i]
+ else:
+ cid = ''
+ maintype, subtype = guess_type(filename)
+ if maintype == 'text':
+ with open(path) as fp:
+ data = fp.read()
+ else:
+ with open(path, 'rb') as fp:
+ data = fp.read()
+ attachments.append({
+ 'name': filename,
+ 'maintype': maintype,
+ 'subtype': subtype,
+ 'data': data,
+ 'cid': cid
+ })
+ except Exception as ex:
+ demisto.error("Invalid entry {} with exception: {}".format(aid, ex))
+ return_error_mail_sender('Entry %s is not valid or is not a file entry' % (aid))
+
+ # handle transient files
+ args = demisto.args()
+ f_names = args.get('transientFile', [])
+ f_names = f_names if isinstance(f_names, (list, tuple)) else f_names.split(',')
+ f_contents = args.get('transientFileContent', [])
+ f_contents = f_contents if isinstance(f_contents, (list, tuple)) else f_contents.split(',')
+ f_cids = args.get('transientFileCID', [])
+ f_cids = f_cids if isinstance(f_cids, (list, tuple)) else f_cids.split(',')
+
+ for name, data, cid in map(None, f_names, f_contents, f_cids):
+ if name is None or data is None:
+ break
+ maintype, subtype = guess_type(name)
+ attachments.append({
+ 'name': name,
+ 'maintype': maintype,
+ 'subtype': subtype,
+ 'data': data,
+ 'cid': cid
+ })
+
+ return attachments
+
+
+def template_params():
+ """
+ Translate the template params if they exist from the context
+ """
+ actualParams = {}
+ paramsStr = demisto.getArg('templateParams')
+ if paramsStr:
+ try:
+ params = json.loads(paramsStr)
+ except ValueError as e:
+ return_error_mail_sender('Unable to parse templateParams: %s' % (str(e)))
+ # Build a simple key/value
+ for p in params:
+ if params[p].get('value'):
+ actualParams[p] = params[p]['value']
+ elif params[p].get('key'):
+ actualParams[p] = demisto.dt(demisto.context(), params[p]['key'])
+ return actualParams
+
+
+def header(s):
+ if not s:
+ return None
+ s_no_newlines = ' '.join(s.splitlines())
+ return Header(s_no_newlines, UTF_8)
+
+
+def create_msg():
+ """
+ Will get args from demisto object
+ Return: a string representation of the message, to, cc, bcc
+ """
+ # Collect all parameters
+ to = argToList(demisto.getArg('to'))
+ cc = argToList(demisto.getArg('cc'))
+ bcc = argToList(demisto.getArg('bcc'))
+ additional_header = argToList(demisto.getArg('additionalHeader'))
+ subject = demisto.getArg('subject') or ''
+ body = demisto.getArg('body') or ''
+ htmlBody = demisto.getArg('htmlBody') or ''
+ replyTo = demisto.getArg('replyTo')
+ templateParams = template_params()
+ if templateParams:
+ body = body.format(**templateParams)
+ htmlBody = htmlBody.format(**templateParams)
+
+ # Basic validation - we allow pretty much everything but you have to have at least a recipient
+ # We allow messages without subject and also without body
+ if not to and not cc and not bcc:
+ return_error_mail_sender('You must have at least one recipient')
+
+ attachments = collect_attachments()
+ attachments.extend(collect_manual_attachments())
+
+ # Let's see what type of message we are talking about
+ if not htmlBody:
+ # This is a simple text message - we cannot have CIDs here
+ if len(attachments) > 0:
+ # This is multipart - default is mixed
+ msg = MIMEMultipart() # type: Message
+ msg.preamble = 'The message is only available on a MIME-aware mail reader.\n'
+ msg.attach(MIMEText(body, 'plain', UTF_8))
+ for att in attachments:
+ handle_file(msg, att['name'], att['maintype'], att['subtype'], None, att['data'])
+ else:
+ # Just text, how boring
+ msg = MIMEText(body, 'plain', UTF_8)
+ else:
+ htmlBody, htmlAttachments = handle_html(htmlBody)
+ attachments += htmlAttachments
+ if len(attachments) > 0:
+ msg = MIMEMultipart()
+ msg.preamble = 'The message is only available on a MIME-aware mail reader.\n'
+ if body:
+ alt = MIMEMultipart('alternative')
+ alt.attach(MIMEText(body, 'plain', UTF_8))
+ alt.attach(MIMEText(htmlBody, 'html', UTF_8))
+ msg.attach(alt)
+ else:
+ msg.attach(MIMEText(htmlBody, 'html', UTF_8))
+ for att in attachments:
+ handle_file(msg, att['name'], att['maintype'], att['subtype'], att['cid'], att['data'])
+ else:
+ if body:
+ msg = MIMEMultipart('alternative')
+ msg.preamble = 'The message is only available on a MIME-aware mail reader.\n'
+ msg.attach(MIMEText(body, 'plain', UTF_8))
+ msg.attach(MIMEText(htmlBody, 'html', UTF_8))
+ else:
+ msg = MIMEText(htmlBody, 'html', UTF_8)
+
+ # Add the relevant headers to the most outer message
+ msg['Subject'] = header(subject)
+ msg['From'] = header(demisto.getParam('from'))
+ if replyTo:
+ msg['Reply-To'] = header(replyTo)
+ if to:
+ msg['To'] = header(','.join(to))
+ if cc:
+ msg['CC'] = header(','.join(cc))
+ if additional_header:
+ for h in additional_header:
+ header_name_and_value = h.split('=')
+ msg[header_name_and_value[0]] = header(header_name_and_value[1])
+ # Notice we should not add BCC header since Python2 does not filter it
+ return msg.as_string(), to, cc, bcc
+
+
+def main():
+ # Following methods raise exceptions so no need to check for return codes
+ # But we do need to catch them
+ global SERVER
+ FROM = demisto.getParam('from')
+ FQDN = demisto.params().get('fqdn')
+ FQDN = (FQDN and FQDN.strip()) or None
+ stderr_org = smtplib.stderr # type: ignore
+ try:
+ if demisto.command() == 'test-module':
+ smtplib.stderr = LOG # type: ignore
+ smtplib.SMTP.debuglevel = 1
+ SERVER = SMTP(demisto.getParam('host'), int(demisto.params().get('port', 0)), local_hostname=FQDN)
+ SERVER.ehlo()
+ # TODO - support for non-valid certs
+ if demisto.getParam('tls'):
+ SERVER.starttls()
+ if demisto.getParam('credentials') and demisto.getParam('credentials').get('identifier') and demisto.getParam('credentials').get('password'): # noqa: E501
+ SERVER.login(demisto.getParam('credentials')['identifier'], demisto.getParam('credentials')['password'])
+ except Exception as e:
+ # also reset at the bottom finally
+ smtplib.stderr = stderr_org # type: ignore
+ smtplib.SMTP.debuglevel = 0
+ return_error_mail_sender(e)
+ return # so mypy knows that we don't continue after this
+ # -- COMMANDS --
+ try:
+ if demisto.command() == 'test-module':
+ msg = MIMEText('This is a test mail from Demisto\nRegards\nDBot') # type: Message
+ msg['Subject'] = 'Test mail from Demisto'
+ msg['From'] = FROM
+ msg['To'] = FROM
+ SERVER.sendmail(FROM, [FROM], msg.as_string())
+ SERVER.quit()
+ demisto.results('ok')
+ elif demisto.command() == 'send-mail':
+ (str_msg, to, cc, bcc) = create_msg()
+ SERVER.sendmail(FROM, to + cc + bcc, str_msg) # type: ignore
+ SERVER.quit() # type: ignore
+ demisto.results('Mail sent successfully')
+ else:
+ return_error_mail_sender('Command not recognized')
+ except SMTPRecipientsRefused as e:
+ error_msg = ''.join('{}\n'.format(val) for key, val in e.recipients.iteritems())
+ return_error_mail_sender("Encountered error: {}".format(error_msg))
+ except Exception as e:
+ return_error_mail_sender(e)
+ finally:
+ smtplib.stderr = stderr_org # type: ignore
+ smtplib.SMTP.debuglevel = 0
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/MailSenderNew/MailSenderNew.yml b/Integrations/MailSenderNew/MailSenderNew.yml
new file mode 100644
index 000000000000..33364884f3fb
--- /dev/null
+++ b/Integrations/MailSenderNew/MailSenderNew.yml
@@ -0,0 +1,108 @@
+commonfields:
+ id: Mail Sender (New)
+ version: -1
+name: Mail Sender (New)
+display: Mail Sender (New)
+category: Messaging
+description: Send emails implemented in Python with embedded image support
+configuration:
+- display: Mail server hostname or IP address
+ name: host
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: SMTP Port
+ name: port
+ defaultvalue: "25"
+ type: 0
+ required: true
+- display: Credentials
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: false
+- display: Sender address
+ name: from
+ defaultvalue: noreply@demisto.com
+ type: 0
+ required: true
+- display: Fully Qualified Domain Name (FQDN) - used in EHLO
+ name: fqdn
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Use TLS for connection
+ name: tls
+ defaultvalue: "false"
+ type: 8
+ required: false
+- display: Trust any certificate (not secure)
+ name: insecure
+ defaultvalue: "false"
+ type: 8
+ required: false
+script:
+ script: ''
+ type: python
+ subtype: python2
+ commands:
+ - name: send-mail
+ arguments:
+ - name: to
+ default: true
+ description: Email addresses for the 'To' field
+ isArray: true
+ - name: cc
+ description: Email addresses for the 'Cc' field
+ isArray: true
+ - name: bcc
+ description: Email addresses for the 'Bcc' field
+ isArray: true
+ - name: subject
+ description: Subject for the email to be sent
+ - name: body
+ description: The contents (body) of the email to be sent in plain text
+ - name: htmlBody
+ description: The contents (body) of the email to be sent in HTML format
+ - name: replyTo
+ description: Address that should be used to reply to the message
+ - name: attachIDs
+ description: A comma-separated list of IDs of war room entries that contain
+ the files that should be attached to the email
+ isArray: true
+ - name: attachNames
+ description: A comma-separated list to rename file-names of corresponding attachments
+ IDs. (e.g. rename first two files - attachNames=file_name1,file_name2. rename
+ first and third file - attachNames=file_name1,,file_name3)
+ isArray: true
+ - name: attachCIDs
+ description: A comma-separated list of CIDs to embed attachments inside the
+ email itself
+ isArray: true
+ - name: transientFile
+ description: Desired name for attached file. Multiple files are supported as
+ comma-separated list. (e.g. transientFile="t1.txt,temp.txt,t3.txt" transientFileContent="test
+ 2,temporary file content,third file content" transientFileCID="t1.txt@xxx.yyy,t2.txt@xxx.zzz")
+ isArray: true
+ - name: transientFileContent
+ description: Content for attached file. Multiple files are supported as comma-separated
+ list. (e.g. transientFile="t1.txt,temp.txt,t3.txt" transientFileContent="test
+ 2,temporary file content,third file content" transientFileCID="t1.txt@xxx.yyy,t2.txt@xxx.zzz")
+ isArray: true
+ - name: transientFileCID
+ description: CID for attached file if we want it inline. Multiple files are
+ supported as comma-separated list. (e.g. transientFile="t1.txt,temp.txt,t3.txt"
+ transientFileContent="test 2,temporary file content,third file content" transientFileCID="t1.txt@xxx.yyy,t2.txt@xxx.zzz")
+ isArray: true
+ - name: templateParams
+ description: 'Replace {varname} variables with values from this argument. Expected
+ values are in the form of a JSON document like {"varname": {"value": "some
+ value", "key": "context key"}}. Each var name can either be provided with
+ the value or a context key to retrieve the value from.'
+ - name: additionalHeader
+ description: 'A CSV list of additional headers in the format: headerName=headerValue. For example: "headerName1=headerValue1,headerName2=headerValue2".'
+ isArray: true
+ description: Send an email
+ runonce: false
+tests:
+ - Mail Sender (New) Test
diff --git a/Integrations/MailSenderNew/MailSenderNew_description.md b/Integrations/MailSenderNew/MailSenderNew_description.md
new file mode 100644
index 000000000000..321160515715
--- /dev/null
+++ b/Integrations/MailSenderNew/MailSenderNew_description.md
@@ -0,0 +1,8 @@
+## SMTP Sender
+ Send emails including support for rich emails with HTML and embedded files.
+ - Most fields are optional and we allow empty subject and empty body
+ - If both body and htmlBody are provided, we will create an alternative envelope
+ - Preconfigured template attachments can be supported by using [data URLs](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs)
+ - Support for template replacement variables using {var} syntax
+
+ Implemented in Python to support extensions.
diff --git a/Integrations/MailSenderNew/MailSenderNew_image.png b/Integrations/MailSenderNew/MailSenderNew_image.png
new file mode 100644
index 000000000000..5b071c2accc8
Binary files /dev/null and b/Integrations/MailSenderNew/MailSenderNew_image.png differ
diff --git a/Integrations/MailSenderNew/MailSenderNew_test.py b/Integrations/MailSenderNew/MailSenderNew_test.py
new file mode 100644
index 000000000000..84898db81533
--- /dev/null
+++ b/Integrations/MailSenderNew/MailSenderNew_test.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+import MailSenderNew
+import demistomock as demisto
+import pytest
+
+RETURN_ERROR_TARGET = 'MailSenderNew.return_error'
+
+
+@pytest.mark.parametrize('subject,subj_include,headers', [
+ (u'testbefore\ntestafter', 'testafter', 'foo=baz'),
+ ('testbefore\ntestafter', 'testafter', 'foo=baz'),
+ ('\xd7\xa2\xd7\x91\xd7\xa8\xd7\x99\xd7\xaa', '=?utf-8?', 'foo=baz'), # non-ascii char utf-8 encoded
+ (u'עברית', '=?utf-8?', 'foo=baz')
+ ]) # noqa: E124
+def test_create_msg(mocker, subject, subj_include, headers):
+ mocker.patch.object(demisto, 'args', return_value={
+ 'to': 'test@test.com,test1@test.com', # disable-secrets-detection
+ 'from': 'test@test.com',
+ 'bcc': 'bcc@test.com', # disable-secrets-detection
+ 'cc': 'cc@test.com', # disable-secrets-detection
+ 'subject': subject,
+ 'body': 'this is the body',
+ 'additionalHeader': headers
+ })
+ mocker.patch.object(demisto, 'params', return_value={
+ 'from': 'test@test.com',
+ })
+ (msg, to, cc, bcc) = MailSenderNew.create_msg()
+ assert to == ['test@test.com', 'test1@test.com'] # disable-secrets-detection
+ assert cc == ['cc@test.com'] # disable-secrets-detection
+ assert bcc == ['bcc@test.com'] # disable-secrets-detection
+ lines = msg.splitlines()
+ subj = [x for x in lines if 'Subject' in x][0]
+ assert subj_include in subj
+ assert 'foo' in msg
+
+
+def test_debug_smtp(mocker):
+ '''
+ Test that when we do test-module and fail we collect the server debug log
+ '''
+ mocker.patch.object(demisto, 'params', return_value={
+ 'from': 'test@test.com',
+ 'host': 'localhost',
+ 'port': '2025'
+ })
+ mocker.patch.object(demisto, 'command', return_value='test-module')
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET)
+ MailSenderNew.main()
+ assert return_error_mock.call_count == 1
+ # LOG should at least contain: "connect: " with port
+ assert MailSenderNew.LOG.messages and '2025' in MailSenderNew.LOG.messages[0]
diff --git a/Integrations/MailSenderNew/Pipfile b/Integrations/MailSenderNew/Pipfile
new file mode 100644
index 000000000000..e1d880847b54
--- /dev/null
+++ b/Integrations/MailSenderNew/Pipfile
@@ -0,0 +1,16 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+flake8 = "*"
+autopep8 = "*"
+
+[packages]
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/MailSenderNew/Pipfile.lock b/Integrations/MailSenderNew/Pipfile.lock
new file mode 100644
index 000000000000..43900376f510
--- /dev/null
+++ b/Integrations/MailSenderNew/Pipfile.lock
@@ -0,0 +1,318 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "5ae90a0109f28819533c6d6963d45b316ff0ad62b09bbb4bb3b6b2b6d4471f78"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "autopep8": {
+ "hashes": [
+ "sha256:4d8eec30cc81bc5617dbf1218201d770dc35629363547f17577c61683ccfb3ee"
+ ],
+ "index": "pypi",
+ "version": "==1.4.4"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==1.5"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32",
+ "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==3.7.4"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "entrypoints": {
+ "hashes": [
+ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
+ "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
+ ],
+ "version": "==0.3"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "flake8": {
+ "hashes": [
+ "sha256:859996073f341f2670741b51ec1e67a01da142831aa1fdc6242dbf88dffbe661",
+ "sha256:a796a115208f5c03b18f332f7c11729812c8c3ded6c46319c59b53efd3819da8"
+ ],
+ "index": "pypi",
+ "version": "==3.7.7"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==1.0.2"
+ },
+ "functools32": {
+ "hashes": [
+ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0",
+ "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.3.post2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265",
+ "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.0"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7",
+ "sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db"
+ ],
+ "version": "==0.18"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:c40744b6bc5162bbb39c1257fe298b7a393861d50978b565f3ccd9cb9de0182a",
+ "sha256:f57abacd059dc3bd666258d1efb0377510a89777fda3e3274e3c01f7c03ae22d"
+ ],
+ "version": "==4.3.20"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af",
+ "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"
+ ],
+ "version": "==19.0"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742",
+ "sha256:5887121d7f7df3603bca2f710e7219f3eca0eb69e0b7cc6e0a022e155ac931a7"
+ ],
+ "markers": "python_version < '3.6'",
+ "version": "==2.3.3"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pyflakes": {
+ "hashes": [
+ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
+ "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ ],
+ "version": "==2.1.1"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:02c2b6d268695a8b64ad61847f92e611e6afcff33fd26c3a2125370c4662905d",
+ "sha256:ee1e85575587c5b58ddafa25e1c1b01691ef172e139fc25585e5d3f02451da93"
+ ],
+ "index": "pypi",
+ "version": "==1.9.4"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a",
+ "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03"
+ ],
+ "version": "==2.4.0"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:4a784f1d4f2ef198fe9b7aef793e9fa1a3b2f84e822d9b3a64a181293a572d45",
+ "sha256:926855726d8ae8371803f7b2e6ec0a69953d9c6311fa7c3b6c1b929ff92d27da"
+ ],
+ "index": "pypi",
+ "version": "==4.6.3"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typing": {
+ "hashes": [
+ "sha256:38566c558a0a94d6531012c8e917b1b8518a41e418f7f15f00e129cc80162ad3",
+ "sha256:53765ec4f83a2b720214727e319607879fec4acde22c4fbb54fa2604e79e44ce",
+ "sha256:84698954b4e6719e912ef9a42a2431407fe3755590831699debda6fba92aac55"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==3.7.4"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:8c1019c6aad13642199fbe458275ad6a84907634cc9f0989877ccc4a2840139d",
+ "sha256:ca943a7e809cc12257001ccfb99e3563da9af99d52f261725e96dfe0f9275bc3"
+ ],
+ "version": "==0.5.1"
+ }
+ }
+}
diff --git a/Integrations/MaxMind_GeoIP2/MaxMind_GeoIP2.png b/Integrations/MaxMind_GeoIP2/MaxMind_GeoIP2.png
new file mode 100644
index 000000000000..5dfab76301f5
Binary files /dev/null and b/Integrations/MaxMind_GeoIP2/MaxMind_GeoIP2.png differ
diff --git a/Integrations/MaxMind_GeoIP2/MaxMind_GeoIP2.py b/Integrations/MaxMind_GeoIP2/MaxMind_GeoIP2.py
new file mode 100644
index 000000000000..d142bfb847c9
--- /dev/null
+++ b/Integrations/MaxMind_GeoIP2/MaxMind_GeoIP2.py
@@ -0,0 +1,179 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import requests
+from collections import defaultdict
+from requests.auth import HTTPBasicAuth
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+'''GLOBAL VARS'''
+BASE_URL = demisto.params().get('url')
+APIKEY = demisto.params().get('apikey')
+ACCOUNT_ID = demisto.params().get('account')
+MODE = demisto.params().get('mode')
+USE_SSL = not demisto.params().get('insecure', False)
+PROXY = demisto.params().get('proxy')
+API_VERSION = 'geoip/v2.1'
+
+HR_HEADERS = [
+ 'IP',
+ 'Domain',
+ 'ASN',
+ 'Organization',
+ 'ISP',
+ 'Location',
+ 'Accuracy Radius',
+ 'User Type',
+ 'Continent',
+ 'ISO Code',
+ 'Country',
+ 'Registered Country',
+ 'TimeZone',
+ 'City',
+ 'Subdivision',
+ 'Is TOR Exit Node',
+ 'Is Hosting Provider',
+ 'Is Anonymous']
+
+HEADERS = {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+}
+
+'''HELPER FUNCTIONS'''
+
+
+def http_request(query):
+ r = requests.request(
+ 'GET',
+ BASE_URL + API_VERSION + '/' + MODE + '/' + query,
+ headers=HEADERS,
+ verify=USE_SSL,
+ auth=HTTPBasicAuth(ACCOUNT_ID, APIKEY)
+ )
+ if r.status_code != 200:
+ return_error(
+ 'Error in API call to MaxMind, got status code - {} and a reason: {}'.format(r.status_code, r.reason))
+ return r
+
+
+def create_map_entry(lat, lng):
+ demisto.results({
+ 'Type': entryTypes['map'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': {'lat': lat, 'lng': lng}
+ })
+
+
+def format_results(res_json):
+ hr = defaultdict() # type: dict
+ maxmind_ec = defaultdict(lambda: defaultdict(int)) # type: dict
+ ip_ec = defaultdict(lambda: defaultdict(int)) # type: dict
+ if 'continent' in res_json:
+ continent = res_json['continent']
+ hr['Continent'] = continent['names']['en']
+ maxmind_ec['Geo']['Continent'] = continent['names']['en']
+ if 'city' in res_json:
+ city = res_json['city']
+ hr['City'] = city['names']['en']
+ maxmind_ec['Geo']['City'] = city['names']['en']
+ if 'country' in res_json:
+ country = res_json['country']
+ hr['Country'] = country['names']['en']
+ maxmind_ec['Geo']['Country'] = country['names']['en']
+ ip_ec['Geo']['Country'] = country['names']['en']
+ if 'location' in res_json:
+ location = res_json['location']
+ ip_ec['Geo']['Location'] = str(location['latitude']) + ', ' + str(location['longitude'])
+ maxmind_ec['Geo']['Location'] = str(location['latitude']) + ', ' + str(location['longitude'])
+ create_map_entry(location['latitude'], location['longitude'])
+ if 'time_zone' in location:
+ hr['TimeZone'] = location['time_zone']
+ maxmind_ec['Geo']['TimeZone'] = location['time_zone']
+ if 'accuracy_radius' in location:
+ hr['Accuracy Radius'] = location['accuracy_radius']
+ maxmind_ec['Geo']['Accuracy'] = location['accuracy_radius']
+ if 'registered_country' in res_json:
+ hr['ISO Code'] = res_json['registered_country']['iso_code']
+ maxmind_ec['ISO_Code'] = res_json['registered_country']['iso_code']
+ registration = res_json['registered_country']['names']['en']
+ hr['Registered Country'] = registration
+ maxmind_ec['RegisteredCountry'] = registration
+ if 'subdivisions' in res_json:
+ subs = res_json['subdivisions'][0]
+ hr['Subdivision'] = subs['names']['en']
+ maxmind_ec['Geo']['Subdivision'] = subs['names']['en']
+ if 'traits' in res_json:
+ traits = res_json['traits']
+ if 'user_type' in traits:
+ hr['User Type'] = traits['user_type']
+ maxmind_ec['UserType'] = traits['user_type']
+ if 'domain' in traits:
+ hr['Domain'] = traits['domain']
+ maxmind_ec['Domain'] = traits['domain']
+ if 'is_anonymous' in traits:
+ hr['Is Anonymous'] = traits['is_anonymous']
+ maxmind_ec['Anonymous'] = traits['is_anonymous']
+ if 'is_hosting_provider' in traits:
+ hr['Is Hosting Provider'] = traits['is_hosting_provider']
+ maxmind_ec['Host'] = traits['is_hosting_provider']
+ if 'is_tor_exit_node' in traits:
+ hr['Is TOR Exit Node'] = traits['is_tor_exit_node']
+ maxmind_ec['Tor'] = traits['is_tor_exit_node']
+ if 'autonomous_system_number' in traits:
+ hr['ASN'] = traits['autonomous_system_number']
+ ip_ec['ASN'] = traits['autonomous_system_number']
+ maxmind_ec['ASN'] = traits['autonomous_system_number']
+ if 'autonomous_system_organization' in traits:
+ hr['Organization'] = traits['autonomous_system_organization']
+ maxmind_ec['Organization'] = traits['autonomous_system_organization']
+ hr['IP'] = traits['ip_address']
+ ip_ec['Address'] = traits['ip_address']
+ maxmind_ec['Address'] = traits['ip_address']
+ if 'isp' in traits:
+ hr['ISP'] = traits['isp']
+ maxmind_ec['ISP'] = traits['isp']
+ return hr, ip_ec, maxmind_ec
+
+
+''' FUNCTIONS '''
+
+
+def get_geo_ip(query):
+ raw = http_request(query)
+ res_json = raw.json()
+ return res_json
+
+
+def geo_ip_command():
+ ip_query = demisto.args().get('ip')
+ res_json = get_geo_ip(ip_query)
+ hr, ip_ec, maxmind_ec = format_results(res_json)
+ ec = ({
+ 'IP(val.Address && val.Address == obj.Address)': ip_ec,
+ 'MaxMind(val.Address && val.Address == obj.Address)': maxmind_ec
+ })
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': res_json,
+ 'HumanReadable': tableToMarkdown('{} - Scan Results'.format(ip_query), hr, HR_HEADERS, removeNull=True),
+ 'EntryContext': ec
+ })
+
+
+''' EXECUTION CODE '''
+LOG('command is %s' % (demisto.command(),))
+try:
+ handle_proxy()
+ if demisto.command() == 'ip':
+ geo_ip_command()
+ if demisto.command() == 'test-module':
+ raw = http_request('8.8.8.8')
+ demisto.results('ok')
+except Exception as e:
+ LOG(e)
+ LOG.print_log()
+ return_error(str(e))
diff --git a/Integrations/MaxMind_GeoIP2/MaxMind_GeoIP2.yml b/Integrations/MaxMind_GeoIP2/MaxMind_GeoIP2.yml
new file mode 100644
index 000000000000..e32c2792ef64
--- /dev/null
+++ b/Integrations/MaxMind_GeoIP2/MaxMind_GeoIP2.yml
@@ -0,0 +1,111 @@
+commonfields:
+ id: MaxMind GeoIP2
+ version: -1
+name: MaxMind GeoIP2
+display: MaxMind GeoIP2
+category: Data Enrichment & Threat Intelligence
+description: Enriches IP addresses
+configuration:
+- display: API Key
+ name: apikey
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Account ID
+ name: account
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Use system proxy
+ name: proxy
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Trust any certificate (unsecure)
+ name: insecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Service Version
+ name: mode
+ defaultvalue: city
+ type: 15
+ required: true
+ options:
+ - country
+ - city
+ - insights
+- display: Base URL
+ name: url
+ defaultvalue: https://geoip.maxmind.com/
+ type: 0
+ required: true
+script:
+ script: ''
+ type: python
+ commands:
+ - name: ip
+ arguments:
+ - name: ip
+ required: true
+ description: IP Address to be queried
+ outputs:
+ - contextPath: IP.Address
+ description: The IP address
+ type: string
+ - contextPath: IP.Geo.Country
+ description: The IP country
+ type: string
+ - contextPath: IP.Geo.Location
+ description: The IP geographic location in coordinates
+ type: string
+ - contextPath: IP.ASN
+ description: The IP ASN
+ type: string
+ - contextPath: MaxMind.Geo.TimeZone
+ description: The time zone the IP is located
+ type: string
+ - contextPath: MaxMind.Geo.Accuracy
+ description: The accuracy of the location
+ type: number
+ - contextPath: MaxMind.Geo.Continent
+ description: The IP Continent
+ type: string
+ - contextPath: MaxMind.Geo.Subdivision
+ description: The IP subdivision
+ type: string
+ - contextPath: MaxMind.Organization
+ description: The IP organization
+ type: string
+ - contextPath: MaxMind.Tor
+ description: Is IP a Tor exit node?
+ type: boolean
+ - contextPath: MaxMind.Host
+ description: The IP host
+ type: string
+ - contextPath: MaxMind.Anonymous
+ description: Is the IP anonymous?
+ type: boolean
+ - contextPath: MaxMind.UserType
+ description: The IP user type
+ type: string
+ - contextPath: MaxMind.ISP
+ description: The IP ISP
+ type: string
+ - contextPath: MaxMind.Domain
+ description: The domain associated to the IP
+ type: string
+ - contextPath: MaxMind.ISO_Code
+ description: ISO code for the country the IP is located
+ type: string
+ - contextPath: MaxMind.RegisteredCountry
+ description: Country the IP is registered
+ type: string
+ - contextPath: MaxMind.City
+ description: City the IP is located in
+ type: string
+ description: Check IP reputation (when information is available, returns a JSON
+ with details). Uses all configured Threat Intelligence feeds
+ runonce: false
+tests:
+ - MaxMind Test
diff --git a/Integrations/MaxMind_GeoIP2/MaxMind_GeoIP2_desc.md b/Integrations/MaxMind_GeoIP2/MaxMind_GeoIP2_desc.md
new file mode 100644
index 000000000000..23f79a6ea6ae
--- /dev/null
+++ b/Integrations/MaxMind_GeoIP2/MaxMind_GeoIP2_desc.md
@@ -0,0 +1 @@
+The MaxMind GeoIP2 integration allows you to query the MaxMind API service and retrieve a JSON of all details.
\ No newline at end of file
diff --git a/Integrations/McAfee-TIE/CHANGELOG.md b/Integrations/McAfee-TIE/CHANGELOG.md
new file mode 100644
index 000000000000..16311e1a1656
--- /dev/null
+++ b/Integrations/McAfee-TIE/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+-
\ No newline at end of file
diff --git a/Integrations/McAfee-TIE/McAfee-TIE.py b/Integrations/McAfee-TIE/McAfee-TIE.py
new file mode 100644
index 000000000000..171d1d4d92e5
--- /dev/null
+++ b/Integrations/McAfee-TIE/McAfee-TIE.py
@@ -0,0 +1,306 @@
+import demistomock as demisto
+from CommonServerPython import *
+from dxlclient.client_config import DxlClientConfig
+from dxlclient.client import DxlClient
+from dxlclient.broker import Broker
+from dxltieclient import TieClient
+from dxltieclient.constants import HashType
+from datetime import datetime
+
+VENDOR_NAME = 'McAfee Threat Intelligence Exchange'
+
+broker_ca_bundle = './brokercerts.crt'
+with open(broker_ca_bundle, "w") as text_file:
+ text_file.write(demisto.params()['broker_ca_bundle'])
+
+cert_file = './cert_file.crt'
+with open(cert_file, "w") as text_file:
+ text_file.write(demisto.params()['cert_file'])
+
+private_key = './private_key.key'
+with open(private_key, "w") as text_file:
+ text_file.write(demisto.params()['private_key'])
+
+broker_urls = demisto.params()['broker_urls'].split(',')
+
+HASH_TYPE_KEYS = {
+ 'md5': HashType.MD5,
+ 'sha1': HashType.SHA1,
+ 'sha256': HashType.SHA256
+}
+
+TRUST_LEVELS = {
+ '0': 'NOT_SET',
+ '1': 'KNOWN_MALICIOUS',
+ '15': 'MOST_LIKELY_MALICIOUS',
+ '30': 'MIGHT_BE_MALICIOUS',
+ '50': 'UNKNOWN',
+ '70': 'MIGHT_BE_TRUSTED',
+ '85': 'MOST_LIKELY_TRUSTED',
+ '99': 'KNOWN_TRUSTED',
+ '100': 'KNOWN_TRUSTED_INSTALLER'
+}
+
+POVIDER = {
+ '1': 'Global Threat Intelligence (GTI)',
+ '3': 'Enterprise reputation',
+ '5': 'Advanced Threat Defense (ATD)',
+ '7': 'Web Gateway (MWG)'
+}
+
+
+def create_error_entry(contents):
+ return {'ContentsFormat': formats['text'], 'Type': entryTypes['error'], 'Contents': contents}
+
+
+def get_client_config():
+ config = DxlClientConfig(
+ broker_ca_bundle=broker_ca_bundle,
+ cert_file=cert_file,
+ private_key=private_key,
+ brokers=[Broker.parse(url) for url in broker_urls]
+ )
+
+ config.connect_retries = 1
+ config.reconnect_delay = 1
+ config.reconnect_delay_max = 10
+
+ return config
+
+
+def get_provider(provider_id):
+ provider_id_str = str(provider_id)
+ return POVIDER.get(provider_id_str, provider_id_str)
+
+
+def parse_reputation(rep):
+ # get trust level
+ trust_level = str(rep.get('trustLevel'))
+ verbose_trust_level = TRUST_LEVELS.get(trust_level, trust_level)
+
+ # get provider
+ provider_id = rep.get('providerId')
+ provider = get_provider(provider_id)
+
+ # get date
+ create_date = rep.get('createDate')
+ create_date_str = str(datetime.fromtimestamp(create_date))
+
+ res = {
+ 'Trust level': trust_level,
+ 'Trust level (verbose)': verbose_trust_level,
+ 'Provider ID': provider_id,
+ 'Provider (verbose)': provider,
+ 'Created date': create_date_str
+ }
+
+ return res
+
+
+def parse_reference(reference):
+ agent_guid = reference.get('agentGuid')
+ return {
+ 'Date': str(datetime.fromtimestamp(reference.get('date'))),
+ 'AgentGuid': agent_guid.replace('{', '').replace('}', '') # remove brackets if exist
+ }
+
+
+def reputations_to_table(reputations):
+ return [parse_reputation(rep) for rep in reputations]
+
+
+def references_to_table(references):
+ return [parse_reference(ref) for ref in references]
+
+
+def trust_level_to_score(trust_level):
+ if (trust_level >= 70):
+ return 1
+ elif (trust_level == 30):
+ return 2
+ elif (trust_level == 0 or trust_level == 50):
+ return 0
+ elif (trust_level < 30):
+ return 3
+ else:
+ # Shouldn't reach here, as the API doesn't support 31-69 values except for 50)
+ return 0
+
+
+def get_thrust_level_and_score(reputations):
+ trust_level = 101 # more than the highst possible trust level
+ vendor = VENDOR_NAME
+
+ for rep in reputations:
+ rep_trust_level = rep.get('trustLevel', 0)
+ if rep_trust_level != 0 and rep_trust_level < trust_level:
+ trust_level = rep.get('trustLevel')
+ vendor = get_provider(rep.get('providerId'))
+
+ if trust_level == 101:
+ # no trust_level found
+ return {
+ 'trust_level': 0,
+ 'score': 0,
+ 'vendor': vendor
+ }
+
+ score = trust_level_to_score(trust_level)
+
+ if (vendor == 'Enterprise reputation'):
+ vendor = VENDOR_NAME
+ return {
+ 'trust_level': trust_level,
+ 'score': score,
+ 'vendor': vendor
+ }
+
+
+def test():
+ config = get_client_config()
+ with DxlClient(config) as client:
+ client.connect()
+ client.disconnect()
+
+
+def file(hash):
+ config = get_client_config()
+ with DxlClient(config) as client:
+ client.connect()
+ # Create the McAfee Threat Intelligence Exchange (TIE) client
+ tie_client = TieClient(client)
+
+ hash_type = get_hash_type(hash)
+ hash_type_key = HASH_TYPE_KEYS.get(hash_type)
+ if not hash_type_key:
+ return create_error_entry('file argument must be sha1(40 charecters) or sha256(64 charecters) or md5(32 charecters)')
+
+ hash_param = {}
+ hash_param[hash_type_key] = hash
+
+ res = tie_client.get_file_reputation(hash_param)
+ reputations = res.values()
+
+ table = reputations_to_table(reputations)
+
+ # creaet context
+ context_file = {}
+ hash_type_uppercase = hash_type.upper()
+ tl_score = get_thrust_level_and_score(reputations)
+
+ context_file[hash_type_uppercase] = hash
+ context_file['TrustLevel'] = tl_score['trust_level']
+ context_file['Vendor'] = tl_score['vendor']
+
+ dbot_score = {'Indicator': hash, 'Type': 'hash', 'Vendor': tl_score['vendor'], 'Score': tl_score['score']}
+ if tl_score['score'] >= 2:
+ context_file['Malicious'] = {
+ 'Vendor': tl_score['vendor'],
+ 'Score': tl_score['score'],
+ 'Description': 'Trust level is ' + str(tl_score['trust_level'])
+ }
+ ec = {
+ 'DBotScore': dbot_score
+ }
+ ec[outputPaths['file']] = context_file
+
+ return {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': reputations,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('McAfee TIE Hash Reputations For %s:' % (hash,), table),
+ 'EntryContext': ec
+ }
+
+
+def file_references(hash):
+ config = get_client_config()
+ with DxlClient(config) as client:
+ client.connect()
+ # Create the McAfee Threat Intelligence Exchange (TIE) client
+ tie_client = TieClient(client)
+
+ hash_type = get_hash_type(hash)
+ hash_type_key = HASH_TYPE_KEYS.get(hash_type)
+ if not hash_type_key:
+ return create_error_entry('file argument must be sha1(40 charecters) or sha256(64 charecters) or md5(32 charecters)')
+
+ hash_param = {}
+ hash_param[hash_type_key] = hash
+
+ references = tie_client.get_file_first_references(hash_param)
+
+ table = references_to_table(references)
+
+ # creaet context
+ context_file = {}
+ hash_type_uppercase = hash_type.upper()
+
+ context_file[hash_type_uppercase] = hash
+ context_file['References'] = table
+ ec = {}
+ ec[outputPaths['file']] = context_file
+ return {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': references,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('References for hash %s' % (hash,), table),
+ 'EntryContext': ec
+ }
+
+
+def set_file_reputation(hash, trust_level, filename, comment):
+ config = get_client_config()
+
+ # find trust_level key
+ trust_level_key = None
+ for k, v in TRUST_LEVELS.iteritems():
+ if v == trust_level:
+ trust_level_key = k
+
+ if not trust_level_key:
+ return create_error_entry('illigale argument trust_level %s. Choose value from predefined values' % (trust_level, ))
+
+ with DxlClient(config) as client:
+ client.connect()
+ tie_client = TieClient(client)
+
+ hash_type = get_hash_type(hash)
+ hash_type_key = HASH_TYPE_KEYS.get(hash_type)
+ if not hash_type_key:
+ return create_error_entry('file argument must be sha1(40 charecters) or sha256(64 charecters) or md5(32 charecters)')
+
+ hash_param = {}
+ hash_param[hash_type_key] = hash
+
+ try:
+ tie_client.set_file_reputation(trust_level_key, hash_param, filename, comment)
+ return 'Successfully set file repuation'
+ except Exception as ex:
+ return create_error_entry(str(ex))
+
+
+args = demisto.args()
+if demisto.command() == 'test-module':
+ test()
+ demisto.results('ok')
+ sys.exit(0)
+elif demisto.command() == 'file':
+ results = file(args.get('file'))
+ demisto.results(results)
+ sys.exit(0)
+elif demisto.command() == 'tie-file-references':
+ results = file_references(args.get('file'))
+ demisto.results(results)
+ sys.exit(0)
+elif demisto.command() == 'tie-set-file-reputation':
+ results = set_file_reputation(
+ args.get('file'),
+ args.get('trust_level'),
+ args.get('filename'),
+ args.get('comment')
+ )
+ demisto.results(results)
+ sys.exit(0)
diff --git a/Integrations/McAfee-TIE/McAfee-TIE.yml b/Integrations/McAfee-TIE/McAfee-TIE.yml
new file mode 100644
index 000000000000..21f3b7ba90bc
--- /dev/null
+++ b/Integrations/McAfee-TIE/McAfee-TIE.yml
@@ -0,0 +1,157 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: McAfee Threat Intelligence Exchange
+ version: -1
+configuration:
+- display: Broker CA certificates content (see `brokercerts.crt` in Integration Tips)
+ name: broker_ca_bundle
+ required: true
+ type: 12
+- display: Client certificates content (see `client.crt` in Integration Tips)
+ name: cert_file
+ required: true
+ type: 12
+- display: Client private key path (e.g., /usr/config/client.key)
+ name: private_key
+ required: true
+ type: 14
+- display: 'A CSV list of broker URLs in the format: [ssl://][:port]) Get
+ the hostname and port from the `brokerlist.properties` file (in instructions).
+ The broker should be reachable from Demisto server.'
+ name: broker_urls
+ required: true
+ type: 0
+description: Connect to McAfee TIE using the McAfee DXL client.
+display: McAfee Threat Intelligence Exchange
+name: McAfee Threat Intelligence Exchange
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: ' Hash of the file to query. Supports MD5 SHA1 & SHA256'
+ isArray: false
+ name: file
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the reputations for the specified hash. Can be "MD5", "SHA1",
+ or "SHA256".
+ execution: false
+ name: file
+ outputs:
+ - contextPath: File.MD5
+ description: ' The MD5 hash of the file.'
+ type: String
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ type: String
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: String
+ - contextPath: File.TrustLevel
+ description: File lowest trust level.
+ type: String
+ - contextPath: File.Vendor
+ description: The vendor that reported the file as malicious.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: ' The indicator that was tested.'
+ type: String
+ - contextPath: File.Malicious.Description
+ description: A description explaining why the file was determined to be malicious.
+ type: String
+ - contextPath: File.Malicious.Vendor
+ description: The vendor that reported the file as malicious.
+ type: String
+ - contextPath: File.Malicious.Score
+ description: The actual score.
+ type: Number
+ - arguments:
+ - default: true
+ description: Hash of the file for which to set the reputation. Can be "MD5",
+ "SHA1", or "SHA256".
+ isArray: false
+ name: file
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The new trust level for the file.
+ isArray: false
+ name: trust_level
+ predefined:
+ - NOT_SET
+ - KNOWN_MALICIOUS
+ - MOST_LIKELY_MALICIOUS
+ - MIGHT_BE_MALICIOUS
+ - UNKNOWN
+ - MIGHT_BE_TRUSTED
+ - MOST_LIKELY_TRUSTED
+ - KNOWN_TRUSTED
+ - KNOWN_TRUSTED_INSTALLER
+ required: true
+ secret: false
+ - default: false
+ description: A file name to associate with the file.
+ isArray: false
+ name: filename
+ required: false
+ secret: false
+ - default: false
+ description: A comment to associate with the file.
+ isArray: false
+ name: comment
+ required: false
+ secret: false
+ deprecated: false
+ description: Sets the “Enterprise†reputation (trust level) of a specified file.
+ Permissions are required to invoke this method. See the 'How-to' in instance
+ instruction.
+ execution: false
+ name: tie-set-file-reputation
+ - arguments:
+ - default: true
+ description: ' Hash of the file for which to search. Can be "MD5", "SHA1",
+ or "SHA256".'
+ isArray: false
+ name: file
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the set of systems which have referenced (typically executed)
+ the specified file.
+ execution: false
+ name: tie-file-references
+ outputs:
+ - contextPath: File.MD5
+ description: MD5 hash of the file (if supplied).
+ type: Unknown
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file (if supplied).
+ type: Unknown
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file (if supplied).
+ type: Unknown
+ - contextPath: File.References.AgentGuid
+ description: The GUID of the system that referenced the file.
+ type: Unknown
+ - contextPath: File.References.Date
+ description: The time the system first referenced the file.
+ type: Unknown
+ dockerimage: demisto/dxl
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- McAfee-TIE Test
diff --git a/Integrations/McAfee-TIE/McAfee-TIE_description.md b/Integrations/McAfee-TIE/McAfee-TIE_description.md
new file mode 100644
index 000000000000..0ef7ae5adcb7
--- /dev/null
+++ b/Integrations/McAfee-TIE/McAfee-TIE_description.md
@@ -0,0 +1,13 @@
+## Prerequisites - Connect to McAfee Threat Intelligence Exchange (TIE) using the DXL TIE Client
+To connect the McAfee TIE using the DXL TIE client, you need to create certificates and configure DXL. Fore mor information, see the [OpenDXL documentation](https://opendxl.github.io/opendxl-client-python/pydoc/index.html). After you complete this configuration, you will have the following files:
+ * Broker CA certificates ('brokercerts.crt' file)
+ * Client certificate ('client.crt' file)
+ * Client private key ('client.key' file)
+ * Broker list properties file ('brokerlist.properties' file)
+
+To use the **tie-set-file-reputation** command, you need authorize the client (Demisto) to run the command. Follow the instructions in the [OpenDXL documentation](https://opendxl.github.io/opendxl-client-python/pydoc/marsendauth.html). In step #4, instead of selecting **Active Response Server API**, select **TIE Server Set Enterprise Reputation**.
+
+## Dependencies (Python packages)
+You don't need to install the packages, they are included in the Docker image.
+ - dxlclient [docs](https://opendxl.github.io/opendxl-client-python/pydoc/index.html)
+ - dxltieclient [docs](https://opendxl.github.io/opendxl-tie-client-python/pydoc/)
diff --git a/Integrations/McAfee-TIE/McAfee-TIE_image.png b/Integrations/McAfee-TIE/McAfee-TIE_image.png
new file mode 100644
index 000000000000..5bf83f1bfbec
Binary files /dev/null and b/Integrations/McAfee-TIE/McAfee-TIE_image.png differ
diff --git a/Integrations/McAfee-TIE/Pipfile b/Integrations/McAfee-TIE/Pipfile
new file mode 100644
index 000000000000..a4d3c52309a8
--- /dev/null
+++ b/Integrations/McAfee-TIE/Pipfile
@@ -0,0 +1,19 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+
+[packages]
+dxlclient = "==3.1.0.587"
+dxlmarclient = "==0.1.2"
+dxltieclient = "==0.1.0"
+virtualenv = "==15.0.3"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/McAfee-TIE/Pipfile.lock b/Integrations/McAfee-TIE/Pipfile.lock
new file mode 100644
index 000000000000..05b1f6d4af8b
--- /dev/null
+++ b/Integrations/McAfee-TIE/Pipfile.lock
@@ -0,0 +1,340 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "f13e13fdacb7cae2f83d3e39d8e3b91ac6dfd4d2bb208df1d5422e0640f17f2b"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "dxlclient": {
+ "hashes": [
+ "sha256:c82bddc6470a3c7d6d95dba66c232a5440457e92745e33cd79426bab88eb14d5"
+ ],
+ "index": "pypi",
+ "version": "==3.1.0.587"
+ },
+ "dxlmarclient": {
+ "hashes": [
+ "sha256:976e8e04f2b96fa150f2b16aa6ade64b45ef5a7d5770fc89b07b46faa95f613d",
+ "sha256:e64c6266eab1f01679fdd857ca559fbeefc9247d2f310c167fccb3e9f1aa33a4"
+ ],
+ "index": "pypi",
+ "version": "==0.1.2"
+ },
+ "dxltieclient": {
+ "hashes": [
+ "sha256:a01edf1ee27ff6b13d7ef45bd5e9ea36b0ad8d657ad29f0a1a39487fd9c408cf",
+ "sha256:a085086149872dd832705dc6116d96f232214a2cbaa1e06fe391e5952be18e71"
+ ],
+ "index": "pypi",
+ "version": "==0.1.0"
+ },
+ "virtualenv": {
+ "hashes": [
+ "sha256:6d9c760d3fc5fa0894b0f99b9de82a4647e1164f0b700a7f99055034bf548b1d",
+ "sha256:cc8164362fc9611d478f784bbc066f3ee74526c50336ec61a6e75d5af97926c8"
+ ],
+ "index": "pypi",
+ "version": "==15.0.3"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==1.5"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
+ "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
+ ],
+ "version": "==2019.6.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32",
+ "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==3.7.4"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==1.0.2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16",
+ "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.3.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8",
+ "sha256:80d2de76188eabfbfcf27e6a37342c2827801e59c4cc14b0371c56fed43820e3"
+ ],
+ "version": "==0.19"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9",
+ "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe"
+ ],
+ "version": "==19.1"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e",
+ "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8"
+ ],
+ "markers": "python_version < '3.6'",
+ "version": "==2.3.4"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42",
+ "sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300"
+ ],
+ "index": "pypi",
+ "version": "==1.9.5"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
+ "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
+ ],
+ "version": "==2.4.2"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae",
+ "sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6"
+ ],
+ "index": "pypi",
+ "version": "==4.6.4"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
+ "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ ],
+ "version": "==2.22.0"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:12e17c7ad1397fd1df5ead7727eb3f1bdc9fe1c18293b0492e0e01b57997e38d",
+ "sha256:dc9e416a095ee7c3360056990d52e5611fb94469352fc1c2dc85be1ff2189146"
+ ],
+ "index": "pypi",
+ "version": "==1.6.0"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
+ "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
+ ],
+ "version": "==1.25.3"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/McAfee_Advanced_Threat_Defense/CHANGELOG.md b/Integrations/McAfee_Advanced_Threat_Defense/CHANGELOG.md
new file mode 100644
index 000000000000..ffaab3eeb88a
--- /dev/null
+++ b/Integrations/McAfee_Advanced_Threat_Defense/CHANGELOG.md
@@ -0,0 +1,11 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Bug Fix - Improved handling of DBotScore outputs, when unsuccessfully detonating a file using the ***atd-file-upload*** command.
+
+## [19.8.2] - 2019-08-22
+-
+
+## [19.8.0] - 2019-08-06
+-
diff --git a/Integrations/McAfee_Advanced_Threat_Defense/McAfee_Advanced_Threat_Defense.py b/Integrations/McAfee_Advanced_Threat_Defense/McAfee_Advanced_Threat_Defense.py
new file mode 100644
index 000000000000..12c2f7b85cd7
--- /dev/null
+++ b/Integrations/McAfee_Advanced_Threat_Defense/McAfee_Advanced_Threat_Defense.py
@@ -0,0 +1,793 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+import json
+import re
+import base64
+import time
+import requests
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' PREREQUISITES '''
+
+
+def load_server_url():
+ """
+ Cleans and loads the server url from the configuration
+ """
+ url = demisto.params().get('baseUrl')
+ url = re.sub(r'/[/]+$/', '', url)
+ url = re.sub(r'/$', '', url)
+ return url
+
+
+''' GLOBALS '''
+USERNAME = demisto.params().get('username')
+PASSWORD = demisto.params().get('password')
+USE_SSL = not demisto.params().get('unsecure')
+BASE_URL = load_server_url()
+LOGIN_HEADERS = {
+ 'Accept': 'application/vnd.ve.v1.0+json',
+ 'Content-Type': 'application/json',
+ 'VE-SDK-API': base64.b64encode(USERNAME + ':' + PASSWORD)
+}
+HEARTBEAT_HEADERS = {
+ 'Accept': 'application/vnd.ve.v1.0+json',
+ 'Content-Type': 'application/json'
+}
+API_HEADERS = None
+
+''' HELPERS '''
+
+
+def get_session_credentials():
+ result = http_request('php/session.php', 'get', LOGIN_HEADERS)
+ if not result:
+ return_error('Failed getting session credentials.')
+ return result['results']
+
+
+@logger
+def heart_beat():
+ return http_request('php/heartbeat.php', 'get', API_HEADERS, HEARTBEAT_HEADERS)
+
+
+def get_headers():
+ sess = get_session_credentials()
+ return {
+ 'Accept': 'application/vnd.ve.v1.0+json',
+ 'VE-SDK-API': base64.b64encode(sess['session'] + ':' + sess['userId'])
+ }
+
+
+def http_request(uri, method, headers=None, body=None, params=None, files=None):
+ """
+ Makes an API call with the supplied uri, method, headers, body
+ """
+ LOG('running request with url=%s' % uri)
+ url = '%s/%s' % (BASE_URL, uri)
+ res = requests.request(
+ method,
+ url,
+ headers=headers,
+ data=body,
+ verify=USE_SSL,
+ params=params,
+ files=files
+ )
+ if res.status_code < 200 or res.status_code >= 300:
+ if res.status_code == 401:
+ return_error(
+ 'Request Failed with status: 401 Unauthorized - Invalid Username or Password')
+ elif res.status_code == 415:
+ return_error(
+ 'Request Failed with status: 415 - Invalid accept header or content type header')
+ else:
+ return_error(
+ 'Request Failed with status: ' + str(res.status_code)
+ + '. Reason is: ' + str(res.reason))
+ result = res.content
+
+ if not uri.startswith('php/showreport.php?'):
+ # parsing the int as string is vital for long taskId/jobId that round up by json.loads
+ try:
+ result = json.loads(result, parse_int=str)
+ except ValueError:
+ LOG('result is: %s' % result)
+ return_error('Response Parsing failed')
+ if 'success' in result: # type: ignore
+ if result['success'] == 'false': # type: ignore
+ return_error('ATD Api call to ' + uri + ' failed. Reason is: ' + str(res.reason))
+ return result
+
+
+def prettify_current_user_res(current_user):
+ pretty_current_user = {
+ 'APIVersion': current_user['apiVersion'],
+ 'IsAdmin': 'True' if current_user['isAdmin'] == '1' else 'False',
+ 'SessionId': current_user['session'],
+ 'UserId': current_user['userId']
+ }
+ return pretty_current_user
+
+
+def prettify_list_users_res(users):
+ if users:
+ pretty_users = []
+ else:
+ return ''
+
+ for user in users:
+ pretty_users.append({
+ 'FullName': user['fullName'],
+ 'UserId': user['idx'],
+ 'LoginId': user['loginId'],
+ 'UserType': user['userType']
+ })
+
+ return pretty_users
+
+
+def prettify_list_profiles_res(profiles):
+ pretty_profiles = []
+ for profile in profiles:
+ pretty_profiles.append({
+ 'Name': profile['name'],
+ 'AnalyzerProfileId': profile['vmProfileid'],
+ 'Description': profile['vmDesc'],
+ 'Sandbox': 'True' if profile['sandbox'] == 1 else 'False',
+ 'Internet': 'True' if profile['internet'] == 1 else 'False',
+ 'LocalBlackList': 'True' if profile['locBlackList'] == 1 else 'False'
+ })
+ return pretty_profiles
+
+
+def prettify_task_status_by_task_id(task_status):
+ pretty_task_status = {
+ 'taskId': task_status['taskid'],
+ 'jobId': task_status['jobid'],
+ 'status': task_status['status'],
+ 'filename': task_status['filename'],
+ 'MD5': task_status['md5'],
+ 'submitTime': task_status['submitTime']
+ }
+ return pretty_task_status
+
+
+def prettify_file_upload_res(file_upload_res):
+ pretty_file_upload = {
+ 'taskId': file_upload_res['results'][0]['taskId'],
+ 'jobId': file_upload_res['subId'],
+ 'messageId': file_upload_res['results'][0]['messageId'],
+ 'url': file_upload_res['results'][0]['url'],
+ 'srcIp': file_upload_res['results'][0]['srcIp'],
+ 'destIp': file_upload_res['results'][0]['destIp'],
+ 'MD5': file_upload_res['results'][0]['md5'],
+ 'SHA1': file_upload_res['results'][0]['sha1'],
+ 'SHA256': file_upload_res['results'][0]['sha256'],
+ }
+ return pretty_file_upload
+
+
+''' FUNCTIONS '''
+
+
+def test_get_session():
+ get_session()
+
+
+@logger
+def get_session():
+ result = http_request('php/session.php', 'get', LOGIN_HEADERS)
+ return result
+
+
+def get_session_command():
+ result = get_session()
+ result = result['results']
+ human_readable = tableToMarkdown('ATD Current User', prettify_current_user_res(result),
+ ['APIVersion', 'IsAdmin', 'SessionId', 'UserId'])
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'ATD.Session(val.SessionId == obj.SessionId)': prettify_current_user_res(result)
+ }
+ })
+
+
+@logger
+def list_users(user_type):
+ user_type = user_type if user_type else 'STAND_ALONE'
+ result = http_request('php/briefUserList.php?userType=' + user_type, 'get', API_HEADERS)
+ users = result['results']
+ return users
+
+
+def list_users_command():
+ users = list_users(demisto.args()['userType'])
+
+ pretty_users = prettify_list_users_res(users)
+ human_readable = tableToMarkdown(
+ 'ATD User List',
+ pretty_users,
+ ['FullName', 'UserId', 'LoginId', 'UserType']
+ )
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': users,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'ATD.Users(val.UserId == obj.UserId)': pretty_users,
+ }
+ })
+
+
+@logger
+def list_profiles():
+ result = http_request('php/vmprofiles.php', 'get', API_HEADERS)
+ return result['results']
+
+
+def list_profiles_command():
+ result = list_profiles()
+
+ human_readable = tableToMarkdown(
+ 'ATD Analyzers Profile List', prettify_list_profiles_res(result),
+ ['Name', 'AnalyzerProfileId', 'Description',
+ 'Sandbox', 'Internet', 'LocalBlackList'])
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'ATD.ListAnalyzerProfiles(val.AnalyzerProfileId == obj.AnalyzerProfileId)':
+ prettify_list_profiles_res(result)
+ }
+ })
+
+
+@logger
+def check_task_status_by_task_id(task_ids):
+ result = {} # type: dict
+ multiple_results = []
+ tasks = []
+
+ for task_id in task_ids:
+ request_suffix = 'iTaskId=' + str(task_id)
+ result = http_request('php/samplestatus.php?' + request_suffix, 'get', API_HEADERS)
+
+ # when you use TaskID, you get results in res.results
+ tasks.append(prettify_task_status_by_task_id(result['results']))
+ multiple_results.append(result['results'])
+
+ status = result['results']['status'] # backward compatibility
+ return {
+ 'status': status,
+ 'tasks': tasks,
+ 'multipleResults': multiple_results
+ }
+
+
+@logger
+def check_task_status_by_job_id(job_ids):
+ task_ids = []
+ for job_id in job_ids:
+ result = http_request('php/getTaskIdList.php?jobId=' + job_id, 'get', API_HEADERS)
+ task_id = result['result']['taskIdList']
+ task_ids.append(task_id)
+ return check_task_status_by_task_id(task_ids)
+
+
+def check_task_status_command():
+ result = {} # type: dict
+ args = demisto.args()
+
+ if ('jobId' not in args and 'taskId' not in args) or ('jobId' in args and 'taskId' in args):
+ return_error('You must specify one (and only one) of the following: jobId, taskId.')
+
+ if 'jobId' in args:
+ ids = argToList(args['jobId'])
+ result = check_task_status_by_job_id(ids)
+
+ elif 'taskId' in args:
+ ids = argToList(args['taskId'])
+ result = check_task_status_by_task_id(ids)
+
+ human_readable = tableToMarkdown(
+ 'ATD Sandbox Task Status',
+ result['tasks'],
+ (result['tasks'][0]).keys()
+ )
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result['multipleResults'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'ATD.status': result['status'], # backward compatibility
+ 'ATD.Task(val.taskId == obj.taskId)': result['tasks']
+ }
+ })
+
+
+@logger
+def get_task_ids(job_ids):
+ results = []
+ for job_id in job_ids:
+ result = http_request('php/getTaskIdList.php?jobId=' + str(job_id), 'get', API_HEADERS)
+ results.append(result)
+ return results
+
+
+def get_task_ids_command():
+ job_ids = argToList(demisto.args()['jobId'])
+ results = get_task_ids(job_ids)
+
+ multiple_human_readable = []
+ entry_context = []
+ for i, result in enumerate(results):
+ multiple_human_readable.append({
+ 'taskId': result['result']['taskIdList'],
+ 'jobId': job_ids[i]
+ })
+ entry_context.append({
+ 'taskId': result['result']['taskIdList'],
+ 'jobId': job_ids[i]
+ })
+
+ human_readable = tableToMarkdown(
+ 'ATD TaskIds and JobIds List', multiple_human_readable, ['taskId', 'jobId'])
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': results,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'ATD.Task(val.jobId == obj.jobId)': entry_context
+ }
+ })
+
+
+@logger
+def file_upload_raw(body, file_entry_id, filename_to_upload):
+ uri = 'php/fileupload.php'
+ if not filename_to_upload: # first priority for the file name is user's argument
+ # second priority for the file name is the file name in the context
+ filename_dq = demisto.dt(
+ demisto.context(), 'File(val=val.EntryID=="' + file_entry_id + '")=val.Name')
+ if filename_dq and filename_dq[0]:
+ filename_to_upload = filename_dq
+ else:
+ # last priority for the file name is demisto's entryID
+ filename_to_upload = file_entry_id
+
+ with open(demisto.getFilePath(file_entry_id)['path'], 'rb') as file_to_upload:
+ file_up = {'amas_filename': file_to_upload}
+ result = http_request(
+ uri,
+ 'post',
+ API_HEADERS,
+ body,
+ '',
+ files=file_up,
+ )
+
+ if not result['success']:
+ return_error('Failed to upload sample due to: ' + result['errorMessage'])
+ return result
+
+
+def url_upload_raw(body):
+ uri = 'php/fileupload.php'
+ res = http_request(
+ uri,
+ 'post',
+ API_HEADERS,
+ body
+ )
+
+ if not res['success']:
+ return_error('Failed to upload sample due to: ' + res['errorMessage'])
+ return res
+
+
+def file_upload(submit_type, sample, vm_profile_list,
+ skip_task_id=None, analyze_again=None, x_mode=None, message_id=None,
+ file_priority_q=None, src_ip=None, dest_ip=None, file_name=None):
+ body = {} # type: dict
+ body['data'] = {}
+ data = {} # type: dict
+ data['data'] = {}
+ # Add missing prefix to url
+ if submit_type != 0:
+ if not sample.startswith('http://') and not sample.startswith('https://'):
+ if sample.startswith('www.'):
+ sample = "http://" + sample
+ else:
+ sample = "http://www." + sample # disable-secrets-detection
+
+ data['data']['vmProfileList'] = vm_profile_list
+ data['data']['submitType'] = submit_type
+ data['data']['messageId'] = message_id
+ data['data']['srcIp'] = src_ip
+ data['data']['destIp'] = dest_ip
+ data['data']['url'] = '' if submit_type == 0 else sample
+ data['data']['skipTaskId'] = int(skip_task_id) if skip_task_id else None
+ data['data']['analyzeAgain'] = analyze_again
+ data['data']['xMode'] = x_mode
+ data['data']['filePriorityQ'] = file_priority_q if file_priority_q else 'run_now'
+
+ body['data'] = json.dumps(data)
+ file_entry_id = sample if submit_type == 0 else ''
+ filename_to_upload = file_name if (submit_type == 0 and file_name) else ''
+ if submit_type == 0:
+ result_obj = file_upload_raw(body, file_entry_id, filename_to_upload)
+ elif submit_type == 1:
+ result_obj = url_upload_raw(body)
+ return {
+ 'taskId': result_obj['results'][0]['taskId'],
+ 'resultObj': result_obj
+ }
+
+
+def file_upload_command():
+ args = demisto.args()
+
+ if ('entryID' in args and 'url' in args) or ('entryID' not in args and 'url' not in args):
+ return_error('You must submit one and only one of the following: url, entryID')
+ if ('entryID' in args and args['submitType'] != '0') or\
+ ('url' in args and args['submitType'] != '1'):
+ return_error(
+ 'In order to detonate a file submitType must be 0'
+ ' and an entryID of a file must be given.\n'
+ 'In order to detonate a url submitType must be 1'
+ ' and a url must be given.')
+
+ sample = args['entryID'] if 'entryID' in args else args['url']
+ vm_profile_list = int(args['vmProfileList']) if 'vmProfileList' in args else None
+ analyze_again = int(args['analyze_again']) if 'analyze_again' in args else None
+ skip_task_id = int(args['skip_task_id']) if 'skip_task_id' in args else None
+ x_mode = int(args['x_mode']) if 'x_mode' in args else None
+ message_id = args['messageId'] if 'messageId' in args else None
+ file_priority_q = args['file_priority_q'] if 'file_priority_q' in args else None
+ src_ip = args['src_ip'] if 'src_ip' in args else None
+ dest_ip = args['dest_ip'] if 'dest_ip' in args else None
+ file_name = args['file_name'] if 'file_name' in args else None
+
+ result = file_upload(int(args['submitType']), sample, vm_profile_list,
+ skip_task_id, analyze_again, x_mode, message_id, file_priority_q,
+ src_ip, dest_ip, file_name)
+ human_readable = tableToMarkdown(
+ 'ATD sandbox sample submission', prettify_file_upload_res(result['resultObj']),
+ ['taskId', 'jobId', 'messageId', 'url', 'dest_ip', 'src_ip', 'MD5', 'SHA1', 'SHA256'],
+ removeNull=True)
+
+ upload_file_output = {
+ 'ATD.Task(val.taskId == obj.taskId)': prettify_file_upload_res(result['resultObj']),
+ 'ATD.taskId': result['taskId'] # backward compatibility
+ }
+ if 'url' in args:
+ upload_file_output[outputPaths['url']] = sample
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result['resultObj'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': upload_file_output
+ })
+
+
+def build_report_context(report_summary, upload_data, status, threshold, task_id):
+ context = {} # type: dict
+ if report_summary and report_summary['Subject']:
+ subject = report_summary['Subject']
+ context = {
+ 'DBotScore': {
+ 'Vendor': 'McAfee Advanced Threat Defense',
+ 'Score': 0
+ }
+ }
+
+ if 'FileType' in subject:
+ context['DBotScore']['Indicator'] = subject['md5']
+ context['DBotScore']['Type'] = 'hash'
+ # default threshold for McAfee ATD is 3
+ if report_summary['Verdict']['Severity'] > threshold:
+ context['DBotScore']['Score'] = 3
+ if subject['Type'] == 'application/url':
+ context['URL(val.Name == obj.Data)'] = {
+ 'Type': subject['Type'],
+ 'MD5': subject['md5'],
+ 'SHA1': subject['sha-1'],
+ 'SHA256': subject['sha-256'],
+ 'Size': subject['size'],
+ 'Name': subject['Name'],
+ 'Malicious': {
+ 'Vendor': 'McAfee Advanced Threat Defense',
+ 'Description': 'Severity: ' + report_summary['Verdict']['Severity']
+ }
+ }
+ else:
+ context['File(val.MD5 == obj.MD5)'] = {
+ 'Type': subject['Type'],
+ 'MD5': subject['md5'],
+ 'SHA1': subject['sha-1'],
+ 'SHA256': subject['sha-256'],
+ 'Size': subject['size'],
+ 'Name': subject['Name'],
+ 'Malicious': {
+ 'Vendor': 'McAfee Advanced Threat Defense',
+ 'Description': 'Severity: ' + report_summary['Verdict']['Severity']
+ }
+ }
+ else:
+ context['DBotScore']['Score'] = 1
+
+ else: # detonation did not return any data
+ # retrieve submission url by the task ID, if exist
+ submission_dt = demisto.dt(
+ demisto.context(), 'ATD.Task(val.taskId === "{}")'.format(task_id))
+ if isinstance(submission_dt, list):
+ submission = submission_dt[0]
+ else:
+ submission = submission_dt
+ if isinstance(submission, dict):
+ if submission.get('url') and len(str(submission.get('url'))) > 0:
+ context['DBotScore']['Type'] = 'application/url'
+ context['DBotScore']['Indicator'] = submission.get('url')
+ else: # if does not exist, submission is a file
+ if submission.get('SHA256') and len(str(submission.get('SHA256'))) > 0:
+ context['DBotScore']['Indicator'] = submission.get('SHA256')
+ context['DBotScore']['Type'] = 'hash'
+ elif submission.get('SHA1') and len(str(submission.get('SHA1'))) > 0:
+ context['DBotScore']['Indicator'] = submission.get('SHA1')
+ context['DBotScore']['Type'] = 'hash'
+
+ context['IP'] = {}
+ if 'Ips' in report_summary:
+ ip_addresses = []
+ for i in range(len(report_summary['Ips'])):
+ ip_addresses.append(report_summary['Ips'][i]['Ipv4'])
+ context['IP']['Address'] = ip_addresses
+
+ if upload_data:
+ context['ATD'] = {}
+ context['ATD']['Task(val.taskId == obj.taskId)'] = {
+ 'status': status,
+ 'taskId': upload_data['taskId'],
+ 'jobId': upload_data['subId'] if 'subId' in upload_data else None,
+ 'messageId': upload_data['messageId'],
+ 'url': upload_data['url'],
+ 'srcIp': upload_data['srcIp'],
+ 'destIp': upload_data['destIp'],
+ 'MD5': upload_data['md5'],
+ 'SHA1': upload_data['sha1'],
+ 'SHA256': upload_data['sha256'],
+ 'Report': {
+ 'Attachments': report_summary['Attachments'] if 'Attachment' in report_summary else None,
+ 'Environment': report_summary['Environment'] if 'Environment' in report_summary else None,
+ 'Ips': report_summary['Ips'] if 'Ips' in report_summary else None,
+ 'Verdict': report_summary['Verdict'] if 'Verdict' in report_summary else None,
+ 'Data': report_summary['Data'] if 'Data' in report_summary else None,
+ 'Selectors': report_summary['Selectors'] if 'Selectors' in report_summary else None
+ }
+ }
+ return context
+
+
+@logger
+def get_report(uri_suffix, task_id, report_type, upload_data, status, threshold):
+ json_res = http_request('php/showreport.php?' + uri_suffix + '&iType=json', 'get', API_HEADERS)
+ if not json_res:
+ return_error(
+ 'You cannot download this report because you do not have the same permissions'
+ ' as the user that uploaded the submission to McAfee ATD.\n'
+ 'Make sure you have the same permissions as the user that uploaded the submissions.'
+ ' Admin users have full permissions.')
+ json_res = json.loads(json_res)
+ summary = json_res['Summary']
+ summary['VerdictDescription'] = summary['Verdict']['Description']
+ summary['VerdictSeverity'] = summary['Verdict']['Severity']
+ entry_context = build_report_context(summary, upload_data, status, threshold, task_id)
+ json_res_string = json.dumps(json_res)
+ if report_type == 'json':
+ human_readable = tableToMarkdown(
+ 'McAfee ATD Sandbox Report', summary, summary.keys(), None, removeNull=True)
+ return {
+ 'content': json_res_string,
+ 'md': human_readable,
+ 'ec': entry_context
+ }
+
+ result = http_request(
+ 'php/showreport.php?' + uri_suffix + '&iType=' + report_type, 'get', API_HEADERS)
+
+ if report_type == 'pdf' or report_type == 'zip':
+ filename = str(task_id) + '.' + report_type
+ return {
+ 'content': result,
+ 'filename': filename,
+ 'ec': entry_context
+ }
+
+ if report_type == 'sample':
+ return {
+ 'content': result,
+ 'filename': task_id + '.zip',
+ 'ec': entry_context
+ }
+ return result
+
+
+def get_report_command():
+ uri_suffix = job_or_task_id()
+ args = demisto.args()
+ report_type = args['type'] if 'type' in args else 'pdf'
+ threshold = args['threshold']
+
+ filename = args['jobId'] if 'jobId' in args else args['taskId']
+
+ return_report(uri_suffix, filename, report_type, '', '', threshold)
+
+
+def job_or_task_id():
+ args = demisto.args()
+ if ('jobId' not in args and 'taskId' not in args) or ('jobId' in args and 'taskId' in args):
+ return_error('You must specify one (and only one) of the following: jobId, taskId.')
+
+ if 'jobId' in args:
+ uri_suffix = 'jobId=' + str(args['jobId'])
+ else:
+ uri_suffix = 'iTaskId=' + str(args['taskId'])
+
+ return uri_suffix
+
+
+def detonate(submit_type, sample, timeout, report_type, threshold, file_name):
+ result = file_upload(submit_type, sample, file_name)
+ task_id = result['taskId']
+ upload_data = result['resultObj']['results'][0]
+
+ timeout = int(timeout)
+ while timeout > 0:
+ status = str(check_task_status_by_task_id([task_id])['status'])
+ if status == 'Completed':
+ uri_suffix = 'iTaskId=' + str(task_id)
+ return_report(uri_suffix, task_id, report_type, upload_data, status, threshold)
+ sys.exit(0)
+ time.sleep(1)
+ timeout -= 1
+
+ return_error("Timeout due to no answer after " + demisto.args()['timeout']
+ + "seconds. Check the status using '!atd-check-status' in a while"
+ " and if 'completed' execute '!atd-get-report'.")
+
+
+def return_report(uri_suffix, task_id, report_type, upload_data, status, threshold):
+ current_status = check_task_status_by_task_id([task_id])['status']
+ if current_status != 'Completed':
+ demisto.results(
+ 'Please wait in order to download the report, the sample is still being analyzed.')
+ else:
+ res = get_report(uri_suffix, task_id, report_type, upload_data, status, threshold)
+
+ if report_type == 'json':
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': res['content'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': res['md'],
+ 'EntryContext': res['ec']
+ })
+
+ elif report_type == 'pdf' or report_type == 'zip':
+ file_type = entryTypes['entryInfoFile']
+ result = fileResult(res['filename'], res['content'],
+ file_type) # will be saved under 'InfoFile' in the context.
+ result['EntryContext'] = res['ec']
+ demisto.results(result)
+
+ elif report_type == 'sample':
+ # used to retrieve a sample from McAfee ATD to demisto
+ file_type = entryTypes['file']
+ # will be saved under 'File' in the context, can be farther investigated.
+ result = fileResult(res['filename'], res['content'], file_type)
+ demisto.results(result)
+
+ else:
+ demisto.results(res)
+
+
+@logger
+def logout():
+ http_request('/php/session.php', 'delete', API_HEADERS)
+
+
+''' EXECUTION '''
+
+
+def main():
+ LOG('command is %s' % (demisto.command(),))
+ global API_HEADERS
+ API_HEADERS = get_headers()
+
+ try:
+ # Remove proxy if not set to true in params
+ handle_proxy()
+
+ if demisto.command() == 'test-module':
+ test_get_session()
+ demisto.results('ok')
+
+ elif demisto.command() == 'atd-login':
+ get_session_command()
+
+ elif demisto.command() == 'atd-list-analyzer-profiles':
+ list_profiles_command()
+
+ elif demisto.command() == 'atd-list-user':
+ list_users_command()
+
+ elif demisto.command() == 'atd-check-status':
+ check_task_status_command()
+
+ elif demisto.command() == 'atd-get-task-ids':
+ get_task_ids_command()
+
+ elif demisto.command() == 'atd-file-upload':
+ file_upload_command()
+
+ elif demisto.command() == 'atd-get-report':
+ get_report_command()
+
+ # deprecated, please use 'ATD - Detonate File' playbook
+ elif demisto.command() == 'detonate-file':
+ detonate(
+ 0, demisto.args().get('upload'), demisto.args().get('timeout'),
+ demisto.args().get('format'), demisto.args().get('threshold'),
+ demisto.args().get('fileName'))
+ # submit type for regular file is 0
+
+ # deprecated, please use 'Detonate URL - McAfee ATD_python' playbook
+ elif demisto.command() == 'detonate-url':
+ detonate(
+ 1, demisto.args().get('url'), demisto.args().get('timeout'),
+ demisto.args().get('format'), demisto.args().get('threshold'),
+ demisto.args().get('fileName'))
+ # submit type for url submission is 1
+
+ # elif demisto.command() == 'detonate-file-remote':
+ # return detonate(3, args.url, args.timeout, args.format, args.threshold);
+ # submit type for url-download is 3
+
+ except Exception, ex:
+ return_error(ex)
+
+ finally:
+ LOG.print_log()
+ logout()
+
+
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/McAfee_Advanced_Threat_Defense/McAfee_Advanced_Threat_Defense.yml b/Integrations/McAfee_Advanced_Threat_Defense/McAfee_Advanced_Threat_Defense.yml
new file mode 100644
index 000000000000..dec77447183d
--- /dev/null
+++ b/Integrations/McAfee_Advanced_Threat_Defense/McAfee_Advanced_Threat_Defense.yml
@@ -0,0 +1,683 @@
+category: Forensics & Malware Analysis
+commonfields:
+ id: McAfee Advanced Threat Defense
+ version: -1
+configuration:
+- display: Server URL (e.g. https://192.168.0.1)
+ name: baseUrl
+ required: true
+ type: 0
+- display: username
+ name: username
+ required: true
+ type: 0
+- display: password
+ name: password
+ required: true
+ type: 4
+- defaultvalue: 'true'
+ display: Trust any certificate (unsecure)
+ name: unsecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: 'Integrated advanced threat detection: Enhancing protection from network
+ edge to endpoint'
+display: McAfee Advanced Threat Defense
+name: McAfee Advanced Threat Defense
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Analyzer profile ID. The profile ID number can be found in the
+ UI Policy/Analyzer Profile page, OR using command atd-list-analyzer-profiles,
+ under vmProfileid key result
+ isArray: false
+ name: vmProfileList
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: This parameter accepts four values — '0', '1', '2' and '3'. • 0
+ — Regular file upload • 1 — URL submission — URL link is processed inside
+ analyzer VM • 2 — Submit file with URL • 3 — URL Download — File from URL
+ is firstly downloaded and then analyzed
+ isArray: false
+ name: submitType
+ predefined:
+ - '0'
+ - '1'
+ - '2'
+ - '3'
+ required: true
+ secret: false
+ - default: false
+ description: Any valid web URL.
+ isArray: false
+ name: url
+ required: false
+ secret: false
+ - default: false
+ description: Maximum 128-character string.
+ isArray: false
+ name: messageId
+ required: false
+ secret: false
+ - default: false
+ description: ' IPv4 address of the source system or gateway from where the file
+ is downloaded.'
+ isArray: false
+ name: srcIp
+ required: false
+ secret: false
+ - default: false
+ description: ' IPv4 address of the target endpoint.'
+ isArray: false
+ name: dstIp
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Value '0' indicates corresponding taskid in API response. Value
+ '1' indicates -1 as taskid in API response.
+ isArray: false
+ name: skipTaskId
+ predefined:
+ - '0'
+ - '1'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Value '0' indicates skip sample analysis if it is analyzed previously
+ . Value '1' indicates do not skip sample analysis if it is not analyzed previously.
+ isArray: false
+ name: analyzeAgain
+ predefined:
+ - '0'
+ - '1'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Value '0' indicates no user interaction is needed during sample
+ analysis. Value '1' indicates user interaction is needed during sample analysis.
+ isArray: false
+ name: xMode
+ predefined:
+ - '0'
+ - '1'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: ' This parameter indicates priority of sample analysis. run_now
+ assigns highest priority (i.e., sample is analyzed right away), add_to_q puts
+ sample in waiting state if there is a waiting queue of samples, default is
+ run_now'
+ isArray: false
+ name: filePriorityQ
+ predefined:
+ - run_now
+ - add_to_q
+ required: false
+ secret: false
+ - default: false
+ description: entry ID f the file to upload
+ isArray: false
+ name: entryID
+ required: false
+ secret: false
+ - default: false
+ description: The name of the file
+ isArray: false
+ name: fileName
+ required: false
+ secret: false
+ deprecated: false
+ description: upload a file/web URL for dynamic analysis by using the provided
+ Analyzer Profile. Only single file/web URL can be submitted at a time.
+ execution: false
+ name: atd-file-upload
+ outputs:
+ - contextPath: ATD.Task.taskId
+ description: The task ID of the sample uploaded
+ type: string
+ - contextPath: ATD.Task.jobId
+ description: The job ID of the sample uploaded
+ type: number
+ - contextPath: ATD.Task.messageId
+ description: The message Id relevant to the sample uploaded
+ type: string
+ - contextPath: ATD.Task.url
+ description: The URL detonated
+ type: string
+ - contextPath: ATD.Task.srcIp
+ description: Source IPv4 address
+ type: string
+ - contextPath: ATD.Task.destIp
+ description: Destination IPv4 address
+ type: string
+ - contextPath: ATD.Task.MD5
+ description: MD5 of the sample uploaded
+ type: string
+ - contextPath: ATD.Task.SHA1
+ description: SHA1 of the sample uploaded
+ type: string
+ - contextPath: ATD.Task.SHA256
+ description: SHA256 of the sample uploaded
+ type: string
+ - contextPath: ATD.taskId
+ description: The task ID of the sample uploaded
+ type: string
+ - contextPath: URL.Data
+ description: In case of a url upload, the url uploaded
+ type: string
+ - arguments:
+ - default: false
+ description: Serves as an identifier for the previously submitted file.
+ isArray: true
+ name: jobId
+ required: true
+ secret: false
+ deprecated: false
+ description: fetches the list of task id's associated with a job id
+ execution: false
+ name: atd-get-task-ids
+ outputs:
+ - contextPath: ATD.Task.taskId
+ description: The corresponding taskId of the jobId sent
+ type: string
+ - contextPath: ATD.Task.jobId
+ description: The jobId sent
+ type: number
+ - arguments:
+ - default: false
+ description: This is the returned TaskId value in the submission step, previously
+ returned value in the File/URL submission step
+ isArray: false
+ name: taskId
+ required: false
+ secret: false
+ - default: false
+ description: Job id
+ isArray: false
+ name: jobId
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: pdf
+ description: 'Type can be one of the following types: • html — HTML report •
+ txt — Text report • xml — XML report • zip — All files packaged in a single
+ zip file • json — Same as xml but in the JSON format • ioc - Indicators of
+ Compromise format • stix - Structured Threat Information expression. Stix
+ generation is disabled, by default. Use set stixreportstatus enable to enable
+ it. • pdf - Portable Document Format • sample - Download sample from McAfee
+ Advanced Threat Defense'
+ isArray: false
+ name: type
+ predefined:
+ - html
+ - txt
+ - xml
+ - zip
+ - json
+ - ioc
+ - stix
+ - pdf
+ - sample
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: '3'
+ description: If ATD severity is bigger than the threshold we will consider it
+ malicious
+ isArray: false
+ name: threshold
+ predefined:
+ - '1'
+ - '2'
+ - '3'
+ - '4'
+ - '5'
+ required: false
+ secret: false
+ deprecated: false
+ description: download the analysis report files
+ execution: false
+ name: atd-get-report
+ outputs:
+ - contextPath: File.Name
+ description: Filename (only in case of report type=json)
+ type: string
+ - contextPath: File.Type
+ description: File type e.g. "PE" (only in case of report type=json)
+ type: string
+ - contextPath: File.Size
+ description: File size (only in case of report type=json)
+ type: number
+ - contextPath: File.MD5
+ description: MD5 hash of the file (only in case of report type=json)
+ type: string
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file (only in case of report type=json)
+ type: string
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file (only in case of report type=json)
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the reason for the vendor to make the decision
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator we tested (only in case of report type=json)
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type of the indicator (only in case of report type=json)
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score (only in case of report type=json)
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score (only in case of report type=json)
+ type: number
+ - contextPath: File.EntryID
+ description: The Entry ID of the sample
+ type: string
+ - contextPath: IP.Address
+ description: IP's relevant to the sample
+ type: string
+ - contextPath: InfoFile.EntryID
+ description: The EntryID of the report file
+ type: string
+ - contextPath: InfoFile.Extension
+ description: The extension of the report file
+ type: string
+ - contextPath: InfoFile.Name
+ description: The name of the report file, which you can use later, for example, to send the report in an email as part of a playbook task. The report name is the the taskId, for example, 7173.pdf.
+ type: string
+ - contextPath: InfoFile.Info
+ description: The info of the report file
+ type: string
+ - contextPath: InfoFile.Size
+ description: The size of the report file
+ type: number
+ - contextPath: InfoFile.Type
+ description: The type of the report file
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious urls, the reason for the vendor to make the decision
+ type: string
+ - deprecated: false
+ description: display the analyzer profiles. Only the analyzer profiles to which
+ the user has access are displayed.
+ execution: false
+ name: atd-list-analyzer-profiles
+ outputs:
+ - contextPath: ATD.ListAnalyzerProfiles.Name
+ description: The Analyzer's profile name
+ type: string
+ - contextPath: ATD.ListAnalyzerProfiles.AnalyzerProfileId
+ description: The Analyzer's profile id
+ type: number
+ - contextPath: ATD.ListAnalyzerProfiles.Description
+ description: The Analyzer's profile description
+ type: string
+ - contextPath: ATD.ListAnalyzerProfiles.Sandbox
+ description: If the Analyzer's profile has access to the sandbox
+ type: boolean
+ - contextPath: ATD.ListAnalyzerProfiles.Internet
+ description: If the Analyzer's profile has access to the internet
+ type: boolean
+ - contextPath: ATD.ListAnalyzerProfiles.LocalBlackList
+ description: If the Analyzer's profile has access to the local black list
+ type: boolean
+ - arguments:
+ - auto: PREDEFINED
+ default: true
+ defaultValue: STAND_ALONE
+ description: This is the user type associated with a user profile. For example
+ NSP, MWG, STAND_ALONE (default) and so on.
+ isArray: true
+ name: userType
+ predefined:
+ - STAND_ALONE
+ - MWG
+ - NSP
+ required: false
+ secret: false
+ deprecated: false
+ description: displays the user profile information present on the McAfee Advanced
+ Threat Defense.
+ execution: false
+ name: atd-list-user
+ outputs:
+ - contextPath: ATD.Users.FullName
+ description: The user's fullname
+ type: string
+ - contextPath: ATD.Users.UserId
+ description: The user's id
+ type: number
+ - contextPath: ATD.Users.LoginId
+ description: The user's login id
+ type: string
+ - contextPath: ATD.Users.UserType
+ description: The user type
+ type: string
+ - deprecated: false
+ description: Returns the current session details
+ execution: false
+ name: atd-login
+ outputs:
+ - contextPath: ATD.Session.APIVersion
+ description: The API version used in the session
+ type: string
+ - contextPath: ATD.Session.IsAdmin
+ description: If the current user is admin
+ type: boolean
+ - contextPath: ATD.Session.SessionId
+ description: The session id
+ type: string
+ - contextPath: ATD.Session.UserId
+ description: The UserId of the User logged in in the session
+ type: number
+ - arguments:
+ - default: false
+ description: ID of the entry containing the file to detonate
+ isArray: false
+ name: upload
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '210'
+ description: Total wait time (in seconds)
+ isArray: false
+ name: timeout
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: json
+ description: Optional - request the format of the report
+ isArray: false
+ name: format
+ predefined:
+ - html
+ - txt
+ - xml
+ - zip
+ - json
+ - ioc
+ - stix
+ - pdf
+ - sample
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: '3'
+ description: If ATD severity is bigger than the threshold we will consider it
+ malicious
+ isArray: false
+ name: threshold
+ predefined:
+ - '1'
+ - '2'
+ - '3'
+ - '4'
+ - '5'
+ required: false
+ secret: false
+ - default: false
+ description: The name of the file
+ isArray: false
+ name: fileName
+ required: false
+ secret: false
+ deprecated: false
+ description: Deprecated, use detonate playbook instead.
+ execution: false
+ name: detonate-file
+ outputs:
+ - contextPath: InfoFile.Name
+ description: Filename (only in case of report type=json)
+ type: string
+ - contextPath: InfoFile.Type
+ description: File type e.g. "PE" (only in case of report type=json)
+ type: string
+ - contextPath: InfoFile.Size
+ description: File size (only in case of report type=json)
+ type: number
+ - contextPath: InfoFile.MD5
+ description: MD5 hash of the file (only in case of report type=json)
+ type: string
+ - contextPath: InfoFile.SHA1
+ description: SHA1 hash of the file (only in case of report type=json)
+ type: string
+ - contextPath: InfoFile.SHA256
+ description: SHA256 hash of the file (only in case of report type=json)
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator we tested (only in case of report type=json)
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type of the indicator (only in case of report type=json)
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score (only in case of report type=json)
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score (only in case of report type=json)
+ type: number
+ - contextPath: ATD.Task.taskId
+ description: This is the returned TaskId value in the submission step, previously
+ returned value in the File/URL submission step
+ type: number
+ - contextPath: ATD.Task.jobId
+ description: This is the returned JobId value in the submission step, previously
+ returned value in the File/URL submission step
+ type: number
+ - contextPath: ATD.Task.status
+ description: The task ID status (Completed or Analyzing)
+ type: string
+ - contextPath: ATD.Task.messageId
+ description: The message Id relevant to the sample uploaded
+ type: string
+ - contextPath: ATD.Task.url
+ description: The URL detonated
+ type: string
+ - contextPath: ATD.Task.srcIp
+ description: Source IPv4 address
+ type: string
+ - contextPath: ATD.Task.destIp
+ description: Destination IPv4 address
+ type: string
+ - contextPath: ATD.Task.MD5
+ description: MD5 of the sample uploaded
+ type: string
+ - contextPath: ATD.Task.SHA256
+ description: SHA256 of the sample uploaded
+ type: string
+ - contextPath: ATD.Task.SHA1
+ description: SHA1 of the sample uploaded
+ type: string
+ - contextPath: IP.Address
+ description: IP's relevant to the sample
+ type: string
+ - arguments:
+ - default: false
+ description: URL to detonate
+ isArray: false
+ name: url
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '200'
+ description: Total wait time (in seconds)
+ isArray: false
+ name: timeout
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: json
+ description: Optional - request the format of the report
+ isArray: false
+ name: format
+ predefined:
+ - html
+ - txt
+ - xml
+ - zip
+ - json
+ - ioc
+ - stix
+ - pdf
+ - sample
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: '3'
+ description: If ATD severity is bigger than the threshold we will consider it
+ malicious
+ isArray: false
+ name: threshold
+ predefined:
+ - '1'
+ - '2'
+ - '3'
+ - '4'
+ - '5'
+ required: false
+ secret: false
+ deprecated: false
+ description: Deprecated, use detonate playbook instead.
+ execution: false
+ name: detonate-url
+ outputs:
+ - contextPath: InfoFile.Name
+ description: Filename (only in case of report type=json)
+ type: string
+ - contextPath: InfoFile.Type
+ description: File type e.g. "PE" (only in case of report type=json)
+ type: string
+ - contextPath: InfoFile.Size
+ description: File size (only in case of report type=json)
+ type: number
+ - contextPath: InfoFile.MD5
+ description: MD5 hash of the file (only in case of report type=json)
+ type: string
+ - contextPath: InfoFile.SHA1
+ description: SHA1 hash of the file (only in case of report type=json)
+ type: string
+ - contextPath: InfoFile.SHA256
+ description: SHA256 hash of the file (only in case of report type=json)
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator we tested (only in case of report type=json)
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type of the indicator (only in case of report type=json)
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score (only in case of report type=json)
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score (only in case of report type=json)
+ type: number
+ - contextPath: ATD.Task.taskId
+ description: This is the returned TaskId value in the submission step, previously
+ returned value in the File/URL submission step
+ type: number
+ - contextPath: ATD.Task.jobId
+ description: This is the returned JobId value in the submission step, previously
+ returned value in the File/URL submission step
+ type: number
+ - contextPath: ATD.Task.status
+ description: The task ID status (Completed or Analyzing)
+ type: string
+ - contextPath: ATD.Task.messageId
+ description: The message Id relevant to the sample uploaded
+ type: string
+ - contextPath: ATD.Task.url
+ description: The URL detonated
+ type: string
+ - contextPath: ATD.Task.srcIp
+ description: Source IPv4 address
+ type: string
+ - contextPath: ATD.Task.destIp
+ description: Destination IPv4 address
+ type: string
+ - contextPath: ATD.Task.MD5
+ description: MD5 of the sample uploaded
+ type: string
+ - contextPath: ATD.Task.SHA256
+ description: SHA256 of the sample uploaded
+ type: string
+ - contextPath: ATD.Task.SHA1
+ description: SHA1 of the sample uploaded
+ type: string
+ - contextPath: IP.Address
+ description: IP's relevant to the sample
+ type: string
+ - arguments:
+ - default: false
+ description: This is the returned TaskId value in the submission step, previously
+ returned value in the File/URL submission step
+ isArray: true
+ name: taskId
+ required: false
+ secret: false
+ - default: false
+ description: Job Id
+ isArray: true
+ name: jobId
+ required: false
+ secret: false
+ deprecated: false
+ description: Checks the analysis status of up to 100 jobIDs/taskIDs
+ execution: false
+ name: atd-check-status
+ outputs:
+ - contextPath: ATD.status
+ description: The task ID status (Completed or Analyzing)
+ type: string
+ - contextPath: ATD.Task.taskId
+ description: This is the returned TaskId value in the submission step, previously
+ returned value in the File/URL submission step
+ type: string
+ - contextPath: ATD.Task.jobId
+ description: This is the returned JobId value in the submission step, previously
+ returned value in the File/URL submission step
+ type: number
+ - contextPath: ATD.Task.status
+ description: The task ID status (Completed or Analyzing)
+ type: string
+ - contextPath: ATD.Task.filename
+ description: The name of the uploaded sample
+ type: string
+ - contextPath: ATD.Task.MD5
+ description: The MD5 of the sample
+ type: string
+ - contextPath: ATD.Task.submitTime
+ description: Submission time of the sample
+ type: string
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- Test Playbook McAfee ATD
+fromversion: 3.5.0
diff --git a/Integrations/McAfee_Advanced_Threat_Defense/McAfee_Advanced_Threat_Defense_description.md b/Integrations/McAfee_Advanced_Threat_Defense/McAfee_Advanced_Threat_Defense_description.md
new file mode 100644
index 000000000000..a32d22861723
--- /dev/null
+++ b/Integrations/McAfee_Advanced_Threat_Defense/McAfee_Advanced_Threat_Defense_description.md
@@ -0,0 +1,3 @@
+Users must to have the "Allow Multiple Logins" capability.
+Go to Manage -> ATD Configuration -> ATD Users -> User Configuration
+Enable the "Allow Multiple Logins" checkbox for the relevant user.
\ No newline at end of file
diff --git a/Integrations/McAfee_Advanced_Threat_Defense/McAfee_Advanced_Threat_Defense_image.png b/Integrations/McAfee_Advanced_Threat_Defense/McAfee_Advanced_Threat_Defense_image.png
new file mode 100644
index 000000000000..5bf83f1bfbec
Binary files /dev/null and b/Integrations/McAfee_Advanced_Threat_Defense/McAfee_Advanced_Threat_Defense_image.png differ
diff --git a/Integrations/McAfee_Advanced_Threat_Defense/McAfee_Advanced_Threat_Defense_test.py b/Integrations/McAfee_Advanced_Threat_Defense/McAfee_Advanced_Threat_Defense_test.py
new file mode 100644
index 000000000000..57e63ee7b9c8
--- /dev/null
+++ b/Integrations/McAfee_Advanced_Threat_Defense/McAfee_Advanced_Threat_Defense_test.py
@@ -0,0 +1,32 @@
+import pytest
+
+import demistomock as demisto
+
+integration_params = {
+ 'baseUrl': 'demi.demi.com',
+ 'username': 'bark',
+ 'password': 'my_password'
+}
+
+
+@pytest.fixture(autouse=True)
+def set_params(mocker):
+ mocker.patch.object(demisto, 'params', return_value=integration_params)
+
+
+def test_prettify_current_user_res():
+ from McAfee_Advanced_Threat_Defense import prettify_current_user_res
+ expected_user_dict = dict({
+ 'APIVersion': "1.0", 'IsAdmin': "True", 'SessionId': "42", 'UserId': 101})
+ prettify_user_res = prettify_current_user_res(
+ {'apiVersion': "1.0", 'isAdmin': "1", 'session': "42", 'userId': 101})
+ assert expected_user_dict == prettify_user_res
+
+
+def test_prettify_task_status_by_taskId_res():
+ from McAfee_Advanced_Threat_Defense import prettify_task_status_by_task_id
+ expected_rtask_status = dict({
+ 'taskId': "41", 'jobId': "42", 'status': "finished", 'filename': "my_name", 'MD5': "my_md5", 'submitTime': "010101"})
+ prettify_task_status_res = prettify_task_status_by_task_id(
+ {'taskid': "41", 'jobid': "42", 'status': "finished", 'filename': "my_name", 'md5': "my_md5", 'submitTime': "010101"})
+ assert expected_rtask_status == prettify_task_status_res
diff --git a/Integrations/McAfee_Advanced_Threat_Defense/Pipfile b/Integrations/McAfee_Advanced_Threat_Defense/Pipfile
new file mode 100644
index 000000000000..5d33264615f8
--- /dev/null
+++ b/Integrations/McAfee_Advanced_Threat_Defense/Pipfile
@@ -0,0 +1,16 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+flake8 = "*"
+autopep8 = "*"
+
+[packages]
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/McAfee_Advanced_Threat_Defense/Pipfile.lock b/Integrations/McAfee_Advanced_Threat_Defense/Pipfile.lock
new file mode 100644
index 000000000000..27fb5db65dfd
--- /dev/null
+++ b/Integrations/McAfee_Advanced_Threat_Defense/Pipfile.lock
@@ -0,0 +1,387 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "044778e20682923f75aa6dcac76202d8c6b19a4e0bf72738fa9e081affe78db6"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "arrow": {
+ "hashes": [
+ "sha256:3397e5448952e18e1295bf047014659effa5ae8da6a5371d37ff0ddc46fa6872",
+ "sha256:6f54d9f016c0b7811fac9fb8c2c7fa7421d80c54dbdd75ffb12913c55db60b8a"
+ ],
+ "version": "==0.13.1"
+ },
+ "asn1crypto": {
+ "hashes": [
+ "sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87",
+ "sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
+ ],
+ "version": "==0.24.0"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5",
+ "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae"
+ ],
+ "version": "==2019.3.9"
+ },
+ "cffi": {
+ "hashes": [
+ "sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774",
+ "sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d",
+ "sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90",
+ "sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b",
+ "sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63",
+ "sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45",
+ "sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25",
+ "sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3",
+ "sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b",
+ "sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647",
+ "sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016",
+ "sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4",
+ "sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb",
+ "sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753",
+ "sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7",
+ "sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9",
+ "sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f",
+ "sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8",
+ "sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f",
+ "sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc",
+ "sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42",
+ "sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3",
+ "sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909",
+ "sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45",
+ "sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d",
+ "sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512",
+ "sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff",
+ "sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201"
+ ],
+ "version": "==1.12.3"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "crypto": {
+ "hashes": [
+ "sha256:8f2ee9756a0265c18845ac097ae447c75cfbde158abe1361b7491619f866a9bd",
+ "sha256:985120aa86f71545388199f96a2a0e00f7ccfe5ecd14c56355eb399e1a63d164"
+ ],
+ "index": "pypi",
+ "version": "==1.4.1"
+ },
+ "cryptography": {
+ "hashes": [
+ "sha256:066f815f1fe46020877c5983a7e747ae140f517f1b09030ec098503575265ce1",
+ "sha256:210210d9df0afba9e000636e97810117dc55b7157c903a55716bb73e3ae07705",
+ "sha256:26c821cbeb683facb966045e2064303029d572a87ee69ca5a1bf54bf55f93ca6",
+ "sha256:2afb83308dc5c5255149ff7d3fb9964f7c9ee3d59b603ec18ccf5b0a8852e2b1",
+ "sha256:2db34e5c45988f36f7a08a7ab2b69638994a8923853dec2d4af121f689c66dc8",
+ "sha256:409c4653e0f719fa78febcb71ac417076ae5e20160aec7270c91d009837b9151",
+ "sha256:45a4f4cf4f4e6a55c8128f8b76b4c057027b27d4c67e3fe157fa02f27e37830d",
+ "sha256:48eab46ef38faf1031e58dfcc9c3e71756a1108f4c9c966150b605d4a1a7f659",
+ "sha256:6b9e0ae298ab20d371fc26e2129fd683cfc0cfde4d157c6341722de645146537",
+ "sha256:6c4778afe50f413707f604828c1ad1ff81fadf6c110cb669579dea7e2e98a75e",
+ "sha256:8c33fb99025d353c9520141f8bc989c2134a1f76bac6369cea060812f5b5c2bb",
+ "sha256:9873a1760a274b620a135054b756f9f218fa61ca030e42df31b409f0fb738b6c",
+ "sha256:9b069768c627f3f5623b1cbd3248c5e7e92aec62f4c98827059eed7053138cc9",
+ "sha256:9e4ce27a507e4886efbd3c32d120db5089b906979a4debf1d5939ec01b9dd6c5",
+ "sha256:acb424eaca214cb08735f1a744eceb97d014de6530c1ea23beb86d9c6f13c2ad",
+ "sha256:c8181c7d77388fe26ab8418bb088b1a1ef5fde058c6926790c8a0a3d94075a4a",
+ "sha256:d4afbb0840f489b60f5a580a41a1b9c3622e08ecb5eec8614d4fb4cd914c4460",
+ "sha256:d9ed28030797c00f4bc43c86bf819266c76a5ea61d006cd4078a93ebf7da6bfd",
+ "sha256:e603aa7bb52e4e8ed4119a58a03b60323918467ef209e6ff9db3ac382e5cf2c6"
+ ],
+ "index": "pypi",
+ "version": "==2.6.1"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "jmespath": {
+ "hashes": [
+ "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6",
+ "sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c"
+ ],
+ "version": "==0.9.4"
+ },
+ "naked": {
+ "hashes": [
+ "sha256:12b76b8a14595d07039422f1d2219ca8fbef8b237f9cdf5d8e947c03e148677e",
+ "sha256:19de9961f4edb29e75cf837e8e031d6b52fbba4f0033515893d26f69c74b3b1f"
+ ],
+ "version": "==0.1.31"
+ },
+ "pancloud": {
+ "hashes": [
+ "sha256:374ca770405f9bfda69489ad9cd1ef3d716287f584771566cf9ff6d22f189a4e"
+ ],
+ "index": "pypi",
+ "version": "==1.5.1"
+ },
+ "pycparser": {
+ "hashes": [
+ "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
+ ],
+ "version": "==2.19"
+ },
+ "python-dateutil": {
+ "hashes": [
+ "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb",
+ "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"
+ ],
+ "version": "==2.8.0"
+ },
+ "pyyaml": {
+ "hashes": [
+ "sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c",
+ "sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95",
+ "sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2",
+ "sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4",
+ "sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad",
+ "sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba",
+ "sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1",
+ "sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e",
+ "sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673",
+ "sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13",
+ "sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19"
+ ],
+ "version": "==5.1"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
+ "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
+ ],
+ "index": "pypi",
+ "version": "==2.21.0"
+ },
+ "shellescape": {
+ "hashes": [
+ "sha256:3ff2aeb6ce2c5a4e6059fe4a2a745a824f5a3834fe8365a39c5ea691073cfdb6",
+ "sha256:e618b2bc13f2553315ca1669995dc10fcc2cae5f1e0fda49035ef02d56f0b358"
+ ],
+ "version": "==3.4.1"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "tinydb": {
+ "hashes": [
+ "sha256:260b1f69168a24518af63f0270c72dc026476607016a0105aef6a966d8d2fbdc",
+ "sha256:a05c4c81e6e867c4f8a2e51c5236d0d897019aa5e9296f5947455b0bdd3c519d"
+ ],
+ "version": "==3.13.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4",
+ "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb"
+ ],
+ "version": "==1.24.3"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "entrypoints": {
+ "hashes": [
+ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
+ "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
+ ],
+ "version": "==0.3"
+ },
+ "flake8": {
+ "hashes": [
+ "sha256:859996073f341f2670741b51ec1e67a01da142831aa1fdc6242dbf88dffbe661",
+ "sha256:a796a115208f5c03b18f332f7c11729812c8c3ded6c46319c59b53efd3819da8"
+ ],
+ "index": "pypi",
+ "version": "==3.7.7"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:1349c6f7c2a0f7539f5f2ace51a9a8e4a37086ce4de6f78f5f53fb041d0a3cd5",
+ "sha256:f09911f6eb114e5592abe635aded8bf3d2c3144ebcfcaf81ee32e7af7b7d1870"
+ ],
+ "version": "==4.3.18"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:2112d2ca570bb7c3e53ea1a35cd5df42bb0fd10c45f0fb97178679c3c03d64c7",
+ "sha256:c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a"
+ ],
+ "markers": "python_version > '2.7'",
+ "version": "==7.0.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:25a1bc1d148c9a640211872b4ff859878d422bccb59c9965e04eed468a0aa180",
+ "sha256:964cedd2b27c492fbf0b7f58b3284a09cf7f99b0f715941fb24a439b3af1bd1a"
+ ],
+ "version": "==0.11.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pyflakes": {
+ "hashes": [
+ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
+ "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ ],
+ "version": "==2.1.1"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:1a8aa4fa958f8f451ac5441f3ac130d9fc86ea38780dd2715e6d5c5882700b24",
+ "sha256:b8bf138592384bd4e87338cb0f256bf5f615398a649d4bd83915f0e4047a5ca6"
+ ],
+ "index": "pypi",
+ "version": "==4.5.0"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:132eae51d6ef3ff4a8c47c393a4ef5ebf0d1aecc96880eb5d6c8ceab7017cc9b",
+ "sha256:18141c1484ab8784006c839be8b985cfc82a2e9725837b0ecfa0203f71c4e39d",
+ "sha256:2baf617f5bbbfe73fd8846463f5aeafc912b5ee247f410700245d68525ec584a",
+ "sha256:3d90063f2cbbe39177e9b4d888e45777012652d6110156845b828908c51ae462",
+ "sha256:4304b2218b842d610aa1a1d87e1dc9559597969acc62ce717ee4dfeaa44d7eee",
+ "sha256:4983ede548ffc3541bae49a82675996497348e55bafd1554dc4e4a5d6eda541a",
+ "sha256:5315f4509c1476718a4825f45a203b82d7fdf2a6f5f0c8f166435975b1c9f7d4",
+ "sha256:6cdfb1b49d5345f7c2b90d638822d16ba62dc82f7616e9b4caa10b72f3f16649",
+ "sha256:7b325f12635598c604690efd7a0197d0b94b7d7778498e76e0710cd582fd1c7a",
+ "sha256:8d3b0e3b8626615826f9a626548057c5275a9733512b137984a68ba1598d3d2f",
+ "sha256:8f8631160c79f53081bd23446525db0bc4c5616f78d04021e6e434b286493fd7",
+ "sha256:912de10965f3dc89da23936f1cc4ed60764f712e5fa603a09dd904f88c996760",
+ "sha256:b010c07b975fe853c65d7bbe9d4ac62f1c69086750a574f6292597763781ba18",
+ "sha256:c908c10505904c48081a5415a1e295d8403e353e0c14c42b6d67f8f97fae6616",
+ "sha256:c94dd3807c0c0610f7c76f078119f4ea48235a953512752b9175f9f98f5ae2bd",
+ "sha256:ce65dee7594a84c466e79d7fb7d3303e7295d16a83c22c7c4037071b059e2c21",
+ "sha256:eaa9cfcb221a8a4c2889be6f93da141ac777eb8819f077e1d09fb12d00a09a93",
+ "sha256:f3376bc31bad66d46d44b4e6522c5c21976bf9bca4ef5987bb2bf727f4506cbb",
+ "sha256:f9202fa138544e13a4ec1a6792c35834250a85958fde1251b6a22e07d1260ae7"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.3.5"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ }
+ }
+}
diff --git a/Integrations/McAfee_ESM-v10/McAfee_ESM-v10.py b/Integrations/McAfee_ESM-v10/McAfee_ESM-v10.py
new file mode 100644
index 000000000000..f4c2798948b0
--- /dev/null
+++ b/Integrations/McAfee_ESM-v10/McAfee_ESM-v10.py
@@ -0,0 +1,1091 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+""" IMPORTS """
+import base64
+import json
+import re
+import sys
+import requests
+import time
+import traceback
+from datetime import datetime, timedelta
+from distutils.util import strtobool
+
+MAX_CASES_PER_FETCH = 30
+
+# by default filters only "Closed" cases
+FILTERED_OUT_STATUSES = [2, ]
+VERIFY = demisto.params()['insecure'] is False
+requests.packages.urllib3.disable_warnings()
+VERSION = demisto.params()['version']
+IS_V2_API = VERSION in ['10.2', '10.3', '11.1']
+
+ESM_URL = demisto.params()['ip'] + ":" + demisto.params()['port']
+USERNAME = demisto.params()['credentials']['identifier']
+PASSWORD = demisto.params()['credentials']['password']
+ESM_DATE_FORMAT = demisto.params()['time_format']
+TIMEZONE = float(demisto.params().get('timezone'))
+
+
+@logger
+def parse_time(time_str):
+ if ESM_DATE_FORMAT != 'auto-discovery':
+ return ESM_DATE_FORMAT
+
+ regex_to_format = {
+ r'\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2}': '%Y/%m/%d %H:%M:%S', # '2018-12-31 16:54:32'
+ r'\d{2}/\d{2}/\d{4} \d{2}:\d{2}:\d{2}': '%d/%m/%Y %H:%M:%S', # '31-12-2018 16:54:32'
+ }
+
+ selected_format = '%Y/%m/%d %H:%M:%S'
+ for regex, time_format in regex_to_format.items():
+ if re.match(regex, time_str):
+ selected_format = time_format
+ break
+
+ return selected_format
+
+
+@logger
+def search_result_to_md(res):
+ columns = res['columns']
+ rows = res['rows']
+
+ md = "### results:\n"
+
+ if len(rows) == 0:
+ return md + "No matching search result were found"
+
+ # headers
+ column_names = list(map(lambda column: column['name'], columns))
+ md = md + ' | '.join(column_names) + '\n'
+ md = md + ' | '.join(list(map(lambda column: "---", columns))) + '\n'
+
+ # body
+ for row in rows:
+ md = md + ' | '.join(r.replace('|', '\\|') for r in row['values']) + '\n'
+
+ return md
+
+
+@logger
+def search_results_to_context(res):
+ columns = res['columns']
+ rows = res['rows']
+ fixed_searches = []
+ for row in rows:
+ values = row['values']
+ i = 0
+ for column in columns:
+ if len(values[i]) != 0:
+ column_string = column['name'].replace(".", "")
+ column_string = column_string.replace(")", "")
+ column_string = column_string.replace("(", "")
+ fixed_searches.append({
+ column_string: values[i]
+ })
+ i += 1
+ context = {'SearchResults(val.ID && val.ID == obj.ID)': fixed_searches}
+ return context
+
+
+def severity_to_level(severity):
+ if severity > 65:
+ return 3
+ elif severity > 32:
+ return 2
+ else:
+ return 1
+
+
+class NitroESM(object):
+ def __init__(self, esmhost, user, passwd):
+ """ Init instance attributes """
+ self.esmhost = esmhost
+ self.user = user
+ self.passwd = passwd
+ self.url = 'https://{}/rs/esm/'.format(self.esmhost)
+ self.session_headers = {'Content-Type': 'application/json'}
+ self.is_logged_in = False
+ self._case_statuses = None
+
+ def __repr__(self):
+ return 'NitroESM("{}", "{}")'.format(self.url, self.user)
+
+ def login(self):
+ b64_user = base64.b64encode(self.user.encode('utf-8')).decode()
+ b64_passwd = base64.b64encode(self.passwd.encode('utf-8')).decode()
+ params = {
+ "username": b64_user,
+ "password": b64_passwd,
+ "locale": "en_US",
+ "os": "Win32"
+ }
+ login_response = requests.post(self.url + 'login',
+ json=params,
+ headers=self.session_headers,
+ verify=VERIFY)
+ jwttoken = login_response.cookies.get('JWTToken')
+ xsrf_token = login_response.headers.get('Xsrf-Token')
+ if jwttoken is None or xsrf_token is None:
+ raise Exception("Failed login\nurl: {}\n response status: {}\nresponse: {}\n".format(
+ self.url + 'login',
+ login_response.status_code,
+ login_response.text))
+
+ self.session_headers = {
+ 'Cookie': 'JWTToken=' + jwttoken,
+ 'X-Xsrf-Token': xsrf_token,
+ 'Content-Type': 'application/json'
+ }
+ self.is_logged_in = True
+
+ def logout(self):
+ if self.is_logged_in:
+ try:
+ url = self.url + ('v2/logout' if IS_V2_API else 'logout')
+ requests.delete(url,
+ headers=self.session_headers,
+ data=json.dumps(''),
+ verify=VERIFY
+ )
+ self.is_logged_in = False
+ except Exception as e:
+ demisto.error('McAfee ESM logout failed with the following error: %s' % (str(e),))
+
+ @logger
+ def cmdquery(self, cmd, query=None, params=None, no_answer=False, no_validation=False):
+ """ Send query to ESM, return JSON result """
+ LOG('querying endpoint: {}'.format(cmd))
+ result = requests.post(self.url + cmd,
+ headers=self.session_headers,
+ params=params,
+ data=query, verify=VERIFY)
+ if not no_validation:
+ if no_answer:
+ if result.status_code != 200:
+ raise ValueError(
+ 'Error - ESM replied with:\n - status code: {} \n - body: {}'.format(result.status_code,
+ result.text))
+ else:
+ try:
+ res = result.json()
+ if VERSION != '10.0' and not cmd.startswith('v2'):
+ res = res['return']
+ return res
+ except Exception as e: # noqa: E722
+ LOG(str(e))
+ raise ValueError(
+ 'Error - ESM replied with:\n - status code: {} \n - body: {}'.format(result.status_code,
+ result.text))
+
+ @logger
+ def execute_query(self, time_range, custom_start, custom_end, filters, fields, query_type):
+ if time_range == 'CUSTOM' and (not custom_start or not custom_end):
+ raise ValueError('you must specify customStart and customEnd when timeRange is CUSTOM')
+
+ cmd = '%sqryExecuteDetail?reverse=false&type=%s' % ('v2/' if IS_V2_API else '', query_type,)
+
+ if time_range == 'CUSTOM':
+ cmd = cmd + '&customStart=' + custom_start + '&customEnd=' + custom_end
+
+ q = {
+ 'config': {
+ 'timeRange': time_range,
+ 'filters': filters,
+ }
+ }
+ if fields is not None:
+ q['config']['fields'] = [{'name': v} for v in argToList(fields)]
+ query = json.dumps(q)
+
+ res = self.cmdquery(cmd, query)
+ return res['resultID']
+
+ @logger
+ def get_query_result(self, result_id):
+ cmd = '%sqryGetStatus' % ('v2/' if IS_V2_API else '',)
+ query = json.dumps({'resultID': result_id})
+
+ res = self.cmdquery(cmd, query)
+ return res['complete']
+
+ def wait_for_results(self, result_id, max_wait):
+
+ # initial back off, sleep 3 sec between each time
+ for i in range(5):
+ ready = self.get_query_result(result_id)
+ if ready:
+ return
+ else:
+ time.sleep(3)
+
+ # wait for response - 1 min between each try
+ for i in range(max_wait):
+ ready = self.get_query_result(result_id)
+ if ready:
+ return
+ else:
+ time.sleep(60)
+
+ raise ValueError('Waited more than {} min for query results : {}'.format(max_wait, result_id))
+
+ @logger
+ def fetch_results(self, result_id):
+ cmd = '%sqryGetResults?startPos=0&reverse=false&numRows=10000' % ('v2/' if IS_V2_API else '',)
+
+ query = json.dumps({'resultID': result_id})
+
+ res = self.cmdquery(cmd, query)
+ return res
+
+ @logger
+ def search(self, time_range, custom_start, custom_end, filters, fields, query_type, max_wait):
+ # execute command
+ result_id = self.execute_query(time_range, custom_start, custom_end, filters, fields, query_type)
+
+ # wait for result to be ready
+ self.wait_for_results(result_id, max_wait)
+
+ # fetch result
+ res = self.fetch_results(result_id)
+
+ table = search_result_to_md(res)
+ context = search_results_to_context(res)
+
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': table,
+ 'EntryContext': context
+ }
+
+ @logger
+ def fetch_all_fields(self):
+ res = self.cmdquery('%sqryGetFilterFields' % ('v2/' if IS_V2_API else '',))
+
+ # convert to an appropriate table
+ for x in res:
+ x['types'] = ','.join(x['types'])
+
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tblToMd('Result:', res),
+ }
+
+ # alarms commands
+ @logger
+ def fetch_alarms(self, time_range, custom_start, custom_end, assigned_user):
+ if time_range == 'CUSTOM' and (not custom_start or not custom_end):
+ raise ValueError('you must specify customStart and customEnd when timeRange is CUSTOM')
+
+ params = {
+ 'pageSize': 50,
+ 'pageNumber': 1,
+ 'triggeredTimeRange': time_range
+ }
+
+ if VERSION.startswith('11.'):
+ cmd = 'alarmGetTriggeredAlarms'
+ else:
+ cmd = 'alarmGetTriggeredAlarmsPaged'
+ if time_range == 'CUSTOM':
+ params.update({
+ 'customStart': custom_start,
+ 'customEnd': custom_end,
+ })
+
+ query = ''
+ if assigned_user == 'ME':
+ user = self.get_user_obj(self.user)
+ query = json.dumps({'assignedUser': user})
+ elif assigned_user:
+ query = json.dumps({'assignedUser': self.get_user_obj(assigned_user)})
+
+ res = self.cmdquery(cmd, query, params=params)
+
+ for alarm in res:
+ alarm['ID'] = alarm['id']['value']
+ del alarm["id"]
+
+ return res
+
+ @logger
+ def update_alarms_status(self, action, alarm_ids):
+ cmd = 'alarm%(action)sTriggeredAlarm' % {'action': action}
+ query = json.dumps({'triggeredIds': [{'value': int(i)} for i in alarm_ids]})
+
+ # the command return an error for a list of alarm ids however is execute the update successfully.
+ self.cmdquery(cmd, query, no_validation=True)
+ return 'Alarms has been %sd.' % (action,)
+
+ @logger
+ def acknowledge_alarms(self, alarm_ids):
+ return self.update_alarms_status('Acknowledge', alarm_ids)
+
+ @logger
+ def unacknowledge_alarms(self, alarm_ids):
+ return self.update_alarms_status('Unacknowledge', alarm_ids)
+
+ @logger
+ def delete_alarms(self, alarm_ids):
+ return self.update_alarms_status('Delete', alarm_ids)
+
+ @logger
+ def get_alarm_event_details(self, event_id):
+ cmd = ('%sipsGetAlertData' % ('v2/' if IS_V2_API else '',))
+ query = json.dumps({'id': event_id})
+
+ res = self.cmdquery(cmd, query)
+
+ return res
+
+ @logger
+ def list_alarm_events(self, alarm_id):
+ cmd = 'notifyGetTriggeredNotificationDetail'
+ query = json.dumps({'id': alarm_id})
+
+ res = self.cmdquery(cmd, query)
+
+ return res
+
+ # case statuses commands
+ @logger
+ def add_case_status(self, name, should_show_in_case_pane):
+ """add a new type of case status with given parameters"""
+
+ status_details = {
+ 'name': name,
+ 'default': False,
+ 'showInCasePane': should_show_in_case_pane
+ }
+
+ cmd = 'caseAddCaseStatus'
+ query = json.dumps({'status': status_details})
+ self.cmdquery(cmd, query)
+ return 'Added case status : %s' % (name,)
+
+ @logger
+ def edit_case_status(self, original_name, new_name, show_in_case_pane):
+ """edit a case status with given id"""
+
+ status_id = self.case_status_name_to_id(original_name)
+ status_details = {
+ 'id': status_id,
+ 'name': new_name
+ }
+
+ if show_in_case_pane is not None:
+ status_details['showInCasePane'] = show_in_case_pane
+
+ cmd = 'caseEditCaseStatus'
+ query = json.dumps({'status': status_details})
+ self.cmdquery(cmd, query, no_answer=True)
+
+ return 'Edit case status with ID: %d' % (status_id,)
+
+ @logger
+ def delete_case_status(self, name):
+ """delete a new type of case status with given name"""
+
+ status_id = self.case_status_name_to_id(name)
+ status_id = {'value': status_id}
+
+ cmd = 'caseDeleteCaseStatus'
+ query = json.dumps({'statusId': status_id})
+ self.cmdquery(cmd, query, no_answer=True)
+
+ return 'Deleted case status with ID: %d' % (status_id['value'],)
+
+ @logger
+ def get_case_statuses(self):
+ """get all case statuses"""
+
+ cmd = 'caseGetCaseStatusList'
+ query = json.dumps({"authPW": {"value": self.passwd}})
+
+ return self.cmdquery(cmd, query)
+
+ @logger
+ def case_status_id_to_name(self, status_id, use_cache=True):
+ """convert case status id to name"""
+ if self._case_statuses is None:
+ self._case_statuses = demisto.getIntegrationContext().get('case_statuses', None)
+
+ if self._case_statuses is None or not use_cache or not any([s['id'] == status_id for s in self._case_statuses]):
+ self._case_statuses = self.get_case_statuses()
+ demisto.setIntegrationContext({
+ 'case_statuses': self._case_statuses
+ })
+
+ matches = [status['name'] for status in self._case_statuses if status['id'] == status_id]
+
+ return matches[0] if matches else 'Unknown - %d' % (status_id,)
+
+ @logger
+ def case_status_name_to_id(self, status_name, use_cache=True):
+ """convert case status name to id"""
+ if self._case_statuses is None or not use_cache or not any(
+ [s['name'].lower() == status_name.lower() for s in self._case_statuses]):
+ self._case_statuses = self.get_case_statuses()
+
+ matches = [status['id'] for status in self._case_statuses if status['name'].lower() == status_name.lower()]
+
+ return matches[0] if matches else 0 # 0 is not a valid value
+
+ # user commands
+ @logger
+ def get_users(self):
+ """get all user's names"""
+
+ cmd = 'userGetUserList'
+ query = json.dumps({"authPW": {"value": self.passwd}})
+
+ return self.cmdquery(cmd, query)
+
+ @logger
+ def get_user_obj(self, user_name):
+ """get user object"""
+ if user_name.lower() == 'me':
+ user_name = self.user
+
+ res = self.get_users()
+ matches = [user for user in res if user['username'] == user_name]
+ self_matches = [user['id']['value'] for user in res if user['username'] == self.user]
+
+ # the login user must appear in the user list
+ return matches[0] if matches else self_matches[0]
+
+ @logger
+ def user_name_to_id(self, user_name):
+ """convert user name to id"""
+ if user_name is None or user_name.lower() == 'me':
+ user_name = self.user
+ res = self.get_users()
+ matches = [user['id']['value'] for user in res if user['username'] == user_name]
+ self_matches = [user['id']['value'] for user in res if user['username'] == self.user]
+
+ # the login user must appear in the user list
+ return matches[0] if matches else self_matches[0]
+
+ @logger
+ def user_id_to_name(self, user_id):
+ """convert user id to name"""
+ res = self.get_users()
+ matches = [user['username'] for user in res if user['id']['value'] == user_id]
+
+ return matches[0] if matches else self.user
+
+ # organization commands
+ @logger
+ def get_organizations(self):
+ """get all organization names"""
+
+ cmd = 'caseGetOrganizationList'
+ return self.cmdquery(cmd, '')
+
+ @logger
+ def organization_name_to_id(self, organization_name):
+ """convert organization name to id"""
+ if organization_name is None:
+ organization_name = ''
+ res = self.get_organizations()
+ matches = [org['id'] for org in res if org['name'].lower() == organization_name.lower()]
+
+ return matches[0] if matches else 1
+
+ @logger
+ def organization_id_to_name(self, organization_id):
+ """convert organization name to id"""
+ res = self.get_organizations()
+ matches = [org['name'] for org in res if org['id'] == organization_id]
+
+ return matches[0] if matches else 'None'
+
+ # cases commands
+ @logger
+ def get_cases(self, since_date_range=None):
+ """get all cases associated with current user"""
+ cmd = 'caseGetCaseList'
+ res = self.cmdquery(cmd)
+ cases = []
+ if since_date_range:
+ start_time, _ = parse_date_range(since_date_range, '%Y/%m/%d %H:%M:%S')
+ for case in res:
+ if case.get('openTime') > start_time:
+ cases.append(case)
+
+ else:
+ cases = res
+
+ return cases
+
+ def get_case_detail(self, case_id):
+ cmd = 'caseGetCaseDetail'
+ case_id = {'id': {'value': case_id}}
+ query = json.dumps(case_id)
+ res = self.cmdquery(cmd, query)
+
+ return res
+
+ def add_case(self, summary, severity, status, assignee, organization):
+ if severity < 1:
+ severity = 1
+ elif severity > 100:
+ severity = 100
+
+ if status is None:
+ status = 'Open'
+
+ assignee = self.user_name_to_id(assignee)
+ org_id = self.organization_name_to_id(organization)
+
+ cmd = 'caseAddCase'
+ case_details = {
+ 'summary': summary,
+ 'assignedTo': assignee,
+ 'severity': severity,
+ 'orgId': org_id,
+ 'statusId': {'value': self.case_status_name_to_id(status)},
+ }
+
+ query = json.dumps({'caseDetail': case_details})
+ res = self.cmdquery(cmd, query)
+ return res['value']
+
+ def edit_case(self, case_id, summary, severity, status, assignee, organization):
+
+ case = self.get_case_detail(case_id)
+
+ if summary is not None:
+ case['summary'] = summary
+
+ if severity is not None:
+ if severity < 1:
+ severity = 1
+ elif severity > 100:
+ severity = 100
+ case['severity'] = severity
+
+ if status is not None:
+ case['statusId'] = self.case_status_name_to_id(status)
+
+ if assignee is not None:
+ case['assignedTo'] = self.user_name_to_id(assignee)
+
+ if organization is not None:
+ case['orgId'] = self.organization_name_to_id(organization)
+
+ cmd = 'caseEditCase'
+ query = json.dumps({'caseDetail': case})
+ self.cmdquery(cmd, query, no_answer=True)
+
+ return
+
+ def get_case_event_list(self, event_ids):
+
+ event_ids = {'list': event_ids}
+
+ cmd = 'caseGetCaseEventsDetail'
+ query = json.dumps({'eventIds': event_ids})
+ res = self.cmdquery(cmd, query)
+
+ return res
+
+
+@logger
+def alarms_to_entry(alarms):
+ if not alarms:
+ return "No alarms were found"
+
+ context = {'Alarm(val.ID && val.ID == obj.ID)': alarms}
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': alarms,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tblToMd('Result:', alarms),
+ 'EntryContext': context
+ }
+
+
+@logger
+def alarms_to_incidents(alarms):
+ incidents = []
+ for alarm in alarms:
+ incidents.append({
+ 'name': alarm['summary'].encode('utf-8'),
+ 'details': 'Alarm {} , ID : {} , was triggered by condition type: {}'.format(
+ alarm['alarmName'].encode('utf-8'),
+ alarm['ID'],
+ alarm['conditionType']),
+ 'severity': severity_to_level(alarm['severity']),
+ 'rawJSON': json.dumps(alarm)
+ })
+ return incidents
+
+
+@logger
+def cases_to_entry(esm, title, cases):
+ if not cases:
+ return 'No cases were found'
+
+ headers = ['ID', 'Summary', 'Status', 'Severity', 'OpenTime']
+ fixed_cases = []
+ context_cases = []
+
+ for case in cases:
+ fixed_case = {
+ 'ID': case['id']['value'],
+ 'Summary': case['summary'],
+ 'Status': esm.case_status_id_to_name(case['statusId']['value']),
+ 'OpenTime': case['openTime'],
+ 'Severity': case['severity']
+ }
+
+ if 'assignedTo' in case:
+ fixed_case['Assignee'] = esm.user_id_to_name(case['assignedTo'])
+ headers.append('Assignee')
+
+ if 'orgId' in case:
+ fixed_case['Organization'] = esm.organization_id_to_name(case['orgId'])
+ headers.append('Organization')
+
+ context_case = fixed_case.copy()
+ if 'eventList' in case:
+ fixed_case['Event List'] = json.dumps(case['eventList'])
+ context_case['EventList'] = case['eventList']
+ headers.append('Event List')
+
+ if 'notes' in case:
+ fixed_case['Notes'] = json.dumps(case['notes'])
+ context_case['Notes'] = case['notes']
+ headers.append('Notes')
+
+ fixed_cases.append(fixed_case)
+ context_cases.append(context_case)
+
+ context = {'Case(val.ID && val.ID == obj.ID)': context_cases}
+
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': cases,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tblToMd(title, fixed_cases, headers),
+ 'EntryContext': context
+ }
+
+
+@logger
+def cases_to_incidents(cases):
+ incidents = []
+ for case in cases:
+ incident = {
+ 'name': case['summary'].encode('utf-8'),
+ 'details': 'Case %s with ID %s was triggerred' % (case['summary'].encode('utf-8'), case['id']['value']),
+ 'severity': severity_to_level(case['severity']),
+ 'rawJSON': json.dumps(case),
+ }
+ incidents.append(incident)
+
+ return incidents
+
+
+@logger
+def case_statuses_to_entry(case_statuses):
+ if not case_statuses:
+ return 'No case statuses were found'
+
+ headers = ['ID', 'Name', 'Is Default', 'Show In Case Pane']
+ fixed_statuses = []
+ for status in case_statuses:
+ fixed_statuses.append({
+ 'ID': status['id'],
+ 'Name': status['name'],
+ 'Is Default': status['default'],
+ 'Show In Case Pane': status['showInCasePane']
+ })
+
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': fixed_statuses,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tblToMd('Result:', fixed_statuses, headers),
+ 'EntryContext': {}
+ }
+
+
+def organizations_to_entry(organizations):
+ if not organizations:
+ return 'No organizations were found'
+
+ headers = ['ID', 'Name']
+ fixed_organizations = []
+ for organization in organizations:
+ fixed_organizations.append({
+ 'ID': organization['id'],
+ 'Name': organization['name'],
+ })
+
+ context = {'Organizations(val.ID && val.ID == obj.ID)': fixed_organizations}
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': fixed_organizations,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tblToMd('Organizations:', fixed_organizations, headers),
+ 'EntryContext': context
+ }
+
+
+def case_events_to_entry(events):
+ if not events:
+ return 'No events were found'
+
+ headers = ['ID', 'LastTime', 'Message']
+ fixed_events = []
+ for event in events:
+ fixed_events.append({
+ 'ID': event['id']['value'],
+ 'LastTime': event['lastTime'],
+ 'Message': event['message'],
+ })
+
+ context = {'CaseEvents(val.ID && val.ID == obj.ID)': fixed_events}
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': fixed_events,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tblToMd('Case Events:', fixed_events, headers),
+ 'EntryContext': context
+ }
+
+
+def alarm_events_to_entry(esm, events):
+ headers = ['ID', 'SubType', 'Severity', 'Message', 'LastTime', 'SrcIP', 'SrcPort', 'DstIP', 'DstPort', ]
+ fixed_events = []
+ for raw_event in events:
+ # there are two type of event objects representation:
+ # 1) the result of esm-list-alarm-events
+ # 2) the result of esm-get-alarm-event-details
+ # therefore, first try to get the field of the first option and fallback to the second option.
+ cases = [{
+ 'ID': case['id'],
+ 'OpenTime': case['openTime'],
+ 'Severity': case['severity'],
+ 'Summary': case['summary'],
+ 'Status': esm.case_status_id_to_name(case['statusId']['value'])
+ } for case in raw_event.get('cases', [])]
+ event = {
+ 'ID': raw_event.get('eventId', '%s|%s' % (raw_event.get('ipsId', ''), raw_event.get('alertId', ''))),
+ 'SubType': raw_event.get('eventSubType', raw_event.get('subtype')),
+ 'Severity': raw_event['severity'],
+ 'Cases': cases,
+ 'Message': raw_event.get('ruleMessage', raw_event.get('ruleName')),
+ 'NormalizedDescription': raw_event.get('normDesc'),
+ 'FirstTime': raw_event.get('firstTime'),
+ 'LastTime': raw_event['lastTime'],
+
+ 'SrcMac': raw_event.get('srcMac'),
+ 'SrcIP': raw_event.get('sourceIp', raw_event.get('srcIp')),
+ 'SrcPort': raw_event.get('srcPort'),
+ 'DstMac': raw_event.get('destMac'),
+ 'DstIP': raw_event['destIp'],
+ 'DstPort': raw_event.get('destPort'),
+
+ 'Raw': raw_event,
+ }
+
+ fixed_events.append(event)
+
+ context = {'EsmAlarmEvent(val.ID && val.ID == obj.ID)': createContext(fixed_events, removeNull=True)}
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': fixed_events,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tblToMd('Alarm Events:', fixed_events, headers=headers),
+ 'EntryContext': context
+ }
+
+
+@logger
+def users_to_entry(users):
+ # unreachable code - in order to send command, one must be logged in. therefore there is at least one user.
+ if not users:
+ return 'No users were found'
+
+ headers = ['ID', 'Name', 'Email', 'SMS', 'IsMaster', 'IsAdmin']
+ fixed_users = []
+ for user in users:
+ fixed_users.append({
+ 'ID': user['id']['value'],
+ 'Name': user['username'],
+ 'Email': user['email'],
+ 'SMS': user['sms'],
+ 'IsMaster': user['master'],
+ 'IsAdmin': user['admin'],
+ })
+
+ context = {'EsmUser(val.ID && val.ID == obj.ID)': fixed_users}
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': fixed_users,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tblToMd('Users:', fixed_users, headers),
+ 'EntryContext': context
+ }
+
+
+def main():
+ esm = NitroESM(ESM_URL, USERNAME, PASSWORD)
+ try:
+ esm.login()
+ final_result = 'No result set'
+
+ if demisto.command() == 'fetch-incidents':
+ last_run = demisto.getLastRun()
+ demisto.debug('\n\nlast run:\n{}\n'.format(last_run))
+ # for backward compatibility uses
+ if 'value' in last_run and 'alarms' not in last_run:
+ last_run['alarms'] = last_run['value']
+ configuration_last_case = int(demisto.params().get('startingCaseID', 0))
+
+ start_alarms = last_run.get('alarms')
+ if start_alarms is None:
+ start_alarms, _ = parse_date_range(demisto.params()['alarm_fetch_time'],
+ date_format='%Y-%m-%dT%H:%M:%S.%f', timezone=TIMEZONE)
+
+ last_case = last_run.get('cases', 0)
+ # if last_case < configuration_last_case:
+ last_case = max(last_case, configuration_last_case)
+
+ incidents = [] # type: list
+ mode = demisto.params().get('fetchTypes', 'alarms').lower() # alarms is default for backward compatibility
+
+ next_run = None
+ if mode in ('alarms', 'both'):
+ end = (datetime.now() + timedelta(hours=TIMEZONE)).isoformat()
+
+ demisto.debug("alarms: start - {} , end - {}".format(start_alarms, end))
+
+ alarms = esm.fetch_alarms(
+ 'CUSTOM',
+ start_alarms,
+ end,
+ ''
+ )
+ demisto.debug('alarms found:\n{}\n'.format(alarms))
+
+ incidents = []
+ for alarm in alarms:
+ triggered_date = alarm['triggeredDate']
+ if next_run is None or next_run < triggered_date:
+ next_run = triggered_date
+ alarm['events'] = esm.list_alarm_events(alarm['ID'])
+ incidents.append({
+ 'name': alarm['summary'],
+ 'details': 'Alarm {} , ID : {} , was triggered by condition type: {}'.format(
+ alarm['alarmName'],
+ alarm['ID'],
+ alarm['conditionType']),
+ 'severity': severity_to_level(alarm['severity']),
+ 'rawJSON': json.dumps(alarm)
+ })
+
+ if mode in ('cases', 'both'):
+ # get new cases
+ cases = [case for case in esm.get_cases() if case['id']['value'] > last_case]
+ cases.sort(key=lambda c: c['id']['value'])
+ cases = cases[:MAX_CASES_PER_FETCH]
+
+ if cases:
+ last_case = cases[-1]['id']['value']
+
+ # update last run info
+ last_run['cases'] = last_case
+
+ demisto.debug('adding %d more cases, last id is: %d' % (len(cases), last_run['cases'],))
+ if cases:
+ incidents.extend(cases_to_incidents(cases))
+
+ if next_run is not None:
+ next_run_datetime = datetime.strptime(next_run, parse_time(next_run))
+ next_run = (next_run_datetime + timedelta(seconds=1)).isoformat()
+ else:
+ next_run = start_alarms
+
+ last_run['value'] = next_run
+ last_run['alarms'] = next_run
+
+ demisto.incidents(incidents)
+ demisto.setLastRun(last_run)
+ sys.exit(0)
+
+ elif demisto.command() == 'test-module':
+ if VERSION not in ['10.0', '10.1', '10.2', '10.3', '11.1']:
+ final_result = 'version must be one of 10.x, got %s' % (VERSION,)
+ else:
+ esm.fetch_all_fields()
+ final_result = 'ok'
+
+ elif demisto.command() == 'esm-fetch-fields':
+ res = esm.fetch_all_fields()
+ final_result = res
+
+ elif demisto.command() == 'esm-search':
+ args = demisto.args()
+ res = esm.search(
+ demisto.get(args, 'timeRange'),
+ demisto.get(args, 'customStart'),
+ demisto.get(args, 'customEnd'),
+ json.loads(args.get('filters')),
+ args.get('fields'),
+ demisto.get(args, 'queryType') or 'EVENT',
+ demisto.get(args, 'maxWait') or 30
+ )
+ final_result = res
+
+ elif demisto.command() == 'esm-get-case-list':
+ since_date_range = demisto.args().get('since')
+ res = esm.get_cases(since_date_range)
+ final_result = cases_to_entry(esm, 'All cases:', res)
+
+ elif demisto.command() == 'esm-get-case-detail':
+ args = demisto.args()
+ case_id = int(demisto.get(args, 'id'))
+ res = esm.get_case_detail(case_id)
+ final_result = cases_to_entry(esm, 'Case %d:' % (case_id,), [res])
+
+ elif demisto.command() == 'esm-add-case':
+ args = demisto.args()
+ res = esm.add_case(
+ demisto.get(args, 'summary'),
+ int(demisto.get(args, 'severity')),
+ demisto.get(args, 'status'),
+ demisto.get(args, 'assignee'),
+ demisto.get(args, 'organization'),
+ )
+ case = esm.get_case_detail(res)
+ final_result = cases_to_entry(esm, 'New Case:', [case])
+
+ elif demisto.command() == 'esm-edit-case':
+ args = demisto.args()
+ case_id = int(demisto.get(args, 'id'))
+ severity = demisto.get(args, 'severity')
+ esm.edit_case(
+ case_id,
+ demisto.get(args, 'summary'),
+ int(severity) if severity else None,
+ demisto.get(args, 'status'),
+ demisto.get(args, 'assignee'),
+ demisto.get(args, 'organization'),
+ )
+ case = esm.get_case_detail(case_id)
+ final_result = cases_to_entry(esm, 'Edited Case:', [case])
+
+ elif demisto.command() == 'esm-get-case-statuses':
+ res = esm.get_case_statuses()
+ final_result = case_statuses_to_entry(res)
+
+ elif demisto.command() == 'esm-add-case-status':
+ args = demisto.args()
+ res = esm.add_case_status(
+ demisto.get(args, 'name'),
+ bool(strtobool(demisto.get(args, 'show_in_case_pane'))),
+ )
+ final_result = res
+
+ elif demisto.command() == 'esm-edit-case-status':
+ args = demisto.args()
+ should_show = demisto.get(args, 'show_in_case_pane')
+ res = esm.edit_case_status(
+ demisto.get(args, 'original_name'),
+ demisto.get(args, 'new_name'),
+ bool(strtobool(should_show)) if should_show else None,
+ )
+ final_result = res
+
+ elif demisto.command() == 'esm-delete-case-status':
+ args = demisto.args()
+ res = esm.delete_case_status(
+ demisto.get(args, 'name')
+ )
+ final_result = res
+
+ elif demisto.command() == 'esm-get-case-event-list':
+ args = demisto.args()
+ event_ids = demisto.get(args, 'ids').split(',')
+ res = esm.get_case_event_list(event_ids)
+ final_result = case_events_to_entry(res)
+
+ elif demisto.command() == 'esm-get-organization-list':
+ res = esm.get_organizations()
+ final_result = organizations_to_entry(res)
+
+ elif demisto.command() == 'esm-get-user-list':
+ res = esm.get_users()
+ final_result = users_to_entry(res)
+
+ elif demisto.command() == 'esm-fetch-alarms':
+ args = demisto.args()
+ res = esm.fetch_alarms(
+ demisto.get(args, 'timeRange'),
+ demisto.get(args, 'customStart'),
+ demisto.get(args, 'customEnd'),
+ demisto.get(args, 'assignedUser')
+ )
+ final_result = alarms_to_entry(res)
+
+ elif demisto.command() == 'esm-acknowledge-alarms':
+ args = demisto.args()
+ res = esm.acknowledge_alarms(argToList(demisto.get(args, 'alarmIds')))
+ final_result = res
+
+ elif demisto.command() == 'esm-unacknowledge-alarms':
+ args = demisto.args()
+ res = esm.unacknowledge_alarms(argToList(demisto.get(args, 'alarmIds')))
+ final_result = res
+
+ elif demisto.command() == 'esm-delete-alarms':
+ args = demisto.args()
+ res = esm.delete_alarms(argToList(demisto.get(args, 'alarmIds')))
+ final_result = res
+
+ elif demisto.command() == 'esm-get-alarm-event-details':
+ args = demisto.args()
+ res = esm.get_alarm_event_details(demisto.get(args, 'eventId'))
+ final_result = alarm_events_to_entry(esm, [res])
+
+ elif demisto.command() == 'esm-list-alarm-events':
+ args = demisto.args()
+ res = esm.list_alarm_events(demisto.get(args, 'alarmId'))
+ final_result = alarm_events_to_entry(esm, res['events'])
+ demisto.results(final_result)
+
+ except Exception as ex:
+ demisto.error('#### error in McAfee ESM v10: ' + str(ex))
+ if demisto.command() == 'fetch-incidents':
+ LOG(traceback.format_exc())
+ LOG.print_log()
+ raise
+ else:
+ return_error(str(ex), error=traceback.format_exc())
+ finally:
+ esm.logout()
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/McAfee_ESM-v10/McAfee_ESM-v10.yml b/Integrations/McAfee_ESM-v10/McAfee_ESM-v10.yml
new file mode 100644
index 000000000000..4ca729e646b8
--- /dev/null
+++ b/Integrations/McAfee_ESM-v10/McAfee_ESM-v10.yml
@@ -0,0 +1,726 @@
+category: Analytics & SIEM
+commonfields:
+ id: McAfee ESM-v10
+ version: -1
+configuration:
+- display: ESM IP (e.g. 78.125.0.209)
+ name: ip
+ required: true
+ type: 0
+- defaultvalue: '443'
+ display: Port
+ name: port
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: true
+ type: 9
+- defaultvalue: '10.2'
+ display: 'Version: (one of 10.0, 10.1, 10.2, 10.3, 11.1)'
+ name: version
+ required: true
+ type: 0
+- display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: auto-discovery
+ display: ESM time format, e.g., %Y/%m/%d %H:%M:%S. Select "auto-discovery" to attempt
+ to determine the format automatically.
+ name: time_format
+ required: false
+ type: 0
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- defaultvalue: alarms
+ display: 'Fetch Types: cases, alarms, both (relevant only for fetch incident mode)'
+ name: fetchTypes
+ required: false
+ type: 0
+- defaultvalue: '0'
+ display: 'Start fetch after Case ID: (relevant only for fetch incident mode)'
+ name: startingCaseID
+ required: false
+ type: 0
+- defaultvalue: 3 days
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days, 3
+ months, 1 year)
+ name: alarm_fetch_time
+ type: 0
+ required: false
+- defaultvalue: '0'
+ display: McAfee ESM Timezone in hours (e.g if ESM timezone is +0300 => then insert
+ 3)
+ name: timezone
+ required: false
+ type: 0
+description: Run queries and receive alarms from Intel Security ESM. Support version 10 and above.
+display: McAfee ESM v10 and v11
+name: McAfee ESM-v10
+script:
+ commands:
+ - deprecated: false
+ description: Get all fields that can be used in query filters, with type information
+ for each field
+ execution: false
+ name: esm-fetch-fields
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The time period for the search
+ isArray: false
+ name: timeRange
+ predefined:
+ - LAST_3_DAYS
+ - LAST_2_DAYS
+ - LAST_24_HOURS
+ - PREVIOUS_DAY
+ - CURRENT_DAY
+ - LAST_HOUR
+ - LAST_30_MINUTES
+ - LAST_10_MINUTES
+ - LAST_MINUTE
+ - CUSTOM
+ - PREVIOUS_YEAR
+ - CURRENT_YEAR
+ - PREVIOUS_QUARTER
+ - CURRENT_QUARTER
+ - PREVIOUS_MONTH
+ - CURRENT_MONTH
+ - PREVIOUS_WEEK
+ - CURRENT_WEEK
+ required: true
+ secret: false
+ - default: false
+ description: Filter on the query results, should be a json string, of the format
+ EsmFilter (read more on that here - https://:/rs/esm/help/types/EsmFilter)
+ isArray: false
+ name: filters
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'Query type to preform, by default EVENT (other possible values
+ are : FLOW/ASSET)'
+ isArray: false
+ name: queryType
+ predefined:
+ - EVENT
+ - FLOW
+ - ASSETS
+ required: false
+ secret: false
+ - default: false
+ description: Maximum time to wait (in minutes), default is 30
+ isArray: false
+ name: maxWait
+ required: false
+ secret: false
+ - default: false
+ description: if timeRange is CUSTOM, start time for the time range (e.g. 2017-06-01T12:48:16.734Z)
+ isArray: false
+ name: customStart
+ required: false
+ secret: false
+ - default: false
+ description: if timeRange is CUSTOM, end time for the time range (e.g. 2017-06-01T12:48:16.734Z)
+ isArray: false
+ name: customEnd
+ required: false
+ secret: false
+ - default: false
+ description: The fields that will be selected when this query is executed.
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ deprecated: false
+ description: Perform a query against Mcafee ESM SIEM
+ execution: false
+ name: esm-search
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The time period for the fetch
+ isArray: false
+ name: timeRange
+ predefined:
+ - LAST_3_DAYS
+ - LAST_2_DAYS
+ - LAST_24_HOURS
+ - PREVIOUS_DAY
+ - CURRENT_DAY
+ - LAST_HOUR
+ - LAST_30_MINUTES
+ - LAST_10_MINUTES
+ - LAST_MINUTE
+ - CUSTOM
+ - PREVIOUS_YEAR
+ - CURRENT_YEAR
+ - PREVIOUS_QUARTER
+ - CURRENT_QUARTER
+ - PREVIOUS_MONTH
+ - CURRENT_MONTH
+ - PREVIOUS_WEEK
+ - CURRENT_WEEK
+ required: true
+ secret: false
+ - default: false
+ description: if timeRange is CUSTOM, start time for the time range (e.g. 2017-06-01T12:48:16.734Z)
+ isArray: false
+ name: customStart
+ required: false
+ secret: false
+ - default: false
+ description: if timeRange is CUSTOM, end time for the time range (e.g. 2017-06-01T12:48:16.734Z)
+ isArray: false
+ name: customEnd
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'user assigned to handle this triggered alarm (use ''ME'' option
+ to use instance user, or use format EsmUser (read more on that here - https://:/rs/esm/help/types/EsmUser) '
+ isArray: false
+ name: assignedUser
+ predefined:
+ - ME
+ - ''
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a list of alarms that have been triggered
+ execution: false
+ name: esm-fetch-alarms
+ outputs:
+ - contextPath: Alarm.ID
+ description: Alarm ID
+ type: number
+ - contextPath: Alarm.summary
+ description: Alarm summary
+ type: string
+ - contextPath: Alarm.assignee
+ description: Alarm assignee
+ type: string
+ - contextPath: Alarm.severity
+ description: Alarm severity
+ type: number
+ - contextPath: Alarm.triggeredDate
+ description: Alarm triggered date
+ type: date
+ - contextPath: Alarm.acknowledgedDate
+ description: Alarm acknowledged date
+ type: date
+ - contextPath: Alarm.acknowledgedUsername
+ description: Alarm acknowledged username
+ type: string
+ - contextPath: Alarm.alarmName
+ description: Alarm name
+ type: string
+ - contextPath: Alarm.conditionType
+ description: Alarm condition type
+ type: number
+ - arguments:
+ - default: false
+ description: Filter for a case opened before this date. Given in format " ",e.g. 1 day,30 minutes,2 weeks,6 months,1 year
+ isArray: false
+ name: since
+ required: false
+ secret: false
+ deprecated: false
+ description: Get a list of cases from the system
+ execution: false
+ name: esm-get-case-list
+ outputs:
+ - contextPath: Case.ID
+ description: The ID of the case
+ type: number
+ - contextPath: Case.Summary
+ description: The summary of the case
+ type: string
+ - contextPath: Case.Status
+ description: The status of the case
+ type: string
+ - contextPath: Case.OpenTime
+ description: The open time of the case
+ type: date
+ - contextPath: Case.Severity
+ description: The severity of the case
+ type: number
+ - contextPath: Case.Assignee
+ description: The Assignee of the case
+ type: string
+ - contextPath: Case.Organization
+ description: The organization of the case
+ type: string
+ - contextPath: Case.EventList
+ description: List of case's events
+ type: Unknown
+ - contextPath: Case.Notes
+ description: List of case's notes
+ type: unknown
+ - arguments:
+ - default: true
+ description: the name of the case
+ isArray: false
+ name: summary
+ required: true
+ secret: false
+ - default: false
+ defaultValue: Open
+ description: the status of the case (use `esm-get-case-statuses` to view all
+ statuses)
+ isArray: false
+ name: status
+ required: false
+ secret: false
+ - default: false
+ defaultValue: me
+ description: who the case is assigned to
+ isArray: false
+ name: assignee
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '1'
+ description: the severity of the case (1 - 100)
+ isArray: false
+ name: severity
+ required: false
+ secret: false
+ - default: false
+ description: the organization assigned to the case (use `esm-get-organization-list`
+ to view all ogranizations)
+ isArray: false
+ name: organization
+ required: false
+ secret: false
+ deprecated: false
+ description: Add a case to the system
+ execution: false
+ name: esm-add-case
+ outputs:
+ - contextPath: Case.ID
+ description: The ID of the case
+ type: number
+ - contextPath: Case.Summary
+ description: The summary of the case
+ type: string
+ - contextPath: Case.Status
+ description: The status of the case
+ type: string
+ - contextPath: Case.OpenTime
+ description: The open time of the case
+ type: date
+ - contextPath: Case.Severity
+ description: The severity of the case
+ type: number
+ - contextPath: Case.Assignee
+ description: The Assignee of the case
+ type: string
+ - contextPath: Case.Organization
+ description: The organization of the case
+ type: string
+ - contextPath: Case.EventList
+ description: List of case's events
+ type: Unknown
+ - contextPath: Case.Notes
+ description: List of case's notes
+ type: Unknown
+ - arguments:
+ - default: true
+ description: the id of the case
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: the name of the case
+ isArray: false
+ name: summary
+ required: false
+ secret: false
+ - default: false
+ description: the new severity of the case (1 - 100)
+ isArray: false
+ name: severity
+ required: false
+ secret: false
+ - default: false
+ description: who the case should be assigned to
+ isArray: false
+ name: assignee
+ required: false
+ secret: false
+ - default: false
+ description: the new status of the case (use `esm-get-case-statuses` to view
+ all statuses)
+ isArray: false
+ name: status
+ required: false
+ secret: false
+ - default: false
+ description: the organization assigned to the case (use `esm-get-organization-list`
+ to view all ogranizations)
+ isArray: false
+ name: organization
+ required: false
+ secret: false
+ deprecated: false
+ description: Edit an existing case
+ execution: false
+ name: esm-edit-case
+ outputs:
+ - contextPath: Case.ID
+ description: The ID of the case
+ type: number
+ - contextPath: Case.Summary
+ description: The summary of the case
+ type: string
+ - contextPath: Case.Status
+ description: The status of the case
+ type: string
+ - contextPath: Case.OpenTime
+ description: The open time of the case
+ type: date
+ - contextPath: Case.Severity
+ description: The severity of the case
+ type: number
+ - contextPath: Case.Assignee
+ description: The Assignee of the case
+ type: string
+ - contextPath: Case.Organization
+ description: The organization of the case
+ type: string
+ - contextPath: Case.EventList
+ description: List of case's events
+ type: Unknown
+ - contextPath: Case.Notes
+ description: List of case's notes
+ type: Unknown
+ - deprecated: false
+ description: Get a list of valid case statuses from the system
+ execution: false
+ name: esm-get-case-statuses
+ - arguments:
+ - default: true
+ description: the name of the case status to edit
+ isArray: false
+ name: original_name
+ required: true
+ secret: false
+ - default: false
+ description: the new name for the case status
+ isArray: false
+ name: new_name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: whether the status be shown in case pane
+ isArray: false
+ name: show_in_case_pane
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Edit a case status
+ execution: false
+ name: esm-edit-case-status
+ - arguments:
+ - default: true
+ description: the ID of the case
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Get detail on an existing case
+ execution: false
+ name: esm-get-case-detail
+ outputs:
+ - contextPath: Case.ID
+ description: The ID of the case
+ type: number
+ - contextPath: Case.Summary
+ description: The summary of the case
+ type: string
+ - contextPath: Case.Status
+ description: The status of the case
+ type: string
+ - contextPath: Case.OpenTime
+ description: The open time of the case
+ type: date
+ - contextPath: Case.Severity
+ description: The severity of the case
+ type: number
+ - contextPath: Case.Assignee
+ description: The Assignee of the case
+ type: string
+ - contextPath: Case.Organization
+ description: The organization of the case
+ type: string
+ - contextPath: Case.EventList
+ description: List of case's events
+ type: Unknown
+ - contextPath: Case.Notes
+ description: List of case's notes
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Comma separated list of event IDs
+ isArray: false
+ name: ids
+ required: true
+ secret: false
+ deprecated: false
+ description: Get case events details
+ execution: false
+ name: esm-get-case-event-list
+ outputs:
+ - contextPath: CaseEvents.ID
+ description: The ID of the event
+ type: string
+ - contextPath: CaseEvents.LastTime
+ description: The last updated time of the event
+ type: date
+ - contextPath: CaseEvents.Message
+ description: The massage of the event
+ type: string
+ - arguments:
+ - default: true
+ description: the name of the case status
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'True'
+ description: whether the status be shown in case pane
+ isArray: false
+ name: show_in_case_pane
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Add a case status
+ execution: false
+ name: esm-add-case-status
+ - arguments:
+ - default: true
+ description: the name of the case status to delete
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: false
+ description: Delete a case status
+ execution: false
+ name: esm-delete-case-status
+ - deprecated: false
+ description: Get case organization
+ execution: false
+ name: esm-get-organization-list
+ outputs:
+ - contextPath: Organizations.ID
+ description: Organization ID
+ type: number
+ - contextPath: Organizations.Name
+ description: Organization Name
+ type: string
+ - deprecated: false
+ description: Get a list of all users.
+ execution: false
+ name: esm-get-user-list
+ outputs:
+ - contextPath: EsmUser.ID
+ description: the ID of the user
+ type: number
+ - contextPath: EsmUser.Name
+ description: the esm user name
+ type: string
+ - contextPath: EsmUser.Email
+ description: the e-mail of the user
+ type: string
+ - contextPath: EsmUser.SMS
+ description: the SMS details of the user
+ type: string
+ - contextPath: EsmUser.IsMaster
+ description: whether the user is a master user
+ type: boolean
+ - contextPath: EsmUser.IsAdmin
+ description: whether the user is an admin
+ type: boolean
+ - arguments:
+ - default: true
+ description: list of triggered alarm ids to be marked acknowledged separated
+ with commas
+ isArray: true
+ name: alarmIds
+ required: true
+ secret: false
+ deprecated: false
+ description: Mark triggered alarms as acknowledged
+ execution: false
+ name: esm-acknowledge-alarms
+ - arguments:
+ - default: true
+ description: list of triggered alarm ids to be marked unacknowledged separated
+ with commas
+ isArray: true
+ name: alarmIds
+ required: true
+ secret: false
+ deprecated: false
+ description: Mark triggered alarms as unacknowledged
+ execution: false
+ name: esm-unacknowledge-alarms
+ - arguments:
+ - default: true
+ description: list of triggered alarm ids to be deleted separated with commas
+ isArray: true
+ name: alarmIds
+ required: true
+ secret: false
+ deprecated: false
+ description: Delete triggered alarms
+ execution: false
+ name: esm-delete-alarms
+ - arguments:
+ - default: true
+ description: The event to get the details for. the ID can be taken from esm-list-alarm-events.
+ isArray: false
+ name: eventId
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets the details for the triggered alarm
+ execution: false
+ name: esm-get-alarm-event-details
+ outputs:
+ - contextPath: EsmAlarmEvent.ID
+ description: Event ID
+ type: string
+ - contextPath: EsmAlarmEvent.SubType
+ description: Event Type
+ type: string
+ - contextPath: EsmAlarmEvent.Severity
+ description: Event Severity
+ type: number
+ - contextPath: EsmAlarmEvent.Message
+ description: Event Message
+ type: string
+ - contextPath: EsmAlarmEvent.LastTime
+ description: Event Time
+ type: date
+ - contextPath: EsmAlarmEvent.SrcIP
+ description: Source IP of the event
+ type: string
+ - contextPath: EsmAlarmEvent.DstIP
+ description: Destination IP of the event
+ type: string
+ - contextPath: EsmAlarmEvent.Cases
+ description: A list of related cases to the event
+ type: Unknown
+ - contextPath: EsmAlarmEvent.Cases.ID
+ description: Case ID
+ type: string
+ - contextPath: EsmAlarmEvent.Cases.OpenTime
+ description: Case creation time
+ type: date
+ - contextPath: EsmAlarmEvent.Cases.Severity
+ description: Case severity
+ type: number
+ - contextPath: EsmAlarmEvent.Cases.Status
+ description: Case status
+ type: string
+ - contextPath: EsmAlarmEvent.Cases.Summary
+ description: Case summary
+ type: string
+ - contextPath: EsmAlarmEvent.DstMac
+ description: Destination MAC of the event
+ type: string
+ - contextPath: EsmAlarmEvent.SrcMac
+ description: Source MAC of the event
+ type: string
+ - contextPath: EsmAlarmEvent.DstPort
+ description: Destination port of the event
+ type: string
+ - contextPath: EsmAlarmEvent.SrcPort
+ description: Source port of the event
+ type: string
+ - contextPath: EsmAlarmEvent.FirstTime
+ description: The first time for the event
+ type: date
+ - contextPath: EsmAlarmEvent.NormalizedDescription
+ description: Normalized description of the event
+ type: string
+ - arguments:
+ - default: true
+ description: The alarm to get the details for. the ID can be taken from the
+ esm-fetch-alarms
+ isArray: false
+ name: alarmId
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets an event list related to the alarm
+ execution: false
+ name: esm-list-alarm-events
+ outputs:
+ - contextPath: EsmAlarmEvent.ID
+ description: Event ID
+ type: string
+ - contextPath: EsmAlarmEvent.SubType
+ description: Event Type
+ type: string
+ - contextPath: EsmAlarmEvent.Severity
+ description: Event Severity
+ type: number
+ - contextPath: EsmAlarmEvent.Message
+ description: Event Message
+ type: string
+ - contextPath: EsmAlarmEvent.LastTime
+ description: Event Time
+ type: date
+ - contextPath: EsmAlarmEvent.SrcIP
+ description: Source IP of the event
+ type: string
+ - contextPath: EsmAlarmEvent.DstIP
+ description: Destination IP of the event
+ type: string
+ - contextPath: EsmAlarmEvent.Cases
+ description: A list of related cases to the event
+ type: Unknown
+ - contextPath: EsmAlarmEvent.Cases.ID
+ description: Case ID
+ type: string
+ - contextPath: EsmAlarmEvent.Cases.OpenTime
+ description: Case creation time
+ type: date
+ - contextPath: EsmAlarmEvent.Cases.Severity
+ description: Case severity
+ type: number
+ - contextPath: EsmAlarmEvent.Cases.Status
+ description: Case status
+ type: string
+ - contextPath: EsmAlarmEvent.Cases.Summary
+ description: Case summary
+ type: string
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
diff --git a/Integrations/McAfee_ESM-v10/McAfee_ESM-v10_description.md b/Integrations/McAfee_ESM-v10/McAfee_ESM-v10_description.md
new file mode 100644
index 000000000000..dfe82645274e
--- /dev/null
+++ b/Integrations/McAfee_ESM-v10/McAfee_ESM-v10_description.md
@@ -0,0 +1 @@
+Timezone parameter should be set according to the timezone the McAfee ESM uses. For examples if the alarms in ESM are -0600 then the timezone should be set to -6.
\ No newline at end of file
diff --git a/Integrations/McAfee_ESM-v10/McAfee_ESM-v10_image.png b/Integrations/McAfee_ESM-v10/McAfee_ESM-v10_image.png
new file mode 100644
index 000000000000..5bf83f1bfbec
Binary files /dev/null and b/Integrations/McAfee_ESM-v10/McAfee_ESM-v10_image.png differ
diff --git a/Integrations/MicrosoftGraphMail/CHANGELOG.md b/Integrations/MicrosoftGraphMail/CHANGELOG.md
new file mode 100644
index 000000000000..6958b7c3f0c1
--- /dev/null
+++ b/Integrations/MicrosoftGraphMail/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+ - Improved the description of the *search* argument in ***msgraph-mail-list-emails*** command.
+ - Fixed an issue where the ***msgraph-mail-delete-email*** command always returned an error.
diff --git a/Integrations/MicrosoftGraphMail/MicrosoftGraphMail.py b/Integrations/MicrosoftGraphMail/MicrosoftGraphMail.py
new file mode 100644
index 000000000000..38330da9ce2b
--- /dev/null
+++ b/Integrations/MicrosoftGraphMail/MicrosoftGraphMail.py
@@ -0,0 +1,635 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+from typing import Union, Optional
+
+
+''' IMPORTS '''
+import requests
+import base64
+import os
+import binascii
+from cryptography.hazmat.primitives.ciphers.aead import AESGCM
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARS '''
+PARAMS = demisto.params()
+TENANT_ID = PARAMS.get('tenant_id')
+AUTH_AND_TOKEN_URL = PARAMS.get('auth_id', '').split('@')
+AUTH_ID = AUTH_AND_TOKEN_URL[0]
+ENC_KEY = PARAMS.get('enc_key')
+if len(AUTH_AND_TOKEN_URL) != 2:
+ TOKEN_RETRIEVAL_URL = 'https://oproxy.demisto.ninja/obtain-token' # disable-secrets-detection
+else:
+ TOKEN_RETRIEVAL_URL = AUTH_AND_TOKEN_URL[1]
+# Remove trailing slash to prevent wrong URL path to service
+URL = PARAMS.get('url', '')
+SERVER = URL[:-1] if (URL and URL.endswith('/')) else URL
+# Service base URL
+BASE_URL = SERVER + '/v1.0'
+APP_NAME = 'ms-graph-mail'
+
+USE_SSL = not PARAMS.get('insecure', False)
+# Remove proxy if not set to true in params
+if not PARAMS.get('proxy'):
+ os.environ.pop('HTTP_PROXY', '')
+ os.environ.pop('HTTPS_PROXY', '')
+ os.environ.pop('http_proxy', '')
+ os.environ.pop('https_proxy', '')
+
+''' HELPER FUNCTIONS '''
+
+
+def epoch_seconds(d: datetime = None) -> int:
+ """
+ Return the number of seconds for given date. If no date, return current.
+
+ Args:
+ d (datetime): timestamp
+ Returns:
+ int: timestamp in epoch
+ """
+ if not d:
+ d = datetime.utcnow()
+ return int((d - datetime.utcfromtimestamp(0)).total_seconds())
+
+
+def get_encrypted(content: str, key: str) -> str:
+ """
+
+ Args:
+ content (str): content to encrypt. For a request to Demistobot for a new access token, content should be
+ the tenant id
+ key (str): encryption key from Demistobot
+
+ Returns:
+ encrypted timestamp:content
+ """
+ def create_nonce() -> bytes:
+ return os.urandom(12)
+
+ def encrypt(string: str, enc_key: str) -> bytes:
+ """
+
+ Args:
+ enc_key (str):
+ string (str):
+
+ Returns:
+ bytes:
+ """
+ # String to bytes
+ enc_key = base64.b64decode(enc_key)
+ # Create key
+ aes_gcm = AESGCM(enc_key)
+ # Create nonce
+ nonce = create_nonce()
+ # Create ciphered data
+ data = string.encode()
+ ct = aes_gcm.encrypt(nonce, data, None)
+ return base64.b64encode(nonce + ct)
+ now = epoch_seconds()
+ encrypted = encrypt(f'{now}:{content}', key).decode('utf-8')
+ return encrypted
+
+
+def get_access_token():
+ integration_context = demisto.getIntegrationContext()
+ access_token = integration_context.get('access_token')
+ valid_until = integration_context.get('valid_until')
+ if access_token and valid_until:
+ if epoch_seconds() < valid_until:
+ return access_token
+ headers = {'Accept': 'application/json'}
+
+ dbot_response = requests.post(
+ TOKEN_RETRIEVAL_URL,
+ headers=headers,
+ data=json.dumps({
+ 'app_name': APP_NAME,
+ 'registration_id': AUTH_ID,
+ 'encrypted_token': get_encrypted(TENANT_ID, ENC_KEY)
+ }),
+ verify=USE_SSL
+ )
+ if dbot_response.status_code not in {200, 201}:
+ msg = 'Error in authentication. Try checking the credentials you entered.'
+ try:
+ demisto.info('Authentication failure from server: {} {} {}'.format(
+ dbot_response.status_code, dbot_response.reason, dbot_response.text))
+ err_response = dbot_response.json()
+ server_msg = err_response.get('message')
+ if not server_msg:
+ title = err_response.get('title')
+ detail = err_response.get('detail')
+ if title:
+ server_msg = f'{title}. {detail}'
+ if server_msg:
+ msg += ' Server message: {}'.format(server_msg)
+ except Exception as ex:
+ demisto.error('Failed parsing error response - Exception: {}'.format(ex))
+ raise Exception(msg)
+ try:
+ gcloud_function_exec_id = dbot_response.headers.get('Function-Execution-Id')
+ demisto.info(f'Google Cloud Function Execution ID: {gcloud_function_exec_id}')
+ parsed_response = dbot_response.json()
+ except ValueError:
+ raise Exception(
+ 'There was a problem in retrieving an updated access token.\n'
+ 'The response from the Demistobot server did not contain the expected content.'
+ )
+ access_token = parsed_response.get('access_token')
+ expires_in = parsed_response.get('expires_in', 3595)
+ time_now = epoch_seconds()
+ time_buffer = 5 # seconds by which to shorten the validity period
+ if expires_in - time_buffer > 0:
+ # err on the side of caution with a slightly shorter access token validity period
+ expires_in = expires_in - time_buffer
+
+ demisto.setIntegrationContext({
+ 'access_token': access_token,
+ 'valid_until': time_now + expires_in
+ })
+ return access_token
+
+
+def error_parser(resp_err: requests.Response) -> str:
+ """
+
+ Args:
+ error (requests.Response): response with error
+
+ Returns:
+ str: string of error
+
+ """
+ try:
+ response = resp_err.json()
+ error = response.get('error', {})
+ err_str = f"{error.get('code')}: {error.get('message')}"
+ if err_str:
+ return err_str
+ # If no error message
+ raise ValueError
+ except ValueError:
+ return resp_err.text
+
+
+def http_request(method: str, url_suffix: str = '', params: dict = None, data: dict = None, odata: str = None,
+ url: str = None) -> dict:
+ """
+ A wrapper for requests lib to send our requests and handle requests and responses better
+ Headers to be sent in requests
+
+ Args:
+ method (str): any restful method
+ url_suffix (str): suffix to add to BASE_URL
+ params (str): http params
+ data (dict): http body
+ odata (str): odata query format
+ url (str): url to replace if need a new api call
+
+ Returns:
+ dict: requests.json()
+ """
+ token = get_access_token()
+ headers = {
+ 'Authorization': f'Bearer {token}',
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+ }
+
+ if odata:
+ url_suffix += odata
+ res = requests.request(
+ method,
+ url if url else BASE_URL + url_suffix,
+ verify=USE_SSL,
+ params=params,
+ data=data,
+ headers=headers
+ )
+ # Handle error responses gracefully
+ if not (199 < res.status_code < 299):
+ error = error_parser(res)
+ return_error(f'Error in API call to Microsoft Graph Mail Integration [{res.status_code}] - {error}')
+ try:
+ if method.lower() != 'delete': # the DELETE request returns nothing in response
+ return res.json()
+ return {}
+ except ValueError:
+ return_error('Could not decode response from API')
+ return {} # return_error will exit
+
+
+def assert_pages(pages: Union[str, int]) -> int:
+ """
+
+ Args:
+ pages (str or int): pages need to pull in int or str
+
+ Returns:
+ int: default 1
+
+ """
+ if isinstance(pages, str) and pages.isdigit():
+ return int(pages)
+ elif isinstance(pages, int):
+ return pages
+ return 1
+
+
+def build_folders_path(folder_string: str) -> Optional[str]:
+ """
+
+ Args:
+ folder_string (str): string with `,` delimiter. first one is mailFolders all other are child
+
+ Returns:
+ str or None: string with path to the folder and child folders
+ """
+ if isinstance(folder_string, str):
+ path = 'mailFolders/'
+ folders_list = argToList(folder_string, ',')
+ first = True
+ for folder in folders_list:
+ if first:
+ path += folder
+ first = False
+ else:
+ path += f'/childFolders/{folder}'
+ return path
+ return None
+
+
+def pages_puller(response: dict, page_count: int) -> list:
+ """ Gets first response from API and returns all pages
+
+ Args:
+ response (dict):
+ page_count (int):
+
+ Returns:
+ list: list of all pages
+ """
+ responses = [response]
+ i = page_count
+ while i != 0:
+ next_link = response.get('@odata.nextLink')
+ if next_link:
+ responses.append(
+ http_request('GET', url=next_link)
+ )
+
+ else:
+ return responses
+ i -= 1
+ return responses
+
+
+def build_mail_object(raw_response: Union[dict, list], user_id: str, get_body: bool = False) -> Union[dict, list]:
+ """Building mail entry context
+ Getting a list from build_mail_object
+
+ Args:
+ user_id (str): user id of the mail
+ get_body (bool): should get body
+ raw_response (dict or list): list of pages
+
+ Returns:
+ dict or list: output context
+ """
+
+ def build_mail(given_mail: dict) -> dict:
+ """
+
+ Args:
+ given_mail (dict):
+
+ Returns:
+ dict:
+ """
+ # Dicts
+ mail_properties = {
+ 'ID': 'id',
+ 'Created': 'createdDateTime',
+ 'LastModifiedTime': 'lastModifiedDateTime',
+ 'ReceivedTime': 'receivedDateTime',
+ 'SendTime': 'sentDateTime',
+ 'Categories': 'categories',
+ 'HasAttachments': 'hasAttachments',
+ 'Subject': 'subject',
+ 'IsDraft': 'isDraft'
+ }
+
+ contact_properties = {
+ 'Sender': 'sender',
+ 'From': 'from',
+ 'CCRecipients': 'ccRecipients',
+ 'BCCRecipients': 'bccRecipients',
+ 'ReplyTo': 'replyTo'
+ }
+
+ # Create entry properties
+ entry = {k: given_mail.get(v) for k, v in mail_properties.items()}
+
+ # Create contacts properties
+ entry.update(
+ {k: build_contact(given_mail.get(v)) for k, v in contact_properties.items()} # type: ignore
+ )
+
+ if get_body:
+ entry['Body'] = given_mail.get('body', {}).get('content')
+ entry['UserID'] = user_id
+ return entry
+
+ def build_contact(contacts: Union[dict, list, str]) -> object:
+ """Building contact object
+
+ Args:
+ contacts (list or dict or str):
+
+ Returns:
+ dict or list[dict] or str or None: describing contact
+ """
+ if contacts:
+ if isinstance(contacts, list):
+ return [build_contact(contact) for contact in contacts]
+ elif isinstance(contacts, dict):
+ email = contacts.get('emailAddress')
+ if email and isinstance(email, dict):
+ return {
+ 'Name': email.get('name'),
+ 'Address': email.get('address')
+ }
+ return None
+
+ mails_list = list()
+ if isinstance(raw_response, list):
+ for page in raw_response:
+ # raw_response can be a list containing multiple pages or one response
+ # if value in page, we got
+ value = page.get('value')
+ if value:
+ for mail in value:
+ mails_list.append(build_mail(mail))
+ else:
+ mails_list.append(build_mail(page))
+ elif isinstance(raw_response, dict):
+ return build_mail(raw_response)
+ return mails_list
+
+
+def file_result_creator(raw_response: dict) -> dict:
+ """
+
+ Args:
+ raw_response (dict):
+
+ Returns:
+ dict:
+
+ """
+ name = raw_response.get('name')
+ data = raw_response.get('contentBytes')
+ try:
+ data = base64.b64decode(data) # type: ignore
+ return fileResult(name, data)
+ except binascii.Error:
+ return_error('Attachment could not be decoded')
+ return {} # return_error will exit
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def list_mails(user_id: str, folder_id: str = '', search: str = None, odata: str = None) -> Union[dict, list]:
+ """Returning all mails from given user
+
+ Args:
+ user_id (str):
+ folder_id (str):
+ search (str):
+ odata (str):
+
+ Returns:
+ dict or list:
+ """
+ no_folder = f'/users/{user_id}/messages/'
+ with_folder = f'users/{user_id}/{build_folders_path(folder_id)}/messages/'
+ pages_to_pull = demisto.args().get('pages_to_pull', 1)
+
+ if search:
+ odata = f'?{odata}$search={search}' if odata else f'?$search={search}'
+ suffix = with_folder if folder_id else no_folder
+ response = http_request('GET', suffix, odata=odata)
+ return pages_puller(response, assert_pages(pages_to_pull))
+
+
+def list_mails_command():
+ search = demisto.args().get('search')
+ user_id = demisto.args().get('user_id')
+ folder_id = demisto.args().get('folder_id')
+ odata = demisto.args().get('odata')
+
+ raw_response = list_mails(user_id, folder_id=folder_id, search=search, odata=odata)
+ mail_context = build_mail_object(raw_response, user_id)
+ entry_context = {'MSGraphMail(var.ID === obj.ID)': mail_context}
+
+ # human_readable builder
+ human_readable = tableToMarkdown(
+ f'### Total of {len(mail_context)} of mails received',
+ mail_context,
+ headers=['Subject', 'From', 'SendTime']
+ )
+ return_outputs(human_readable, entry_context, raw_response)
+
+
+def delete_mail(user_id: str, message_id: str, folder_id: str = None) -> bool:
+ """
+
+ Args:
+ user_id (str):
+ message_id (str):
+ folder_id (str):
+
+ Returns:
+ bool
+ """
+ with_folder = f'/users/{user_id}/{build_folders_path(folder_id)}/messages/{message_id}' # type: ignore
+ no_folder = f'/users/{user_id}/messages/{message_id}'
+ suffix = with_folder if folder_id else no_folder
+ http_request('DELETE', suffix)
+ return True
+
+
+def delete_mail_command():
+ user_id = demisto.args().get('user_id')
+ folder_id = demisto.args().get('folder_id')
+ message_id = demisto.args().get('message_id')
+ delete_mail(user_id, message_id, folder_id)
+
+ human_readable = tableToMarkdown(
+ 'Message has been deleted successfully',
+ {
+ 'Message ID': message_id,
+ 'User ID': user_id,
+ 'Folder ID': folder_id
+ },
+ headers=['Message ID', 'User ID', 'Folder ID'],
+ removeNull=True
+ )
+
+ entry_context = {} # type: ignore
+
+ return_outputs(human_readable, entry_context)
+
+
+def get_attachment(message_id: str, user_id: str, attachment_id: str, folder_id: str = None) -> dict:
+ """
+
+ Args:
+ message_id (str):
+ user_id (str_:
+ attachment_id (str):
+ folder_id (str):
+
+ Returns:
+ dict:
+ """
+ no_folder = f'/users/{user_id}/messages/{message_id}/attachments/{attachment_id}'
+ with_folder = (f'/users/{user_id}/{build_folders_path(folder_id)}/' # type: ignore
+ f'messages/{message_id}/attachments/{attachment_id}')
+ suffix = with_folder if folder_id else no_folder
+ response = http_request('GET', suffix)
+ return response
+
+
+def get_attachment_command():
+ message_id = demisto.args().get('message_id')
+ user_id = demisto.args().get('user_id')
+ folder_id = demisto.args().get('folder_id')
+ attachment_id = demisto.args().get('attachment_id')
+ raw_response = get_attachment(message_id, user_id, folder_id=folder_id, attachment_id=attachment_id)
+ entry_context = file_result_creator(raw_response)
+ demisto.results(entry_context)
+
+
+def get_message(user_id: str, message_id: str, folder_id: str = None, odata: str = None) -> dict:
+ """
+
+ Args:
+ user_id (str): User ID to pull message from
+ message_id (str): Message ID to pull
+ folder_id: (str) Folder ID to pull from
+ odata (str): OData query
+
+ Returns
+ dict: request json
+ """
+ no_folder = f'/users/{user_id}/messages/{message_id}/'
+ with_folder = (f'/users/{user_id}/{build_folders_path(folder_id)}' # type: ignore
+ f'/messages/{message_id}/')
+
+ suffix = with_folder if folder_id else no_folder
+ response = http_request('GET', suffix, odata=odata)
+
+ # Add user ID
+ response['userId'] = user_id
+ return response
+
+
+def get_message_command():
+ user_id = demisto.args().get('user_id')
+ folder_id = demisto.args().get('folder_id')
+ message_id = demisto.args().get('message_id')
+ get_body = demisto.args().get('get_body') == 'true'
+ odata = demisto.args().get('odata')
+ raw_response = get_message(user_id, message_id, folder_id, odata=odata)
+ mail_context = build_mail_object(raw_response, user_id=user_id, get_body=get_body)
+ entry_context = {'MSGraphMail(val.ID === obj.ID)': mail_context}
+ human_readable = tableToMarkdown(
+ f'Results for message ID {message_id}',
+ mail_context,
+ headers=['ID', 'Subject', 'SendTime', 'Sender', 'From', 'HasAttachments', 'Body']
+ )
+ return_outputs(
+ human_readable,
+ entry_context,
+ raw_response=raw_response
+ )
+
+
+def list_attachments(user_id: str, message_id: str, folder_id: str) -> dict:
+ """Listing all the attachments
+
+ Args:
+ user_id (str):
+ message_id (str):
+ folder_id (str):
+
+ Returns:
+ dict:
+ """
+ no_folder = f'/users/{user_id}/messages/{message_id}/attachments/'
+ with_folder = f'/users/{user_id}/{build_folders_path(folder_id)}/messages/{message_id}/attachments/'
+ suffix = with_folder if folder_id else no_folder
+ return http_request('GET', suffix)
+
+
+def list_attachments_command():
+ user_id = demisto.args().get('user_id')
+ message_id = demisto.args().get('message_id')
+ folder_id = demisto.args().get('folder_id')
+ raw_response = list_attachments(user_id, message_id, folder_id)
+ attachments = raw_response.get('value')
+ if attachments:
+ attachment_list = [{
+ 'ID': attachment.get('id'),
+ 'Name': attachment.get('name'),
+ 'Type': attachment.get('contentType')
+ } for attachment in attachments]
+ attachment_entry = {'ID': message_id, 'Attachment': attachment_list, 'UserID': user_id}
+ entry_context = {'MSGraphMailAttachment(val.ID === obj.ID)': attachment_entry}
+
+ # Build human readable
+ file_names = [attachment.get('Name') for attachment in attachment_list if isinstance(
+ attachment, dict) and attachment.get('Name')]
+ human_readable = tableToMarkdown(
+ f'Total of {len(attachment_list)} attachments found in message {message_id} from user {user_id}',
+ {'File names': file_names}
+ )
+ return_outputs(human_readable, entry_context, raw_response)
+ else:
+ human_readable = f'### No attachments found in message {message_id}'
+ return_outputs(human_readable, dict(), raw_response)
+
+
+def main():
+ """ COMMANDS MANAGER / SWITCH PANEL """
+ command = demisto.command()
+ LOG(f'Command being called is {command}')
+
+ try:
+ if command == 'test-module':
+ get_access_token()
+ demisto.results('ok')
+ elif command in ('msgraph-mail-list-emails', 'msgraph-mail-search-email'):
+ list_mails_command()
+ elif command == 'msgraph-mail-get-email':
+ get_message_command()
+ elif command == 'msgraph-mail-delete-email':
+ delete_mail_command()
+ elif command == 'msgraph-mail-list-attachments':
+ list_attachments_command()
+ elif command == 'msgraph-mail-get-attachment':
+ get_attachment_command()
+ # Log exceptions
+ except Exception as e:
+ return_error(str(e))
+
+
+if __name__ == "builtins":
+ main()
diff --git a/Integrations/MicrosoftGraphMail/MicrosoftGraphMail.yml b/Integrations/MicrosoftGraphMail/MicrosoftGraphMail.yml
new file mode 100644
index 000000000000..d899566741fc
--- /dev/null
+++ b/Integrations/MicrosoftGraphMail/MicrosoftGraphMail.yml
@@ -0,0 +1,372 @@
+category: Email Gateway
+commonfields:
+ id: MicrosoftGraphMail
+ version: -1
+configuration:
+- defaultvalue: https://graph.microsoft.com
+ display: Server URL
+ name: url
+ required: true
+ type: 0
+- display: ID (received from the admin consent - see Detailed Instructions (?)
+ name: auth_id
+ required: true
+ type: 4
+- display: Token (received from the admin consent - see Detailed Instructions (?)
+ section)
+ name: tenant_id
+ required: true
+ type: 4
+- display: Key (received from the admin consent - see Detailed Instructions (?)
+ name: enc_key
+ required: true
+ type: 4
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Microsoft Graph lets your app get authorized access to a user's Outlook
+ mail data in a personal or organization account.
+display: Microsoft Graph Mail
+name: MicrosoftGraphMail
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: User ID from which to pull mails (can be principal ID (email address)).
+ isArray: true
+ name: user_id
+ required: true
+ secret: false
+ - default: false
+ description: ' A CSV list of folder IDs, in the format: (mail_box,child_mail_box,child_mail_box). '
+ isArray: false
+ name: folder_id
+ required: false
+ secret: false
+ - default: false
+ description: Add an OData query.
+ isArray: false
+ name: odata
+ required: false
+ secret: false
+ - default: false
+ description: 'The term for which to search. This argument cannot contain reserved characters such as !, $, #, @, etc.
+ For further information, see https://tools.ietf.org/html/rfc3986#section-2.2'
+ isArray: false
+ name: search
+ required: false
+ secret: false
+ - default: true
+ defaultValue: '1'
+ description: The number of pages of emails to pull (maximum is 10 emails per
+ page).
+ isArray: false
+ name: pages_to_pull
+ required: false
+ secret: false
+ deprecated: false
+ description: Gets properties of mails.
+ execution: false
+ name: msgraph-mail-list-emails
+ outputs:
+ - contextPath: MSGraphMail.ID
+ description: ID of email.
+ type: String
+ - contextPath: MSGraphMail.Created
+ description: Time of email creation.
+ type: Date
+ - contextPath: MSGraphMail.LastModifiedTime
+ description: Time of last modified.
+ type: Date
+ - contextPath: MSGraphMail.ReceivedTime
+ description: Time of email receiving.
+ type: Date
+ - contextPath: MSGraphMail.SendTime
+ description: Time of sending email.
+ type: Date
+ - contextPath: MSGraphMail.Categories
+ description: Categories of email.
+ type: String
+ - contextPath: MSGraphMail.HasAttachments
+ description: If there're any attachments in the email
+ type: Boolean
+ - contextPath: MSGraphMail.Subject
+ description: Subject of email.
+ type: String
+ - contextPath: MSGraphMail.IsDraft
+ description: If the email is draft
+ type: Boolean
+ - contextPath: MSGraphMail.Body
+ description: Body of email
+ type: String
+ - contextPath: MSGraphMail.Sender.Name
+ description: Name of sender
+ type: String
+ - contextPath: MSGraphMail.Sender.Address
+ description: Email address of sender
+ type: String
+ - contextPath: MSGraphMail.From.Name
+ description: Name of from
+ type: String
+ - contextPath: MSGraphMail.From.Address
+ description: Email address of from
+ type: String
+ - contextPath: MSGraphMail.CCRecipients.Name
+ description: Name of ccRecipients
+ type: String
+ - contextPath: MSGraphMail.CCRecipients.Address
+ description: Email address of ccRecipients
+ type: String
+ - contextPath: MSGraphMail.BCCRecipients.Name
+ description: Name of bccRecipients
+ type: String
+ - contextPath: MSGraphMail.BCCRecipients.Address
+ description: Email address of bccRecipients
+ type: String
+ - contextPath: MSGraphMail.ReplyTo.Name
+ description: Name of replyTo
+ type: String
+ - contextPath: MSGraphMail.ReplyTo.Address
+ description: Email address of replyTo
+ type: String
+ - contextPath: MSGraphMail.UserID
+ description: ID of user
+ type: String
+ - arguments:
+ - default: false
+ description: User ID or principal ID (mostly email address).
+ isArray: false
+ name: user_id
+ required: true
+ secret: false
+ - default: false
+ description: Message ID.
+ isArray: false
+ name: message_id
+ required: true
+ secret: false
+ - default: false
+ description: Folder ID.
+ isArray: false
+ name: folder_id
+ required: false
+ secret: false
+ - default: false
+ description: OData.
+ isArray: false
+ name: odata
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether the message body should be returned.
+ isArray: false
+ name: get_body
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Gets the properties of an email.
+ execution: false
+ name: msgraph-mail-get-email
+ outputs:
+ - contextPath: MSGraphMail.ID
+ description: ID of email.
+ type: String
+ - contextPath: MSGraphMail.Created
+ description: Time of email creation.
+ type: Date
+ - contextPath: MSGraphMail.LastModifiedTime
+ description: Time of last modified.
+ type: Date
+ - contextPath: MSGraphMail.ReceivedTime
+ description: Time of email receiving.
+ type: Date
+ - contextPath: MSGraphMail.SendTime
+ description: Time of sending email.
+ type: Date
+ - contextPath: MSGraphMail.Categories
+ description: Categories of email.
+ type: String
+ - contextPath: MSGraphMail.HasAttachments
+ description: If there're any attachments in the email
+ type: Boolean
+ - contextPath: MSGraphMail.Subject
+ description: Subject of email.
+ type: String
+ - contextPath: MSGraphMail.IsDraft
+ description: If the email is draft
+ type: Boolean
+ - contextPath: MSGraphMail.Body
+ description: Body of email
+ type: String
+ - contextPath: MSGraphMail.Sender.Name
+ description: Name of sender
+ type: String
+ - contextPath: MSGraphMail.Sender.Address
+ description: Email address of sender
+ type: String
+ - contextPath: MSGraphMail.From.Name
+ description: Name of from
+ type: String
+ - contextPath: MSGraphMail.From.Address
+ description: Email address of from
+ type: String
+ - contextPath: MSGraphMail.CCRecipients.Name
+ description: Name of ccRecipients
+ type: String
+ - contextPath: MSGraphMail.CCRecipients.Address
+ description: Email address of ccRecipients
+ type: String
+ - contextPath: MSGraphMail.BCCRecipients.Name
+ description: Name of bccRecipients
+ type: String
+ - contextPath: MSGraphMail.BCCRecipients.Address
+ description: Email address of bccRecipients
+ type: String
+ - contextPath: MSGraphMail.ReplyTo.Name
+ description: Name of replyTo
+ type: String
+ - contextPath: MSGraphMail.ReplyTo.Address
+ description: Email address of replyTo
+ type: String
+ - contextPath: MSGraphMail.UserID
+ description: ID of user
+ type: String
+ - arguments:
+ - default: false
+ description: User ID or principal ID (mostly email address).
+ isArray: false
+ name: user_id
+ required: true
+ secret: false
+ - default: false
+ description: Message ID.
+ isArray: false
+ name: message_id
+ required: true
+ secret: false
+ - default: false
+ description: Folder ID (Comma sepreated, mailFolders,childFolders,childFolders...).
+ isArray: false
+ name: folder_id
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes an email.
+ execution: false
+ name: msgraph-mail-delete-email
+ - arguments:
+ - default: false
+ description: User ID or principal ID (mostly email address).
+ isArray: false
+ name: user_id
+ required: true
+ secret: false
+ - default: false
+ description: Message ID.
+ isArray: false
+ name: message_id
+ required: true
+ secret: false
+ - default: false
+ description: Folder ID (Comma sepreated, mailFolders,childFolders,childFolders...).
+ isArray: false
+ name: folder_id
+ required: false
+ secret: false
+ deprecated: false
+ description: Lists all of the attachments of given email
+ execution: false
+ name: msgraph-mail-list-attachments
+ outputs:
+ - contextPath: MSGraphMailAttachment.ID
+ description: Email ID.
+ type: String
+ - contextPath: MSGraphMailAttachment.Attachment.ID
+ description: ID of attachment.
+ type: String
+ - contextPath: MSGraphMailAttachment.Attachment.Name
+ description: Name of attachment.
+ type: String
+ - contextPath: MSGraphMailAttachment.Attachment.Type
+ description: Type of attachment.
+ type: String
+ - contextPath: MSGraphMailAttachment.UserID
+ description: ID of user
+ type: String
+ - arguments:
+ - default: false
+ description: User ID or principal ID (mostly email address).
+ isArray: false
+ name: user_id
+ required: true
+ secret: false
+ - default: false
+ description: Message ID.
+ isArray: false
+ name: message_id
+ required: true
+ secret: false
+ - default: false
+ description: Folder ID (Comma sepreated, mailFolders,childFolders,childFolders...).
+ isArray: false
+ name: folder_id
+ required: false
+ secret: false
+ - default: false
+ description: ID of the attachment.
+ isArray: false
+ name: attachment_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets an attachment from the email.
+ execution: false
+ name: msgraph-mail-get-attachment
+ outputs:
+ - contextPath: File.Size
+ description: Size of file
+ type: Number
+ - contextPath: File.SHA1
+ description: File's SHA1
+ type: String
+ - contextPath: File.SHA256
+ description: File's SHA256
+ type: String
+ - contextPath: File.Name
+ description: File's name
+ type: String
+ - contextPath: File.SSDeep
+ description: File's SSDeep
+ type: String
+ - contextPath: File.EntryID
+ description: File's entry id
+ type: String
+ - contextPath: File.Info
+ description: File's info
+ type: String
+ - contextPath: File.Type
+ description: File's type
+ type: String
+ - contextPath: File.MD5
+ description: File's MD5
+ type: String
+ - contextPath: File.Extension
+ description: File's Extension
+ type: String
+ dockerimage: demisto/crypto:1.0.0.303
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- MicrosoftGraphMail-Test
diff --git a/Integrations/MicrosoftGraphMail/MicrosoftGraphMail_description.md b/Integrations/MicrosoftGraphMail/MicrosoftGraphMail_description.md
new file mode 100644
index 000000000000..a1d41a1367d7
--- /dev/null
+++ b/Integrations/MicrosoftGraphMail/MicrosoftGraphMail_description.md
@@ -0,0 +1,2 @@
+To allow us access to Microsoft Graph Mail, an admin has to approve our app using an admin consent flow, by clicking on the following [link](https://oproxy.demisto.ninja/ms-graph-mail).
+After authorizing the Demisto app, you will get an ID, Token, and Key, which should be inserted in the integration instance configuration's corresponding fields.
\ No newline at end of file
diff --git a/Integrations/MicrosoftGraphMail/MicrosoftGraphMail_image.png b/Integrations/MicrosoftGraphMail/MicrosoftGraphMail_image.png
new file mode 100644
index 000000000000..08f94ff881d9
Binary files /dev/null and b/Integrations/MicrosoftGraphMail/MicrosoftGraphMail_image.png differ
diff --git a/Integrations/MicrosoftGraphMail/MicrosoftGraphMail_test.py b/Integrations/MicrosoftGraphMail/MicrosoftGraphMail_test.py
new file mode 100644
index 000000000000..880574b89ece
--- /dev/null
+++ b/Integrations/MicrosoftGraphMail/MicrosoftGraphMail_test.py
@@ -0,0 +1,46 @@
+from CommonServerPython import *
+from MicrosoftGraphMail import build_mail_object, assert_pages, build_folders_path, error_parser, epoch_seconds
+from requests.models import Response
+
+
+def test_build_mail_object():
+ # Testing list of mails
+ user_id = 'ex@example.com'
+ with open('test_data/mails') as mail_json:
+ mail = json.load(mail_json)
+ res = build_mail_object(mail, user_id=user_id, get_body=True)
+ assert isinstance(res, list)
+ assert len(mail) == len(res)
+ assert res[0]['Created'] == '2019-04-16T19:40:00Z'
+ assert res[0]['UserID'] == user_id
+ assert res[0]['Body']
+
+ with open('test_data/mail') as mail_json:
+ mail = json.load(mail_json)
+ res = build_mail_object(mail, user_id=user_id, get_body=True)
+ assert isinstance(res, dict)
+ assert res['UserID'] == user_id
+ assert res['Body']
+
+
+def test_assert_pages():
+ assert assert_pages(3) == 3 and assert_pages(None) == 1 and assert_pages('4') == 4
+
+
+def test_build_folders_path():
+ inp = 'i,s,f,q'
+ response = build_folders_path(inp)
+ assert response == 'mailFolders/i/childFolders/s/childFolders/f/childFolders/q'
+
+
+def test_error_parser():
+ err = Response()
+ err.status_code = 401
+ err._content = b'{"error":{"code":"code","message":"message"}}'
+ response = error_parser(err)
+ assert response == 'code: message'
+
+
+def test_epoch_seconds():
+ integer = epoch_seconds()
+ assert isinstance(integer, int)
diff --git a/Integrations/MicrosoftGraphMail/pipfile b/Integrations/MicrosoftGraphMail/pipfile
new file mode 100644
index 000000000000..735bc94d8669
--- /dev/null
+++ b/Integrations/MicrosoftGraphMail/pipfile
@@ -0,0 +1,15 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+
+[packages]
+requests = "*"
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/MicrosoftGraphMail/pipfile.lock b/Integrations/MicrosoftGraphMail/pipfile.lock
new file mode 100644
index 000000000000..c5aaf8eb651e
--- /dev/null
+++ b/Integrations/MicrosoftGraphMail/pipfile.lock
@@ -0,0 +1,263 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "bc91471581e911c5c57ceb8933db0b3b01c5e0d9cd138a868b6bb76623e8b318"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5",
+ "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae"
+ ],
+ "version": "==2019.3.9"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:2112d2ca570bb7c3e53ea1a35cd5df42bb0fd10c45f0fb97178679c3c03d64c7",
+ "sha256:c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a"
+ ],
+ "markers": "python_version > '2.7'",
+ "version": "==7.0.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f",
+ "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746"
+ ],
+ "version": "==0.9.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:3773f4c235918987d51daf1db66d51c99fac654c81d6f2f709a046ab446d5e5d",
+ "sha256:b7802283b70ca24d7119b32915efa7c409982f59913c1a6c0640aacf118b95f5"
+ ],
+ "index": "pypi",
+ "version": "==4.4.1"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
+ "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
+ ],
+ "index": "pypi",
+ "version": "==2.21.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0",
+ "sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3"
+ ],
+ "version": "==1.24.2"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:01cb7e1ca5e6c5b3f235f0385057f70558b70d2f00320208825fa62887292f43",
+ "sha256:268067462aed7eb2a1e237fcb287852f22077de3fb07964e87e00f829eea2d1a"
+ ],
+ "version": "==4.3.17"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33",
+ "sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39",
+ "sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019",
+ "sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088",
+ "sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b",
+ "sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e",
+ "sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6",
+ "sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b",
+ "sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5",
+ "sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff",
+ "sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd",
+ "sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7",
+ "sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff",
+ "sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d",
+ "sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2",
+ "sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35",
+ "sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4",
+ "sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514",
+ "sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252",
+ "sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109",
+ "sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f",
+ "sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c",
+ "sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92",
+ "sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577",
+ "sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d",
+ "sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d",
+ "sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f",
+ "sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a",
+ "sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b"
+ ],
+ "version": "==1.3.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:2112d2ca570bb7c3e53ea1a35cd5df42bb0fd10c45f0fb97178679c3c03d64c7",
+ "sha256:c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a"
+ ],
+ "markers": "python_version > '2.7'",
+ "version": "==7.0.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f",
+ "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746"
+ ],
+ "version": "==0.9.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:3773f4c235918987d51daf1db66d51c99fac654c81d6f2f709a046ab446d5e5d",
+ "sha256:b7802283b70ca24d7119b32915efa7c409982f59913c1a6c0640aacf118b95f5"
+ ],
+ "index": "pypi",
+ "version": "==4.4.1"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:04894d268ba6eab7e093d43107869ad49e7b5ef40d1a94243ea49b352061b200",
+ "sha256:16616ece19daddc586e499a3d2f560302c11f122b9c692bc216e821ae32aa0d0",
+ "sha256:252fdae740964b2d3cdfb3f84dcb4d6247a48a6abe2579e8029ab3be3cdc026c",
+ "sha256:2af80a373af123d0b9f44941a46df67ef0ff7a60f95872412a145f4500a7fc99",
+ "sha256:2c88d0a913229a06282b285f42a31e063c3bf9071ff65c5ea4c12acb6977c6a7",
+ "sha256:2ea99c029ebd4b5a308d915cc7fb95b8e1201d60b065450d5d26deb65d3f2bc1",
+ "sha256:3d2e3ab175fc097d2a51c7a0d3fda442f35ebcc93bb1d7bd9b95ad893e44c04d",
+ "sha256:4766dd695548a15ee766927bf883fb90c6ac8321be5a60c141f18628fb7f8da8",
+ "sha256:56b6978798502ef66625a2e0f80cf923da64e328da8bbe16c1ff928c70c873de",
+ "sha256:5cddb6f8bce14325b2863f9d5ac5c51e07b71b462361fd815d1d7706d3a9d682",
+ "sha256:644ee788222d81555af543b70a1098f2025db38eaa99226f3a75a6854924d4db",
+ "sha256:64cf762049fc4775efe6b27161467e76d0ba145862802a65eefc8879086fc6f8",
+ "sha256:68c362848d9fb71d3c3e5f43c09974a0ae319144634e7a47db62f0f2a54a7fa7",
+ "sha256:6c1f3c6f6635e611d58e467bf4371883568f0de9ccc4606f17048142dec14a1f",
+ "sha256:b213d4a02eec4ddf622f4d2fbc539f062af3788d1f332f028a2e19c42da53f15",
+ "sha256:bb27d4e7805a7de0e35bd0cb1411bc85f807968b2b0539597a49a23b00a622ae",
+ "sha256:c9d414512eaa417aadae7758bc118868cd2396b0e6138c1dd4fda96679c079d3",
+ "sha256:f0937165d1e25477b01081c4763d2d9cdc3b18af69cb259dd4f640c9b900fe5e",
+ "sha256:fb96a6e2c11059ecf84e6741a319f93f683e440e341d4489c9b161eca251cf2a",
+ "sha256:fc71d2d6ae56a091a8d94f33ec9d0f2001d1cb1db423d8b4355debfe9ce689b7"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.3.4"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ }
+ }
+}
diff --git a/Integrations/MicrosoftGraphMail/test_data/mail b/Integrations/MicrosoftGraphMail/test_data/mail
new file mode 100644
index 000000000000..7fdd5300a667
--- /dev/null
+++ b/Integrations/MicrosoftGraphMail/test_data/mail
@@ -0,0 +1,53 @@
+{
+ "@odata.context": "resource.com",
+ "@odata.etag": "W/\"CQ23124512ny\"",
+ "createdDateTime": "2019-04-16T19:40:00Z",
+ "lastModifiedDateTime": "2019-04-16T19:40:02Z",
+ "changeKey": "change_key",
+ "categories": [],
+ "receivedDateTime": "2019-04-16T19:40:01Z",
+ "sentDateTime": "2019-04-16T19:40:01Z",
+ "hasAttachments": false,
+ "internetMessageId": "",
+ "subject": "Playbook has stopped on error for Legal Hold #` (#2231)",
+ "bodyPreview": "Incident playbook task \"Parse Legal Hold CSV File\" stopped on error.\nIncident Id: #2231\nIncident Name: Legal Hold #`\nIncident SLA: 26 Apr 19, 19:13 UTC\nIncident Severity: Unknown\n\nTask: #1\nTask Name: Parse Legal Hold CSV File\nTask SLA: N\\A\n\nView it on htt",
+ "importance": "normal",
+ "parentFolderId": "parent_folder",
+ "conversationId": "conversation_id",
+ "isDeliveryReceiptRequested": false,
+ "isReadReceiptRequested": false,
+ "isRead": true,
+ "isDraft": false,
+ "webLink": "link!!!",
+ "inferenceClassification": "focused",
+ "body": {
+ "contentType": "text",
+ "content": "Incident playbook task \"Parse Legal Hold CSV File\" stopped on error.\nIncident Id: #2231\nIncident Name: Legal Hold #`\nIncident SLA: 26 Apr 19, 19:13 UTC\nIncident Severity: Unknown\n\nTask: #1\nTask Name: Parse Legal Hold CSV File\nTask SLA: N\\A\n/WorkPlan/2231/1"
+ },
+ "sender": {
+ "emailAddress": {
+ "name": "Test Test",
+ "address": "test@test.com"
+ }
+ },
+ "from": {
+ "emailAddress": {
+ "name": "Test Test",
+ "address": "test@example.com"
+ }
+ },
+ "toRecipients": [
+ {
+ "emailAddress": {
+ "name": "Hatul Kaze",
+ "address": "KHatul@example.com"
+ }
+ }
+ ],
+ "ccRecipients": [],
+ "bccRecipients": [],
+ "replyTo": [],
+ "flag": {
+ "flagStatus": "notFlagged"
+ }
+}
\ No newline at end of file
diff --git a/Integrations/MicrosoftGraphMail/test_data/mails b/Integrations/MicrosoftGraphMail/test_data/mails
new file mode 100644
index 000000000000..c94c03d37f9b
--- /dev/null
+++ b/Integrations/MicrosoftGraphMail/test_data/mails
@@ -0,0 +1,108 @@
+[
+ {
+ "@odata.etag": "W/\"\"",
+ "id": "-qAAA=",
+ "createdDateTime": "2019-04-16T19:40:00Z",
+ "lastModifiedDateTime": "2019-04-16T19:40:02Z",
+ "changeKey": "",
+ "categories": [],
+ "receivedDateTime": "2019-04-16T19:40:01Z",
+ "sentDateTime": "2019-04-16T19:40:01Z",
+ "hasAttachments": false,
+ "internetMessageId": "",
+ "subject": "Playbook has stopped on error for Legal Hold #` (#2231)",
+ "bodyPreview": "Incident playbook task \"Parse Legal Hold CSV File\" stopped on error.\nIncident Id: #2231\nIncident Name: Legal Hold #`\nIncident SLA: 26 Apr 19, 19:13 UTC\nIncident Severity: Unknown\n\nTask: #1\nTask Name: Parse Legal Hold CSV File\nTask SLA: N\\A\n\nView it on htt",
+ "importance": "normal",
+ "parentFolderId": "",
+ "conversationId": "",
+ "isDeliveryReceiptRequested": false,
+ "isReadReceiptRequested": false,
+ "isRead": true,
+ "isDraft": false,
+ "webLink": "",
+ "inferenceClassification": "focused",
+ "body": {
+ "contentType": "text",
+ "content": "Incident playbook task \"Parse Legal Hold CSV File\" stopped on error.\nIncident Id: #2231\nIncident Name: Legal Hold #`\nIncident SLA: 26 Apr 19, 19:13 UTC\nIncident Severity: Unknown\n\nTask: #1\nTask Name: Parse Legal Hold CSV File\nTask SLA: N\\A\n\nView it on Plan/2231/1"
+ },
+ "sender": {
+ "emailAddress": {
+ "name": "User Test",
+ "address": "user@example.com"
+ }
+ },
+ "from": {
+ "emailAddress": {
+ "name": "User Test",
+ "address": "user@example.com"
+ }
+ },
+ "toRecipients": [
+ {
+ "emailAddress": {
+ "name": "mayb@queen.com",
+ "address": "mayb@queen.com"
+ }
+ }
+ ],
+ "ccRecipients": [],
+ "bccRecipients": [],
+ "replyTo": [],
+ "flag": {
+ "flagStatus": "notFlagged"
+ }
+ },
+ {
+ "@odata.etag": "W/\"\"",
+ "id": "id_number",
+ "createdDateTime": "2019-04-16T19:36:06Z",
+ "lastModifiedDateTime": "2019-04-16T19:36:08Z",
+ "changeKey": "",
+ "categories": [],
+ "receivedDateTime": "2019-04-16T19:36:07Z",
+ "sentDateTime": "2019-04-16T19:36:07Z",
+ "hasAttachments": false,
+ "internetMessageId": "",
+ "subject": "Message from Demisto Security Operations Server",
+ "bodyPreview": "DBot has updated an incident Legal Hold List.\nView it on233",
+ "importance": "normal",
+ "parentFolderId": "",
+ "conversationId": "",
+ "isDeliveryReceiptRequested": false,
+ "isReadReceiptRequested": false,
+ "isRead": true,
+ "isDraft": false,
+ "webLink": "",
+ "inferenceClassification": "focused",
+ "body": {
+ "contentType": "text",
+ "content": "DBot has updated an incident Legal Hold List.\nVie"
+ },
+ "sender": {
+ "emailAddress": {
+ "name": "User Test",
+ "address": "user@example.com"
+ }
+ },
+ "from": {
+ "emailAddress": {
+ "name": "User Test",
+ "address": "user@example.com"
+ }
+ },
+ "toRecipients": [
+ {
+ "emailAddress": {
+ "name": "Fredy Mecrury",
+ "address": "freddy@queen.com"
+ }
+ }
+ ],
+ "ccRecipients": [],
+ "bccRecipients": [],
+ "replyTo": [],
+ "flag": {
+ "flagStatus": "notFlagged"
+ }
+ }
+]
\ No newline at end of file
diff --git a/Integrations/MicrosoftGraphSecurity/MicrosoftGraphSecurity.py b/Integrations/MicrosoftGraphSecurity/MicrosoftGraphSecurity.py
new file mode 100644
index 000000000000..3d2faab30f74
--- /dev/null
+++ b/Integrations/MicrosoftGraphSecurity/MicrosoftGraphSecurity.py
@@ -0,0 +1,642 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import requests
+from datetime import datetime, timedelta
+from typing import Dict, Any
+import base64
+from cryptography.hazmat.primitives.ciphers.aead import AESGCM
+
+if not demisto.params()['proxy']:
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARS '''
+PARAMS = demisto.params()
+SERVER = PARAMS['host'][:-1] if PARAMS['host'].endswith('/') else PARAMS['host']
+BASE_URL = SERVER + '/v1.0/'
+TENANT = PARAMS['tenant_id']
+AUTH_AND_TOKEN_URL = PARAMS['auth_id'].split('@')
+AUTH_ID = AUTH_AND_TOKEN_URL[0]
+ENC_KEY = PARAMS.get('enc_key')
+USE_SSL = not PARAMS.get('insecure', False)
+if len(AUTH_AND_TOKEN_URL) != 2:
+ TOKEN_RETRIEVAL_URL = 'https://oproxy.demisto.ninja/obtain-token' # disable-secrets-detection
+else:
+ TOKEN_RETRIEVAL_URL = AUTH_AND_TOKEN_URL[1]
+APP_NAME = 'ms-graph-security'
+
+''' HELPER FUNCTIONS '''
+
+
+def epoch_seconds(d=None):
+ """
+ Return the number of seconds for given date. If no date, return current.
+ """
+ if not d:
+ d = datetime.utcnow()
+ return int((d - datetime.utcfromtimestamp(0)).total_seconds())
+
+
+def get_encrypted(content: str, key: str) -> str:
+ """
+
+ Args:
+ content (str): content to encrypt. For a request to Demistobot for a new access token, content should be
+ the tenant id
+ key (str): encryption key from Demistobot
+
+ Returns:
+ encrypted timestamp:content
+ """
+ def create_nonce() -> bytes:
+ return os.urandom(12)
+
+ def encrypt(string: str, enc_key: str) -> bytes:
+ """
+
+ Args:
+ enc_key (str):
+ string (str):
+
+ Returns:
+ bytes:
+ """
+ # String to bytes
+ enc_key = base64.b64decode(enc_key)
+ # Create key
+ aes_gcm = AESGCM(enc_key)
+ # Create nonce
+ nonce = create_nonce()
+ # Create ciphered data
+ data = string.encode()
+ ct = aes_gcm.encrypt(nonce, data, None)
+ return base64.b64encode(nonce + ct)
+ now = epoch_seconds()
+ encrypted = encrypt(f'{now}:{content}', key).decode('utf-8')
+ return encrypted
+
+
+def get_access_token():
+ integration_context = demisto.getIntegrationContext()
+ access_token = integration_context.get('access_token')
+ valid_until = integration_context.get('valid_until')
+ if access_token and valid_until:
+ if epoch_seconds() < valid_until:
+ return access_token
+ headers = {'Accept': 'application/json'}
+
+ dbot_response = requests.post(
+ TOKEN_RETRIEVAL_URL,
+ headers=headers,
+ data=json.dumps({
+ 'app_name': APP_NAME,
+ 'registration_id': AUTH_ID,
+ 'encrypted_token': get_encrypted(TENANT, ENC_KEY)
+ }),
+ verify=USE_SSL
+ )
+ if dbot_response.status_code not in {200, 201}:
+ msg = 'Error in authentication. Try checking the credentials you entered.'
+ try:
+ demisto.info('Authentication failure from server: {} {} {}'.format(
+ dbot_response.status_code, dbot_response.reason, dbot_response.text))
+ err_response = dbot_response.json()
+ server_msg = err_response.get('message')
+ if not server_msg:
+ title = err_response.get('title')
+ detail = err_response.get('detail')
+ if title:
+ server_msg = f'{title}. {detail}'
+ if server_msg:
+ msg += ' Server message: {}'.format(server_msg)
+ except Exception as ex:
+ demisto.error('Failed parsing error response - Exception: {}'.format(ex))
+ raise Exception(msg)
+ try:
+ gcloud_function_exec_id = dbot_response.headers.get('Function-Execution-Id')
+ demisto.info(f'Google Cloud Function Execution ID: {gcloud_function_exec_id}')
+ parsed_response = dbot_response.json()
+ except ValueError:
+ raise Exception(
+ 'There was a problem in retrieving an updated access token.\n'
+ 'The response from the Demistobot server did not contain the expected content.'
+ )
+ access_token = parsed_response.get('access_token')
+ expires_in = parsed_response.get('expires_in', 3595)
+ time_now = epoch_seconds()
+ time_buffer = 5 # seconds by which to shorten the validity period
+ if expires_in - time_buffer > 0:
+ # err on the side of caution with a slightly shorter access token validity period
+ expires_in = expires_in - time_buffer
+
+ demisto.setIntegrationContext({
+ 'access_token': access_token,
+ 'valid_until': time_now + expires_in
+ })
+ return access_token
+
+
+def get_timestamp(time_description):
+ if time_description == 'Last24Hours':
+ time_delta = 1
+ elif time_description == 'Last48Hours':
+ time_delta = 2
+ else:
+ time_delta = 7
+ return datetime.strftime(datetime.now() - timedelta(time_delta), '%Y-%m-%d')
+
+
+def http_request(method, url_suffix, json=None, params=None):
+ """
+ Generic request to the graph
+ """
+ token = get_access_token()
+ r = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ json=json,
+ params=params,
+ headers={
+ 'Authorization': 'Bearer ' + token,
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+ }
+ )
+ if r.status_code not in {200, 204, 206}:
+ return_error('Error in API call to Microsoft Graph [%d] - %s' % (r.status_code, r.reason))
+ elif r.status_code == 206: # 206 indicates Partial Content, and the reason for that will be in the Warning header
+ demisto.debug(str(r.headers))
+ if not r.text:
+ return {}
+ return r.json()
+
+
+def capitalize_first_letter(string):
+ return string[:1].upper() + string[1:]
+
+
+''' FUNCTIONS '''
+
+
+def search_alerts_command(args):
+ last_modified = args.get('last_modified')
+ severity = args.get('severity')
+ category = args.get('category')
+ vendor = args.get('vendor')
+ time_from = args.get('time_from')
+ time_to = args.get('time_to')
+ filter_query = args.get('filter')
+ alerts = search_alerts(last_modified, severity, category, vendor, time_from, time_to, filter_query)['value']
+ outputs = []
+ for alert in alerts:
+ outputs.append({
+ 'ID': alert['id'],
+ 'Title': alert['title'],
+ 'Category': alert['category'],
+ 'Severity': alert['severity'],
+ 'CreatedDate': alert['createdDateTime'],
+ 'EventDate': alert['eventDateTime'],
+ 'Status': alert['status'],
+ 'Vendor': alert['vendorInformation']['vendor'],
+ 'Provider': alert['vendorInformation']['provider']
+ })
+ ec = {
+ 'MsGraph.Alert(val.ID && val.ID === obj.ID)': outputs
+ }
+ table_headers = ['ID', 'Vendor', 'Provider', 'Title', 'Category', 'Severity', 'CreatedDate', 'EventDate', 'Status']
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': alerts,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Microsoft Security Graph Alerts', outputs, table_headers, removeNull=True),
+ 'EntryContext': ec
+ }
+ demisto.results(entry)
+
+
+def search_alerts(last_modified, severity, category, vendor, time_from, time_to, filter_query):
+ filters = ''
+ if last_modified:
+ filters += "&modifiedDate gt '{}'".format(get_timestamp(last_modified))
+ if category:
+ filters += "&category eq '{}'".format(category)
+ if severity:
+ filters += "&severity eq '{}'".format(severity)
+ if time_from:
+ filters += "&createdDate gt '{}'".format(time_from)
+ if time_to:
+ filters += "&createdDate lt '{}'".format(time_to)
+ if filter_query:
+ filters += "&{}".format(filter_query)
+ cmd_url = 'security/alerts/?$filter=' + filters[1:]
+ response = http_request('GET', cmd_url)
+ return response
+
+
+def get_alert_details_command(args):
+ alert_id = args.get('alert_id')
+ fields_to_include = args.get('fields_to_include')
+ if fields_to_include:
+ fields_list = fields_to_include.split(',')
+ else:
+ fields_list = []
+
+ show_all_fields = True if 'All' in fields_list else False
+
+ alert_details = get_alert_details(alert_id)
+
+ hr = '## Microsoft Security Graph Alert Details - {}\n'.format(alert_id)
+
+ basic_properties_title = 'Basic Properties'
+ basic_properties = {
+ 'ActivityGroupName': alert_details['activityGroupName'],
+ 'AssignedTo': alert_details['assignedTo'],
+ 'AzureTenantID': alert_details['azureTenantId'],
+ 'Category': alert_details['category'],
+ 'ClosedDate': alert_details['closedDateTime'],
+ 'Confidence': alert_details['confidence'],
+ 'CreatedDate': alert_details['createdDateTime'],
+ 'Description': alert_details['description'],
+ 'EventDate': alert_details['eventDateTime'],
+ 'LastModifiedDate': alert_details['eventDateTime'],
+ 'Severity': alert_details['severity'],
+ 'Status': alert_details['status'],
+ 'Title': alert_details['title']
+ }
+ hr += tableToMarkdown(basic_properties_title, basic_properties, removeNull=True)
+
+ if 'CloudAppStates' in fields_list or show_all_fields:
+ cloud_apps_states = alert_details['cloudAppStates']
+ if cloud_apps_states:
+ cloud_apps_hr = []
+ for state in cloud_apps_states:
+ cloud_apps_hr.append({
+ 'DestinationSerivceIP': state['destinationServiceIp'],
+ 'DestinationSerivceName': state['destinationServiceName'],
+ 'RiskScore': state['riskScore']
+ })
+ cloud_apps_title = 'Cloud Application States for Alert'
+ hr += tableToMarkdown(cloud_apps_title, cloud_apps_hr, removeNull=True)
+
+ if 'CustomerComments' in fields_list or show_all_fields:
+ comments = alert_details['comments']
+ if comments:
+ comments_hr = '### Customer Provided Comments for Alert\n'
+ for comment in comments:
+ comments_hr += '- {}\n'.format(comment)
+ hr += comments_hr
+
+ if 'FileStates' in fields_list or show_all_fields:
+ file_states = alert_details['fileStates']
+ if file_states:
+ file_states_hr = []
+ for state in file_states:
+ file_state = {
+ 'Name': state['name'],
+ 'Path': state['path'],
+ 'RiskScore': state['riskScore']
+ }
+ file_hash = state.get('fileHash')
+ if file_hash:
+ file_state['FileHash'] = file_hash['hashValue']
+ file_states_hr.append(file_state)
+ file_states_title = 'File Security States for Alert'
+ hr += tableToMarkdown(file_states_title, file_states_hr, removeNull=True)
+
+ if 'HostStates' in fields_list or show_all_fields:
+ host_states = alert_details['hostStates']
+ if host_states:
+ host_states_hr = []
+ for state in host_states:
+ host_state = {
+ 'Fqdn': state['fqdn'],
+ 'NetBiosName': state['netBiosName'],
+ 'OS': state['os'],
+ 'PrivateIPAddress': state['privateIpAddress'],
+ 'PublicIPAddress': state['publicIpAddress']
+ }
+ aad_joined = state.get('isAzureAadJoined')
+ if aad_joined:
+ host_state['IsAsureAadJoined'] = aad_joined
+ aad_registered = state.get('isAzureAadRegistered')
+ if aad_registered:
+ host_state['IsAsureAadRegistered'] = aad_registered
+ risk_score = state.get('riskScore')
+ if risk_score:
+ host_state['RiskScore'] = risk_score
+ host_states_hr.append(host_state)
+ host_states_title = 'Host Security States for Alert'
+ hr += tableToMarkdown(host_states_title, host_states_hr, removeNull=True)
+
+ if 'MalwareStates' in fields_list or show_all_fields:
+ malware_states = alert_details['malwareStates']
+ if malware_states:
+ malware_states_hr = []
+ for state in malware_states:
+ malware_states_hr.append({
+ 'Category': state['category'],
+ 'Familiy': state['family'],
+ 'Name': state['name'],
+ 'Severity': state['severity'],
+ 'WasRunning': state['wasRunning']
+ })
+ malware_states_title = 'Malware States for Alert'
+ hr += tableToMarkdown(malware_states_title, malware_states_hr, removeNull=True)
+
+ if 'NetworkConnections' in fields_list or show_all_fields:
+ network_connections = alert_details['networkConnections']
+ if network_connections:
+ network_connections_hr = []
+ for connection in network_connections:
+ connection_hr = {}
+ for key, value in connection.items():
+ if value or value is False:
+ connection_hr[capitalize_first_letter(key)] = value
+ network_connections_hr.append(connection_hr)
+ network_connections_title = 'Network Connections for Alert'
+ hr += tableToMarkdown(network_connections_title, network_connections_hr, removeNull=True)
+
+ if 'Processes' in fields_list or show_all_fields:
+ processes = alert_details['processes']
+ if processes:
+ processes_hr = []
+ for process in processes:
+ process_hr = {}
+ for key, value in process.items():
+ if value or value is False:
+ process_hr[capitalize_first_letter(key)] = value
+ processes_hr.append(process_hr)
+ processes_title = 'Processes for Alert'
+ hr += tableToMarkdown(processes_title, processes_hr, removeNull=True)
+
+ if 'Triggers' in fields_list or show_all_fields:
+ triggers = alert_details['triggers']
+ if triggers:
+ triggers_hr = []
+ for trigger in triggers:
+ triggers_hr.append({
+ 'Name': trigger['name'],
+ 'Type': trigger['type'],
+ 'Value': trigger['value']
+ })
+ triggers_title = 'Triggers for Alert'
+ hr += tableToMarkdown(triggers_title, triggers_hr, removeNull=True)
+
+ if 'UserStates' in fields_list or show_all_fields:
+ user_states = alert_details['userStates']
+ if user_states:
+ user_states_hr = []
+ for state in user_states:
+ state_hr = {}
+ for key, value in state.items():
+ if value or value is False:
+ state_hr[capitalize_first_letter(key)] = value
+ user_states_hr.append(state_hr)
+ user_states_title = 'User Security States for Alert'
+ hr += tableToMarkdown(user_states_title, user_states_hr, removeNull=True)
+
+ if 'VendorInformation' in fields_list or show_all_fields:
+ vendor_information = alert_details['vendorInformation']
+ if vendor_information:
+ vendor_info_hr = {
+ 'Provider': vendor_information['provider'],
+ 'ProviderVersion': vendor_information['providerVersion'],
+ 'SubProvider': vendor_information['subProvider'],
+ 'Vendor': vendor_information['vendor']
+ }
+ vendor_info_title = 'Vendor Information for Alert'
+ hr += tableToMarkdown(vendor_info_title, vendor_info_hr, removeNull=True)
+
+ if 'VulnerabilityStates' in fields_list or show_all_fields:
+ vulnerability_states = alert_details['vulnerabilityStates']
+ if vulnerability_states:
+ vulnerability_states_hr = []
+ for state in vulnerability_states:
+ vulnerability_states_hr.append({
+ 'CVE': state['cve'],
+ 'Severity': state['severity'],
+ 'WasRunning': state['wasRunning']
+ })
+ vulnerability_states_title = 'Vulnerability States for Alert'
+ hr += tableToMarkdown(vulnerability_states_title, vulnerability_states_hr, removeNull=True)
+
+ if 'RegistryKeys' in fields_list or show_all_fields:
+ registry_keys = alert_details['registryKeyStates']
+ if registry_keys:
+ registry_keys_hr = []
+ for r_key in registry_keys:
+ r_key_hr = {}
+ for key, value in r_key.items():
+ if value or value is False:
+ r_key_hr[capitalize_first_letter(key)] = value
+ registry_keys_hr.append(r_key_hr)
+ registry_keys_title = 'Registry Keys for Alert'
+ hr += tableToMarkdown(registry_keys_title, registry_keys_hr, removeNull=True)
+
+ context = {
+ 'ID': alert_details['id'],
+ 'Title': alert_details['title'],
+ 'Category': alert_details['category'],
+ 'Severity': alert_details['severity'],
+ 'CreatedDate': alert_details['createdDateTime'],
+ 'EventDate': alert_details['eventDateTime'],
+ 'Status': alert_details['status'],
+ 'Vendor': alert_details['vendorInformation']['vendor'],
+ 'Provider': alert_details['vendorInformation']['provider']
+ }
+ ec = {
+ 'MsGraph.Alert(val.ID && val.ID === obj.ID)': context
+ }
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': alert_details,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ }
+ demisto.results(entry)
+
+
+def get_alert_details(alert_id):
+ cmd_url = 'security/alerts/' + alert_id
+ response = http_request('GET', cmd_url)
+ return response
+
+
+def update_alert_command(args):
+ alert_id = args.get('alert_id')
+ vendor_information = args.get('vendor_information')
+ provider_information = args.get('provider_information')
+ assigned_to = args.get('assigned_to')
+ closed_date_time = args.get('closed_date_time')
+ comments = args.get('comments')
+ feedback = args.get('feedback')
+ status = args.get('status')
+ tags = args.get('tags')
+ if all(v is None for v in [assigned_to, closed_date_time, comments, feedback, status, tags]):
+ return_error('No data to update was provided')
+ update_alert(alert_id, vendor_information, provider_information,
+ assigned_to, closed_date_time, comments, feedback, status, tags)
+ context = {
+ 'ID': alert_id
+ }
+ if status:
+ context['Status'] = status
+ ec = {
+ 'MsGraph.Alert(val.ID && val.ID === obj.ID)': context
+ }
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': context,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': 'Alert {} has been successfully updated.'.format(alert_id),
+ 'EntryContext': ec
+ }
+ demisto.results(entry)
+
+
+def update_alert(alert_id, vendor_information, provider_information,
+ assigned_to, closed_date_time, comments, feedback, status, tags):
+ cmd_url = '/security/alerts/' + alert_id
+ data: Dict[str, Any] = {
+ 'vendorInformation': {
+ 'provider': provider_information,
+ 'vendor': vendor_information
+ }
+ }
+ if assigned_to:
+ data['assignedTo'] = assigned_to
+ if closed_date_time:
+ data['closedDateTime'] = closed_date_time
+ if comments:
+ data['comments'] = [comments]
+ if feedback:
+ data['feedback'] = feedback
+ if status:
+ data['status'] = status
+ if tags:
+ data['tags'] = [tags]
+ http_request('PATCH', cmd_url, json=data)
+
+
+def get_users_command():
+ users = get_users()['value']
+ outputs = []
+ for user in users:
+ outputs.append({
+ 'Name': user['displayName'],
+ 'Title': user['jobTitle'],
+ 'Email': user['mail'],
+ 'ID': user['id']
+ })
+ ec = {
+ 'MsGraph.User(val.ID && val.ID === obj.ID)': outputs
+ }
+ table_headers = ['Name', 'Title', 'Email', 'ID']
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': users,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Microsoft Graph Users', outputs, table_headers, removeNull=True),
+ 'EntryContext': ec
+ }
+ demisto.results(entry)
+
+
+def get_users():
+ cmd_url = 'users'
+ response = http_request('GET', cmd_url)
+ return response
+
+
+def get_user_command():
+ user_id = demisto.args().get('user_id')
+ raw_user = get_user(user_id)
+ user = {
+ 'Name': raw_user['displayName'],
+ 'Title': raw_user['jobTitle'],
+ 'Email': raw_user['mail'],
+ 'ID': raw_user['id']
+ }
+ ec = {
+ 'MsGraph.User(val.ID && val.ID === obj.ID)': user
+ }
+ table_headers = ['Name', 'Title', 'Email', 'ID']
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': raw_user,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Microsoft Graph User ' + user_id, user, table_headers, removeNull=True),
+ 'EntryContext': ec
+ }
+ demisto.results(entry)
+
+
+def get_user(user_id):
+ cmd_url = 'users/' + user_id
+ response = http_request('GET', cmd_url)
+ return response
+
+
+def test_function():
+ token = get_access_token()
+ response = requests.get(
+ BASE_URL + 'users',
+ headers={
+ 'Authorization': 'Bearer ' + token,
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+ },
+ params={'$select': 'displayName'},
+ verify=USE_SSL
+ )
+ try:
+ data = response.json() if response.text else {}
+ if not response.ok:
+ return_error(f'API call to MS Graph Security failed. Please check authentication related parameters.'
+ f' [{response.status_code}] - {demisto.get(data, "error.message")}')
+
+ demisto.results('ok')
+
+ except TypeError as ex:
+ demisto.debug(str(ex))
+ return_error(f'API call to MS Graph Security failed, could not parse result. '
+ f'Please check authentication related parameters. [{response.status_code}]')
+
+
+''' EXECUTION CODE '''
+
+LOG('command is %s' % (demisto.command(), ))
+
+try:
+ if demisto.command() == 'test-module':
+ test_function()
+
+ elif demisto.command() == 'msg-search-alerts':
+ search_alerts_command(demisto.args())
+
+ elif demisto.command() == 'msg-get-alert-details':
+ get_alert_details_command(demisto.args())
+
+ elif demisto.command() == 'msg-update-alert':
+ update_alert_command(demisto.args())
+
+ elif demisto.command() == 'msg-get-users':
+ get_users_command()
+
+ elif demisto.command() == 'msg-get-user':
+ get_user_command()
+
+except Exception as e:
+ return_error(str(e))
diff --git a/Integrations/MicrosoftGraphSecurity/MicrosoftGraphSecurity.yml b/Integrations/MicrosoftGraphSecurity/MicrosoftGraphSecurity.yml
new file mode 100644
index 000000000000..1a96ad3e6bc6
--- /dev/null
+++ b/Integrations/MicrosoftGraphSecurity/MicrosoftGraphSecurity.yml
@@ -0,0 +1,313 @@
+category: Analytics & SIEM
+commonfields:
+ id: Microsoft Graph
+ version: -1
+configuration:
+- defaultvalue: https://graph.microsoft.com
+ display: Host URL (e.g. https://graph.microsoft.com)
+ name: host
+ required: true
+ type: 0
+- display: ID (received from the admin consent - see Detailed Instructions (?) section)
+ name: auth_id
+ required: true
+ type: 4
+- display: Token (received from the admin consent - see Detailed Instructions (?)
+ section)
+ name: tenant_id
+ required: true
+ type: 4
+- display: Key (received from the admin consent - see Detailed Instructions (?) section)
+ name: enc_key
+ required: true
+ type: 4
+- defaultvalue: ""
+ display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: ""
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Unified gateway to security insights - all from a unified Microsoft Graph
+ Security API.
+display: Microsoft Graph Security
+name: Microsoft Graph
+script:
+ commands:
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: When the alert was last modified in the following string format
+ - YYYY-MM-DD
+ isArray: false
+ name: last_modified
+ predefined:
+ - Last24Hours
+ - Last48Hours
+ - LastWeek
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Alert severity - set by vendor/provider.
+ isArray: false
+ name: severity
+ predefined:
+ - unknown
+ - informational
+ - low
+ - medium
+ - high
+ required: false
+ secret: false
+ - default: false
+ description: Category of the alert, e.g. credentialTheft, ransomware (Categories
+ can be added or removed by vendors.)
+ isArray: false
+ name: category
+ required: false
+ secret: false
+ - default: false
+ description: The start time (creation time of alert) for the search in the following
+ string format - YYYY-MM-DD
+ isArray: false
+ name: time_from
+ required: false
+ secret: false
+ - default: false
+ description: The end time (creation time of alert) for the search in the following
+ string format - YYYY-MM-DD
+ isArray: false
+ name: time_to
+ required: false
+ secret: false
+ - default: false
+ description: Use this field to filter on any of the alert properties in the
+ format "{property} eq '{property-value}'", e.g. "category eq 'ransomware'"
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ deprecated: false
+ description: List alerts (security issues) within a customer's tenant that Microsoft
+ or partner security solutions have identified.
+ execution: false
+ name: msg-search-alerts
+ outputs:
+ - contextPath: MsGraph.Alert.ID
+ description: Alert ID
+ type: string
+ - contextPath: MsGraph.Alert.Title
+ description: Alert title
+ type: string
+ - contextPath: MsGraph.Alert.Category
+ description: Alert category
+ type: string
+ - contextPath: MsGraph.Alert.Severity
+ description: Alert severity
+ type: string
+ - contextPath: MsGraph.Alert.CreatedDate
+ description: Alert created date
+ type: date
+ - contextPath: MsGraph.Alert.EventDate
+ description: Alert event time
+ type: date
+ - contextPath: MsGraph.Alert.Status
+ description: Alert status
+ type: string
+ - contextPath: MsGraph.Alert.Vendor
+ description: Alert vendor/provider
+ type: string
+ - contextPath: MsGraph.Alert.MalwareStates
+ description: Alert malware states
+ type: string
+ - contextPath: MsGraph.Alert.Vendor
+ description: Alert vendor
+ type: string
+ - contextPath: MsGraph.Alert.Provider
+ description: Alert provider
+ type: string
+ - arguments:
+ - default: true
+ description: The Alert ID - Provider-generated GUID/unique identifier.
+ isArray: false
+ name: alert_id
+ required: true
+ secret: false
+ - default: false
+ defaultValue: All
+ description: 'Fields to fetch for specified Alert apart from the basic properties,
+ given as comma separated values, e.g. NetworkConnections,Processes. Optional
+ values: All,NetworkConnections,Processes,RegistryKeys,UserStates,HostStates,FileStates,CloudAppStates,MalwareStates,CustomerComment,Triggers,VendorInformation,VulnerabilityStates'
+ isArray: false
+ name: fields_to_include
+ required: false
+ secret: false
+ deprecated: false
+ description: Get details for a specific alert.
+ execution: false
+ name: msg-get-alert-details
+ outputs:
+ - contextPath: MsGraph.Alert.ID
+ description: Alert ID
+ type: string
+ - contextPath: MsGraph.Alert.Title
+ description: Alert title
+ type: string
+ - contextPath: MsGraph.Alert.Category
+ description: Alert category
+ type: string
+ - contextPath: MsGraph.Alert.Severity
+ description: Alert severity
+ type: string
+ - contextPath: MsGraph.Alert.CreatedDate
+ description: Alert created date
+ type: date
+ - contextPath: MsGraph.Alert.EventDate
+ description: Alert event date
+ type: date
+ - contextPath: MsGraph.Alert.Status
+ description: Alert status
+ type: string
+ - contextPath: MsGraph.Alert.Vendor
+ description: Alert vendor
+ type: string
+ - contextPath: MsGraph.Alert.Provider
+ description: Alert provider
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The Alert ID. Provider-generated GUID/unique identifier.
+ isArray: false
+ name: alert_id
+ required: true
+ secret: false
+ - default: false
+ description: Name of the analyst the alert is assigned to for triage, investigation,
+ or remediation.
+ isArray: false
+ name: assigned_to
+ required: false
+ secret: false
+ - default: false
+ description: Time at which the alert was closed in the following string format
+ - MM/DD/YYYY
+ isArray: false
+ name: closed_date_time
+ required: false
+ secret: false
+ - default: false
+ description: Analyst comments on the alert (for customer alert management).
+ isArray: false
+ name: comments
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Analyst feedback on the alert.
+ isArray: false
+ name: feedback
+ predefined:
+ - unknown
+ - truePositive
+ - falsePositive
+ - benignPositive
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Alert lifecycle status (stage).
+ isArray: false
+ name: status
+ predefined:
+ - unknown
+ - newAlert
+ - inProgress
+ - resolved
+ required: false
+ secret: false
+ - default: false
+ description: User-definable labels that can be applied to an alert and can serve
+ as filter conditions, e.g. "HVA", "SAW).
+ isArray: false
+ name: tags
+ required: false
+ secret: false
+ - default: false
+ description: Details about the security service vendor, e.g. Microsoft
+ isArray: false
+ name: vendor_information
+ required: true
+ secret: false
+ - default: false
+ description: Details about the security service vendor, e.g. Windows Defender
+ ATP
+ isArray: false
+ name: provider_information
+ required: true
+ secret: false
+ deprecated: false
+ description: Update an editable alert property within any integrated solution
+ to keep alert status and assignments in sync across solutions using its reference
+ ID.
+ execution: false
+ name: msg-update-alert
+ outputs:
+ - contextPath: MsGraph.Alert.ID
+ description: Alert ID
+ type: string
+ - contextPath: MsGraph.Alert.Status
+ description: Alert status
+ type: string
+ - deprecated: false
+ description: Retrieve a list of user objects.
+ execution: false
+ name: msg-get-users
+ outputs:
+ - contextPath: MsGraph.User.Email
+ description: User email address
+ type: string
+ - contextPath: MsGraph.User.ID
+ description: User ID
+ type: string
+ - contextPath: MsGraph.User.Title
+ description: User job title
+ type: string
+ - contextPath: MsGraph.User.Name
+ description: User name
+ type: string
+ - arguments:
+ - default: true
+ description: User ID of user to retreive
+ isArray: false
+ name: user_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieve the properties and relationships of user object.
+ execution: false
+ name: msg-get-user
+ outputs:
+ - contextPath: MsGraph.User.Email
+ description: User email address
+ type: string
+ - contextPath: MsGraph.User.ID
+ description: User ID
+ type: string
+ - contextPath: MsGraph.User.Title
+ description: User job title
+ type: string
+ - contextPath: MsGraph.User.Name
+ description: User name
+ type: string
+ dockerimage: demisto/crypto:1.0.0.303
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- Microsoft Graph Test
diff --git a/Integrations/MicrosoftGraphSecurity/MicrosoftGraphSecurity_description.md b/Integrations/MicrosoftGraphSecurity/MicrosoftGraphSecurity_description.md
new file mode 100644
index 000000000000..d25ad4f9a6e7
--- /dev/null
+++ b/Integrations/MicrosoftGraphSecurity/MicrosoftGraphSecurity_description.md
@@ -0,0 +1,2 @@
+To allow us access to Microsoft Graph Security, an admin has to approve our app using an admin consent flow, by clicking on the following [link](https://oproxy.demisto.ninja/ms-graph-security).
+After authorizing the Demisto app, you will get an ID, Token, and Key, which should be inserted in the integration instance configuration's corresponding fields.
\ No newline at end of file
diff --git a/Integrations/MicrosoftGraphSecurity/MicrosoftGraphSecurity_image.png b/Integrations/MicrosoftGraphSecurity/MicrosoftGraphSecurity_image.png
new file mode 100644
index 000000000000..08f94ff881d9
Binary files /dev/null and b/Integrations/MicrosoftGraphSecurity/MicrosoftGraphSecurity_image.png differ
diff --git a/Integrations/MicrosoftGraphUser/MicrosoftGraphUser.py b/Integrations/MicrosoftGraphUser/MicrosoftGraphUser.py
new file mode 100644
index 000000000000..c8d61d565da4
--- /dev/null
+++ b/Integrations/MicrosoftGraphUser/MicrosoftGraphUser.py
@@ -0,0 +1,398 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+'''IMPORTS'''
+import requests
+from datetime import datetime
+import base64
+from cryptography.hazmat.primitives.ciphers.aead import AESGCM
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARS '''
+BASE_URL = demisto.getParam('host').rstrip('/') + '/v1.0/'
+TENANT = demisto.getParam('tenant_id')
+AUTH_AND_TOKEN_URL = demisto.getParam('auth_id').split('@')
+AUTH_ID = AUTH_AND_TOKEN_URL[0]
+ENC_KEY = demisto.getParam('enc_key')
+USE_SSL = not demisto.params().get('insecure', False)
+
+''' CONSTANTS '''
+if len(AUTH_AND_TOKEN_URL) != 2:
+ TOKEN_RETRIEVAL_URL = 'https://oproxy.demisto.ninja/obtain-token' # disable-secrets-detection
+else:
+ TOKEN_RETRIEVAL_URL = AUTH_AND_TOKEN_URL[1]
+BLOCK_ACCOUNT_JSON = '{"accountEnabled": false}'
+UNBLOCK_ACCOUNT_JSON = '{"accountEnabled": true}'
+NO_OUTPUTS: dict = {}
+APP_NAME = 'ms-graph-user'
+
+
+def camel_case_to_readable(text):
+ """
+ 'camelCase' -> 'Camel Case'
+ """
+ if text == 'id':
+ return 'ID'
+ return ''.join(' ' + char if char.isupper() else char.strip() for char in text).strip().title()
+
+
+def parse_outputs(users_data):
+ """
+ Parse user data as received from Microsoft Graph API into Demisto's conventions
+ """
+ if isinstance(users_data, list):
+ users_readable, users_outputs = [], []
+ for user_data in users_data:
+ user_readable = {camel_case_to_readable(k): v for k, v in user_data.items() if k != '@removed'}
+ if '@removed' in user_data:
+ user_readable['Status'] = 'deleted'
+ users_readable.append(user_readable)
+ users_outputs.append({k.replace(' ', ''): v for k, v in user_readable.copy().items()})
+
+ return users_readable, users_outputs
+
+ else:
+ user_readable = {camel_case_to_readable(k): v for k, v in users_data.items() if k != '@removed'}
+ if '@removed' in users_data:
+ user_readable['Status'] = 'deleted'
+ user_outputs = {k.replace(' ', ''): v for k, v in user_readable.copy().items()}
+
+ return user_readable, user_outputs
+
+
+def epoch_seconds():
+ """
+ Return the number of seconds for return current date.
+ """
+ return int((datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds())
+
+
+def get_encrypted(content: str, key: str) -> str:
+ """
+
+ Args:
+ content (str): content to encrypt. For a request to Demistobot for a new access token, content should be
+ the tenant id
+ key (str): encryption key from Demistobot
+
+ Returns:
+ encrypted timestamp:content
+ """
+ def create_nonce() -> bytes:
+ return os.urandom(12)
+
+ def encrypt(string: str, enc_key: str) -> bytes:
+ """
+
+ Args:
+ enc_key (str):
+ string (str):
+
+ Returns:
+ bytes:
+ """
+ # String to bytes
+ enc_key = base64.b64decode(enc_key)
+ # Create key
+ aes_gcm = AESGCM(enc_key)
+ # Create nonce
+ nonce = create_nonce()
+ # Create ciphered data
+ data = string.encode()
+ ct = aes_gcm.encrypt(nonce, data, None)
+ return base64.b64encode(nonce + ct)
+ now = epoch_seconds()
+ encrypted = encrypt(f'{now}:{content}', key).decode('utf-8')
+ return encrypted
+
+
+def get_access_token():
+ integration_context = demisto.getIntegrationContext()
+ access_token = integration_context.get('access_token')
+ valid_until = integration_context.get('valid_until')
+ if access_token and valid_until:
+ if epoch_seconds() < valid_until:
+ return access_token
+ headers = {'Accept': 'application/json'}
+
+ dbot_response = requests.post(
+ TOKEN_RETRIEVAL_URL,
+ headers=headers,
+ data=json.dumps({
+ 'app_name': APP_NAME,
+ 'registration_id': AUTH_ID,
+ 'encrypted_token': get_encrypted(TENANT, ENC_KEY)
+ }),
+ verify=USE_SSL
+ )
+ if dbot_response.status_code not in {200, 201}:
+ msg = 'Error in authentication. Try checking the credentials you entered.'
+ try:
+ demisto.info('Authentication failure from server: {} {} {}'.format(
+ dbot_response.status_code, dbot_response.reason, dbot_response.text))
+ err_response = dbot_response.json()
+ server_msg = err_response.get('message')
+ if not server_msg:
+ title = err_response.get('title')
+ detail = err_response.get('detail')
+ if title:
+ server_msg = f'{title}. {detail}'
+ if server_msg:
+ msg += ' Server message: {}'.format(server_msg)
+ except Exception as ex:
+ demisto.error('Failed parsing error response - Exception: {}'.format(ex))
+ raise Exception(msg)
+ try:
+ gcloud_function_exec_id = dbot_response.headers.get('Function-Execution-Id')
+ demisto.info(f'Google Cloud Function Execution ID: {gcloud_function_exec_id}')
+ parsed_response = dbot_response.json()
+ except ValueError:
+ raise Exception(
+ 'There was a problem in retrieving an updated access token.\n'
+ 'The response from the Demistobot server did not contain the expected content.'
+ )
+ access_token = parsed_response.get('access_token')
+ expires_in = parsed_response.get('expires_in', 3595)
+ time_now = epoch_seconds()
+ time_buffer = 5 # seconds by which to shorten the validity period
+ if expires_in - time_buffer > 0:
+ # err on the side of caution with a slightly shorter access token validity period
+ expires_in = expires_in - time_buffer
+
+ demisto.setIntegrationContext({
+ 'access_token': access_token,
+ 'valid_until': time_now + expires_in
+ })
+ return access_token
+
+
+def http_request(method, url_suffix, params=None, body=None):
+ """
+ Generic request to Microsoft Graph
+ """
+ token = get_access_token()
+ response = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ headers={
+ 'Authorization': 'Bearer ' + token,
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+ },
+ params=params,
+ data=body,
+ verify=USE_SSL,
+ )
+ try:
+ data = response.json() if response.text else {}
+ if not response.ok:
+ return_error(f'API call to MS Graph failed [{response.status_code}] - {demisto.get(data, "error.message")}')
+ elif response.status_code == 206: # 206 indicates Partial Content, reason will be in the warning header
+ demisto.debug(str(response.headers))
+
+ return data
+
+ except TypeError as ex:
+ demisto.debug(str(ex))
+ return_error(f'Error in API call to Microsoft Graph, could not parse result [{response.status_code}]')
+
+
+def test_function():
+ token = get_access_token()
+ response = requests.get(
+ BASE_URL + 'users',
+ headers={
+ 'Authorization': 'Bearer ' + token,
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+ },
+ params={'$select': 'displayName'},
+ verify=USE_SSL
+ )
+ try:
+ data = response.json() if response.text else {}
+ if not response.ok:
+ return_error(f'API call to MS Graph failed. Please check authentication related parameters.'
+ f' [{response.status_code}] - {demisto.get(data, "error.message")}')
+
+ demisto.results('ok')
+
+ except TypeError as ex:
+ demisto.debug(str(ex))
+ return_error(f'API call to MS Graph failed, could not parse result. '
+ f'Please check authentication related parameters. [{response.status_code}]')
+
+
+def terminate_user_session_command():
+ user = demisto.getArg('user')
+ terminate_user_session(user)
+
+ return_outputs(readable_output=f'user: "{user}" session has been terminated successfully', outputs=NO_OUTPUTS)
+
+
+def terminate_user_session(user):
+ http_request('PATCH', f'users/{user}', body=BLOCK_ACCOUNT_JSON)
+
+
+def unblock_user_command():
+ user = demisto.getArg('user')
+ unblock_user(user)
+
+ return_outputs(
+ readable_output=f'"{user}" unblocked. It might take several minutes for the changes to take affect across '
+ 'all applications.',
+ outputs=NO_OUTPUTS
+ )
+
+
+def unblock_user(user):
+ http_request('PATCH', f'users/{user}', body=UNBLOCK_ACCOUNT_JSON)
+
+
+def delete_user_command():
+ user = demisto.getArg('user')
+ delete_user(user)
+
+ return_outputs(readable_output=f'user: "{user}" was deleted successfully', outputs=NO_OUTPUTS)
+
+
+def delete_user(user):
+ http_request('DELETE ', f'users/{user}')
+
+
+def create_user_command():
+ required_properties = {
+ 'accountEnabled': demisto.getArg('account_enabled'),
+ 'displayName': demisto.getArg('display_name'),
+ 'onPremisesImmutableId': demisto.getArg('on_premises_immutable_id'),
+ 'mailNickname': demisto.getArg('mail_nickname'),
+ 'passwordProfile': {
+ "forceChangePasswordNextSignIn": 'true',
+ "password": demisto.getArg('password')
+ },
+ 'userPrincipalName': demisto.getArg('user_principal_name')
+ }
+ other_properties = {}
+ for key_value in demisto.getArg('other_properties').split(','):
+ key, value = key_value.split('=', 2)
+ other_properties[key] = value
+
+ # create the user
+ required_properties.update(other_properties)
+ create_user(required_properties)
+
+ # display the new user and it's properties
+ user = required_properties.get('userPrincipalName')
+ user_data = get_user(user, '*')
+ user_readable, user_outputs = parse_outputs(user_data)
+ human_readable = tableToMarkdown(name=f"{user} was created successfully:", t=user_readable, removeNull=True)
+ outputs = {'MSGraphUser(val.ID == obj.ID)': user_outputs}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=user_data)
+
+
+def create_user(properties):
+ http_request('POST', 'users', body=json.dumps(properties))
+
+
+def update_user_command():
+ user = demisto.getArg('user')
+ updated_fields = demisto.getArg('updated_fields')
+
+ update_user(user, updated_fields)
+ get_user_command()
+
+
+def update_user(user, updated_fields):
+ body = {}
+ for key_value in updated_fields.split(','):
+ field, value = key_value.split('=', 2)
+ body[field] = value
+ http_request('PATCH', f'users/{user}', body=json.dumps(body))
+
+
+def get_delta_command():
+ properties = demisto.getArg('properties') + ',userPrincipalName'
+ users_data = get_delta(properties)
+ headers = list(set([camel_case_to_readable(p) for p in argToList(properties)] + ['ID', 'User Principal Name']))
+
+ users_readable, users_outputs = parse_outputs(users_data)
+ hr = tableToMarkdown(name='All Graph Users', headers=headers, t=users_readable, removeNull=True)
+ outputs = {'MSGraphUser(val.ID == obj.ID)': users_outputs}
+ return_outputs(readable_output=hr, outputs=outputs, raw_response=users_data)
+
+
+def get_delta(properties):
+ users = http_request('GET', 'users/delta', params={'$select': properties}).get('value')
+ return users
+
+
+def get_user_command():
+ user = demisto.getArg('user')
+ properties = demisto.args().get('properties', '*')
+ user_data = get_user(user, properties)
+
+ user_readable, user_outputs = parse_outputs(user_data)
+ human_readable = tableToMarkdown(name=f"{user} data", t=user_readable, removeNull=True)
+ outputs = {'MSGraphUser(val.ID == obj.ID)': user_outputs}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=user_data)
+
+
+def get_user(user, properties):
+ user_data = http_request('GET ', f'users/{user}', params={'$select': properties})
+ user_data.pop('@odata.context', None)
+
+ return user_data
+
+
+def list_users_command():
+ properties = demisto.args().get('properties', 'id,displayName,jobTitle,mobilePhone,mail')
+ users_data = list_users(properties)
+
+ users_readable, users_outputs = parse_outputs(users_data)
+ human_readable = tableToMarkdown(name='All Graph Users', t=users_readable, removeNull=True)
+ outputs = {'MSGraphUser(val.ID == obj.ID)': users_outputs}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=users_data)
+
+
+def list_users(properties):
+ users = http_request('GET', 'users', params={'$select': properties}).get('value')
+ return users
+
+
+try:
+ handle_proxy()
+
+ # COMMANDS
+ if demisto.command() == 'test-module':
+ test_function()
+
+ elif demisto.command() == 'msgraph-user-terminate-session':
+ terminate_user_session_command()
+
+ elif demisto.command() == 'msgraph-user-unblock':
+ unblock_user_command()
+
+ elif demisto.command() == 'msgraph-user-update':
+ update_user_command()
+
+ elif demisto.command() == 'msgraph-user-delete':
+ delete_user_command()
+
+ elif demisto.command() == 'msgraph-user-create':
+ create_user_command()
+
+ elif demisto.command() == 'msgraph-user-get-delta':
+ get_delta_command()
+
+ elif demisto.command() == 'msgraph-user-get':
+ get_user_command()
+
+ elif demisto.command() == 'msgraph-user-list':
+ list_users_command()
+
+
+except Exception as ex:
+ return_error(str(ex))
diff --git a/Integrations/MicrosoftGraphUser/MicrosoftGraphUser.yml b/Integrations/MicrosoftGraphUser/MicrosoftGraphUser.yml
new file mode 100644
index 000000000000..b3c1a2fd1223
--- /dev/null
+++ b/Integrations/MicrosoftGraphUser/MicrosoftGraphUser.yml
@@ -0,0 +1,365 @@
+category: Analytics & SIEM
+commonfields:
+ id: Microsoft Graph User
+ version: -1
+configuration:
+- defaultvalue: https://graph.microsoft.com
+ display: Host URL (e.g., https://graph.microsoft.com)
+ name: host
+ required: true
+ type: 0
+- display: ID (received from the admin consent - see Detailed Instructions (?)
+ name: auth_id
+ required: true
+ type: 4
+- display: Token (received from the admin consent - see Detailed Instructions (?)
+ section)
+ name: tenant_id
+ required: true
+ type: 4
+- display: Key (received from the admin consent - see Detailed Instructions (?)
+ name: enc_key
+ required: true
+ type: 4
+- defaultvalue: 'false'
+ display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: 'false'
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Unified gateway to security insights - all from a unified Microsoft Graph
+ User API.
+display: Microsoft Graph User
+name: Microsoft Graph User
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: User ID or userPrincipalName
+ isArray: false
+ name: user
+ required: true
+ secret: false
+ deprecated: false
+ description: Terminates a user's session from all Office 365 applications, and
+ prevents sign in.
+ execution: false
+ name: msgraph-user-terminate-session
+ - arguments:
+ - default: false
+ description: User ID or userPrincipalName
+ isArray: false
+ name: user
+ required: true
+ secret: false
+ deprecated: false
+ description: Unblock a user.
+ execution: false
+ name: msgraph-user-unblock
+ - arguments:
+ - default: false
+ description: User ID or userPrincipalName to update properties for.
+ isArray: false
+ name: user
+ required: true
+ secret: false
+ - default: false
+ description: User fields to update (in JSON format).
+ isArray: false
+ name: updated_fields
+ required: true
+ secret: false
+ deprecated: false
+ description: Updates the properties of a user object.
+ execution: false
+ name: msgraph-user-update
+ outputs:
+ - contextPath: MSGraphUser.ID
+ description: User's ID.
+ type: Unknown
+ - contextPath: MSGraphUser.DisplayName
+ description: User's display name.
+ type: Unknown
+ - contextPath: MSGraphUser.GivenName
+ description: User's given name.
+ type: Unknown
+ - contextPath: MSGraphUser.BusinessPhones
+ description: User's business phone numbers.
+ type: Unknown
+ - contextPath: MSGraphUser.JobTitle
+ description: User's job title.
+ type: Unknown
+ - contextPath: MSGraphUser.Mail
+ description: User's mail address.
+ type: Unknown
+ - contextPath: MSGraphUser.MobilePhone
+ description: User's mobile phone number.
+ type: Unknown
+ - contextPath: MSGraphUser.OfficeLocation
+ description: User's office location.
+ type: Unknown
+ - contextPath: MSGraphUser.PreferredLanguage
+ description: User's preferred language.
+ type: Unknown
+ - contextPath: MSGraphUser.Surname
+ description: User's surname.
+ type: Unknown
+ - contextPath: MSGraphUser.UserPrincipalName
+ description: User's principal name.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: User ID or userPrincipalName to delete.
+ isArray: false
+ name: user
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes an exsiting user.
+ execution: true
+ name: msgraph-user-delete
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: true if the account is enabled; otherwise, false.
+ isArray: false
+ name: account_enabled
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: The name to display in the address book.
+ isArray: false
+ name: display_name
+ required: true
+ secret: false
+ - default: false
+ description: Only needs to be specified when creating a new user account if
+ you are using a federated domain for the user's userPrincipalName (UPN) property.
+ isArray: false
+ name: on_premises_immutable_id
+ required: false
+ secret: false
+ - default: false
+ description: The mail alias for the user.
+ isArray: false
+ name: mail_nickname
+ required: true
+ secret: false
+ - default: false
+ description: The password profile for the user.
+ isArray: false
+ name: password
+ required: true
+ secret: false
+ - default: false
+ description: ' The user principal name, for example: someuser@contoso.com. '
+ isArray: false
+ name: user_principal_name
+ required: true
+ secret: false
+ - default: false
+ description: ' Optional properties for the user, for example: "displayName=name,mobilePhone=phone-num" '
+ isArray: false
+ name: other_properties
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a new user.
+ execution: false
+ name: msgraph-user-create
+ outputs:
+ - contextPath: MSGraphUser.ID
+ description: User's ID.
+ type: Unknown
+ - contextPath: MSGraphUser.DisplayName
+ description: User's display name.
+ type: Unknown
+ - contextPath: MSGraphUser.GivenName
+ description: User's given name.
+ type: Unknown
+ - contextPath: MSGraphUser.BusinessPhones
+ description: User's business phone numbers.
+ type: Unknown
+ - contextPath: MSGraphUser.JobTitle
+ description: User's job title.
+ type: Unknown
+ - contextPath: MSGraphUser.Mail
+ description: User's mail address.
+ type: Unknown
+ - contextPath: MSGraphUser.MobilePhone
+ description: User's mobile phone number.
+ type: Unknown
+ - contextPath: MSGraphUser.OfficeLocation
+ description: User's office location.
+ type: Unknown
+ - contextPath: MSGraphUser.PreferredLanguage
+ description: User's preferred language.
+ type: Unknown
+ - contextPath: MSGraphUser.Surname
+ description: User's surname.
+ type: Unknown
+ - contextPath: MSGraphUser.UserPrincipalName
+ description: User's principal name.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: ' A CSV list of properties by which to filter the results, for
+ example: "displayName,jobTitle,mobilePhone" '
+ isArray: false
+ name: properties
+ required: false
+ secret: false
+ deprecated: false
+ description: Gets newly created, updated, or deleted users without performing
+ a full read of the entire user collection.
+ execution: false
+ name: msgraph-user-get-delta
+ outputs:
+ - contextPath: MSGraphUser.ID
+ description: User's ID.
+ type: Unknown
+ - contextPath: MSGraphUser.DisplayName
+ description: User's display name.
+ type: Unknown
+ - contextPath: MSGraphUser.GivenName
+ description: User's given name.
+ type: Unknown
+ - contextPath: MSGraphUser.BusinessPhones
+ description: User's business phone numbers.
+ type: Unknown
+ - contextPath: MSGraphUser.JobTitle
+ description: User's job title.
+ type: Unknown
+ - contextPath: MSGraphUser.Mail
+ description: User's mail address.
+ type: Unknown
+ - contextPath: MSGraphUser.MobilePhone
+ description: User's mobile phone.
+ type: Unknown
+ - contextPath: MSGraphUser.OfficeLocation
+ description: User's office location.
+ type: Unknown
+ - contextPath: MSGraphUser.PreferredLanguage
+ description: User's preferred language.
+ type: Unknown
+ - contextPath: MSGraphUser.Surname
+ description: User's surname.
+ type: Unknown
+ - contextPath: MSGraphUser.UserPrincipalName
+ description: User's principal name.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: User ID or userPrincipalName
+ isArray: false
+ name: user
+ required: true
+ secret: false
+ - default: false
+ description: ' A CSV list of properties by which to filter the results, for
+ example: "displayName,jobTitle,mobilePhone" '
+ isArray: false
+ name: properties
+ required: false
+ secret: false
+ deprecated: false
+ description: 'Retrieves the properties and relationships of user object. for more
+ information, visit: https://docs.microsoft.com/en-us/graph/api/user-update?view=graph-rest-1.0)'
+ execution: false
+ name: msgraph-user-get
+ outputs:
+ - contextPath: MSGraphUser.ID
+ description: User's ID.
+ type: Unknown
+ - contextPath: MSGraphUser.DisplayName
+ description: User's display name.
+ type: Unknown
+ - contextPath: MSGraphUser.GivenName
+ description: User's given name.
+ type: Unknown
+ - contextPath: MSGraphUser.BusinessPhones
+ description: User's business phone numbers.
+ type: Unknown
+ - contextPath: MSGraphUser.JobTitle
+ description: User's job title.
+ type: Unknown
+ - contextPath: MSGraphUser.Mail
+ description: User's mail address.
+ type: Unknown
+ - contextPath: MSGraphUser.MobilePhone
+ description: User's mobile phone number.
+ type: Unknown
+ - contextPath: MSGraphUser.OfficeLocation
+ description: User's office location.
+ type: Unknown
+ - contextPath: MSGraphUser.PreferredLanguage
+ description: User's preferred language.
+ type: Unknown
+ - contextPath: MSGraphUser.Surname
+ description: User's surname.
+ type: Unknown
+ - contextPath: MSGraphUser.UserPrincipalName
+ description: User's principal name.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: ' A CSV list of properties by which to filter the results, for
+ example: "displayName,jobTitle,mobilePhone" '
+ isArray: false
+ name: properties
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a list of user objects.
+ execution: false
+ name: msgraph-user-list
+ outputs:
+ - contextPath: MSGraphUser.ID
+ description: User's ID.
+ type: Unknown
+ - contextPath: MSGraphUser.DisplayName
+ description: User's display name.
+ type: Unknown
+ - contextPath: MSGraphUser.GivenName
+ description: User's given name.
+ type: Unknown
+ - contextPath: MSGraphUser.BusinessPhones
+ description: User's business phone numbers.
+ type: Unknown
+ - contextPath: MSGraphUser.JobTitle
+ description: User's job title.
+ type: Unknown
+ - contextPath: MSGraphUser.Mail
+ description: User's mail address.
+ type: Unknown
+ - contextPath: MSGraphUser.MobilePhone
+ description: User's mobile phone number.
+ type: Unknown
+ - contextPath: MSGraphUser.OfficeLocation
+ description: User's office location.
+ type: Unknown
+ - contextPath: MSGraphUser.PreferredLanguage
+ description: User's preferred language.
+ type: Unknown
+ - contextPath: MSGraphUser.Surname
+ description: User's surname.
+ type: Unknown
+ - contextPath: MSGraphUser.UserPrincipalName
+ description: User's principal name.
+ type: Unknown
+ dockerimage: demisto/crypto:1.0.0.303
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- Microsoft Graph - Test
diff --git a/Integrations/MicrosoftGraphUser/MicrosoftGraphUser_description.md b/Integrations/MicrosoftGraphUser/MicrosoftGraphUser_description.md
new file mode 100644
index 000000000000..580de051fbf8
--- /dev/null
+++ b/Integrations/MicrosoftGraphUser/MicrosoftGraphUser_description.md
@@ -0,0 +1,2 @@
+To allow us access to Microsoft Graph User, an admin has to approve our app using an admin consent flow, by clicking on the following [link](https://oproxy.demisto.ninja/ms-graph-user).
+After authorizing the Demisto app, you will get an ID, Token, and Key, which should be inserted in the integration instance configuration's corresponding fields.
\ No newline at end of file
diff --git a/Integrations/MicrosoftGraphUser/MicrosoftGraphUser_image.png b/Integrations/MicrosoftGraphUser/MicrosoftGraphUser_image.png
new file mode 100644
index 000000000000..08f94ff881d9
Binary files /dev/null and b/Integrations/MicrosoftGraphUser/MicrosoftGraphUser_image.png differ
diff --git a/Integrations/MicrosoftTeams/CHANGELOG.md b/Integrations/MicrosoftTeams/CHANGELOG.md
new file mode 100644
index 000000000000..7aa16b474d44
--- /dev/null
+++ b/Integrations/MicrosoftTeams/CHANGELOG.md
@@ -0,0 +1,20 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+ - Added support for single port mapping.
+ - Added the ***microsoft-teams-integration-health*** command.
+
+## [19.9.1] - 2019-09-18
+ - Added verification for the authorization header signature.
+ - Added support for HTTPS.
+
+## [19.9.0] - 2019-09-04
+ - Added the *channel_name* argument to the ***mirror-investigation***, which enables mirroring to a channel with a custom channel name.
+ - Added a message that is sent to a channel that is opened as part of the mirror investigation process.
+ - Improved messages returned from the bot in direct messages.
+ - Improved error handling for HTTP errors returned from Microsoft Bot Framework API.
+
+## [19.8.2] - 2019-08-22
+#### New Integration
+Send messages and notifications to your team members.
diff --git a/Integrations/MicrosoftTeams/MicrosoftTeams.py b/Integrations/MicrosoftTeams/MicrosoftTeams.py
new file mode 100644
index 000000000000..e37f735a74a2
--- /dev/null
+++ b/Integrations/MicrosoftTeams/MicrosoftTeams.py
@@ -0,0 +1,1513 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+import requests
+from distutils.util import strtobool
+from flask import Flask, request, Response
+from gevent.pywsgi import WSGIServer
+import jwt
+import time
+from threading import Thread
+from typing import Match, Union, Optional, cast, Dict, Any, List
+import re
+from jwt.algorithms import RSAAlgorithm
+from tempfile import NamedTemporaryFile
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARIABLES'''
+PARAMS: dict = demisto.params()
+BOT_ID: str = PARAMS.get('bot_id', '')
+BOT_PASSWORD: str = PARAMS.get('bot_password', '')
+USE_SSL: bool = not PARAMS.get('insecure', False)
+APP: Flask = Flask('demisto-teams')
+PLAYGROUND_INVESTIGATION_TYPE: int = 9
+GRAPH_BASE_URL: str = 'https://graph.microsoft.com'
+
+INCIDENT_TYPE: str = PARAMS.get('incidentType', '')
+
+URL_REGEX: str = r'http[s]?://(?:[a-zA-Z]|[0-9]|[:/$_@.&+#-]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'
+ENTITLEMENT_REGEX: str = \
+ r'(\{){0,1}[0-9a-fA-F]{8}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{12}(\}){0,1}'
+ENTRY_FOOTER: str = 'From Microsoft Teams'
+
+MESSAGE_TYPES: dict = {
+ 'mirror_entry': 'mirrorEntry',
+ 'incident_opened': 'incidentOpened',
+ 'status_changed': 'incidentStatusChanged'
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def epoch_seconds(d: datetime = None) -> int:
+ """
+ Return the number of seconds for given date. If no date, return current.
+ :param d: timestamp datetime object
+ :return: timestamp in epoch
+ """
+ if not d:
+ d = datetime.utcnow()
+ return int((d - datetime.utcfromtimestamp(0)).total_seconds())
+
+
+def error_parser(resp_err: requests.Response, api: str = 'graph') -> str:
+ """
+ Parses Microsoft API error message from Requests response
+ :param resp_err: response with error
+ :param api: API to query (graph/bot)
+ :return: string of error
+ """
+ try:
+ response: dict = resp_err.json()
+ if api == 'graph':
+ error: dict = response.get('error', {})
+ err_str: str = f"{error.get('code', '')}: {error.get('message', '')}"
+ if err_str:
+ return err_str
+ elif api == 'bot':
+ error_description: str = response.get('error_description', '')
+ if error_description:
+ return error_description
+ # If no error message
+ raise ValueError()
+ except ValueError:
+ return resp_err.text
+
+
+def translate_severity(severity: str) -> int:
+ """
+ Translates Demisto text severity to int severity
+ :param severity: Demisto text severity
+ :return: Demisto integer severity
+ """
+ severity_dictionary = {
+ 'Low': 1,
+ 'Medium': 2,
+ 'High': 3,
+ 'Critical': 4
+ }
+ return severity_dictionary.get(severity, 0)
+
+
+def create_incidents(demisto_user: dict, incidents: list) -> dict:
+ """
+ Creates incidents according to a provided JSON object
+ :param demisto_user: The demisto user associated with the request (if exists)
+ :param incidents: The incidents JSON
+ :return: The creation result
+ """
+ if demisto_user:
+ data = demisto.createIncidents(incidents, userID=demisto_user.get('id', ''))
+ else:
+ data = demisto.createIncidents(incidents)
+ return data
+
+
+def process_incident_create_message(demisto_user: dict, message: str) -> str:
+ """
+ Processes an incident creation message
+ :param demisto_user: The Demisto user associated with the message (if exists)
+ :param message: The creation message
+ :return: Creation result
+ """
+ json_pattern: str = r'(?<=json=).*'
+ name_pattern: str = r'(?<=name=).*'
+ type_pattern: str = r'(?<=type=).*'
+ json_match: Optional[Match[str]] = re.search(json_pattern, message)
+ created_incident: Union[dict, list]
+ data: str = str()
+ if json_match:
+ if re.search(name_pattern, message) or re.search(type_pattern, message):
+ data = 'No other properties other than json should be specified.'
+ else:
+ incidents_json: str = json_match.group()
+ incidents: Union[dict, list] = json.loads(incidents_json.replace('“', '"').replace('â€', '"'))
+ if not isinstance(incidents, list):
+ incidents = [incidents]
+ created_incident = create_incidents(demisto_user, incidents)
+ if not created_incident:
+ data = 'Failed creating incidents.'
+ else:
+ name_match: Optional[Match[str]] = re.search(name_pattern, message)
+ if not name_match:
+ data = 'Please specify arguments in the following manner: name= type=[type] or json=.'
+ else:
+ incident_name: str = re.sub('type=.*', '', name_match.group()).strip()
+ incident_type: str = str()
+
+ type_match: Optional[Match[str]] = re.search(type_pattern, message)
+ if type_match:
+ incident_type = re.sub('name=.*', '', type_match.group()).strip()
+
+ incident: dict = {'name': incident_name}
+
+ incident_type = incident_type or INCIDENT_TYPE
+ if incident_type:
+ incident['type'] = incident_type
+
+ created_incident = create_incidents(demisto_user, [incident])
+ if not created_incident:
+ data = 'Failed creating incidents.'
+
+ if created_incident:
+ if isinstance(created_incident, list):
+ created_incident = created_incident[0]
+ created_incident = cast(Dict[Any, Any], created_incident)
+ server_links: dict = demisto.demistoUrls()
+ server_link: str = server_links.get('server', '')
+ data = f"Successfully created incident {created_incident.get('name', '')}.\n" \
+ f"View it on: {server_link}#/WarRoom/{created_incident.get('id', '')}"
+
+ return data
+
+
+def is_investigation_mirrored(investigation_id: str, mirrored_channels: list) -> int:
+ """
+ Checks if investigation is already mirrored
+ :param investigation_id: Investigation ID to check if mirrored
+ :param mirrored_channels: List of mirrored channels to check if investigation is mirrored in
+ :return: Index in mirrored channels list if mirrored, else -1
+ """
+ for index, channel in enumerate(mirrored_channels):
+ if channel.get('investigation_id') == investigation_id:
+ return index
+ return -1
+
+
+def urlify_hyperlinks(message: str) -> str:
+ """
+ Turns URL to markdown hyper-link
+ e.g. https://www.demisto.com -> [https://www.demisto.com](https://www.demisto.com)
+ :param message: Message to look for URLs in
+ :return: Formatted message with hyper-links
+ """
+ formatted_message: str = message
+ # URLify markdown hyperlinks
+ urls = re.findall(URL_REGEX, message)
+ for url in urls:
+ formatted_message = formatted_message.replace(url, f'[{url}]({url})')
+ return formatted_message
+
+
+def get_team_member(integration_context: dict, team_member_id: str) -> dict:
+ """
+ Searches for a team member
+ :param integration_context: Cached object to search for team member in
+ :param team_member_id: Team member ID to search for
+ :return: Found team member object
+ """
+ team_member: dict = dict()
+ teams: list = json.loads(integration_context.get('teams', '[]'))
+
+ for team in teams:
+ team_members: list = team.get('team_members', [])
+ for member in team_members:
+ if member.get('id') == team_member_id:
+ team_member['username'] = member.get('name', '')
+ team_member['user_email'] = member.get('userPrincipalName', '')
+ return team_member
+
+ raise ValueError('Team member was not found')
+
+
+def get_team_member_id(requested_team_member: str, integration_context: dict) -> str:
+ """
+ Gets team member ID based on name, email or principal name
+ :param requested_team_member: Team member name / principal name / email to look for
+ :param integration_context: Cached object to search for team member in
+ :return: Team member ID
+ """
+ teams: list = json.loads(integration_context.get('teams', '[]'))
+
+ for team in teams:
+ team_members: list = team.get('team_members', [])
+ for team_member in team_members:
+ if requested_team_member in {team_member.get('name', ''), team_member.get('userPrincipalName', '')}:
+ return team_member.get('id')
+
+ raise ValueError('Team member was not found')
+
+
+def create_adaptive_card(body: list, actions: list = None) -> dict:
+ """
+ Creates Microsoft Teams adaptive card object given body and actions
+ :param body: Adaptive card data
+ :param actions: Adaptive card actions
+ :return: Adaptive card object
+ """
+ adaptive_card: dict = {
+ 'contentType': 'application/vnd.microsoft.card.adaptive',
+ 'content': {
+ '$schema': 'http://adaptivecards.io/schemas/adaptive-card.json',
+ 'version': '1.0',
+ 'type': 'AdaptiveCard',
+ 'body': body
+ }
+ }
+ if actions:
+ adaptive_card['content']['actions'] = actions
+ return adaptive_card
+
+
+def process_tasks_list(data_by_line: list) -> dict:
+ """
+ Processes tasks list assigned to user given from Demisto server and creates adaptive card
+ :param data_by_line: List of tasks to process
+ :return: Adaptive card of assigned tasks
+ """
+ body: list = list()
+ for line in data_by_line[2:]:
+ split_data: list = [stat.strip() for stat in line.split('|')]
+ body.append({
+ 'type': 'FactSet',
+ 'facts': [
+ {
+ 'title': 'Task:',
+ 'value': split_data[0]
+ },
+ {
+ 'title': 'Incident:',
+ 'value': split_data[1]
+ },
+ {
+ 'title': 'Due:',
+ 'value': split_data[2]
+ },
+ {
+ 'title': 'Link:',
+ 'value': f'[{split_data[3]}]({split_data[3]})'
+ }
+ ]
+ })
+ return create_adaptive_card(body)
+
+
+def process_incidents_list(data_by_line: list) -> dict:
+ """
+ Processes incidents list assigned to user given from Demisto server and creates adaptive card
+ :param data_by_line: List of incidents to process
+ :return: Adaptive card of assigned incidents
+ """
+ body: list = list()
+ for line in data_by_line[2:]:
+ split_data: list = [stat.strip() for stat in line.split('|')]
+ body.append({
+ 'type': 'FactSet',
+ 'facts': [
+ {
+ 'title': 'ID:',
+ 'value': split_data[0]
+ },
+ {
+ 'title': 'Name:',
+ 'value': split_data[1]
+ },
+ {
+ 'title': 'Status:',
+ 'value': split_data[2]
+ },
+ {
+ 'title': 'Type:',
+ 'value': split_data[3]
+ },
+ {
+ 'title': 'Owner:',
+ 'value': split_data[4]
+ },
+ {
+ 'title': 'Created:',
+ 'value': split_data[5]
+ },
+ {
+ 'title': 'Link:',
+ 'value': f'[{split_data[6]}]({split_data[6]})'
+ }
+ ]
+ })
+ return create_adaptive_card(body)
+
+
+def process_mirror_or_unknown_message(message: str) -> dict:
+ """
+ Processes mirror investigation command or unknown direct message and creates adaptive card
+ :param message: The direct message to process
+ :return: Adaptive card of mirror response / unknown message
+ """
+ body: list = [{
+ 'type': 'TextBlock',
+ 'text': message.replace('\n', '\n\n'),
+ 'wrap': True
+ }]
+ return create_adaptive_card(body)
+
+
+def process_ask_user(message: str) -> dict:
+ """
+ Processes ask user message and creates adaptive card
+ :param message: The question object
+ :return: Adaptive card of the question to send
+ """
+ message_object: dict = json.loads(message)
+ text: str = message_object.get('message_text', '')
+ entitlement: str = message_object.get('entitlement', '')
+ options: list = message_object.get('options', [])
+ investigation_id: str = message_object.get('investigation_id', '')
+ task_id: str = message_object.get('task_id', '')
+ body = [
+ {
+ 'type': 'TextBlock',
+ 'text': text
+ }
+ ]
+ actions: list = list()
+ for option in options:
+ actions.append({
+ 'type': 'Action.Submit',
+ 'title': option,
+ 'data': {
+ 'response': option,
+ 'entitlement': entitlement,
+ 'investigation_id': investigation_id,
+ 'task_id': task_id
+ }
+ })
+ return create_adaptive_card(body, actions)
+
+
+def get_bot_access_token() -> str:
+ """
+ Retrieves Bot Framework API access token, either from cache or from Microsoft
+ :return: The Bot Framework API access token
+ """
+ integration_context: dict = demisto.getIntegrationContext()
+ access_token: str = integration_context.get('bot_access_token', '')
+ valid_until: int = integration_context.get('bot_valid_until', int)
+ if access_token and valid_until:
+ if epoch_seconds() < valid_until:
+ return access_token
+ url: str = 'https://login.microsoftonline.com/botframework.com/oauth2/v2.0/token'
+ data: dict = {
+ 'grant_type': 'client_credentials',
+ 'client_id': BOT_ID,
+ 'client_secret': BOT_PASSWORD,
+ 'scope': 'https://api.botframework.com/.default'
+ }
+ response: requests.Response = requests.post(
+ url,
+ data=data,
+ verify=USE_SSL
+ )
+ if not response.ok:
+ error = error_parser(response, 'bot')
+ raise ValueError(f'Failed to get bot access token [{response.status_code}] - {error}')
+ try:
+ response_json: dict = response.json()
+ access_token = response_json.get('access_token', '')
+ expires_in: int = response_json.get('expires_in', 3595)
+ time_now: int = epoch_seconds()
+ time_buffer = 5 # seconds by which to shorten the validity period
+ if expires_in - time_buffer > 0:
+ expires_in -= time_buffer
+ integration_context['bot_access_token'] = access_token
+ integration_context['bot_valid_until'] = time_now + expires_in
+ demisto.setIntegrationContext(integration_context)
+ return access_token
+ except ValueError:
+ raise ValueError('Failed to get bot access token')
+
+
+def get_graph_access_token() -> str:
+ """
+ Retrieves Microsoft Graph API access token, either from cache or from Microsoft
+ :return: The Microsoft Graph API access token
+ """
+ integration_context: dict = demisto.getIntegrationContext()
+ access_token: str = integration_context.get('graph_access_token', '')
+ valid_until: int = integration_context.get('graph_valid_until', int)
+ if access_token and valid_until:
+ if epoch_seconds() < valid_until:
+ return access_token
+ tenant_id: str = integration_context.get('tenant_id', '')
+ if not tenant_id:
+ raise ValueError(
+ 'Did not receive tenant ID from Microsoft Teams, verify the messaging endpoint is configured correctly.'
+ )
+ url: str = f'https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/token'
+ data: dict = {
+ 'grant_type': 'client_credentials',
+ 'client_id': BOT_ID,
+ 'scope': 'https://graph.microsoft.com/.default',
+ 'client_secret': BOT_PASSWORD
+ }
+
+ response: requests.Response = requests.post(
+ url,
+ data=data,
+ verify=USE_SSL
+ )
+
+ if not response.ok:
+ error = error_parser(response)
+ raise ValueError(f'Failed to get Graph access token [{response.status_code}] - {error}')
+ try:
+ response_json: dict = response.json()
+ access_token = response_json.get('access_token', '')
+ expires_in: int = response_json.get('expires_in', 3595)
+ time_now: int = epoch_seconds()
+ time_buffer = 5 # seconds by which to shorten the validity period
+ if expires_in - time_buffer > 0:
+ expires_in -= time_buffer
+ integration_context['graph_access_token'] = access_token
+ integration_context['graph_valid_until'] = time_now + expires_in
+ demisto.setIntegrationContext(integration_context)
+ return access_token
+ except ValueError:
+ raise ValueError('Failed to get Graph access token')
+
+
+def http_request(
+ method: str, url: str = '', json_: dict = None, api: str = 'graph'
+) -> Union[dict, list]:
+ """
+ A wrapper for requests lib to send our requests and handle requests and responses better
+ Headers to be sent in requests
+ :param method: any restful method
+ :param url: URL to query
+ :param json_: HTTP JSON body
+ :param api: API to query (graph/bot)
+ :return: requests.json()
+ """
+ if api == 'graph':
+ access_token = get_graph_access_token()
+ else: # Bot Framework API
+ access_token = get_bot_access_token()
+
+ headers: dict = {
+ 'Authorization': f'Bearer {access_token}',
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+ }
+ try:
+ response: requests.Response = requests.request(
+ method,
+ url,
+ headers=headers,
+ json=json_,
+ verify=USE_SSL
+ )
+
+ if not response.ok:
+ error: str = error_parser(response, api)
+ raise ValueError(f'Error in API call to Microsoft Teams: [{response.status_code}] - {error}')
+
+ if response.status_code in {202, 204}:
+ # Delete channel returns 204 if successful
+ # Update message returns 202 if the request has been accepted for processing
+ return {}
+ if response.status_code == 201:
+ # For channel creation query, we get a body in the response, otherwise we should just return
+ if not response.content:
+ return {}
+ try:
+ return response.json()
+ except ValueError:
+ raise ValueError(f'Error in API call to Microsoft Teams: {response.text}')
+ except requests.exceptions.ConnectTimeout:
+ error_message = 'Connection Timeout Error - potential reason may be that Microsoft Teams is not ' \
+ 'accessible from your host.'
+ raise ConnectionError(error_message)
+ except requests.exceptions.SSLError:
+ error_message = 'SSL Certificate Verification Failed - try selecting \'Trust any certificate\' in ' \
+ 'the integration configuration.'
+ raise ConnectionError(error_message)
+ except requests.exceptions.ProxyError:
+ error_message = 'Proxy Error - if \'Use system proxy settings\' in the integration configuration has been ' \
+ 'selected, try deselecting it.'
+ raise ConnectionError(error_message)
+
+
+def integration_health():
+
+ bot_framework_api_health = 'Operational'
+ graph_api_health = 'Operational'
+
+ try:
+ get_bot_access_token()
+ except ValueError as e:
+ bot_framework_api_health = f'Non operational - {str(e)}'
+
+ try:
+ get_graph_access_token()
+ except ValueError as e:
+ graph_api_health = f'Non operational - {str(e)}'
+
+ api_health_output: list = [{
+ 'Bot Framework API Health': bot_framework_api_health,
+ 'Graph API Health': graph_api_health
+ }]
+
+ api_health_human_readble: str = tableToMarkdown('Microsoft API Health', api_health_output)
+
+ mirrored_channels_output = list()
+ integration_context: dict = demisto.getIntegrationContext()
+ teams: list = json.loads(integration_context.get('teams', '[]'))
+ for team in teams:
+ mirrored_channels: list = team.get('mirrored_channels', [])
+ for channel in mirrored_channels:
+ mirrored_channels_output.append({
+ 'Team': team.get('team_name'),
+ 'Channel': channel.get('channel_name'),
+ 'Investigation ID': channel.get('investigation_id')
+ })
+
+ mirrored_channels_human_readable: str
+
+ if mirrored_channels_output:
+ mirrored_channels_human_readable = tableToMarkdown(
+ 'Microsoft Teams Mirrored Channels', mirrored_channels_output
+ )
+ else:
+ mirrored_channels_human_readable = 'No mirrored channels.'
+
+ demisto.results(api_health_human_readble + mirrored_channels_human_readable)
+
+
+def validate_auth_header(headers: dict) -> bool:
+ """
+ Validated authorization header provided in the bot activity object
+ :param headers: Bot activity headers
+ :return: True if authorized, else False
+ """
+ parts: list = headers.get('Authorization', '').split(' ')
+ if len(parts) != 2:
+ return False
+ scehma: str = parts[0]
+ jwt_token: str = parts[1]
+ if scehma != 'Bearer' or not jwt_token:
+ demisto.info('Authorization header validation - failed to verify schema')
+ return False
+
+ decoded_payload: dict = jwt.decode(jwt_token, verify=False)
+ issuer: str = decoded_payload.get('iss', '')
+ if issuer != 'https://api.botframework.com':
+ demisto.info('Authorization header validation - failed to verify issuer')
+ return False
+
+ integration_context: dict = demisto.getIntegrationContext()
+ open_id_metadata: dict = json.loads(integration_context.get('open_id_metadata', '{}'))
+ keys: list = open_id_metadata.get('keys', [])
+
+ unverified_headers: dict = jwt.get_unverified_header(jwt_token)
+ key_id: str = unverified_headers.get('kid', '')
+ key_object: dict = dict()
+
+ # Check if we got the requested key in cache
+ for key in keys:
+ if key.get('kid') == key_id:
+ key_object = key
+ break
+
+ if not key_object:
+ # Didn't find requested key in cache, getting new keys
+ try:
+ open_id_url: str = 'https://login.botframework.com/v1/.well-known/openidconfiguration'
+ response: requests.Response = requests.get(open_id_url, verify=USE_SSL)
+ if not response.ok:
+ demisto.info(f'Authorization header validation failed to fetch open ID config - {response.reason}')
+ return False
+ response_json: dict = response.json()
+ jwks_uri: str = response_json.get('jwks_uri', '')
+ keys_response: requests.Response = requests.get(jwks_uri, verify=USE_SSL)
+ if not keys_response.ok:
+ demisto.info(f'Authorization header validation failed to fetch keys - {response.reason}')
+ return False
+ keys_response_json: dict = keys_response.json()
+ keys = keys_response_json.get('keys', [])
+ open_id_metadata['keys'] = keys
+ except ValueError:
+ demisto.info('Authorization header validation - failed to parse keys response')
+ return False
+
+ if not keys:
+ # Didn't get new keys
+ demisto.info('Authorization header validation - failed to get keys')
+ return False
+
+ # Find requested key in new keys
+ for key in keys:
+ if key.get('kid') == key_id:
+ key_object = key
+ break
+
+ if not key_object:
+ # Didn't find requested key in new keys
+ demisto.info('Authorization header validation - failed to find relevant key')
+ return False
+
+ endorsements: list = key_object.get('endorsements', [])
+ if not endorsements or 'msteams' not in endorsements:
+ demisto.info('Authorization header validation - failed to verify endorsements')
+ return False
+
+ public_key: str = RSAAlgorithm.from_jwk(json.dumps(key_object))
+ options = {
+ 'verify_aud': False,
+ 'verify_exp': True
+ }
+ decoded_payload = jwt.decode(jwt_token, public_key, options=options)
+
+ audience_claim: str = decoded_payload.get('aud', '')
+ if audience_claim != demisto.params().get('bot_id'):
+ demisto.info('Authorization header validation - failed to verify audience_claim')
+ return False
+
+ integration_context['open_id_metadata'] = json.dumps(open_id_metadata)
+ demisto.setIntegrationContext(integration_context)
+
+ return True
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def get_team_aad_id(team_name: str) -> str:
+ """
+ Gets Team AAD ID
+ :param team_name: Team name to get AAD ID of
+ :return: team AAD ID
+ """
+ integration_context: dict = demisto.getIntegrationContext()
+ if integration_context.get('teams'):
+ teams: list = json.loads(integration_context['teams'])
+ for team in teams:
+ if team_name == team.get('team_name', ''):
+ return team.get('team_aad_id', '')
+ url: str = f"{GRAPH_BASE_URL}/beta/groups?$filter=resourceProvisioningOptions/Any(x:x eq 'Team')"
+ response: dict = cast(Dict[Any, Any], http_request('GET', url))
+ teams = response.get('value', [])
+ for team in teams:
+ if team.get('displayName', '') == team_name:
+ return team.get('id', '')
+ raise ValueError('Could not find requested team.')
+
+
+# def add_member_to_team(user_principal_name: str, team_id: str):
+# url: str = f'{GRAPH_BASE_URL}/v1.0/groups/{team_id}/members/$ref'
+# requestjson_: dict = {
+# '@odata.id': f'{GRAPH_BASE_URL}/v1.0/directoryObjects/{user_principal_name}'
+# }
+# http_request('POST', url, json_=requestjson_)
+
+
+def get_users() -> list:
+ """
+ Retrieves list of AAD users
+ :return: List of AAD users
+ """
+ url: str = f'{GRAPH_BASE_URL}/v1.0/users'
+ users: dict = cast(Dict[Any, Any], http_request('GET', url))
+ return users.get('value', [])
+
+
+# def create_group_request(
+# display_name: str, mail_enabled: bool, mail_nickname: str, security_enabled: bool,
+# owners_ids: list, members_ids: list = None
+# ) -> str:
+# url = f'{GRAPH_BASE_URL}/v1.0/groups'
+# data: dict = {
+# 'displayName': display_name,
+# 'groupTypes': ['Unified'],
+# 'mailEnabled': mail_enabled,
+# 'mailNickname': mail_nickname,
+# 'securityEnabled': security_enabled,
+# 'owners@odata.bind': owners_ids,
+# 'members@odata.bind': members_ids or owners_ids
+# }
+# group_creation_response: dict = cast(Dict[Any, Any], http_request('POST', url, json_=data))
+# group_id: str = group_creation_response.get('id', '')
+# return group_id
+#
+#
+# def create_team_request(group_id: str) -> str:
+# url = f'{GRAPH_BASE_URL}/v1.0/groups/{group_id}/team'
+# team_creation_response: dict = cast(Dict[Any, Any], http_request('PUT', url, json_={}))
+# team_id: str = team_creation_response.get('id', '')
+# return team_id
+#
+#
+# def add_bot_to_team(team_id: str):
+# url: str = f'{GRAPH_BASE_URL}/v1.0/teams/{team_id}/installedApps'
+# bot_app_id: str = ''
+# data: dict = {
+# 'teamsApp@odata.bind': f'https://graph.microsoft.com/v1.0/appCatalogs/teamsApps/{bot_app_id}'
+# }
+# print(http_request('POST', url, json_=data))
+#
+#
+# def create_team():
+# display_name: str = demisto.args().get('display_name', '')
+# mail_enabled: bool = bool(strtobool(demisto.args().get('mail_enabled', True)))
+# mail_nickname: str = demisto.args().get('mail_nickname', '')
+# security_enabled: bool = bool(strtobool(demisto.args().get('security_enabled', True)))
+# owners = argToList(demisto.args().get('owner', ''))
+# members = argToList(demisto.args().get('members', ''))
+# owners_ids: list = list()
+# members_ids: list = list()
+# users: list = get_users()
+# user_id: str = str()
+# for member in members:
+# found_member: bool = False
+# for user in users:
+# if member in {user.get('displayName', ''), user.get('mail'), user.get('userPrincipalName')}:
+# found_member = True
+# user_id = user.get('id', '')
+# members_ids.append(f'https://graph.microsoft.com/v1.0/users/{user_id}')
+# break
+# if not found_member:
+# demisto.results({
+# 'Type': entryTypes['warning'],
+# 'Contents': f'User {member} was not found',
+# 'ContentsFormat': formats['text']
+# })
+# for owner in owners:
+# found_owner: bool = False
+# for user in users:
+# if owner in {user.get('displayName', ''), user.get('mail'), user.get('userPrincipalName')}:
+# found_owner = True
+# user_id = user.get('id', '')
+# owners_ids.append(f'https://graph.microsoft.com/v1.0/users/{user_id}')
+# break
+# if not found_owner:
+# demisto.results({
+# 'Type': entryTypes['warning'],
+# 'Contents': f'User {owner} was not found',
+# 'ContentsFormat': formats['text']
+# })
+# if not owners_ids:
+# raise ValueError('Could not find given users to be Team owners.')
+# group_id: str = create_group_request(
+# display_name, mail_enabled, mail_nickname, security_enabled, owners_ids, members_ids
+# )
+# team_id: str = create_team_request(group_id)
+# add_bot_to_team(team_id)
+# demisto.results(f'Team {display_name} was created successfully')
+
+
+def create_channel(team_aad_id: str, channel_name: str, channel_description: str = '') -> str:
+ """
+ Creates a Microsoft Teams channel
+ :param team_aad_id: Team AAD ID to create channel in
+ :param channel_name: Name of channel to create
+ :param channel_description: Description of channel to create
+ :return: ID of created channel
+ """
+ url: str = f'{GRAPH_BASE_URL}/v1.0/teams/{team_aad_id}/channels'
+ request_json: dict = {
+ 'displayName': channel_name,
+ 'description': channel_description
+ }
+ channel_data: dict = cast(Dict[Any, Any], http_request('POST', url, json_=request_json))
+ channel_id: str = channel_data.get('id', '')
+ return channel_id
+
+
+def get_channel_id(channel_name: str, team_aad_id: str, investigation_id: str = None) -> str:
+ """
+ Retrieves Microsoft Teams channel ID
+ :param channel_name: Name of channel to get ID of
+ :param team_aad_id: AAD ID of team to search channel in
+ :param investigation_id: Demisto investigation ID to search mirrored channel of
+ :return: Requested channel ID
+ """
+ investigation_id = investigation_id or str()
+ integration_context: dict = demisto.getIntegrationContext()
+ teams: list = json.loads(integration_context.get('teams', '[]'))
+ for team in teams:
+ mirrored_channels: list = team.get('mirrored_channels', [])
+ for channel in mirrored_channels:
+ if channel.get('channel_name') == channel_name or channel.get('investigation_id') == investigation_id:
+ return channel.get('channel_id')
+ url: str = f'{GRAPH_BASE_URL}/v1.0/teams/{team_aad_id}/channels'
+ response: dict = cast(Dict[Any, Any], http_request('GET', url))
+ channel_id: str = ''
+ channels: list = response.get('value', [])
+ for channel in channels:
+ channel_display_name: str = channel.get('displayName', '')
+ if channel_display_name == channel_name:
+ channel_id = channel.get('id', '')
+ break
+ if not channel_id:
+ raise ValueError(f'Could not find channel: {channel_name}')
+ return channel_id
+
+
+def get_team_members(service_url: str, team_id: str) -> list:
+ """
+ Retrieves team members given a team
+ :param team_id: ID of team to get team members of
+ :param service_url: Bot service URL to query
+ :return: List of team members
+ """
+ url: str = f'{service_url}/v3/conversations/{team_id}/members'
+ response: list = cast(List[Any], http_request('GET', url, api='bot'))
+ return response
+
+
+def update_message(service_url: str, conversation_id: str, activity_id: str, text: str):
+ """
+ Updates a message in Microsoft Teams channel
+ :param service_url: Bot service URL to query
+ :param conversation_id: Conversation ID of message to update
+ :param activity_id: Activity ID of message to update
+ :param text: Text to update in the message
+ :return: None
+ """
+ body = [{
+ 'type': 'TextBlock',
+ 'text': text
+ }]
+ adaptive_card: dict = create_adaptive_card(body=body)
+ conversation = {
+ 'type': 'message',
+ 'attachments': [adaptive_card]
+ }
+ url: str = f'{service_url}/v3/conversations/{conversation_id}/activities/{activity_id}'
+ http_request('PUT', url, json_=conversation, api='bot')
+
+
+def close_channel_request(team_aad_id: str, channel_id: str):
+ """
+ Sends an HTTP request to close a Microsoft Teams channel
+ :param team_aad_id: AAD ID of team to close the channel in
+ :param channel_id: ID of channel to close
+ :return: None
+ """
+ url: str = f'{GRAPH_BASE_URL}/v1.0/teams/{team_aad_id}/channels/{channel_id}'
+ http_request('DELETE', url)
+
+
+def close_channel():
+ """
+ Deletes a mirrored Microsoft Teams channel
+ """
+ integration_context: dict = demisto.getIntegrationContext()
+ channel_name: str = demisto.args().get('channel', '')
+ investigation: dict = demisto.investigation()
+ investigation_id: str = investigation.get('id', '')
+ channel_id: str = str()
+ team_aad_id: str
+ mirrored_channels: list
+ if not channel_name:
+ # Closing channel as part of autoclose in mirroring process
+ teams: list = json.loads(integration_context.get('teams', '[]'))
+ for team in teams:
+ team_aad_id = team.get('team_aad_id', '')
+ mirrored_channels = team.get('mirrored_channels', [])
+ for channel_index, channel in enumerate(mirrored_channels):
+ if channel.get('investigation_id') == investigation_id:
+ channel_id = channel.get('channel_id', '')
+ close_channel_request(team_aad_id, channel_id)
+ mirrored_channels.pop(channel_index)
+ team['mirrored_channels'] = mirrored_channels
+ break
+ if not channel_id:
+ raise ValueError('Could not find Microsoft Teams channel to close.')
+ integration_context['teams'] = json.dumps(teams)
+ demisto.setIntegrationContext(integration_context)
+ else:
+ team_name: str = demisto.args().get('team') or demisto.params().get('team')
+ team_aad_id = get_team_aad_id(team_name)
+ channel_id = get_channel_id(channel_name, team_aad_id, investigation_id)
+ close_channel_request(team_aad_id, channel_id)
+ demisto.results('Channel was successfully closed.')
+
+
+def create_personal_conversation(integration_context: dict, team_member_id: str) -> str:
+ """
+ Create a personal conversation with a team member
+ :param integration_context: Cached object to retrieve relevant data for the conversation creation
+ :param team_member_id: ID of team member to create a conversation with
+ :return: ID of created conversation
+ """
+ bot_id: str = demisto.params().get('bot_id', '')
+ bot_name: str = integration_context.get('bot_name', '')
+ tenant_id: str = integration_context.get('tenant_id', '')
+ conversation: dict = {
+ 'bot': {
+ 'id': f'28:{bot_id}',
+ 'name': bot_name
+ },
+ 'members': [{
+ 'id': team_member_id
+ }],
+ 'channelData': {
+ 'tenant': {
+ 'id': tenant_id
+ }
+ }
+ }
+ service_url: str = integration_context.get('service_url', '')
+ if not service_url:
+ raise ValueError('Did not find service URL. Try messaging the bot on Microsoft Teams')
+ url: str = f'{service_url}/v3/conversations'
+ response: dict = cast(Dict[Any, Any], http_request('POST', url, json_=conversation, api='bot'))
+ return response.get('id', '')
+
+
+def send_message_request(service_url: str, channel_id: str, conversation: dict):
+ """
+ Sends an HTTP request to send message to Microsoft Teams
+ :param channel_id: ID of channel to send message in
+ :param conversation: Conversation message object to send
+ :param service_url: Bot service URL to query
+ :return: None
+ """
+ url: str = f'{service_url}/v3/conversations/{channel_id}/activities'
+ http_request('POST', url, json_=conversation, api='bot')
+
+
+def send_message():
+ message_type: str = demisto.args().get('messageType', '')
+ original_message: str = demisto.args().get('originalMessage', '')
+ message: str = demisto.args().get('message', '')
+ try:
+ adaptive_card: dict = json.loads(demisto.args().get('adaptive_card', '{}'))
+ except ValueError:
+ raise ValueError('Given adaptive card is not in valid JSON format.')
+
+ if message_type == MESSAGE_TYPES['mirror_entry'] and ENTRY_FOOTER in original_message:
+ # Got a message which was already mirrored - skipping it
+ return
+ channel_name: str = demisto.args().get('channel', '')
+
+ if not channel_name and message_type in {MESSAGE_TYPES['status_changed'], MESSAGE_TYPES['incident_opened']}:
+ # Got a notification from server
+ channel_name = demisto.params().get('incident_notifications_channel', 'General')
+ severity: int = int(demisto.args().get('severity'))
+ severity_threshold: int = translate_severity(demisto.params().get('min_incident_severity', 'Low'))
+ if severity < severity_threshold:
+ return
+
+ team_member: str = demisto.args().get('team_member', '')
+
+ if not (team_member or channel_name):
+ raise ValueError('No channel or team member to send message were provided.')
+
+ if team_member and channel_name:
+ raise ValueError('Provide either channel or team member to send message to, not both.')
+
+ if not (message or adaptive_card):
+ raise ValueError('No message or adaptive card to send were provided.')
+
+ if message and adaptive_card:
+ raise ValueError('Provide either message or adaptive to send, not both.')
+
+ integration_context: dict = demisto.getIntegrationContext()
+ channel_id: str = str()
+ personal_conversation_id: str = str()
+ if channel_name:
+ team_name: str = demisto.args().get('team', '') or demisto.params().get('team', '')
+ team_aad_id: str = get_team_aad_id(team_name)
+ investigation_id: str = str()
+ if message_type == MESSAGE_TYPES['mirror_entry']:
+ # Got an entry from the War Room to mirror to Teams
+ # Getting investigation ID in case channel name is custom and not the default
+ investigation: dict = demisto.investigation()
+ investigation_id = investigation.get('id', '')
+ channel_id = get_channel_id(channel_name, team_aad_id, investigation_id)
+ elif team_member:
+ team_member_id: str = get_team_member_id(team_member, integration_context)
+ personal_conversation_id = create_personal_conversation(integration_context, team_member_id)
+
+ recipient: str = channel_id or personal_conversation_id
+
+ conversation: dict
+
+ if message:
+ entitlement_match: Optional[Match[str]] = re.search(ENTITLEMENT_REGEX, message)
+ if entitlement_match:
+ # In TeamsAsk process
+ adaptive_card = process_ask_user(message)
+ conversation = {
+ 'type': 'message',
+ 'attachments': [adaptive_card]
+ }
+ else:
+ # Sending regular message
+ formatted_message: str = urlify_hyperlinks(message)
+ conversation = {
+ 'type': 'message',
+ 'text': formatted_message
+ }
+ else: # Adaptive card
+ conversation = {
+ 'type': 'message',
+ 'attachments': [adaptive_card]
+ }
+
+ service_url: str = integration_context.get('service_url', '')
+ if not service_url:
+ raise ValueError('Did not find service URL. Try messaging the bot on Microsoft Teams')
+
+ send_message_request(service_url, recipient, conversation)
+ demisto.results('Message was sent successfully.')
+
+
+def mirror_investigation():
+ """
+ Updates the integration context with a new or existing mirror.
+ """
+ investigation: dict = demisto.investigation()
+
+ if investigation.get('type') == PLAYGROUND_INVESTIGATION_TYPE:
+ raise ValueError('Can not perform this action in playground.')
+
+ integration_context: dict = demisto.getIntegrationContext()
+
+ mirror_type: str = demisto.args().get('mirror_type', 'all')
+ auto_close: str = demisto.args().get('autoclose', 'true')
+ mirror_direction: str = demisto.args().get('direction', 'both').lower()
+ team_name: str = demisto.args().get('team', '')
+ if not team_name:
+ team_name = demisto.params().get('team', '')
+ team_aad_id: str = get_team_aad_id(team_name)
+ mirrored_channels: list = list()
+ teams: list = json.loads(integration_context.get('teams', '[]'))
+ team: dict = dict()
+ for team in teams:
+ if team.get('team_aad_id', '') == team_aad_id:
+ if team.get('mirrored_channels'):
+ mirrored_channels = team['mirrored_channels']
+ break
+ if mirror_direction != 'both':
+ mirror_type = f'{mirror_type}:{mirror_direction}'
+
+ investigation_id: str = investigation.get('id', '')
+ investigation_mirrored_index: int = is_investigation_mirrored(investigation_id, mirrored_channels)
+
+ if investigation_mirrored_index > -1:
+ # Updating channel mirror configuration
+ mirrored_channels[investigation_mirrored_index]['mirror_type'] = mirror_type
+ mirrored_channels[investigation_mirrored_index]['mirror_direction'] = mirror_direction
+ mirrored_channels[investigation_mirrored_index]['auto_close'] = auto_close
+ mirrored_channels[investigation_mirrored_index]['mirrored'] = False
+ demisto.results('Investigation mirror was updated successfully.')
+ else:
+ channel_name: str = demisto.args().get('channel_name', '') or f'incident-{investigation_id}'
+ channel_description: str = f'Channel to mirror incident {investigation_id}'
+ channel_id: str = create_channel(team_aad_id, channel_name, channel_description)
+ service_url: str = integration_context.get('service_url', '')
+ server_links: dict = demisto.demistoUrls()
+ server_link: str = server_links.get('server', '')
+ warroom_link: str = f'{server_link}#/WarRoom/{investigation_id}'
+ conversation: dict = {
+ 'type': 'message',
+ 'text': f'This channel was created to mirror [incident {investigation_id}]({warroom_link}) '
+ f'between Teams and Demisto. In order for your Teams messages to be mirrored in Demisto, '
+ f'you need to mention the Demisto Bot in the message.'
+ }
+ send_message_request(service_url, channel_id, conversation)
+ mirrored_channels.append({
+ 'channel_id': channel_id,
+ 'investigation_id': investigation_id,
+ 'mirror_type': mirror_type,
+ 'mirror_direction': mirror_direction,
+ 'auto_close': auto_close,
+ 'mirrored': False,
+ 'channel_name': channel_name
+ })
+ demisto.results(f'Investigation mirrored successfully in channel {channel_name}.')
+ team['mirrored_channels'] = mirrored_channels
+ integration_context['teams'] = json.dumps(teams)
+ demisto.setIntegrationContext(integration_context)
+
+
+def channel_mirror_loop():
+ """
+ Runs in a long running container - checking for newly mirrored investigations.
+ """
+ while True:
+ found_channel_to_mirror: bool = False
+ try:
+ integration_context = demisto.getIntegrationContext()
+ teams: list = json.loads(integration_context.get('teams', '[]'))
+ for team in teams:
+ mirrored_channels = team.get('mirrored_channels', [])
+ channel: dict
+ for channel in mirrored_channels:
+ investigation_id = channel.get('investigation_id', '')
+ if not channel['mirrored']:
+ demisto.info(f'Mirroring incident: {investigation_id} in Microsoft Teams')
+ channel_to_update: dict = channel
+ if channel_to_update['mirror_direction'] and channel_to_update['mirror_type']:
+ demisto.mirrorInvestigation(
+ channel_to_update['investigation_id'],
+ channel_to_update['mirror_type'],
+ bool(strtobool(channel_to_update['auto_close']))
+ )
+ channel_to_update['mirrored'] = True
+ demisto.info(f'Mirrored incident: {investigation_id} to Microsoft Teams successfully')
+ else:
+ demisto.info(f'Could not mirror {investigation_id}')
+ team['mirrored_channels'] = mirrored_channels
+ integration_context['teams'] = json.dumps(teams)
+ demisto.setIntegrationContext(integration_context)
+ found_channel_to_mirror = True
+ break
+ if found_channel_to_mirror:
+ break
+ except Exception as e:
+ demisto.error(f'An error occurred in channel mirror loop: {str(e)}')
+ demisto.updateModuleHealth(f'An error occurred: {str(e)}')
+ finally:
+ time.sleep(5)
+
+
+def member_added_handler(integration_context: dict, request_body: dict, channel_data: dict):
+ """
+ Handles member added activity
+ :param integration_context: Cached object to retrieve relevant data from
+ :param request_body: Activity payload
+ :param channel_data: Microsoft Teams tenant, team and channel details
+ :return: None
+ """
+ bot_id = demisto.params().get('bot_id')
+
+ team: dict = channel_data.get('team', {})
+ team_id: str = team.get('id', '')
+ team_aad_id: str = team.get('aadGroupId', '')
+ team_name: str = team.get('name', '')
+
+ tenant: dict = channel_data.get('tenant', {})
+ tenant_id: str = tenant.get('id', '')
+
+ recipient: dict = request_body.get('recipient', {})
+ recipient_name: str = recipient.get('name', '')
+
+ members_added: list = request_body.get('membersAdded', [])
+
+ teams: list = json.loads(integration_context.get('teams', '[]'))
+
+ service_url: str = integration_context.get('service_url', '')
+ if not service_url:
+ raise ValueError('Did not find service URL. Try messaging the bot on Microsoft Teams')
+
+ for member in members_added:
+ member_id = member.get('id', '')
+ if bot_id in member_id:
+ # The bot was added to a team, caching team ID and team members
+ demisto.info(f'The bot was added to team {team_name}')
+ integration_context['tenant_id'] = tenant_id
+ integration_context['bot_name'] = recipient_name
+ break
+ team_members: list = get_team_members(service_url, team_id)
+
+ found_team: bool = False
+ for team in teams:
+ if team.get('team_aad_id', '') == team_aad_id:
+ team['team_members'] = team_members
+ found_team = True
+ break
+ if not found_team:
+ # Didn't found an existing team, adding new team object
+ teams.append({
+ 'team_aad_id': team_aad_id,
+ 'team_id': team_id,
+ 'team_name': team_name,
+ 'team_members': team_members
+ })
+ integration_context['teams'] = json.dumps(teams)
+ demisto.setIntegrationContext(integration_context)
+
+
+def direct_message_handler(integration_context: dict, request_body: dict, conversation: dict, message: str):
+ """
+ Handles a direct message sent to the bot
+ :param integration_context: Cached object to retrieve relevant data from
+ :param request_body: Activity payload
+ :param conversation: Conversation object sent
+ :param message: The direct message sent
+ :return: None
+ """
+ conversation_id: str = conversation.get('id', '')
+
+ from_property: dict = request_body.get('from', {})
+ user_id: str = from_property.get('id', '')
+
+ team_member: dict = get_team_member(integration_context, user_id)
+
+ username: str = team_member.get('username', '')
+ user_email: str = team_member.get('user_email', '')
+
+ formatted_message: str = str()
+
+ attachment: dict = dict()
+
+ return_card: bool = False
+
+ allow_external_incidents_creation: bool = demisto.params().get('allow_external_incidents_creation', False)
+
+ lowered_message = message.lower()
+ if lowered_message.find('incident') != -1 and (lowered_message.find('create') != -1
+ or lowered_message.find('open') != -1
+ or lowered_message.find('new') != -1):
+ if user_email:
+ demisto_user = demisto.findUser(email=user_email)
+ else:
+ demisto_user = demisto.findUser(username=username)
+
+ if not demisto_user and not allow_external_incidents_creation:
+ data = 'You are not allowed to create incidents.'
+ else:
+ data = process_incident_create_message(demisto_user, message)
+ formatted_message = urlify_hyperlinks(data)
+ else:
+ try:
+ data = demisto.directMessage(message, username, user_email, allow_external_incidents_creation)
+ return_card = True
+ if data.startswith('`'): # We got a list of incidents/tasks:
+ data_by_line: list = data.replace('```', '').strip().split('\n')
+ return_card = True
+ if data_by_line[0].startswith('Task'):
+ attachment = process_tasks_list(data_by_line)
+ else:
+ attachment = process_incidents_list(data_by_line)
+ else: # Mirror investigation command / unknown direct message
+ attachment = process_mirror_or_unknown_message(data)
+ except Exception as e:
+ data = str(e)
+ if return_card:
+ conversation = {
+ 'type': 'message',
+ 'attachments': [attachment]
+ }
+ else:
+ formatted_message = formatted_message or data
+ conversation = {
+ 'type': 'message',
+ 'text': formatted_message
+ }
+
+ service_url: str = integration_context.get('service_url', '')
+ if not service_url:
+ raise ValueError('Did not find service URL. Try messaging the bot on Microsoft Teams')
+
+ send_message_request(service_url, conversation_id, conversation)
+
+
+def entitlement_handler(integration_context: dict, request_body: dict, value: dict, conversation_id: str):
+ """
+ Handles activity the bot received as part of TeamsAsk flow, which includes entitlement
+ :param integration_context: Cached object to retrieve relevant data from
+ :param request_body: Activity payload
+ :param value: Object which includes
+ :param conversation_id: Message conversation ID
+ :return: None
+ """
+ response: str = value.get('response', '')
+ entitlement_guid: str = value.get('entitlement', '')
+ investigation_id: str = value.get('investigation_id', '')
+ task_id: str = value.get('task_id', '')
+ from_property: dict = request_body.get('from', {})
+ team_members_id: str = from_property.get('id', '')
+ team_member: dict = get_team_member(integration_context, team_members_id)
+ demisto.handleEntitlementForUser(
+ incidentID=investigation_id,
+ guid=entitlement_guid,
+ taskID=task_id,
+ email=team_member.get('user_email', ''),
+ content=response
+ )
+ activity_id: str = request_body.get('replyToId', '')
+ service_url: str = integration_context.get('service_url', '')
+ if not service_url:
+ raise ValueError('Did not find service URL. Try messaging the bot on Microsoft Teams')
+ update_message(service_url, conversation_id, activity_id, 'Your response was submitted successfully.')
+
+
+def message_handler(integration_context: dict, request_body: dict, channel_data: dict, message: str):
+ """
+ Handles a message in which the bot was mentioned
+ :param integration_context: Cached object to retrieve relevant data from
+ :param request_body: Activity payload
+ :param channel_data: Microsoft Teams tenant, team and channel details
+ :param message: The message which was sent mentioning the bot
+ :return: None
+ """
+ channel: dict = channel_data.get('channel', {})
+ channel_id: str = channel.get('id', '')
+ team_id: str = channel_data.get('team', {}).get('id', '')
+
+ from_property: dict = request_body.get('from', {})
+ team_member_id: str = from_property.get('id', '')
+
+ if integration_context.get('teams'):
+ teams: list = json.loads(integration_context['teams'])
+ for team in teams:
+ if team.get('team_id', '') == team_id:
+ mirrored_channels: list = team.get('mirrored_channels', [])
+ for mirrored_channel in mirrored_channels:
+ if mirrored_channel.get('channel_id') == channel_id:
+ if mirrored_channel.get('mirror_direction', '') != 'FromDemisto' \
+ and 'none' not in mirrored_channel.get('mirror_type', ''):
+ investigation_id: str = mirrored_channel.get('investigation_id', '')
+ username: str = from_property.get('name', '')
+ user_email: str = get_team_member(integration_context, team_member_id).get('user_mail', '')
+ demisto.addEntry(
+ id=investigation_id,
+ entry=message,
+ username=username,
+ email=user_email,
+ footer=f'\n**{ENTRY_FOOTER}**'
+ )
+ return
+
+
+@APP.route('/', methods=['POST'])
+def messages() -> Response:
+ """
+ Main handler for messages sent to the bot
+ """
+ headers: dict = cast(Dict[Any, Any], request.headers)
+ if validate_auth_header(headers) is False:
+ demisto.info(f'Authorization header failed: {str(headers)}')
+ else:
+ request_body: dict = request.json
+ integration_context: dict = demisto.getIntegrationContext()
+ service_url: str = request_body.get('serviceUrl', '')
+ if service_url:
+ service_url = service_url[:-1] if service_url.endswith('/') else service_url
+ integration_context['service_url'] = service_url
+ demisto.setIntegrationContext(integration_context)
+
+ channel_data: dict = request_body.get('channelData', {})
+ event_type: str = channel_data.get('eventType', '')
+
+ conversation: dict = request_body.get('conversation', {})
+ conversation_type: str = conversation.get('conversationType', '')
+ conversation_id: str = conversation.get('id', '')
+
+ message_text: str = request_body.get('text', '')
+
+ # Remove bot mention
+ bot_name = integration_context.get('bot_name', '')
+ formatted_message: str = message_text.replace(f'{bot_name} ', '')
+
+ value: dict = request_body.get('value', {})
+
+ if event_type == 'teamMemberAdded':
+ demisto.info('New Microsoft Teams team member was added')
+ member_added_handler(integration_context, request_body, channel_data)
+ elif value:
+ # In TeamsAsk process
+ demisto.info('Got response from user in MicrosoftTeamsAsk process')
+ entitlement_handler(integration_context, request_body, value, conversation_id)
+ elif conversation_type == 'personal':
+ demisto.info('Got direct message to the bot')
+ direct_message_handler(integration_context, request_body, conversation, formatted_message)
+ else:
+ demisto.info('Got message mentioning the bot')
+ message_handler(integration_context, request_body, channel_data, formatted_message)
+ demisto.info('Finished processing Microsoft Teams activity successfully')
+ demisto.updateModuleHealth('')
+ return Response(status=200)
+
+
+def long_running_loop():
+ """
+ The infinite loop which runs the mirror loop and the bot app in two different threads
+ """
+
+ certificate: str = demisto.params().get('certificate', '')
+ private_key: str = demisto.params().get('key', '')
+
+ certificate_path = str()
+ private_key_path = str()
+
+ try:
+ port_mapping: str = PARAMS.get('longRunningPort', '')
+ port: int
+ if port_mapping:
+ if ':' in port_mapping:
+ port = int(port_mapping.split(':')[1])
+ else:
+ port = int(port_mapping)
+ else:
+ raise ValueError('No port mapping was provided')
+ Thread(target=channel_mirror_loop, daemon=True).start()
+ demisto.info('Started channel mirror loop thread')
+
+ ssl_args = dict()
+
+ if certificate and private_key:
+ certificate_file = NamedTemporaryFile(delete=False)
+ certificate_path = certificate_file.name
+ certificate_file.write(bytes(certificate, 'utf-8'))
+ certificate_file.close()
+ ssl_args['certfile'] = certificate_path
+
+ private_key_file = NamedTemporaryFile(delete=False)
+ private_key_path = private_key_file.name
+ private_key_file.write(bytes(private_key, 'utf-8'))
+ private_key_file.close()
+ ssl_args['keyfile'] = private_key_path
+
+ demisto.info('Starting HTTPS Server')
+ else:
+ demisto.info('Starting HTTP Server')
+
+ server = WSGIServer(('', port), APP, **ssl_args)
+ server.serve_forever()
+ except Exception as e:
+ if certificate_path:
+ os.unlink(certificate_path)
+ if private_key_path:
+ os.unlink(private_key_path)
+ demisto.error(f'An error occurred in long running loop: {str(e)}')
+ raise ValueError(str(e))
+
+
+def test_module():
+ """
+ Tests token retrieval for Bot Framework API
+ """
+ get_bot_access_token()
+ demisto.results('ok')
+
+
+def main():
+ """ COMMANDS MANAGER / SWITCH PANEL """
+
+ commands: dict = {
+ 'test-module': test_module,
+ 'long-running-execution': long_running_loop,
+ 'send-notification': send_message,
+ 'mirror-investigation': mirror_investigation,
+ 'close-channel': close_channel,
+ 'microsoft-teams-integration-health': integration_health
+ # 'microsoft-teams-create-team': create_team,
+ # 'microsoft-teams-send-file': send_file,
+ }
+
+ ''' EXECUTION '''
+ try:
+ handle_proxy()
+ command: str = demisto.command()
+ LOG(f'Command being called is {command}')
+ if command in commands.keys():
+ commands[command]()
+ # Log exceptions
+ except Exception as e:
+ if command == 'long-running-execution':
+ LOG(str(e))
+ LOG.print_log()
+ demisto.updateModuleHealth(str(e))
+ else:
+ return_error(str(e))
+
+
+if __name__ == 'builtins':
+ main()
diff --git a/Integrations/MicrosoftTeams/MicrosoftTeams.yml b/Integrations/MicrosoftTeams/MicrosoftTeams.yml
new file mode 100644
index 000000000000..335b334679dd
--- /dev/null
+++ b/Integrations/MicrosoftTeams/MicrosoftTeams.yml
@@ -0,0 +1,190 @@
+category: Messaging
+commonfields:
+ id: Microsoft Teams
+ version: -1
+configuration:
+- display: Bot ID
+ name: bot_id
+ required: true
+ type: 0
+- display: Bot Password
+ name: bot_password
+ required: true
+ type: 4
+- display: Default team
+ name: team
+ required: true
+ type: 0
+- defaultvalue: General
+ display: Notifications channel
+ name: incident_notifications_channel
+ required: true
+ type: 0
+- display: Certificate (Required for HTTPS)
+ name: certificate
+ required: false
+ type: 12
+- display: Private Key (Required for HTTPS)
+ name: key
+ required: false
+ type: 14
+- defaultvalue: Low
+ display: Minimum incident severity to send notifications to Teams by
+ name: min_incident_severity
+ options:
+ - Low
+ - Medium
+ - High
+ - Critical
+ required: false
+ type: 15
+- display: Allow external users to create incidents via direct message
+ name: allow_external_incidents_creation
+ required: false
+ type: 8
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Long running instance
+ name: longRunning
+ required: false
+ type: 8
+- display: Listen port, e.g. 7000 (Required for investigation
+ mirroring and direct messages)
+ name: longRunningPort
+ required: false
+ type: 0
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+description: Send messages and notifications to your team members.
+display: Microsoft Teams
+name: Microsoft Teams
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The channel to which to send messages.
+ isArray: false
+ name: channel
+ required: false
+ secret: false
+ - default: false
+ description: The message to send to the channel or team member.
+ isArray: false
+ name: message
+ required: false
+ secret: false
+ - default: false
+ description: The team member to which to send the message.
+ isArray: false
+ name: team_member
+ required: false
+ secret: false
+ - default: false
+ description: The team in which the specified channel exists. The team must already
+ exist, and this value will override the default channel configured in the
+ integration parameters.
+ isArray: false
+ name: team
+ required: false
+ secret: false
+ - default: false
+ description: The Microsoft Teams adaptive card to send.
+ isArray: false
+ name: adaptive_card
+ required: false
+ secret: false
+ deprecated: false
+ description: Sends a message to the specified teams.
+ execution: false
+ name: send-notification
+ - arguments:
+ - auto: PREDEFINED
+ default: true
+ defaultValue: all
+ description: The mirroring type. Can be "all", which mirrors everything, "chat",
+ which mirrors only chats (not commands), or "none", which stops all mirroring.
+ isArray: false
+ name: mirror_type
+ predefined:
+ - all
+ - chat
+ - none
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: Whether to auto-close the channel when the incident is closed in
+ Demisto. If "true", the channel will be auto-closed. Default is "true".
+ isArray: false
+ name: autoclose
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: both
+ description: The mirroring direction. Can be "FromDemisto", "ToDemisto", or
+ "Both".
+ isArray: false
+ name: direction
+ predefined:
+ - Both
+ - FromDemisto
+ - ToDemisto
+ required: false
+ secret: false
+ - default: false
+ description: The team in which to mirror the Demisto investigation. If not specified,
+ the default team configured in the integration parameters will be used.
+ isArray: false
+ name: team
+ required: false
+ secret: false
+ - default: false
+ description: The name of the channel. The default is "incident-".
+ isArray: false
+ name: channel_name
+ required: false
+ secret: false
+ deprecated: false
+ description: Mirrors the Demisto investigation to the specified Microsoft Teams
+ channel.
+ execution: false
+ name: mirror-investigation
+ - arguments:
+ - default: false
+ description: The name of the channel to close.
+ isArray: false
+ name: channel
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes the specified Microsoft Teams channel.
+ execution: false
+ name: close-channel
+ - deprecated: false
+ description: Returns real-time and historical data on the integration status.
+ execution: false
+ name: microsoft-teams-integration-health
+ dockerimage: demisto/teams:1.0.0.1701
+ isfetch: false
+ longRunning: true
+ longRunningPort: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- No test
+fromversion: 5.0.0
diff --git a/Integrations/MicrosoftTeams/MicrosoftTeams_description.md b/Integrations/MicrosoftTeams/MicrosoftTeams_description.md
new file mode 100644
index 000000000000..6da3a3fde61c
--- /dev/null
+++ b/Integrations/MicrosoftTeams/MicrosoftTeams_description.md
@@ -0,0 +1,3 @@
+There are several steps required to configure this integration.
+
+For more information, see the [Microsoft Teams](https://support.demisto.com/hc/en-us/articles/360034095994-Microsoft-Teams) integration documentation.
\ No newline at end of file
diff --git a/Integrations/MicrosoftTeams/MicrosoftTeams_image.png b/Integrations/MicrosoftTeams/MicrosoftTeams_image.png
new file mode 100644
index 000000000000..08f94ff881d9
Binary files /dev/null and b/Integrations/MicrosoftTeams/MicrosoftTeams_image.png differ
diff --git a/Integrations/MicrosoftTeams/MicrosoftTeams_test.py b/Integrations/MicrosoftTeams/MicrosoftTeams_test.py
new file mode 100644
index 000000000000..f4a0b685ad22
--- /dev/null
+++ b/Integrations/MicrosoftTeams/MicrosoftTeams_test.py
@@ -0,0 +1,1274 @@
+import demistomock as demisto
+import json
+import pytest
+from CommonServerPython import entryTypes
+
+entryTypes['warning'] = 11
+
+bot_id: str = '9bi5353b-md6a-4458-8321-e924af433amb'
+
+tenant_id: str = 'pbae9ao6-01ql-249o-5me3-4738p3e1m941'
+
+team_id: str = '19:21f27jk08d1a487fa0f5467779619827@thread.skype'
+
+team_aad_id: str = '7d8efdf8-0c5a-42e3-a489-5ef5c3fc7a2b'
+
+team_name: str = 'The-A-Team'
+
+service_url: str = 'https://smba.trafficmanager.net/emea'
+
+mirrored_channels: list = [
+ {
+ 'channel_id': '19:2cbad0d78c624400ef83a5750539998g@thread.skype',
+ 'investigation_id': '1',
+ 'mirror_type': 'all',
+ 'mirror_direction': 'both',
+ 'auto_close': 'true',
+ 'mirrored': True,
+ 'channel_name': 'incident-1'
+ },
+ {
+ 'channel_id': '19:2cbad0d78c624400ef83a5750534448g@thread.skype',
+ 'investigation_id': '10',
+ 'mirror_type': 'all',
+ 'mirror_direction': 'both',
+ 'auto_close': 'true',
+ 'mirrored': True,
+ 'channel_name': 'incident-10'
+ }
+]
+
+team_members: list = [
+ {
+ 'id': '29:1KZccCJRTxlPdHnwcKfxHAtYvPLIyHgkSLhFSnGXLGVFlnltovdZPmZAduPKQP6NrGqOcde7FXAF7uTZ_8FQOqg',
+ 'objectId': '359d2c3c-162b-414c-b2eq-386461e5l050',
+ 'name': 'Bruce Willis',
+ 'givenName': 'Bruce',
+ 'surname': 'Willis',
+ 'userPrincipalName': 'bwillis@email.com',
+ 'tenantId': tenant_id
+ },
+ {
+ 'id': '29:1pBMMC85IyjM3tr_MCZi7KW4pw4EULxLN4C7R_xoi3Wva_lOn3VTf7xJlCLK-r-pMumrmoz9agZxsSrCf7__u9R',
+ 'objectId': '2826c1p7-bdb6-4529-b57d-2598me968631',
+ 'name': 'Denzel Washington',
+ 'givenName': 'Denzel',
+ 'surname': 'Washington',
+ 'email': 'dwashinton@email.com',
+ 'userPrincipalName': 'dwashinton@email.com',
+ 'tenantId': tenant_id
+ }
+]
+
+integration_context: dict = {
+ 'bot_name': 'DemistoBot',
+ 'service_url': service_url,
+ 'tenant_id': tenant_id,
+ 'teams': json.dumps([{
+ 'mirrored_channels': mirrored_channels,
+ 'team_id': team_id,
+ 'team_aad_id': team_aad_id,
+ 'team_members': team_members,
+ 'team_name': team_name
+ }])
+}
+
+
+@pytest.fixture(autouse=True)
+def get_integration_context(mocker):
+ mocker.patch.object(demisto, 'getIntegrationContext', return_value=integration_context)
+
+
+@pytest.fixture(autouse=True)
+def get_graph_access_token(requests_mock):
+ requests_mock.post(
+ f'https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/token',
+ json={
+ 'access_token': 'token'
+ },
+ status_code=200
+ )
+
+
+@pytest.fixture(autouse=True)
+def get_bot_access_token(requests_mock):
+ requests_mock.post(
+ 'https://login.microsoftonline.com/botframework.com/oauth2/v2.0/token',
+ json={
+ 'access_token': 'token'
+ }
+ )
+
+
+def test_message_handler(mocker):
+ from MicrosoftTeams import message_handler
+ mocker.patch.object(demisto, 'addEntry')
+ request_body: dict = {
+ 'from': {
+ 'id': '29:1KZccCJRTxlPdHnwcKfxHAtYvPLIyHgkSLhFSnGXLGVFlnltovdZPmZAduPKQP6NrGqOcde7FXAF7uTZ_8FQOqg',
+ 'aadObjectId': '359d2c3c-162b-414c-b2eq-386461e5l050',
+ 'name': 'Bruce Willis'
+ }
+ }
+ channel_data: dict = {
+ 'channel': {
+ 'id': '19:2cbad0d78c624400ef83a5750539998g@thread.skype'
+ },
+ 'team': {
+ 'id': team_id
+ }
+ }
+ message_handler(integration_context, request_body, channel_data, 'waz up')
+ assert demisto.addEntry.call_count == 1
+ add_entry_args = demisto.addEntry.call_args[1]
+ assert add_entry_args == {
+ 'id': '1',
+ 'entry': 'waz up',
+ 'username': 'Bruce Willis',
+ 'email': '',
+ 'footer': '\n**From Microsoft Teams**'
+ }
+
+
+def test_member_added_handler(mocker, requests_mock):
+ from MicrosoftTeams import member_added_handler
+ mocker.patch.object(demisto, 'getIntegrationContext', return_value={})
+ mocker.patch.object(demisto, 'setIntegrationContext')
+ mocker.patch.object(demisto, 'params', return_value={'bot_id': bot_id})
+ requests_mock.get(
+ f'{service_url}/v3/conversations/{team_id}/members',
+ json=team_members
+ )
+ request_body: dict = {
+ 'recipient': {
+ 'id': f'28:{bot_id}',
+ 'name': 'DemistoBot'
+ },
+ 'membersAdded': [{
+ 'id': f'28:{bot_id}'
+ }]
+ }
+ channel_data: dict = {
+ 'team': {
+ 'id': team_id,
+ 'name': team_name,
+ 'aadGroupId': team_aad_id
+ },
+ 'eventType': 'teamMemberAdded',
+ 'tenant': {
+ 'id': tenant_id
+ }
+ }
+ member_added_handler(integration_context, request_body, channel_data)
+ expected_integration_context: dict = {
+ 'bot_name': 'DemistoBot',
+ 'teams': json.dumps([{
+ 'mirrored_channels': mirrored_channels,
+ 'team_id': team_id,
+ 'team_aad_id': team_aad_id,
+ 'team_members': team_members,
+ 'team_name': team_name
+ }]),
+ 'tenant_id': tenant_id,
+ 'service_url': service_url
+ }
+ assert demisto.setIntegrationContext.call_count == 2
+ set_integration_context = demisto.setIntegrationContext.call_args[0]
+ assert len(set_integration_context) == 1
+ assert set_integration_context[0] == expected_integration_context
+
+
+def test_mirror_investigation(mocker, requests_mock):
+ from MicrosoftTeams import mirror_investigation
+
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(demisto, 'setIntegrationContext')
+ mocker.patch.object(
+ demisto,
+ 'params',
+ return_value={
+ 'team': 'The-A-Team'
+ }
+ )
+
+ # verify command cannot be executed in the war room
+ mocker.patch.object(
+ demisto,
+ 'investigation',
+ return_value={
+ 'type': 9
+ }
+ )
+ with pytest.raises(ValueError) as e:
+ mirror_investigation()
+ assert str(e.value) == 'Can not perform this action in playground.'
+
+ # verify channel is mirrored successfully and a message is sent to it
+ mocker.patch.object(
+ demisto,
+ 'investigation',
+ return_value={
+ 'id': '2'
+ }
+ )
+ channel_id: str = 'channel-id'
+ # create channel mock request
+ requests_mock.post(
+ f'https://graph.microsoft.com/v1.0/teams/{team_aad_id}/channels',
+ json={
+ 'id': channel_id
+ }
+ )
+ # send message mock request
+ requests_mock.post(
+ f'{service_url}/v3/conversations/{channel_id}/activities',
+ json={}
+ )
+ mirror_investigation()
+ updated_mirrored_channels: list = mirrored_channels[:]
+ updated_mirrored_channels.append({
+ 'channel_id': 'channel-id',
+ 'investigation_id': '2',
+ 'mirror_type': 'all',
+ 'mirror_direction': 'both',
+ 'auto_close': 'true',
+ 'mirrored': False,
+ 'channel_name': 'incident-2'
+ })
+ expected_integration_context: dict = {
+ 'bot_name': 'DemistoBot',
+ 'tenant_id': tenant_id,
+ 'service_url': service_url,
+ 'teams': json.dumps([{
+ 'mirrored_channels': updated_mirrored_channels,
+ 'team_id': team_id,
+ 'team_aad_id': team_aad_id,
+ 'team_members': team_members,
+ 'team_name': 'The-A-Team'
+ }])
+ }
+ assert requests_mock.request_history[1].json() == {
+ 'displayName': 'incident-2',
+ 'description': 'Channel to mirror incident 2'
+ }
+ assert requests_mock.request_history[3].json() == {
+ 'text': 'This channel was created to mirror [incident 2](https://test-address:8443#/WarRoom/2) between '
+ 'Teams and Demisto. In order for your Teams messages to be mirrored in Demisto, you need to'
+ ' mention the Demisto Bot in the message.',
+ 'type': 'message'
+ }
+
+ assert demisto.setIntegrationContext.call_count == 3
+ set_integration_context = demisto.setIntegrationContext.call_args[0]
+ assert len(set_integration_context) == 1
+ set_integration_context[0].pop('graph_access_token')
+ set_integration_context[0].pop('graph_valid_until')
+ set_integration_context[0].pop('bot_access_token')
+ set_integration_context[0].pop('bot_valid_until')
+ assert set_integration_context[0] == expected_integration_context
+ results = demisto.results.call_args[0]
+ assert len(results) == 1
+ assert results[0] == 'Investigation mirrored successfully in channel incident-2.'
+
+ # verify channel mirror is updated successfully
+ mocker.patch.object(demisto, 'setIntegrationContext')
+ mocker.patch.object(
+ demisto,
+ 'args',
+ return_value={
+ 'mirror_type': 'chat',
+ 'direction': 'FromDemisto',
+ 'autoclose': 'false'
+ }
+ )
+ mocker.patch.object(
+ demisto,
+ 'investigation',
+ return_value={
+ 'id': '1'
+ }
+ )
+ mirror_investigation()
+ assert demisto.setIntegrationContext.call_count == 1
+ set_integration_context = demisto.setIntegrationContext.call_args[0]
+ assert len(set_integration_context) == 1
+ results = demisto.results.call_args[0]
+ assert len(results) == 1
+ assert results[0] == 'Investigation mirror was updated successfully.'
+
+ # verify channel with custom channel name is mirrored successfully
+ mocker.patch.object(
+ demisto,
+ 'investigation',
+ return_value={
+ 'id': '14'
+ }
+ )
+ mocker.patch.object(
+ demisto,
+ 'args',
+ return_value={
+ 'channel_name': 'booya'
+ }
+ )
+
+ mirror_investigation()
+ assert requests_mock.request_history[5].json() == {
+ 'displayName': 'booya',
+ 'description': 'Channel to mirror incident 14'
+ }
+ results = demisto.results.call_args[0]
+ assert len(results) == 1
+ assert results[0] == 'Investigation mirrored successfully in channel booya.'
+
+
+def test_send_message(mocker, requests_mock):
+ from MicrosoftTeams import send_message
+ mocker.patch.object(demisto, 'results')
+
+ # verify that a mirrored message is skipped
+ mocker.patch.object(
+ demisto,
+ 'args',
+ return_value={
+ 'messageType': 'mirrorEntry',
+ 'originalMessage': 'a mirrored message\n**From Microsoft Teams**'
+ }
+ )
+ assert send_message() is None
+
+ # verify notification from server with severity below threshold is not sent
+ mocker.patch.object(
+ demisto,
+ 'params',
+ return_value={
+ 'min_incident_severity': 'Medium',
+ 'team': 'The-A-Team'
+ }
+ )
+ mocker.patch.object(
+ demisto,
+ 'args',
+ return_value={
+ 'messageType': 'incidentOpened',
+ 'severity': 1
+ }
+ )
+ assert send_message() is None
+
+ # verify error is raised if no user/channel were provided
+ mocker.patch.object(
+ demisto,
+ 'args',
+ return_value={}
+ )
+ with pytest.raises(ValueError) as e:
+ send_message()
+ assert str(e.value) == 'No channel or team member to send message were provided.'
+
+ # verify error is raised if both user and channel were provided
+ mocker.patch.object(
+ demisto,
+ 'args',
+ return_value={
+ 'channel': 'somechannel',
+ 'team_member': 'someuser'
+ }
+ )
+ with pytest.raises(ValueError) as e:
+ send_message()
+ assert str(e.value) == 'Provide either channel or team member to send message to, not both.'
+
+ # verify message is sent properly given user to send to
+ mocker.patch.object(
+ demisto,
+ 'params',
+ return_value={
+ 'bot_id': bot_id
+ }
+ )
+ mocker.patch.object(
+ demisto,
+ 'args',
+ return_value={
+ 'team_member': 'Denzel Washington',
+ 'message': 'MESSAGE'
+ }
+ )
+ requests_mock.post(
+ f'{service_url}/v3/conversations',
+ json={
+ 'id': 'conversation-id'
+ }
+ )
+ requests_mock.post(
+ f'{service_url}/v3/conversations/conversation-id/activities',
+ json={}
+ )
+ expected_create_personal_conversation_data: dict = {
+ 'bot': {
+ 'id': f'28:{bot_id}',
+ 'name': 'DemistoBot'
+ },
+ 'members': [{
+ 'id': '29:1pBMMC85IyjM3tr_MCZi7KW4pw4EULxLN4C7R_xoi3Wva_lOn3VTf7xJlCLK-r-pMumrmoz9agZxsSrCf7__u9R'
+ }],
+ 'channelData': {
+ 'tenant': {
+ 'id': tenant_id
+ }
+ }
+ }
+ send_message()
+ assert requests_mock.request_history[0].json() == expected_create_personal_conversation_data
+ results = demisto.results.call_args[0]
+ assert len(results) == 1
+ assert results[0] == 'Message was sent successfully.'
+
+ # verify message is sent properly given channel
+ mocker.patch.object(
+ demisto,
+ 'params',
+ return_value={
+ 'team': 'The-A-Team'
+ }
+ )
+ mocker.patch.object(
+ demisto,
+ 'args',
+ return_value={
+ 'channel': 'incident-1',
+ 'message': 'MESSAGE'
+ }
+ )
+ requests_mock.post(
+ f"{service_url}/v3/conversations/{mirrored_channels[0]['channel_id']}/activities",
+ json={}
+ )
+ send_message()
+ results = demisto.results.call_args[0]
+ assert len(results) == 1
+ assert results[0] == 'Message was sent successfully.'
+
+ # verify message is sent properly given entitlement
+ message: dict = {
+ 'message_text': 'is this really working?',
+ 'options': ['yes', 'no', 'maybe'],
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8',
+ 'investigation_id': '72',
+ 'task_id': '23'
+ }
+ mocker.patch.object(
+ demisto,
+ 'args',
+ return_value={
+ 'team_member': 'dwashinton@email.com',
+ 'message': json.dumps(message)
+ }
+ )
+ expected_ask_user_message: dict = {
+ 'attachments': [{
+ 'content': {
+ '$schema': 'http://adaptivecards.io/schemas/adaptive-card.json',
+ 'actions': [
+ {
+ 'data': {
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8',
+ 'investigation_id': '72',
+ 'response': 'yes',
+ 'task_id': '23'
+ },
+ 'title': 'yes',
+ 'type': 'Action.Submit'
+ },
+ {
+ 'data': {
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8',
+ 'investigation_id': '72',
+ 'response': 'no',
+ 'task_id': '23'
+ },
+ 'title': 'no',
+ 'type': 'Action.Submit'
+ },
+ {
+ 'data': {
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8',
+ 'investigation_id': '72',
+ 'response': 'maybe',
+ 'task_id': '23'
+ },
+ 'title': 'maybe',
+ 'type': 'Action.Submit'
+ }
+ ],
+ 'body': [{
+ 'text': 'is this really working?',
+ 'type': 'TextBlock'
+ }],
+ 'type': 'AdaptiveCard',
+ 'version': '1.0'
+ },
+ 'contentType': 'application/vnd.microsoft.card.adaptive'
+ }],
+ 'type': 'message'
+ }
+
+ send_message()
+ assert requests_mock.request_history[4].json() == expected_ask_user_message
+ results = demisto.results.call_args[0]
+ assert len(results) == 1
+ assert results[0] == 'Message was sent successfully.'
+
+ # verify proper error is raised if invalid JSON provided as adaptive card
+ mocker.patch.object(
+ demisto,
+ 'args',
+ return_value={
+ 'channel': 'channel',
+ 'adaptive_card': 'THISisSTRINGnotJSON'
+ }
+ )
+ with pytest.raises(ValueError) as e:
+ send_message()
+ assert str(e.value) == 'Given adaptive card is not in valid JSON format.'
+
+ # verify proper error is raised if both message and adaptive card were provided
+ mocker.patch.object(
+ demisto,
+ 'args',
+ return_value={
+ 'channel': 'channel',
+ 'message': 'message',
+ 'adaptive_card': '{"a":"b"}'
+ }
+ )
+ with pytest.raises(ValueError) as e:
+ send_message()
+ assert str(e.value) == 'Provide either message or adaptive to send, not both.'
+
+ # verify proper error is raised if neither message or adaptive card were provided
+ mocker.patch.object(
+ demisto,
+ 'args',
+ return_value={
+ 'channel': 'channel'
+ }
+ )
+ with pytest.raises(ValueError) as e:
+ send_message()
+ assert str(e.value) == 'No message or adaptive card to send were provided.'
+
+ # verify adaptive card sent successfully
+
+ adaptive_card: dict = {
+ "contentType": "application/vnd.microsoft.card.adaptive",
+ "content": {
+ "$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
+ "type": "AdaptiveCard",
+ "version": "1.0",
+ "body": [
+ {
+ "type": "Container",
+ "items": [{
+ "type": "TextBlock",
+ "text": "What a pretty adaptive card"
+ }]
+ }
+ ]
+ }
+ }
+ mocker.patch.object(
+ demisto,
+ 'args',
+ return_value={
+ 'team_member': 'bwillis@email.com',
+ 'adaptive_card': json.dumps(adaptive_card)
+ }
+ )
+ expected_conversation: dict = {
+ 'type': 'message',
+ 'attachments': [adaptive_card]
+ }
+ send_message()
+ assert requests_mock.request_history[6].json() == expected_conversation
+ results = demisto.results.call_args[0]
+ assert len(results) == 1
+ assert results[0] == 'Message was sent successfully.'
+
+
+def test_get_channel_id(requests_mock):
+ from MicrosoftTeams import get_channel_id
+ # get channel which is in the integration context
+ assert get_channel_id('incident-1', team_aad_id) == '19:2cbad0d78c624400ef83a5750539998g@thread.skype'
+ # get channel which is not in the integration context
+ requests_mock.get(
+ f'https://graph.microsoft.com/v1.0/teams/{team_aad_id}/channels',
+ json={
+ 'value': [
+ {
+ 'description': 'channel for incident 1',
+ 'displayName': 'incident-1',
+ 'id': '19:67pd3967e74g45f28d0c65f1689132bb@thread.skype'
+ },
+ {
+ 'description': 'channel for incident 2',
+ 'displayName': 'incident-3',
+ 'id': '19:67pd3967e74g45f28d0c65f1689132bo@thread.skype'
+ }
+ ]
+ }
+ )
+ assert get_channel_id('incident-3', team_aad_id) == '19:67pd3967e74g45f28d0c65f1689132bo@thread.skype'
+ # Try a channel which does not exit
+ with pytest.raises(ValueError) as e:
+ get_channel_id('incident-4', team_aad_id)
+ assert str(e.value) == 'Could not find channel: incident-4'
+
+
+def test_close_channel(mocker, requests_mock):
+ from MicrosoftTeams import close_channel
+ requests_mock.delete(
+ f'https://graph.microsoft.com/v1.0/teams/{team_aad_id}/channels/19:2cbad0d78c624400ef83a5750539998g@thread.skype',
+ status_code=204
+ )
+ requests_mock.delete(
+ f'https://graph.microsoft.com/v1.0/teams/{team_aad_id}/channels/19:2cbad0d78c624400ef83a5750534448g@thread.skype',
+ status_code=204
+ )
+ mocker.patch.object(demisto, 'results')
+
+ # close channel without given channel name
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '1'})
+ mocker.patch.object(demisto, 'getIntegrationContext', return_value=integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext')
+ close_channel()
+ assert requests_mock.request_history[0].method == 'DELETE'
+ assert demisto.setIntegrationContext.call_count == 1
+ results = demisto.results.call_args[0]
+ assert len(results) == 1
+ assert results[0] == 'Channel was successfully closed.'
+
+ # try to close channel without given channel name, which does not exist in the integration context
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '5'})
+ with pytest.raises(ValueError) as e:
+ close_channel()
+ assert str(e.value) == 'Could not find Microsoft Teams channel to close.'
+
+ # close channel given channel name
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(demisto, 'setIntegrationContext')
+ requests_mock.get(
+ f'https://graph.microsoft.com/v1.0/teams/{team_aad_id}/channels',
+ json={
+ 'value': [
+ {
+ 'description': 'channel for incident 1',
+ 'displayName': 'incident-1',
+ 'id': '19:67pd3967e74g45f28d0c65f1689132bb@thread.skype'
+ },
+ {
+ 'description': 'channel for incident 6',
+ 'displayName': 'incident-6',
+ 'id': '19:67pd3967e74g45f28d0c65f1689132bo@thread.skype'
+ }
+ ]
+ }
+ )
+ requests_mock.delete(
+ f'https://graph.microsoft.com/v1.0/teams/{team_aad_id}/channels/19:67pd3967e74g45f28d0c65f1689132bb@thread.skype',
+ status_code=204
+ )
+ mocker.patch.object(demisto, 'params', return_value={'team': 'The-A-Team'})
+ mocker.patch.object(demisto, 'args', return_value={'channel': 'incident-1'})
+
+ close_channel()
+ assert requests_mock.request_history[0].method == 'DELETE'
+ assert demisto.setIntegrationContext.call_count == 0
+ assert demisto.results.call_count == 1
+ results = demisto.results.call_args[0]
+ assert len(results) == 1
+ assert results[0] == 'Channel was successfully closed.'
+
+
+def test_entitlement_handler(mocker, requests_mock):
+ from MicrosoftTeams import entitlement_handler
+ mocker.patch.object(demisto, 'handleEntitlementForUser')
+ conversation_id: str = 'f:3005393407786078157'
+ activity_id: str = '1:1vW2mx4iDZf05lk18yskL64Wkfwraa76YTGNgDiIi-_5'
+ requests_mock.put(
+ f'{service_url}/v3/conversations/{conversation_id}/activities/{activity_id}',
+ json={'id': 'updateid'}
+ )
+ request_body: dict = {
+ 'from': {
+ 'id': '29:1KZccCJRTxlPdHnwcKfxHAtYvPLIyHgkSLhFSnGXLGVFlnltovdZPmZAduPKQP6NrGqOcde7FXAF7uTZ_8FQOqg',
+ 'aadObjectId': '359d2c3c-162b-414c-b2eq-386461e5l050',
+ 'name': 'Bruce Willis'
+ },
+ 'replyToId': activity_id
+ }
+ value: dict = {
+ 'response': 'Approve!',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8',
+ 'investigation_id': '100',
+ 'task_id': '4'
+ }
+ entitlement_handler(integration_context, request_body, value, conversation_id)
+ assert demisto.handleEntitlementForUser.call_count == 1
+ handle_entitlement_args = demisto.handleEntitlementForUser.call_args[1]
+ assert handle_entitlement_args == {
+ 'incidentID': '100',
+ 'guid': '4404dae8-2d45-46bd-85fa-64779c12abe8',
+ 'taskID': '4',
+ 'email': 'bwillis@email.com',
+ 'content': 'Approve!'
+ }
+
+
+def test_translate_severity():
+ from MicrosoftTeams import translate_severity
+ assert translate_severity('Low') == 1
+ assert translate_severity('NotRealSeverity') == 0
+
+
+def test_is_investigation_mirrored():
+ from MicrosoftTeams import is_investigation_mirrored
+ existing_investigation_id: str = '1'
+ non_existing_investigation_id: str = '2'
+
+ assert is_investigation_mirrored(existing_investigation_id, mirrored_channels) == 0
+ assert is_investigation_mirrored(non_existing_investigation_id, mirrored_channels) == -1
+
+
+def test_urlify_hyperlinks():
+ from MicrosoftTeams import urlify_hyperlinks
+ message: str = 'Visit https://www.demisto.com and http://www.demisto.com'
+ formatted_message: str = 'Visit [https://www.demisto.com](https://www.demisto.com) ' \
+ 'and [http://www.demisto.com](http://www.demisto.com)'
+ assert urlify_hyperlinks(message) == formatted_message
+
+
+def test_get_team_aad_id(mocker, requests_mock):
+ from MicrosoftTeams import get_team_aad_id
+
+ # verify team ID for team which is in integration context
+ mocker.patch.object(
+ demisto,
+ 'params',
+ return_value={
+ 'team': 'The-A-Team'
+ }
+ )
+ assert get_team_aad_id('The-A-Team') == '7d8efdf8-0c5a-42e3-a489-5ef5c3fc7a2b'
+
+ # verify non existing team raises value error
+ requests_mock.get(
+ "https://graph.microsoft.com/beta/groups?$filter=resourceProvisioningOptions/Any(x:x eq 'Team')",
+ json={
+ '@odata.context': 'https://graph.microsoft.com/beta/$metadata#groups',
+ 'value': [
+ {
+ 'id': '02bd9fd6-8f93-4758-87c3-1fb73740a315',
+ 'displayName': 'MyGreatTeam',
+ 'groupTypes': [
+ 'Unified'
+ ],
+ 'mailEnabled': True,
+ 'resourceBehaviorOptions': [],
+ 'resourceProvisioningOptions': [
+ 'Team'
+ ],
+ 'securityEnabled': False,
+ 'visibility': 'Private'
+ },
+ {
+ 'id': '8090c93e-ba7c-433e-9f39-08c7ba07c0b3',
+ 'displayName': 'WooahTeam',
+ 'groupTypes': [
+ 'Unified'
+ ],
+ 'mailEnabled': True,
+ 'mailNickname': 'X1050LaunchTeam',
+ 'resourceBehaviorOptions': [],
+ 'resourceProvisioningOptions': [
+ 'Team'
+ ],
+ 'securityEnabled': False,
+ 'visibility': 'Private'
+ }
+ ]
+ }
+ )
+ with pytest.raises(ValueError) as e:
+ get_team_aad_id('The-B-Team')
+ assert str(e.value) == 'Could not find requested team.'
+
+ # verify team ID for team which is not in integration context
+ assert get_team_aad_id('MyGreatTeam') == '02bd9fd6-8f93-4758-87c3-1fb73740a315'
+
+
+def test_get_team_member():
+ from MicrosoftTeams import get_team_member
+ user_id: str = '29:1KZccCJRTxlPdHnwcKfxHAtYvPLIyHgkSLhFSnGXLGVFlnltovdZPmZAduPKQP6NrGqOcde7FXAF7uTZ_8FQOqg'
+ team_member: dict = {
+ 'username': 'Bruce Willis',
+ 'user_email': 'bwillis@email.com'
+ }
+ assert get_team_member(integration_context, user_id) == team_member
+ with pytest.raises(ValueError) as e:
+ get_team_member(integration_context, 'NotRealUser')
+ assert str(e.value) == 'Team member was not found'
+
+
+def test_get_team_member_id():
+ from MicrosoftTeams import get_team_member_id
+ requested_team_member: str = 'Denzel Washington'
+ expected_user_id: str = '29:1pBMMC85IyjM3tr_MCZi7KW4pw4EULxLN4C7R_xoi3Wva_lOn3VTf7xJlCLK-r-pMumrmoz9agZxsSrCf7__u9R'
+ assert get_team_member_id(requested_team_member, integration_context) == expected_user_id
+
+ requested_team_member = 'dwashinton@email.com'
+ assert get_team_member_id(requested_team_member, integration_context) == expected_user_id
+ requested_team_member = 'TheRock'
+ with pytest.raises(ValueError) as e:
+ get_team_member_id(requested_team_member, integration_context)
+ assert str(e.value) == 'Team member was not found'
+
+
+def test_create_adaptive_card():
+ from MicrosoftTeams import create_adaptive_card
+ body: list = [{
+ 'type': 'TextBlock',
+ 'size': 'Medium',
+ 'weight': 'Bolder',
+ 'text': 'What a beautiful text'
+ }]
+ expected_adaptive_card: dict = {
+ 'contentType': 'application/vnd.microsoft.card.adaptive',
+ 'content': {
+ '$schema': 'http://adaptivecards.io/schemas/adaptive-card.json',
+ 'version': '1.0',
+ 'type': 'AdaptiveCard',
+ 'body': body
+ }
+ }
+ assert create_adaptive_card(body) == expected_adaptive_card
+ actions: list = [{
+ 'type': 'Action.OpenUrl',
+ 'title': 'DEMISTO',
+ 'url': 'https://www.demisto.com'
+ }]
+ expected_adaptive_card['content']['actions'] = actions
+ assert create_adaptive_card(body, actions) == expected_adaptive_card
+
+
+def test_process_tasks_list():
+ from MicrosoftTeams import process_tasks_list
+ data_by_line: list = [
+ 'Task | Incident | Due | Link ',
+ '=========================================|================================|=====================|=====',
+ 'Manually review the incident | 21 - nnn | 0001-01-01 00:00:00 | '
+ 'https://demisto.com/#/WorkPlan/21'
+ ]
+ expected_adaptive_card: dict = {
+ 'contentType': 'application/vnd.microsoft.card.adaptive',
+ 'content': {
+ '$schema': 'http://adaptivecards.io/schemas/adaptive-card.json',
+ 'version': '1.0',
+ 'type': 'AdaptiveCard',
+ 'body': [{
+ 'type': 'FactSet',
+ 'facts': [
+ {
+ 'title': 'Task:',
+ 'value': 'Manually review the incident'
+ },
+ {
+ 'title': 'Incident:',
+ 'value': '21 - nnn'
+ },
+ {
+ 'title': 'Due:',
+ 'value': '0001-01-01 00:00:00'
+ },
+ {
+ 'title': 'Link:',
+ 'value': '[https://demisto.com/#/WorkPlan/21](https://demisto.com/#/WorkPlan/21)'
+ }
+ ]
+ }]
+ }
+ }
+ assert process_tasks_list(data_by_line) == expected_adaptive_card
+
+
+def test_process_incidents_list():
+ from MicrosoftTeams import process_incidents_list
+ data_by_line: list = [
+ 'ID | Name | Status | Type | Owner | Created | Link ',
+ '===========|======================|=============|=============|=============|=====================|=====',
+ '257 | w | Active | Unclassifie | god | 2019-07-28 16:42:40 | '
+ 'https://demisto.com/#/WarRoom/257',
+ '250 | gosa | Active | Unclassifie | mozes | 2019-07-28 16:16:49 | '
+ 'https://demisto.com/#/WarRoom/250 '
+ ]
+ expected_adaptive_card: dict = {
+ 'contentType': 'application/vnd.microsoft.card.adaptive',
+ 'content': {
+ '$schema': 'http://adaptivecards.io/schemas/adaptive-card.json',
+ 'version': '1.0',
+ 'type': 'AdaptiveCard',
+ 'body': [
+ {
+ 'type': 'FactSet',
+ 'facts': [
+ {
+ 'title': 'ID:',
+ 'value': '257'
+ },
+ {
+ 'title': 'Name:',
+ 'value': 'w'
+ },
+ {
+ 'title': 'Status:',
+ 'value': 'Active'
+ },
+ {
+ 'title': 'Type:',
+ 'value': 'Unclassifie'
+ },
+ {
+ 'title': 'Owner:',
+ 'value': 'god'
+ },
+ {
+ 'title': 'Created:',
+ 'value': '2019-07-28 16:42:40'
+ },
+ {
+ 'title': 'Link:',
+ 'value': '[https://demisto.com/#/WarRoom/257](https://demisto.com/#/WarRoom/257)'
+ }
+ ]
+ },
+ {
+ 'type': 'FactSet',
+ 'facts': [
+ {
+ 'title': 'ID:',
+ 'value': '250'
+ },
+ {
+ 'title': 'Name:',
+ 'value': 'gosa'
+ },
+ {
+ 'title': 'Status:',
+ 'value': 'Active'
+ },
+ {
+ 'title': 'Type:',
+ 'value': 'Unclassifie'
+ },
+ {
+ 'title': 'Owner:',
+ 'value': 'mozes'
+ },
+ {
+ 'title': 'Created:',
+ 'value': '2019-07-28 16:16:49'
+ },
+ {
+ 'title': 'Link:',
+ 'value': '[https://demisto.com/#/WarRoom/250](https://demisto.com/#/WarRoom/250)'
+ }
+ ]
+ }
+ ]
+ }
+ }
+ assert process_incidents_list(data_by_line) == expected_adaptive_card
+
+
+def test_process_mirror_or_unknown_message():
+ from MicrosoftTeams import process_mirror_or_unknown_message
+ message: str = 'I can understand the following commands:\nlist incidents [page x]\nlist my incidents [page x]\n' \
+ 'list my tasks\nlist closed incidents\nnew incident [details]\nmirror incident-id'
+ expected_adaptive_card: dict = {
+ 'contentType': 'application/vnd.microsoft.card.adaptive',
+ 'content': {
+ '$schema': 'http://adaptivecards.io/schemas/adaptive-card.json',
+ 'version': '1.0',
+ 'type': 'AdaptiveCard',
+ 'body': [{
+ 'type': 'TextBlock',
+ 'text': 'I can understand the following commands:\n\nlist incidents [page x]\n\nlist my incidents [page'
+ ' x]\n\nlist my tasks\n\nlist closed incidents\n\nnew incident [details]\n\nmirror incident-id',
+ 'wrap': True
+ }]
+ }
+ }
+ assert process_mirror_or_unknown_message(message) == expected_adaptive_card
+
+
+def test_create_channel(requests_mock):
+ from MicrosoftTeams import create_channel
+ requests_mock.post(
+ f'https://graph.microsoft.com/v1.0/teams/{team_aad_id}/channels',
+ json={
+ 'id': '19:67pd3967e74g45f28d0c65f1689132bb@thread.skype'
+ }
+ )
+ channel_name: str = 'CrazyChannel'
+ response = create_channel(team_aad_id, channel_name)
+ assert response == '19:67pd3967e74g45f28d0c65f1689132bb@thread.skype'
+
+
+def test_get_team_members(requests_mock):
+ from MicrosoftTeams import get_team_members
+ requests_mock.get(
+ f'{service_url}/v3/conversations/{team_aad_id}/members',
+ json=team_members
+ )
+ assert get_team_members(service_url, team_aad_id) == team_members
+
+
+def test_update_message(requests_mock):
+ from MicrosoftTeams import update_message
+ activity_id: str = '1:1vW2mx4iDZf05lk18yskL64Wkfwraa76YTGNgDiIi-_5'
+ conversation_id: str = 'f:3005393407786078157'
+ requests_mock.put(
+ f'{service_url}/v3/conversations/{conversation_id}/activities/{activity_id}',
+ json={'id': 'updateid'}
+ )
+ expected_conversation: dict = {
+ 'type': 'message',
+ 'attachments': [{
+ 'contentType': 'application/vnd.microsoft.card.adaptive',
+ 'content': {
+ '$schema': 'http://adaptivecards.io/schemas/adaptive-card.json',
+ 'version': '1.0', 'type': 'AdaptiveCard',
+ 'body': [{
+ 'type': 'TextBlock', 'text': 'OMG!'
+ }]
+ }
+ }]
+ }
+ update_message(service_url, conversation_id, activity_id, 'OMG!')
+ assert requests_mock.request_history[0].method == 'PUT'
+ assert json.loads(requests_mock.request_history[0].body) == expected_conversation
+
+
+# def test_create_team(mocker, requests_mock):
+# from MicrosoftTeams import create_team
+# mocker.patch.object(
+# demisto,
+# 'args',
+# return_value={
+# 'display_name': 'OhMyTeam',
+# 'mail_nickname': 'NoNicknamesPlease',
+# 'owner': 'nonexistingmmember@demisto.com',
+# 'mail_enabled': 'true',
+# 'security_enabled': 'false'
+# }
+# )
+# requests_mock.get(
+# f'https://graph.microsoft.com/v1.0/users',
+# json={
+# 'value': team_members
+# }
+# )
+# with pytest.raises(ValueError) as e:
+# create_team()
+# assert str(e.value) == 'Could not find given users to be Team owners.'
+# mocker.patch.object(
+# demisto,
+# 'args',
+# return_value={
+# 'display_name': 'OhMyTeam',
+# 'mail_nickname': 'NoNicknamesPlease',
+# 'owner': 'dwashinton@email.com'
+# }
+# )
+
+
+def test_direct_message_handler(mocker, requests_mock):
+ from MicrosoftTeams import direct_message_handler
+ mocker.patch.object(
+ demisto,
+ 'createIncidents',
+ return_value={
+ 'id': '4',
+ 'name': 'incidentnumberfour'
+ }
+ )
+ requests_mock.post(
+ f'{service_url}/v3/conversations/conversation-id/activities',
+ json={}
+ )
+ request_body: dict = {
+ 'from': {
+ 'id': '29:1KZccCJRTxlPdHnwcKfxHAtYvPLIyHgkSLhFSnGXLGVFlnltovdZPmZAduPKQP6NrGqOcde7FXAF7uTZ_8FQOqg'
+ }
+ }
+ conversation: dict = {
+ 'id': 'conversation-id'
+ }
+
+ # verify create incident fails on un allowed external incident creation and non found user
+ message: str = 'create incident name=GoFish type=Phishing'
+ mocker.patch.object(demisto, 'findUser', return_value=None)
+ direct_message_handler(integration_context, request_body, conversation, message)
+ assert requests_mock.request_history[0].json() == {
+ 'text': 'You are not allowed to create incidents.', 'type': 'message'
+ }
+
+ # verify create incident successfully
+ mocker.patch.object(demisto, 'findUser', return_value={'id': 'nice-demisto-id'})
+ direct_message_handler(integration_context, request_body, conversation, message)
+ assert requests_mock.request_history[1].json() == {
+ 'text': "Successfully created incident incidentnumberfour.\n"
+ "View it on: [https://test-address:8443#/WarRoom/4](https://test-address:8443#/WarRoom/4)",
+ 'type': 'message'
+ }
+
+ # verify get my incidents
+ my_incidents: str = "```ID | Name | Status | Type | Owner | Created" \
+ " | Link\n ===========|======================|=============|=============|====" \
+ "=========|=====================|=====\n257 | w | Active | " \
+ "Unclassifie | god | 2019-07-28 16:42:40 | https://demisto.com/#/WarRoom/257```"
+ mocker.patch.object(demisto, 'directMessage', return_value=my_incidents)
+ message = 'list my incidents'
+ direct_message_handler(integration_context, request_body, conversation, message)
+ assert requests_mock.request_history[2].json() == {
+ 'attachments': [{
+ 'content': {
+ '$schema': 'http://adaptivecards.io/schemas/adaptive-card.json',
+ 'body': [{
+ 'facts': [
+ {
+ 'title': 'ID:',
+ 'value': '257'
+ },
+ {
+ 'title': 'Name:',
+ 'value': 'w'
+ },
+ {
+ 'title': 'Status:',
+ 'value': 'Active'
+ },
+ {
+ 'title': 'Type:',
+ 'value': 'Unclassifie'
+ },
+ {
+ 'title': 'Owner:',
+ 'value': 'god'
+ },
+ {
+ 'title': 'Created:',
+ 'value': '2019-07-28 16:42:40'
+ },
+ {
+ 'title': 'Link:',
+ 'value': '[https://demisto.com/#/WarRoom/257](https://demisto.com/#/WarRoom/257)'
+ }
+ ],
+ 'type': 'FactSet'
+ }],
+ 'type': 'AdaptiveCard',
+ 'version': '1.0'
+ },
+ 'contentType': 'application/vnd.microsoft.card.adaptive'
+ }],
+ 'type': 'message'
+ }
+
+ # verify error message raised by Demisto server is sent as message as expectec
+ mocker.patch.object(
+ demisto,
+ 'directMessage',
+ side_effect=ValueError(
+ 'I\'m sorry but I was unable to find you as a Demisto user for email [johnnydepp@gmail.com]'
+ )
+ )
+ direct_message_handler(integration_context, request_body, conversation, message)
+ assert requests_mock.request_history[3].json() == {
+ 'type': 'message',
+ 'text': 'I\'m sorry but I was unable to find you as a Demisto user for email [johnnydepp@gmail.com]'
+ }
+
+
+def test_error_parser():
+ from MicrosoftTeams import error_parser
+
+ class MockResponse:
+ def __init__(self, json_data, status_code, text=''):
+ self.json_data = json_data
+ self.status_code = status_code
+ self.text = text
+
+ def json(self):
+ return self.json_data
+
+ # verify bot framework error parsed as expected
+ error_description: str = "AADSTS700016: Application with identifier '2bc5202b-ad6a-4458-8821-e104af433bbb' " \
+ "was not found in the directory 'botframework.com'. This can happen if the application " \
+ "has not been installed by the administrator of the tenant or consented to by any user " \
+ "in the tenant. You may have sent your authentication request to the wrong tenant.\r\n" \
+ "Trace ID: 9eaeeec8-7f9e-4fb8-b319-5413581f0a00\r\nCorrelation ID: " \
+ "138cb511-2484-410e-b9c1-14b15accbeba\r\nTimestamp: 2019-08-28 13:18:44Z"
+
+ bot_error_json_response: dict = {
+ 'error': 'unauthorized_client',
+ 'error_description': error_description,
+ 'error_codes': [
+ 700016
+ ],
+ 'timestamp': '2019-08-28 13:18:44Z',
+ 'trace_id': '9eaeeec8-7f9e-4fb8-b319-5413581f0a11',
+ 'correlation_id': '138cb111-2484-410e-b9c1-14b15accbeba',
+ 'error_uri': 'https://login.microsoftonline.com/error?code=700016'
+ }
+
+ bot_error_json_response = MockResponse(bot_error_json_response, 400)
+ assert error_parser(bot_error_json_response, 'bot') == error_description
+
+ # verify graph error parsed as expected
+ error_code: str = 'InvalidAuthenticationToken'
+ error_message: str = 'Access token validation failure.'
+ graph_error_json_response: dict = {
+ 'error': {
+ 'code': error_code,
+ 'message': error_message,
+ 'innerError': {
+ 'request-id': 'c240ab22-4463-4a1f-82bc-8509d8190a77',
+ 'date': '2019-08-28T13:37:14'
+ }
+ }
+ }
+ graph_error_json_response = MockResponse(graph_error_json_response, 401)
+ assert error_parser(graph_error_json_response) == f'{error_code}: {error_message}'
+
+
+def test_integration_health(mocker):
+ from MicrosoftTeams import integration_health
+ mocker.patch.object(demisto, 'results')
+ expected_results = """### Microsoft API Health
+|Bot Framework API Health|Graph API Health|
+|---|---|
+| Operational | Operational |
+### Microsoft Teams Mirrored Channels
+|Channel|Investigation ID|Team|
+|---|---|---|
+| incident-10 | 10 | The-A-Team |
+| incident-2 | 2 | The-A-Team |
+| booya | 14 | The-A-Team |
+"""
+ integration_health()
+
+ results = demisto.results.call_args[0]
+ assert len(results) == 1
+ assert results[0] == expected_results
diff --git a/Integrations/MicrosoftTeams/Pipfile b/Integrations/MicrosoftTeams/Pipfile
new file mode 100644
index 000000000000..990c8baa2c10
--- /dev/null
+++ b/Integrations/MicrosoftTeams/Pipfile
@@ -0,0 +1,15 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+
+[packages]
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/MicrosoftTeams/Pipfile.lock b/Integrations/MicrosoftTeams/Pipfile.lock
new file mode 100644
index 000000000000..81d5f0d1b0d6
--- /dev/null
+++ b/Integrations/MicrosoftTeams/Pipfile.lock
@@ -0,0 +1,236 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "32272467b7bec2f5e423288cd9c77c4715ccc25a4a29f7bee65ee83fcd8e0eb3"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
+ "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
+ ],
+ "version": "==2019.6.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8",
+ "sha256:80d2de76188eabfbfcf27e6a37342c2827801e59c4cc14b0371c56fed43820e3"
+ ],
+ "version": "==0.19"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832",
+ "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"
+ ],
+ "version": "==7.2.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9",
+ "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe"
+ ],
+ "version": "==19.1"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
+ "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
+ ],
+ "version": "==2.4.2"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6ef6d06de77ce2961156013e9dff62f1b2688aa04d0dc244299fe7d67e09370d",
+ "sha256:a736fed91c12681a7b34617c8fcefe39ea04599ca72c608751c31d89579a3f77"
+ ],
+ "index": "pypi",
+ "version": "==5.0.1"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
+ "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ ],
+ "version": "==2.22.0"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:12e17c7ad1397fd1df5ead7727eb3f1bdc9fe1c18293b0492e0e01b57997e38d",
+ "sha256:dc9e416a095ee7c3360056990d52e5611fb94469352fc1c2dc85be1ff2189146"
+ ],
+ "index": "pypi",
+ "version": "==1.6.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e",
+ "sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e",
+ "sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0",
+ "sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c",
+ "sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631",
+ "sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4",
+ "sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34",
+ "sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b",
+ "sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a",
+ "sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233",
+ "sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1",
+ "sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36",
+ "sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d",
+ "sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a",
+ "sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.4.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
+ "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
+ ],
+ "version": "==1.25.3"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/MimecastV2/CHANGELOG.md b/Integrations/MimecastV2/CHANGELOG.md
new file mode 100644
index 000000000000..16311e1a1656
--- /dev/null
+++ b/Integrations/MimecastV2/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+-
\ No newline at end of file
diff --git a/Integrations/MimecastV2/MimecastV2.py b/Integrations/MimecastV2/MimecastV2.py
new file mode 100644
index 000000000000..b3a361c47cf1
--- /dev/null
+++ b/Integrations/MimecastV2/MimecastV2.py
@@ -0,0 +1,1538 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import os
+import hmac
+import uuid
+import json
+import base64
+import hashlib
+import requests
+
+from datetime import timedelta
+from urllib2 import HTTPError
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+BASE_URL = demisto.params()['baseUrl']
+ACCESS_KEY = demisto.params()['accessKey']
+SECRET_KEY = demisto.params()['secretKey']
+APP_ID = demisto.params()['appId']
+APP_KEY = demisto.params()['appKey']
+USE_SSL = True if demisto.params().get('insecure') else False
+PROXY = True if demisto.params().get('proxy') else False
+# Flags to control which type of incidents are being fetched
+FETCH_URL = demisto.params().get('fetchURL')
+FETCH_ATTACHMENTS = demisto.params().get('fetchAttachments')
+FETCH_IMPERSONATIONS = demisto.params().get('fetchImpersonations')
+# Used to refresh token / discover available auth types / login
+EMAIL_ADDRESS = demisto.params().get('email')
+PASSWORD = demisto.params().get('password')
+FETCH_DELTA = int(demisto.params().get('fetchDelta', 24))
+
+# remove proxy if not set to true in params
+if not demisto.params().get('proxy'):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+# default query xml template for test module
+default_query_xml = " \n\
+ \n\
+ \n\
+ \n\
+ \n\
+ attachmentcount \n\
+ status \n\
+ subject \n\
+ size \n\
+ receiveddate \n\
+ displayfrom \n\
+ id \n\
+ displayto \n\
+ smash \n\
+ \n\
+ \n\
+ \n\
+ \n\
+ \n\
+ \n\
+ \n\
+ \n\
+ \n\
+ "
+
+''' HELPER FUNCTIONS '''
+
+
+def epoch_seconds(d=None):
+ """
+ Return the number of seconds for given date. If no date, return current.
+ """
+ if not d:
+ d = datetime.utcnow()
+ return int((d - datetime.utcfromtimestamp(0)).total_seconds())
+
+
+def auto_refresh_token():
+ """
+ Check if we have a valid token, if not automatically renew validation time for 3 days when necessary params are provided
+ """
+ if APP_ID and EMAIL_ADDRESS and PASSWORD:
+ integration_context = demisto.getIntegrationContext()
+ last_update_ts = integration_context.get('token_last_update')
+ current_ts = epoch_seconds()
+ if (last_update_ts and current_ts - last_update_ts > 60 * 60 * 24 * 3 - 1800) or last_update_ts is None:
+ refresh_token_request()
+ current_ts = epoch_seconds()
+ demisto.setIntegrationContext({'token_last_update': current_ts})
+
+
+def http_request(method, api_endpoint, payload=None, params={}, user_auth=True, is_file=False):
+ is_user_auth = True
+ url = BASE_URL + api_endpoint
+ # 2 types of auth, user and non user, mostly user is needed
+ if user_auth:
+ # Generate request header values
+ request_id = str(uuid.uuid4())
+ hdr_date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S") + " UTC"
+
+ # Create the HMAC SHA1 of the Base64 decoded secret key for the Authorization header
+ hmac_sha1 = hmac.new(SECRET_KEY.decode("base64"), ':'.join([hdr_date, request_id, api_endpoint, APP_KEY]),
+ digestmod=hashlib.sha1).digest()
+
+ # Use the HMAC SHA1 value to sign the hdrDate + ":" requestId + ":" + URI + ":" + appkey
+ signature = base64.encodestring(hmac_sha1).rstrip()
+
+ # Create request headers
+ headers = {
+ 'Authorization': 'MC ' + ACCESS_KEY + ':' + signature,
+ 'x-mc-app-id': APP_ID,
+ 'x-mc-date': hdr_date,
+ 'x-mc-req-id': request_id,
+ 'Content-Type': 'application/json'
+ }
+
+ else:
+ # This type of auth is only supported for basic commands: login/discover/refresh-token
+ is_user_auth = False
+ auth = base64.b64encode(EMAIL_ADDRESS + ':' + PASSWORD)
+ auth_type = 'Basic-Cloud'
+ auth_header = auth_type + ' ' + auth
+ headers = {
+ 'x-mc-app-id': APP_ID,
+ 'Content-Type': 'application/json',
+ 'Authorization': auth_header
+ }
+
+ LOG('running %s request with url=%s\tparams=%s\tdata=%s\tis user auth=%s' % (
+ method, url, json.dumps(params), json.dumps(payload), is_user_auth))
+ try:
+ res = requests.request(
+ method,
+ url,
+ verify=USE_SSL,
+ params=params,
+ headers=headers,
+ data=payload
+ )
+
+ res.raise_for_status()
+ if is_file:
+ return res
+ return res.json()
+
+ except HTTPError as e:
+ LOG(e)
+ if e.response.status_code == 418: # type: ignore # pylint: disable=no-member
+ if not APP_ID or not EMAIL_ADDRESS or not PASSWORD:
+ return_error(
+ 'Credentials provided are expired, could not automatically refresh tokens. App ID + Email Address '
+ '+ Password are required.')
+ else:
+ raise
+
+ except Exception as e:
+ LOG(e)
+ raise
+
+
+def parse_query_args(args):
+ query_xml = default_query_xml
+ if args.get('pageSize'):
+ query_xml = query_xml.replace('page-size=\"25\"', 'page-size=\"' + args.get('pageSize') + '\"')
+ if args.get('startRow'):
+ query_xml = query_xml.replace('startrow=\"0\"', 'startrow=\"' + args.get('startRow') + '\"')
+ if args.get('active') == 'true':
+ query_xml = query_xml.replace('active=\"false\"', 'active=\"true\"')
+ if args.get('body'):
+ query_xml = query_xml.replace(' ', '(body: ' + args.get('body') + ') ')
+ if args.get('subject'):
+ query_xml = query_xml.replace(' ', '(subject: ' + args.get('subject') + ') ')
+ if args.get('text'):
+ query_xml = query_xml.replace(' ', '' + args.get('text') + ' ')
+ if args.get('date'):
+ query_xml = query_xml.replace('', ' ')
+
+ if args.get('dateTo') or args.get('dateFrom'):
+ return_error('Cannot use both date and dateFrom/dateTo arguments')
+
+ date_to = ""
+ date_from = ""
+
+ if args.get('dateTo'):
+ date_to = args.get('dateTo')
+ if args.get('dateFrom'):
+ date_from = args.get('dateFrom')
+ if date_to and date_from:
+ query_xml = query_xml.replace('',
+ ' ')
+ elif date_from:
+ query_xml = query_xml.replace('',
+ ' ')
+ elif date_to:
+ query_xml = query_xml.replace('',
+ ' ')
+
+ if args.get('sentFrom'):
+ query_xml = query_xml.replace(' ', '' + args.get('sentFrom') + ' ')
+ if args.get('sentTo'):
+ query_xml = query_xml.replace(' ', '' + args.get('sentTo') + ' ')
+ query_xml = query_xml.replace(' ', '') # no empty tag
+ if args.get('attachmentText'):
+ query_xml = query_xml.replace('', args.get('attachmentText') + '')
+ if args.get('attachmentType'):
+ query_xml = query_xml.replace('',
+ '')
+
+ return query_xml
+
+
+'''COMMANDS '''
+
+
+def test_module():
+ if not ACCESS_KEY:
+ return_error('Cannot test valid connection without the Access Key parameter.')
+ list_managed_url()
+
+
+def query():
+ headers = ['Subject', 'Display From', 'Display To', 'Received Date', 'Size', 'Attachment Count', 'Status', 'ID']
+ contents = []
+ context = {}
+ messages_context = []
+ query_xml = ''
+
+ if demisto.args().get('queryXml'):
+ query_xml = demisto.args().get('queryXml')
+ else:
+ query_xml = parse_query_args(demisto.args())
+ if demisto.args().get('dryRun') == 'true':
+ return query_xml
+
+ messages = query_request(query_xml)
+ for message in messages:
+ contents.append({
+ 'Subject': message.get('subject'),
+ 'From': message.get('displayfrom'),
+ 'To': message.get('displayto'),
+ 'Received Date': message.get('receiveddate'),
+ 'Size': message.get('size'),
+ 'Attachment Count': message.get('attachmentcount'),
+ 'Status': message.get('status'),
+ 'ID': message.get('id')
+ })
+ messages_context.append({
+ 'Subject': message.get('subject'),
+ 'Sender': message.get('displayfrom'),
+ 'Recipient': message.get('displayto'),
+ 'ReceivedDate': message.get('receiveddate'),
+ 'Size': message.get('size'),
+ 'AttachmentCount': message.get('attachmentcount'),
+ 'Status': message.get('status'),
+ 'ID': message.get('id')
+ })
+
+ context['Mimecast.Message(val.ID && val.ID == obj.ID)'] = messages_context
+
+ results = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Mimecast archived emails', contents, headers),
+ 'EntryContext': context
+ }
+
+ return results
+
+
+def query_request(query_xml):
+ api_endpoint = '/api/archive/search'
+ # API request demands admin boolean, since we don't have any other support but admin we simply pass true.
+ data = [{
+ 'admin': True,
+ 'query': query_xml
+ }]
+ payload = {
+ 'data': data
+ }
+ response = http_request('POST', api_endpoint, json.dumps(payload))
+ if response.get('fail'):
+ return_error(json.dumps(response.get('fail')[0].get('errors')))
+ return response.get('data')[0].get('items')
+
+
+def url_decode():
+ headers = [] # type: List[str]
+ contents = {}
+ context = {}
+ protected_url = demisto.args().get('url').encode('utf-8')
+ decoded_url = url_decode_request(protected_url)
+ contents['Decoded URL'] = decoded_url
+ context[outputPaths['url']] = {
+ 'Data': protected_url,
+ 'Mimecast': {
+ 'DecodedURL': decoded_url
+ }
+ }
+
+ results = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Mimecast Decoded URL:', contents, headers),
+ 'EntryContext': context
+ }
+
+ return results
+
+
+def url_decode_request(url):
+ # Setup required variables
+ api_endpoint = '/api/ttp/url/decode-url'
+ payload = {
+ 'data': [
+ {
+ 'url': url
+ }
+ ]
+ }
+ response = http_request('POST', api_endpoint, str(payload))
+ if not response.get('data')[0].get('url'):
+ return_error('No URL has been returned from the service')
+ return response.get('data')[0].get('url')
+
+
+def get_policy():
+ headers = ['Policy ID', 'Sender', 'Reciever', 'Bidirectional', 'Start', 'End']
+ contents = []
+ context = {}
+ title = 'Mimecast list blocked sender policies: \n These are the existing Blocked Sender Policies:'
+ policy_id = demisto.args().get('policyID')
+ if policy_id:
+ policy_id = policy_id.encode('utf-8')
+ title = 'Mimecast Get Policy'
+
+ policies_list = get_policy_request(policy_id)
+ policies_context = []
+ for policy_list in policies_list:
+ policy = policy_list.get('policy')
+ sender = policy.get('from')
+ reciever = policy.get('to')
+ contents.append({
+ 'Policy ID': policy_list['id'],
+ 'Sender': {
+ 'Group': sender.get('groupId'),
+ 'Email Address': sender.get('emailAddress'),
+ 'Domain': sender.get('emailDomain'),
+ 'Type': sender.get('type')
+ },
+ 'Reciever': {
+ 'Group': reciever.get('groupId'),
+ 'Email Address': reciever.get('emailAddress'),
+ 'Domain': reciever.get('domain'),
+ 'Type': reciever.get('type')
+ },
+ 'Bidirectional': policy.get('bidirectional'),
+ 'Start': policy.get('fromDate'),
+ 'End': policy.get('toDate')
+ })
+ policies_context.append({
+ 'ID': policy_list['id'],
+ 'Sender': {
+ 'Group': sender.get('groupId'),
+ 'Address': sender.get('emailAddress'),
+ 'Domain': sender.get('domain'),
+ 'Type': sender.get('type')
+ },
+ 'Reciever': {
+ 'Group': reciever.get('groupId'),
+ 'Address': reciever.get('emailAddress'),
+ 'Domain': reciever.get('domain'),
+ 'Type': reciever.get('type')
+ },
+ 'Bidirectional': policy.get('bidirectional'),
+ 'FromDate': policy.get('fromDate'),
+ 'ToDate': policy.get('toDate')
+ })
+
+ context['Mimecast.Policy(val.ID && val.ID == obj.ID)'] = policies_context
+
+ results = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, contents, headers),
+ 'EntryContext': context
+ }
+
+ return results
+
+
+def get_policy_request(policy_id=None):
+ # Setup required variables
+ api_endpoint = '/api/policy/blockedsenders/get-policy'
+ data = []
+ if policy_id:
+ data.append({
+ 'id': policy_id
+ })
+ payload = {
+ 'data': data
+ }
+
+ response = http_request('POST', api_endpoint, str(payload))
+ if response.get('fail'):
+ return_error(json.dumps(response.get('fail')[0].get('errors')))
+ return response.get('data')
+
+
+def create_policy():
+ headers = ['Policy ID', 'Sender', 'Reciever', 'Bidirectional', 'Start', 'End']
+ contents = {} # type: Dict[Any, Any]
+ context = {}
+ policies_context = {} # type: Dict[Any, Any]
+ description = demisto.args().get('description').encode('utf-8')
+ from_part = demisto.args().get('fromPart').encode('utf-8')
+ from_type = demisto.args().get('fromType').encode('utf-8')
+ from_value = demisto.args().get('fromValue').encode('utf-8')
+ to_type = demisto.args().get('toType').encode('utf-8')
+ to_value = demisto.args().get('toValue').encode('utf-8')
+ option = demisto.args().get('option').encode('utf-8')
+
+ policy_obj = {
+ 'description': description,
+ 'fromPart': from_part,
+ 'fromType': from_type,
+ 'fromValue': from_value,
+ 'toType': to_type,
+ 'toValue': to_value
+ }
+
+ policy_list = create_policy_request(policy_obj, option)
+ policy = policy_list.get('policy')
+ policy_id = policy_list.get('id')
+ title = 'Mimecast Create Policy: \n Policy {} Was Created Successfully!'.format(policy_id)
+ sender = policy.get('from')
+ reciever = policy.get('to')
+ contents = {
+ 'Policy ID': policy_id,
+ 'Sender': {
+ 'Group': sender.get('groupId'),
+ 'Email Address': sender.get('emailAddress'),
+ 'Domain': sender.get('emailDomain'),
+ 'Type': sender.get('type')
+ },
+ 'Reciever': {
+ 'Group': reciever.get('groupId'),
+ 'Email Address': reciever.get('emailAddress'),
+ 'Domain': reciever.get('domain'),
+ 'Type': reciever.get('type')
+ },
+ 'Bidirectional': policy.get('bidirectional'),
+ 'Start': policy.get('fromDate'),
+ 'End': policy.get('toDate')
+ }
+ policies_context = {
+ 'ID': policy_id,
+ 'Sender': {
+ 'Group': sender.get('groupId'),
+ 'Address': sender.get('emailAddress'),
+ 'Domain': sender.get('domain'),
+ 'Type': sender.get('type')
+ },
+ 'Reciever': {
+ 'Group': reciever.get('groupId'),
+ 'Address': reciever.get('emailAddress'),
+ 'Domain': reciever.get('domain'),
+ 'Type': reciever.get('type')
+ },
+ 'Bidirectional': policy.get('bidirectional'),
+ 'FromDate': policy.get('fromDate'),
+ 'ToDate': policy.get('toDate')
+ }
+
+ context['Mimecast.Policy(val.ID && val.ID == obj.ID)'] = policies_context
+
+ results = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, contents, headers),
+ 'EntryContext': context
+ }
+
+ return results
+
+
+def create_policy_request(policy, option):
+ # Setup required variables
+ api_endpoint = '/api/policy/blockedsenders/create-policy'
+ payload = {
+ 'data': [{
+ 'policy': policy,
+ 'option': option
+ }]
+ }
+
+ response = http_request('POST', api_endpoint, str(payload))
+ if response.get('fail'):
+ return_error(json.dumps(response.get('fail')[0].get('errors')))
+ return response.get('data')[0]
+
+
+def delete_policy():
+ contents = [] # type: List[Any]
+ context = {}
+ policy_id = demisto.args().get('policyID').encode('utf-8')
+
+ delete_policy_request(policy_id)
+
+ context['Mimecast.Policy(val.ID && val.ID == obj.ID)'] = {
+ 'ID': policy_id,
+ 'Deleted': True
+ }
+
+ results = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': 'Mimecast Policy {} deleted successfully!'.format(policy_id),
+ 'EntryContext': context
+ }
+
+ return results
+
+
+def delete_policy_request(policy_id=None):
+ # Setup required variables
+ api_endpoint = '/api/policy/blockedsenders/delete-policy'
+ data = [{
+ 'id': policy_id
+ }]
+ payload = {
+ 'data': data
+ }
+
+ response = http_request('POST', api_endpoint, str(payload))
+ if response.get('fail'):
+ return_error(json.dumps(response.get('fail')[0].get('errors')))
+ if response.get('data')[0].get('id') != policy_id:
+ return_error('Policy was not deleted.')
+ return response.get('data')[0]
+
+
+def manage_sender():
+ headers = [] # type: List[str]
+ context = {}
+ sender = demisto.args().get('sender').encode('utf-8')
+ recipient = demisto.args().get('recipient').encode('utf-8')
+ action = demisto.args().get('action').encode('utf-8')
+ title_action = 'permitted' if action == 'permit' else 'blocked'
+ title = 'Mimecast messages from {} to {} will now be {}!'.format(sender, recipient, title_action)
+
+ req_obj = {
+ 'sender': sender,
+ 'to': recipient,
+ 'action': action
+ }
+
+ managed_sender = manage_sender_request(req_obj)
+
+ contents = {
+ 'Sender': managed_sender.get('sender'),
+ 'Recipient': managed_sender.get('to'),
+ 'Action': managed_sender.get('type'),
+ 'ID': managed_sender.get('id')
+ }
+
+ context['Mimecast.Managed(val.ID && val.ID == obj.ID)'] = {
+ 'Sender': managed_sender.get('sender'),
+ 'Recipient': managed_sender.get('to'),
+ 'Action': managed_sender.get('type'),
+ 'ID': managed_sender.get('id')
+ }
+
+ results = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, contents, headers),
+ 'EntryContext': context
+ }
+
+ return results
+
+
+def manage_sender_request(req_obj):
+ # Setup required variables
+ api_endpoint = '/api/managedsender/permit-or-block-sender'
+ data = []
+ data.append(req_obj)
+ payload = {
+ 'data': data
+ }
+
+ response = http_request('POST', api_endpoint, str(payload))
+ if response.get('fail'):
+ return_error(json.dumps(response.get('fail')[0].get('errors')))
+ return response.get('data')[0]
+
+
+def list_managed_url():
+ headers = ['URL', 'Action', 'Match Type', 'User Awareness', 'URL Rewriting', 'Comment']
+ contents = []
+ context = {}
+ managed_urls_context = []
+ full_url_response = ''
+ url = demisto.args().get('url')
+ if url:
+ url = url.encode('utf-8')
+
+ managed_urls = list_managed_url_request()
+ for managed_url in managed_urls:
+ query_string = ''
+ scheme = ''
+ if managed_url.get('queryString'):
+ query_string = '?' + managed_url.get('queryString')
+ if managed_url.get('scheme'):
+ scheme = managed_url.get('scheme') + '://'
+ full_url_response = scheme + managed_url.get('domain', '') + managed_url.get('path', '') + query_string
+ if (url and url in full_url_response) or not url:
+ contents.append({
+ 'URL': full_url_response,
+ 'Match Type': managed_url.get('matchType'),
+ 'Comment': managed_url.get('comment'),
+ 'Action': managed_url.get('action'),
+ 'URL Rewriting': managed_url.get('disableRewrite'),
+ 'User Awareness': managed_url.get('disableUserAwareness')
+ })
+ managed_urls_context.append({
+ 'Domain': managed_url.get('domain'),
+ 'disableLogClick': managed_url.get('disableLogClick'),
+ 'Action': managed_url.get('action'),
+ 'Path': managed_url.get('path'),
+ 'matchType': managed_url.get('matchType'),
+ 'ID': managed_url.get('id'),
+ 'disableRewrite': managed_url.get('disableRewrite')
+ })
+
+ context['Mimecast.URL(val.ID && val.ID == obj.ID)'] = managed_urls_context
+ results = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Mimecast Managed URLs: ', contents, headers),
+ 'EntryContext': context
+ }
+
+ return results
+
+
+def list_managed_url_request():
+ # Setup required variables
+ api_endpoint = '/api/ttp/url/get-all-managed-urls'
+ data = [] # type: List[Any]
+ payload = {
+ 'data': data
+ }
+
+ response = http_request('POST', api_endpoint, str(payload))
+ if response.get('fail'):
+ return_error(json.dumps(response.get('fail')[0].get('errors')))
+ return response.get('data')
+
+
+def create_managed_url():
+ context = {}
+ contents = {} # type: Dict[Any, Any]
+ managed_urls_context = []
+ url = demisto.args().get('url').encode('utf-8')
+ action = demisto.args().get('action').encode('utf-8')
+ match_type = demisto.args().get('matchType').encode('utf-8')
+ disable_rewrite = demisto.args().get('disableRewrite').encode('utf-8')
+ disable_user_awareness = demisto.args().get('disableUserAwareness').encode('utf-8')
+ disable_log_click = demisto.args().get('disableLogClick').encode('utf-8')
+ comment = demisto.args().get('comment')
+ if comment:
+ comment = comment.encode('utf-8')
+
+ url_req_obj = {
+ 'comment': comment,
+ 'disableRewrite': disable_rewrite,
+ 'url': url,
+ 'disableUserAwareness': disable_user_awareness,
+ 'disableLogClick': disable_log_click,
+ 'action': action,
+ 'matchType': match_type
+ }
+
+ managed_url = create_managed_url_request(url_req_obj)
+ managed_urls_context.append({
+ 'Domain': managed_url.get('domain'),
+ 'disableLogClick': managed_url.get('disableLogClick'),
+ 'Action': managed_url.get('action'),
+ 'Path': managed_url.get('path'),
+ 'matchType': managed_url.get('matchType'),
+ 'ID': managed_url.get('id'),
+ 'disableRewrite': managed_url.get('disableRewrite')
+ })
+
+ context['Mimecast.URL(val.ID && val.ID == obj.ID)'] = managed_urls_context
+
+ results = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': 'Managed URL {} created successfully!'.format(url),
+ 'EntryContext': context
+ }
+
+ return results
+
+
+def create_managed_url_request(url_obj):
+ # Setup required variables
+ api_endpoint = '/api/ttp/url/create-managed-url'
+ data = []
+ data.append(url_obj)
+ payload = {
+ 'data': data
+ }
+
+ response = http_request('POST', api_endpoint, str(payload))
+ if response.get('fail'):
+ return_error(json.dumps(response.get('fail')[0].get('errors')))
+ return response.get('data')[0]
+
+
+def list_messages():
+ headers = ['Subject', 'Size', 'Recieved Date', 'From', 'Attachment Count', 'Message ID']
+ context = {}
+ contents = []
+ messages_context = []
+ search_params = {}
+
+ # can't send null values for keys, so if optional value not sent by user, do not add to request.
+ mailbox = demisto.args().get('mailbox', '').encode('utf-8')
+ if mailbox:
+ search_params['mailbox'] = mailbox
+ view = demisto.args().get('view', '').encode('utf-8')
+ if view:
+ search_params['view'] = view
+ end_time = demisto.args().get('endTime', '').encode('utf-8')
+ if end_time:
+ search_params['end'] = end_time
+ start_time = demisto.args().get('startTime', '').encode('utf-8')
+ if start_time:
+ search_params['start'] = start_time
+ subject = demisto.args().get('subject')
+
+ messages_list = list_messages_request(search_params)
+
+ for message in messages_list:
+ if subject == message.get('subject') or not subject:
+ contents.append({
+ 'Message ID': message.get('id'),
+ 'Subject': message.get('subject'),
+ 'Size': message.get('size'),
+ 'Recieved Date': message.get('received'),
+ 'From': message.get('from').get('emailAddress'),
+ 'Attachment Count': message.get('attachmentCount')
+ })
+ messages_context.append({
+ 'Subject': message.get('subject'),
+ 'ID': message.get('id'),
+ 'Size': message.get('size'),
+ 'RecievedDate': message.get('received'),
+ 'From': message.get('from').get('emailAddress'),
+ 'AttachmentCount': message.get('attachmentCount')
+ })
+
+ context['Mimecast.Message(val.ID && val.ID == obj.ID)'] = messages_context
+
+ results = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Mimecast messages list', contents, headers),
+ 'EntryContext': context
+ }
+
+ return results
+
+
+def list_messages_request(search_params):
+ # Setup required variables
+ api_endpoint = '/api/archive/get-message-list'
+ data = []
+ data.append(search_params)
+ payload = {
+ 'meta': {
+ 'pagination': {
+ }
+ },
+ 'data': data
+ }
+
+ response = http_request('POST', api_endpoint, str(payload))
+ if response.get('fail'):
+ return_error(json.dumps(response.get('fail')[0].get('errors')))
+ return response.get('data')
+
+
+def get_url_logs():
+ headers = [] # type: List[Any]
+ contents = []
+ context = {}
+ url_logs_context = []
+ search_params = {}
+ result_number = demisto.args().get('resultsNumber', '').encode('utf-8')
+ from_date = demisto.args().get('fromDate', '').encode('utf-8')
+ to_date = demisto.args().get('toDate', '').encode('utf-8')
+ scan_result = demisto.args().get('resultType', '').encode('utf-8')
+ limit = int(demisto.args().get('limit', 100))
+
+ if from_date:
+ search_params['from'] = from_date
+ if to_date:
+ search_params['to'] = to_date
+ if scan_result:
+ search_params['scanResult'] = scan_result
+
+ url_logs = get_url_logs_request(search_params, result_number)
+ if limit:
+ url_logs = url_logs[:limit]
+ for url_log in url_logs:
+ contents.append({
+ 'Action': url_log.get('action'),
+ 'Admin Override': url_log.get('adminOverride'),
+ 'Category': url_log.get('category'),
+ 'Date': url_log.get('date'),
+ 'Route': url_log.get('route'),
+ 'Scan Result': url_log.get('scanResult'),
+ 'URL': url_log.get('url'),
+ 'User Awareness Action': url_log.get('userAwarenessAction'),
+ 'User Email Address': url_log.get('userEmailAddress'),
+ 'User Override': url_log.get('userOverride')
+ })
+ url_logs_context.append({
+ 'Action': url_log.get('action'),
+ 'AdminOverride': url_log.get('adminOverride'),
+ 'Category': url_log.get('category'),
+ 'Date': url_log.get('date'),
+ 'Route': url_log.get('route'),
+ 'Result': url_log.get('scanResult'),
+ 'URL': url_log.get('url'),
+ 'Awareness': url_log.get('userAwarenessAction'),
+ 'Address': url_log.get('userEmailAddress'),
+ 'UserOverride': url_log.get('userOverride')
+ })
+
+ context['Mimecast.UrlLog'] = url_logs_context
+
+ results = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Mimecast URL logs: ', contents, headers),
+ 'EntryContext': context
+ }
+
+ return results
+
+
+def get_url_logs_request(search_params, result_number=None):
+ # Setup required variables
+ api_endpoint = '/api/ttp/url/get-logs'
+ pagination = {} # type: Dict[Any, Any]
+ if result_number:
+ pagination = {'page_size': result_number}
+ payload = {
+ 'meta': {
+ 'pagination': pagination
+ },
+ 'data': [search_params]
+ }
+
+ response = http_request('POST', api_endpoint, str(payload))
+ if response.get('fail'):
+ return_error(json.dumps(response.get('fail')[0].get('errors')))
+ return response.get('data')[0].get('clickLogs')
+
+
+def get_attachment_logs():
+ headers = [] # type: List[Any]
+ contents = []
+ context = {}
+ attachment_logs_context = []
+ search_params = {}
+ result_number = demisto.args().get('resultsNumber', '').encode('utf-8')
+ from_date = demisto.args().get('fromDate', '').encode('utf-8')
+ to_date = demisto.args().get('toDate', '').encode('utf-8')
+ result = demisto.args().get('resultType', '').encode('utf-8')
+ limit = int(demisto.args().get('limit', 100))
+
+ if from_date:
+ search_params['from'] = from_date
+ if to_date:
+ search_params['to'] = to_date
+ if result:
+ search_params['result'] = result
+
+ attachment_logs = get_attachment_logs_request(search_params, result_number)
+ if limit:
+ attachment_logs = attachment_logs[:limit]
+ for attachment_log in attachment_logs:
+ contents.append({
+ 'Result': attachment_log.get('result'),
+ 'Date': attachment_log.get('date'),
+ 'Sender Address': attachment_log.get('senderAddress'),
+ 'File Name': attachment_log.get('fileName'),
+ 'Action': attachment_log.get('actionTriggered'),
+ 'Route': attachment_log.get('route'),
+ 'Details': attachment_log.get('details'),
+ 'Recipient Address': attachment_log.get('recipientAddress'),
+ 'File Type': attachment_log.get('fileType')
+ })
+ attachment_logs_context.append({
+ 'Result': attachment_log.get('result'),
+ 'Date': attachment_log.get('date'),
+ 'Sender': attachment_log.get('senderAddress'),
+ 'FileName': attachment_log.get('fileName'),
+ 'Action': attachment_log.get('actionTriggered'),
+ 'Route': attachment_log.get('route'),
+ 'Details': attachment_log.get('details'),
+ 'Recipient': attachment_log.get('recipientAddress'),
+ 'FileType': attachment_log.get('fileType')
+ })
+
+ context['Mimecast.AttachmentLog'] = attachment_logs_context
+
+ results = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Mimecast attachment logs: ', contents, headers),
+ 'EntryContext': context
+ }
+
+ return results
+
+
+def get_attachment_logs_request(search_params, result_number=None):
+ # Setup required variables
+ api_endpoint = '/api/ttp/attachment/get-logs'
+ pagination = {} # type: Dict[Any, Any]
+ if result_number:
+ pagination = {'page_size': result_number}
+ payload = {
+ 'meta': {
+ 'pagination': pagination
+ },
+ 'data': [search_params]
+ }
+
+ response = http_request('POST', api_endpoint, str(payload))
+ if response.get('fail'):
+ return_error(json.dumps(response.get('fail')[0].get('errors')))
+ return response.get('data')[0].get('attachmentLogs')
+
+
+def get_impersonation_logs():
+ headers = [] # type: List[Any]
+ contents = []
+ context = {}
+ impersonation_logs_context = []
+ search_params = {}
+ result_number = demisto.args().get('resultsNumber', '').encode('utf-8')
+ from_date = demisto.args().get('fromDate', '').encode('utf-8')
+ to_date = demisto.args().get('toDate', '').encode('utf-8')
+ tagged_malicious = demisto.args().get('taggedMalicious', '').encode('utf-8')
+ search_field = demisto.args().get('searchField', '').encode('utf-8')
+ query = demisto.args().get('query', '').encode('utf-8')
+ identifiers = argToList(demisto.args().get('identifiers', '').encode('utf-8'))
+ actions = argToList(demisto.args().get('actions', '').encode('utf-8'))
+ limit = int(demisto.args().get('limit', 100))
+
+ if from_date:
+ search_params['from'] = from_date
+ if to_date:
+ search_params['to'] = to_date
+ if tagged_malicious:
+ search_params['taggedMalicious'] = tagged_malicious
+ if search_field:
+ search_params['searchField'] = search_field
+ if query:
+ search_params['query'] = query
+ if identifiers:
+ search_params['identifiers'] = identifiers
+ if actions:
+ search_params['actions'] = actions
+
+ impersonation_logs, result_count = get_impersonation_logs_request(search_params, result_number)
+ if limit:
+ impersonation_logs = impersonation_logs[:limit]
+ for impersonation_log in impersonation_logs:
+ contents.append({
+ 'Result Count': result_count,
+ 'Hits': impersonation_log.get('hits'),
+ 'Malicious': impersonation_log.get('taggedMalicious'),
+ 'Sender IP': impersonation_log.get('senderIpAddress'),
+ 'Sender Address': impersonation_log.get('senderAddress'),
+ 'Subject': impersonation_log.get('subject'),
+ 'Identifiers': impersonation_log.get('identifiers'),
+ 'Date': impersonation_log.get('eventTime'),
+ 'Action': impersonation_log.get('action'),
+ 'Policy': impersonation_log.get('definition'),
+ 'ID': impersonation_log.get('id'),
+ 'Recipient Address': impersonation_log.get('recipientAddress'),
+ 'External': impersonation_log.get('taggedExternal')
+ })
+ impersonation_logs_context.append({
+ 'ResultCount': result_count,
+ 'Hits': impersonation_log.get('hits'),
+ 'Malicious': impersonation_log.get('taggedMalicious'),
+ 'SenderIP': impersonation_log.get('senderIpAddress'),
+ 'SenderAddress': impersonation_log.get('senderAddress'),
+ 'Subject': impersonation_log.get('subject'),
+ 'Identifiers': impersonation_log.get('identifiers'),
+ 'Date': impersonation_log.get('eventTime'),
+ 'Action': impersonation_log.get('action'),
+ 'Policy': impersonation_log.get('definition'),
+ 'ID': impersonation_log.get('id'),
+ 'RecipientAddress': impersonation_log.get('recipientAddress'),
+ 'External': impersonation_log.get('taggedExternal')
+ })
+
+ context['Mimecast.Impersonation'] = impersonation_logs_context
+
+ results = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Mimecast impersonation logs: ', contents, headers),
+ 'EntryContext': context
+ }
+
+ return results
+
+
+def get_impersonation_logs_request(search_params, result_number=None):
+ # Setup required variables
+ api_endpoint = '/api/ttp/impersonation/get-logs'
+ pagination = {} # type: Dict[Any, Any]
+ if result_number:
+ pagination = {'page_size': result_number}
+ payload = {
+ 'meta': {
+ 'pagination': pagination
+ },
+ 'data': [search_params]
+ }
+
+ response = http_request('POST', api_endpoint, str(payload))
+ if response.get('fail'):
+ return_error(json.dumps(response.get('fail')[0].get('errors')))
+ return response.get('data')[0].get('impersonationLogs'), response.get('data')[0].get('resultCount')
+
+
+def fetch_incidents():
+ last_run = demisto.getLastRun()
+ last_fetch = last_run.get('time')
+
+ # handle first time fetch
+ if last_fetch is None:
+ last_fetch = datetime.now() - timedelta(hours=FETCH_DELTA)
+ last_fetch_date_time = last_fetch.strftime("%Y-%m-%dT%H:%M:%S") + '+0000'
+ else:
+ last_fetch = datetime.strptime(last_fetch, '%Y-%m-%dT%H:%M:%SZ')
+ last_fetch_date_time = last_fetch.strftime("%Y-%m-%dT%H:%M:%S") + '+0000'
+ current_fetch = last_fetch
+
+ incidents = []
+ if FETCH_URL:
+ search_params = {
+ 'from': last_fetch_date_time,
+ 'scanResult': 'malicious'
+ }
+ url_logs = get_url_logs_request(search_params)
+ for url_log in url_logs:
+ incident = url_to_incident(url_log)
+ temp_date = datetime.strptime(incident['occurred'], '%Y-%m-%dT%H:%M:%SZ')
+ # update last run
+ if temp_date > last_fetch:
+ last_fetch = temp_date + timedelta(seconds=1)
+
+ # avoid duplication due to weak time query
+ if temp_date > current_fetch:
+ incidents.append(incident)
+
+ if FETCH_ATTACHMENTS:
+ search_params = {
+ 'from': last_fetch_date_time,
+ 'result': 'malicious'
+ }
+ attachment_logs = get_attachment_logs_request(search_params)
+ for attachment_log in attachment_logs:
+ incident = attachment_to_incident(attachment_log)
+ temp_date = datetime.strptime(incident['occurred'], '%Y-%m-%dT%H:%M:%SZ')
+
+ # update last run
+ if temp_date > last_fetch:
+ last_fetch = temp_date + timedelta(seconds=1)
+
+ # avoid duplication due to weak time query
+ if temp_date > current_fetch:
+ incidents.append(incident)
+
+ if FETCH_IMPERSONATIONS:
+ search_params = {
+ 'from': last_fetch_date_time,
+ 'taggedMalicious': True
+ }
+ impersonation_logs, _ = get_impersonation_logs_request(search_params)
+ for impersonation_log in impersonation_logs:
+ incident = impersonation_to_incident(impersonation_log)
+ temp_date = datetime.strptime(incident['occurred'], '%Y-%m-%dT%H:%M:%SZ')
+
+ # update last run
+ if temp_date > last_fetch:
+ last_fetch = temp_date + timedelta(seconds=1)
+
+ # avoid duplication due to weak time query
+ if temp_date > current_fetch:
+ incidents.append(incident)
+
+ demisto.setLastRun({'time': last_fetch.isoformat().split('.')[0] + 'Z'})
+ demisto.incidents(incidents)
+
+
+def url_to_incident(url_log):
+ incident = {}
+ incident['name'] = 'Mimecast malicious URL: ' + url_log.get('url')
+ incident['occurred'] = url_log.get('date').replace('+0000', 'Z')
+ incident['rawJSON'] = json.dumps(url_log)
+ return incident
+
+
+def attachment_to_incident(attachment_log):
+ incident = {}
+ incident['name'] = 'Mimecast malicious attachment: ' + attachment_log.get('fileName')
+ incident['occurred'] = attachment_log.get('date').replace('+0000', 'Z')
+ incident['rawJSON'] = json.dumps(attachment_log)
+ return incident
+
+
+def impersonation_to_incident(impersonation_log):
+ incident = {}
+ incident['name'] = 'Mimecast malicious impersonation: ' + impersonation_log.get('subject')
+ incident['occurred'] = impersonation_log.get('eventTime').replace('+0000', 'Z')
+ incident['rawJSON'] = json.dumps(impersonation_log)
+ return incident
+
+
+def discover():
+ headers = [] # type: List[Any]
+ context = {}
+ context_obj = {} # type: Dict[Any, Any]
+ contents = []
+
+ response = discover_request()
+
+ contents.append({
+ 'Authentication Types': response.get('authenticate'),
+ 'Email Address': response.get('emailAddress'),
+ 'Email Token': response.get('emailToken')
+ })
+
+ context_obj = {
+ 'AuthenticationTypes': response.get('authenticate'),
+ 'EmailAddress': response.get('emailAddress'),
+ 'EmailToken': response.get('emailToken')
+ }
+
+ context['Mimecast.Authentication(val.EmailAddress && val.EmailAddress === obj.EmailAddress)'] = context_obj
+
+ results = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Mimecast Authentication Information', contents, headers),
+ 'EntryContext': context
+ }
+
+ return results
+
+
+def discover_request():
+ if not EMAIL_ADDRESS:
+ return_error('In order to discover account\'s auth types, account\'s email is required.')
+ email = EMAIL_ADDRESS.encode('utf-8')
+ # Setup required variables
+ api_endpoint = '/api/login/discover-authentication'
+ payload = {
+ 'data': [{
+ 'emailAddress': email
+ }]
+ }
+ response = http_request('POST', api_endpoint, str(payload), {}, user_auth=False)
+ if response.get('fail'):
+ return_error(json.dumps(response.get('fail')[0].get('errors')))
+ return response.get('data')[0]
+
+
+def refresh_token():
+ contents = refresh_token_request()
+
+ results = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': 'Token has been refreshed succesfully and is valid for the next 3 days'
+ }
+
+ return results
+
+
+def refresh_token_request():
+ if not EMAIL_ADDRESS:
+ return_error('In order to refresh a token validty duration, account\'s email is required.')
+ if not ACCESS_KEY:
+ return_error('In order to refresh a token validty duration, account\'s access key is required.')
+ email = EMAIL_ADDRESS.encode('utf-8')
+ access_key = ACCESS_KEY.encode('utf-8')
+ # Setup required variables
+ api_endpoint = '/api/login/login'
+ payload = {
+ 'data': [{
+ 'userName': email,
+ 'accessKey': access_key
+ }]
+ }
+ response = http_request('POST', api_endpoint, str(payload), {}, user_auth=False)
+ if response.get('fail'):
+ return_error(json.dumps(response.get('fail')[0].get('errors')))
+ return response.get('data')[0]
+
+
+def login():
+ headers = ['Access Key', 'Secret Key']
+ contents = []
+
+ response = login_request()
+
+ contents.append({
+ 'Access Key': response.get('accessKey'),
+ 'Secret Key': response.get('secretKey')
+ })
+
+ results = {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Mimecast authentication details \n Tokens are valid for 3 days', contents,
+ headers)
+ }
+
+ return results
+
+
+def login_request():
+ if not EMAIL_ADDRESS:
+ return_error('In order to refresh a token validty duration, account\'s email is required.')
+ email = EMAIL_ADDRESS.encode('utf-8')
+ # Setup required variables
+ api_endpoint = '/api/login/login'
+ payload = {
+ 'data': [{
+ 'userName': email
+ }]
+ }
+ response = http_request('POST', api_endpoint, str(payload), {}, user_auth=False)
+ if response.get('fail'):
+ return_error(json.dumps(response.get('fail')[0].get('errors')))
+ return response.get('data')[0]
+
+
+def get_message():
+ context = {}
+ contents = {} # type: Dict[Any, Any]
+ metadata_context = {} # type: Dict[Any, Any]
+ results = []
+ message_id = demisto.args().get('messageID').encode('utf-8')
+ message_context = demisto.args().get('context').encode('utf-8')
+ message_type = demisto.args().get('type').encode('utf-8')
+ message_part = demisto.args().get('part')
+
+ if message_part == 'all' or message_part == 'metadata':
+ contents, metadata_context = get_message_metadata(message_id)
+
+ context['Mimecast.Message(val.ID && val.ID === obj.ID)'] = metadata_context
+
+ results.append({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Mimecast message details', contents, removeNull=True),
+ 'EntryContext': context
+ })
+
+ if message_part == 'all' or message_part == 'message':
+ email_file = get_message_body_content_request(message_id, message_context, message_type)
+ results.append(fileResult(message_id, email_file))
+
+ return results
+
+
+def get_message_body_content_request(message_id, message_context, message_type):
+ # Setup required variables
+ api_endpoint = '/api/archive/get-message-part'
+
+ data = [{
+ 'id': message_id,
+ 'type': message_type,
+ 'context': message_context
+ }]
+ payload = {
+ 'data': data
+ }
+
+ response = http_request('POST', api_endpoint, str(payload), is_file=True)
+ if isinstance(response, dict) and response.get('fail'):
+ return_error(json.dumps(response.get('fail', [{}])[0].get('errors')))
+ return response.content
+
+
+def get_message_metadata(message_id):
+ contents = {} # type: Dict[Any, Any]
+ context = {} # type: Dict[Any, Any]
+ message = get_message_metadata_request(message_id)
+
+ receivers = message.get('to', [])
+ to_context = []
+ to_contents = []
+ for receiver in receivers:
+ to_context.append({
+ 'EmailAddress': receiver.get('emailAddress')
+ })
+ to_contents.append(
+ receiver.get('emailAddress')
+ )
+
+ copies = message.get('cc', [])
+ cc_context = []
+ cc_contents = []
+ for copy in copies:
+ cc_context.append({
+ 'EmailAddress': copy.get('emailAddress')
+ })
+ cc_contents.append(
+ copy.get('emailAddress')
+ )
+
+ response_headers = message.get('headers', [])
+ headers_contents = []
+ headers_context = []
+ for header in response_headers:
+ values = header.get('values')
+ values = [value.encode('utf-8') for value in values]
+ headers_context.append({
+ 'Name': header.get('name'),
+ 'Values': values
+ })
+ headers_contents.append(
+ 'Name: {}, Values: {}'.format(str(header.get('name')), str(values))
+ )
+
+ attachments = message.get('attachments', [])
+ attachments_context = []
+ attachments_contents = []
+ for attachment in attachments:
+ attachments_context.append({
+ 'FileName': attachment.get('filename'),
+ 'SHA256': attachment.get('sha256'),
+ 'ID': attachment.get('id'),
+ 'Size': attachment.get('size')
+ })
+ attachments_contents.append(
+ 'FileName: {}, SHA256: {}, ID: {}, Size: {}'.format(str(attachment.get('filename')),
+ str(attachment.get('sha256')),
+ str(attachment.get('id')), str(attachment.get('size')))
+ )
+
+ contents = {
+ 'Subject': message.get('subject'),
+ 'Header Date': message.get('headerDate'),
+ 'Size': message.get('size'),
+ 'From': message.get('from', {}).get('emailAddress'),
+ 'To': to_contents,
+ 'Reply To': message.get('replyTo', {}).get('emailAddress'),
+ 'CC': cc_contents,
+ 'Envelope From': message.get('envelopeFrom', {}).get('emailAddress'),
+ 'Headers': headers_contents,
+ 'Attachments': attachments_contents,
+ 'Processed': message.get('processed'),
+ 'Has Html Body': message.get('hasHtmlBody'),
+ 'ID': message.get('id')
+ }
+
+ context = {
+ 'Subject': message.get('subject'),
+ 'HeaderDate': message.get('headerDate'),
+ 'Size': message.get('size'),
+ 'From': message.get('from', {}).get('emailAddress'),
+ 'To': to_context,
+ 'ReplyTo': message.get('replyTo', {}).get('emailAddress'),
+ 'CC': cc_context,
+ 'EnvelopeFrom': message.get('envelopeFrom', {}).get('emailAddress'),
+ 'Headers': headers_context,
+ 'Attachments': attachments_context,
+ 'Processed': message.get('processed'),
+ 'HasHtmlBody': message.get('hasHtmlBody'),
+ 'ID': message.get('id')
+ }
+
+ return contents, context
+
+
+def get_message_metadata_request(message_id):
+ # Setup required variables
+ api_endpoint = '/api/archive/get-message-detail'
+ data = [{
+ 'id': message_id
+ }]
+ payload = {
+ 'data': data
+ }
+
+ response = http_request('POST', api_endpoint, str(payload))
+ if response.get('fail'):
+ return_error(json.dumps(response.get('fail')[0].get('errors')))
+ return response.get('data')[0]
+
+
+def download_attachment():
+ attachment_id = demisto.args().get('attachmentID').encode('utf-8')
+ attachment_file = download_attachment_request(attachment_id)
+ return fileResult(attachment_id, attachment_file)
+
+
+def download_attachment_request(attachment_id):
+ # Setup required variables
+ api_endpoint = '/api/archive/get-file'
+
+ data = [{
+ 'id': attachment_id
+ }]
+ payload = {
+ 'data': data
+ }
+
+ response = http_request('POST', api_endpoint, str(payload), is_file=True)
+ if isinstance(response, dict) and response.get('fail'):
+ return_error(json.dumps(response.get('fail', [{}])[0].get('errors')))
+ return response.content
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('command is %s' % (demisto.command(),))
+
+# Check if token needs to be refresh, if it does and relevant params are set, refresh.
+if ACCESS_KEY:
+ auto_refresh_token()
+
+try:
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module()
+ demisto.results('ok')
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+ elif demisto.command() == 'mimecast-query':
+ demisto.results(query())
+ elif demisto.command() == 'mimecast-list-blocked-sender-policies':
+ demisto.results(get_policy())
+ elif demisto.command() == 'mimecast-get-policy':
+ demisto.results(get_policy())
+ elif demisto.command() == 'mimecast-create-policy':
+ demisto.results(create_policy())
+ elif demisto.command() == 'mimecast-delete-policy':
+ demisto.results(delete_policy())
+ elif demisto.command() == 'mimecast-manage-sender':
+ demisto.results(manage_sender())
+ elif demisto.command() == 'mimecast-list-managed-url':
+ demisto.results(list_managed_url())
+ elif demisto.command() == 'mimecast-create-managed-url':
+ demisto.results(create_managed_url())
+ elif demisto.command() == 'mimecast-list-messages':
+ demisto.results(list_messages())
+ elif demisto.command() == 'mimecast-get-attachment-logs':
+ demisto.results(get_attachment_logs())
+ elif demisto.command() == 'mimecast-get-url-logs':
+ demisto.results(get_url_logs())
+ elif demisto.command() == 'mimecast-get-impersonation-logs':
+ demisto.results(get_impersonation_logs())
+ elif demisto.command() == 'mimecast-url-decode':
+ demisto.results(url_decode())
+ elif demisto.command() == 'mimecast-discover':
+ demisto.results(discover())
+ elif demisto.command() == 'mimecast-login':
+ demisto.results(login())
+ elif demisto.command() == 'mimecast-refresh-token':
+ demisto.results(refresh_token())
+ elif demisto.command() == 'mimecast-get-message':
+ demisto.results(get_message())
+ elif demisto.command() == 'mimecast-download-attachments':
+ demisto.results(download_attachment())
+
+
+except Exception as e:
+ LOG(e.message)
+ LOG.print_log()
+ return_error(e.message)
diff --git a/Integrations/MimecastV2/MimecastV2.yml b/Integrations/MimecastV2/MimecastV2.yml
new file mode 100644
index 000000000000..4b95d37ef048
--- /dev/null
+++ b/Integrations/MimecastV2/MimecastV2.yml
@@ -0,0 +1,1194 @@
+category: Email Gateway
+commonfields:
+ id: MimecastV2
+ version: -1
+configuration:
+- defaultvalue: https://api.mimecast.com
+ display: BaseUrl - API url including region, For example https://eu-api.mimecast.com
+ name: baseUrl
+ required: true
+ type: 0
+- display: App ID
+ name: appId
+ required: true
+ type: 0
+- display: User Email Address (Use for auto token refresh)
+ name: email
+ required: false
+ type: 0
+- display: Password
+ name: password
+ required: false
+ type: 4
+- display: App key
+ name: appKey
+ required: false
+ type: 4
+- display: AccessKey
+ name: accessKey
+ required: false
+ type: 0
+- display: SecretKey
+ name: secretKey
+ required: false
+ type: 4
+- defaultvalue: 'true'
+ display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Fetch URL incidents
+ name: fetchURL
+ required: false
+ type: 8
+- display: Fetch attachment incidents
+ name: fetchAttachments
+ required: false
+ type: 8
+- display: Fetch impersonation incidents
+ name: fetchImpersonations
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- defaultvalue: '24'
+ display: Hours before first fetch to retrieve incidents
+ name: fetchDelta
+ required: false
+ type: 0
+description: Mimecast unified email management offers cloud email services for email
+ security, continuity and archiving emails. Please read detailed instructions in
+ order to understand how to set the integration's parameters.
+display: Mimecast
+name: MimecastV2
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: The query string xml for the search using Mimecast Unified Search
+ Experience (MUSE) - read more on https://community.mimecast.com/docs/DOC-2262,
+ using this will override other query arguments
+ isArray: false
+ name: queryXml
+ required: false
+ secret: false
+ - default: false
+ description: Search for this text in messages
+ isArray: false
+ name: text
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Will not execute the query, but just return the query string built
+ isArray: false
+ name: dryRun
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Search in specific dates only (default is all mails fomr)
+ isArray: false
+ name: date
+ predefined:
+ - today
+ - yesterday
+ - last_week
+ - last_month
+ - last_year
+ required: false
+ secret: false
+ - default: false
+ description: Search emails from date, format YYYY-MM-DDTHH:MM:SZ (e.g. 2015-09-21T23:00:00Z)
+ isArray: false
+ name: dateFrom
+ required: false
+ secret: false
+ - default: false
+ description: Search emails to date, format YYYY-MM-DDTHH:MM:SZ (e.g. 2015-09-21T23:00:00Z)
+ isArray: false
+ name: dateTo
+ required: false
+ secret: false
+ - default: false
+ description: Filter on messages to a specific address
+ isArray: false
+ name: sentTo
+ required: false
+ secret: false
+ - default: false
+ description: Filter on messages from a specific address
+ isArray: false
+ name: sentFrom
+ required: false
+ secret: false
+ - default: false
+ description: Search email by subject, will override the text argument
+ isArray: false
+ name: subject
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'These are the attachment types available: optional - messages
+ with and without attachments any - messages with any attachment documents
+ - messages with doc, dot, docx, docm, dotx, dotm, pdf, rtf, html attachments
+ spreadsheets - messages with xls, xlt, xlsx, xlsm, xltx, xltm, xlsb, xlam,
+ csv attachments presentations - messages with ppt, pptx, pptm, potx, potm,
+ ppam, ppsx, ppsm, sldx, sldm, thms, pps attachments text - messages with txt,
+ text, html, log attachments images - messages with jpg, jpeg, png, bmp, gif,
+ psd, tif, tiff attachments media - messages with mp3, mp4, m4a, mpg, mpeg,
+ avi, wav, aac, wma, mov attachments zips - messages with zip, rar, cab, gz,
+ gzip, 7z attachments none - No attachments are to be present in the results'
+ isArray: false
+ name: attachmentType
+ predefined:
+ - optional
+ - any
+ - documents
+ - spreadsheets
+ - presentations
+ - text
+ - images
+ - media
+ - zips
+ - none
+ required: false
+ secret: false
+ - default: false
+ description: Search for text in attachments
+ isArray: false
+ name: attachmentText
+ required: false
+ secret: false
+ - default: false
+ description: Search email by text in body, will override the text and subject
+ arguments
+ isArray: false
+ name: body
+ required: false
+ secret: false
+ - default: false
+ description: Sets the number of results to return per page (default 25)
+ isArray: false
+ name: pageSize
+ required: false
+ secret: false
+ - default: false
+ description: Sets the result to start returning results (default 0)
+ isArray: false
+ name: startRow
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Defines if the search should query recently received messages that
+ are not fully processed yet (default false). You can search by mailbox and
+ date time across active messages
+ isArray: false
+ name: active
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Query mimecast emails
+ execution: false
+ name: mimecast-query
+ outputs:
+ - contextPath: Mimecast.Message.ID
+ description: Message ID
+ type: string
+ - contextPath: Mimecast.Message.Subject
+ description: Message subject
+ type: string
+ - contextPath: Mimecast.Message.Sender
+ description: Message sender address
+ type: string
+ - contextPath: Mimecast.Message.Recipient
+ description: Message recipient address
+ type: string
+ - contextPath: Mimecast.Message.RecievedDate
+ description: Message received date
+ type: date
+ - contextPath: Mimecast.Message.Size
+ description: The size of the message in bytes
+ type: number
+ - contextPath: Mimecast.Message.AttachmentCount
+ description: Message attachments count
+ type: number
+ - contextPath: Mimecast.Message.Status
+ description: Message status
+ type: string
+ - deprecated: false
+ description: List all existing mimecast blocked sender policies
+ execution: false
+ name: mimecast-list-blocked-sender-policies
+ outputs:
+ - contextPath: Mimecast.Policy.ID
+ description: Policy ID
+ type: string
+ - contextPath: Mimecast.Policy.Sender.Address
+ description: Block Sender by email address
+ type: string
+ - contextPath: Mimecast.Policy.Sender.Domain
+ description: Block Sender by domain
+ type: string
+ - contextPath: Mimecast.Policy.Sender.Group
+ description: Block Sender by group
+ type: string
+ - contextPath: Mimecast.Policy.Bidirectional
+ description: Blocked policy is Bidirectional or not
+ type: boolean
+ - contextPath: Mimecast.Policy.Receiver.Address
+ description: Block emails to Receiver type address
+ type: string
+ - contextPath: Mimecast.Policy.Receiver.Domain
+ description: Block emails to Receiver type domain
+ type: string
+ - contextPath: Mimecast.Policy.Receiver.Group
+ description: Block emails to Receiver type group
+ type: string
+ - contextPath: Mimecast.Policy.FromDate
+ description: Policy validation start date
+ type: date
+ - contextPath: Mimecast.Policy.ToDate
+ description: Policy expiration date
+ type: date
+ - contextPath: Mimecast.Policy.Sender.Type
+ description: Block emails to Sender type
+ type: string
+ - contextPath: Mimecast.Policy.Receiver.Type
+ description: Block emails to Receiver type
+ type: string
+ - arguments:
+ - default: false
+ description: Filter by policy ID
+ isArray: false
+ name: policyID
+ required: true
+ secret: false
+ deprecated: false
+ description: Get a blocked sender policy by ID
+ execution: false
+ name: mimecast-get-policy
+ outputs:
+ - contextPath: Mimecast.Policy.ID
+ description: Policy ID
+ type: string
+ - contextPath: Mimecast.Policy.Sender.Address
+ description: Block Sender by email address
+ type: string
+ - contextPath: Mimecast.Policy.Sender.Domain
+ description: Block Sender by domain
+ type: string
+ - contextPath: Mimecast.Policy.Sender.Group
+ description: Block Sender by group
+ type: string
+ - contextPath: Mimecast.Policy.Bidirectional
+ description: Blocked policy is Bidirectional or not
+ type: boolean
+ - contextPath: Mimecast.Policy.Receiver.Address
+ description: Block emails to Receiver type address
+ type: string
+ - contextPath: Mimecast.Policy.Receiver.Domain
+ description: Block emails to Receiver type domain
+ type: string
+ - contextPath: Mimecast.Policy.Receiver.Group
+ description: Block emails to Receiver type group
+ type: string
+ - contextPath: Mimecast.Policy.Fromdate
+ description: Policy validation start date
+ type: date
+ - contextPath: Mimecast.Policy.Todate
+ description: Policy expiration date
+ type: date
+ - arguments:
+ - default: false
+ description: Policy description
+ isArray: false
+ name: description
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: envelope_from
+ description: Addresses based on
+ isArray: false
+ name: fromPart
+ predefined:
+ - envelope_from
+ - header_from
+ - both
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Blocked Sender type
+ isArray: false
+ name: fromType
+ predefined:
+ - everyone
+ - internal_addresses
+ - external_addresses
+ - email_domain
+ - profile_group
+ - individual_email_address
+ required: true
+ secret: false
+ - default: false
+ description: 'Required if fromType is one of email domain, profile group, individual
+ email address. Expected values: If fromType is email_domain, a domain name
+ without the @ symbol. If fromType is profile_group, the ID of the profile
+ group. If fromType is individual_email_address, an email address.'
+ isArray: false
+ name: fromValue
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Receiver type
+ isArray: false
+ name: toType
+ predefined:
+ - everyone
+ - internal_addresses
+ - external_addresses
+ - email_domain
+ - profile_group
+ - address_attribute_value
+ - individual_email_address
+ - free_mail_domains
+ - header_display_name
+ required: true
+ secret: false
+ - default: false
+ description: 'Required if fromType is one of email domain, profile group, individual
+ email address. Expected values: If toType is email_domain, a domain name without
+ the @ symbol. If toType is profile_group, the ID of the profile group. If
+ toType is individual_email_address, an email address.'
+ isArray: false
+ name: toValue
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The block option, must be one of: no_action, block_sender.'
+ isArray: false
+ name: option
+ predefined:
+ - no_action
+ - block_sender
+ required: true
+ secret: false
+ deprecated: false
+ description: Create a Blocked Sender Policy
+ execution: false
+ name: mimecast-create-policy
+ outputs:
+ - contextPath: Mimecast.Policy.ID
+ description: Policy ID
+ type: string
+ - contextPath: Mimecast.Policy.Sender.Address
+ description: Block Sender by email address
+ type: string
+ - contextPath: Mimecast.Policy.Sender.Domain
+ description: Block Sender by domain
+ type: string
+ - contextPath: Mimecast.Policy.Sender.Group
+ description: Block Sender by group
+ type: string
+ - contextPath: Mimecast.Policy.Bidirectional
+ description: Blocked policy is Bidirectional or not
+ type: boolean
+ - contextPath: Mimecast.Policy.Receiver.Address
+ description: Block emails to Receiver type address
+ type: string
+ - contextPath: Mimecast.Policy.Receiver.Domain
+ description: Block emails to Receiver type domain
+ type: string
+ - contextPath: Mimecast.Policy.Receiver.Group
+ description: Block emails to Receiver type group
+ type: string
+ - contextPath: Mimecast.Policy.Fromdate
+ description: Policy validation start date
+ type: date
+ - contextPath: Mimecast.Policy.Todate
+ description: Policy expiration date
+ type: date
+ - arguments:
+ - default: false
+ description: Policy ID
+ isArray: false
+ name: policyID
+ required: true
+ secret: false
+ deprecated: false
+ description: Delete a Blocked Sender Policy
+ execution: false
+ name: mimecast-delete-policy
+ outputs:
+ - contextPath: Mimecast.Policy.ID
+ description: Policy ID
+ type: string
+ - arguments:
+ - default: false
+ description: The email address of sender to permit or block
+ isArray: false
+ name: sender
+ required: true
+ secret: false
+ - default: false
+ description: The email address of recipient to permit or block
+ isArray: false
+ name: recipient
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Choose to either "permit" (to bypass spam checks) or "block" (to
+ reject the email)
+ isArray: false
+ name: action
+ predefined:
+ - permit
+ - block
+ required: true
+ secret: false
+ deprecated: false
+ description: Permit or block a specific sender
+ execution: false
+ name: mimecast-manage-sender
+ outputs:
+ - contextPath: Mimecast.Managed.Sender
+ description: The email address of the sender
+ type: string
+ - contextPath: Mimecast.Managed.Recipient
+ description: The email address of the recipient
+ type: string
+ - contextPath: Mimecast.Managed.Action
+ description: Chosen action
+ type: string
+ - contextPath: Mimecast.Managed.ID
+ description: The Mimecast secure ID of the managed sender object.
+ type: string
+ - arguments:
+ - default: false
+ description: Filter results by specific URL
+ isArray: false
+ name: url
+ required: false
+ secret: false
+ deprecated: false
+ description: Get a list of all managed URLs
+ execution: false
+ name: mimecast-list-managed-url
+ outputs:
+ - contextPath: Mimecast.URL.Domain
+ description: The managed domain
+ type: string
+ - contextPath: Mimecast.URL.Disablelogclick
+ description: If logging of user clicks on the URL is disabled
+ type: boolean
+ - contextPath: Mimecast.URL.Action
+ description: Either block of permit
+ type: string
+ - contextPath: Mimecast.URL.Path
+ description: The path of the managed URL
+ type: string
+ - contextPath: Mimecast.URL.matchType
+ description: Either explicit - applies to the full URL or domain - applies to
+ all URL values in the domain
+ type: string
+ - contextPath: Mimecast.URL.ID
+ description: The Mimecast secure ID of the managed URL
+ type: string
+ - contextPath: Mimecast.URL.disableRewrite
+ description: If rewriting of this URL in emails is disabled
+ type: boolean
+ - arguments:
+ - default: false
+ description: The URL to block or permit. Do not include a fragment (#).
+ isArray: false
+ name: url
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Set to "block" to blacklist the URL, "permit" to whitelist it
+ isArray: false
+ name: action
+ predefined:
+ - block
+ - permit
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: explicit
+ description: Set to "explicit" to block or permit only instances of the full
+ URL. Set to "domain" to block or permit any URL with the same domain
+ isArray: false
+ name: matchType
+ predefined:
+ - explicit
+ - domain
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Disable rewriting of this URL in emails. Applies only if action
+ = "permit". Default false
+ isArray: false
+ name: disableRewrite
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Add a comment about the managed URL
+ isArray: false
+ name: comment
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Disable User Awareness challenges for this URL. Applies only if
+ action = "permit". Default false
+ isArray: false
+ name: disableUserAwareness
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Disable logging of user clicks on the URL. Default is false
+ isArray: false
+ name: disableLogClick
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Create a managed URL on Mimecast
+ execution: false
+ name: mimecast-create-managed-url
+ outputs:
+ - contextPath: Mimecast.URL.Domain
+ description: The managed domain
+ type: string
+ - contextPath: Mimecast.URL.Action
+ description: Either block of permit
+ type: string
+ - contextPath: Mimecast.URL.disableLogClick
+ description: If logging of user clicks on the URL is disabled
+ type: string
+ - contextPath: Mimecast.URL.matchType
+ description: Either explicit - applies to the full URL or domain - applies to
+ all URL values in the domain
+ type: string
+ - contextPath: Mimecast.URL.ID
+ description: The Mimecast secure ID of the managed URL
+ type: string
+ - contextPath: Mimecast.URL.disableRewrite
+ description: If rewriting of this URL in emails is disabled
+ type: boolean
+ - arguments:
+ - default: false
+ description: The email address to return the message list for
+ isArray: false
+ name: mailbox
+ required: false
+ secret: false
+ - default: false
+ description: The start date of messages to return, in the following format,
+ 2015-11-16T14:49:18+0000. Default is the last calendar month
+ isArray: false
+ name: startTime
+ required: false
+ secret: false
+ - default: false
+ description: The end date of messages to return, in the following format, 2015-11-16T14:49:18+0000.
+ Default is the end of the current day
+ isArray: false
+ name: endTime
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: INBOX
+ description: 'The message list type, must be one of: inbox or sent, default
+ is inbox'
+ isArray: false
+ name: view
+ predefined:
+ - INBOX
+ - SENT
+ required: false
+ secret: false
+ - default: false
+ description: Filter by message subject
+ isArray: false
+ name: subject
+ required: false
+ secret: false
+ deprecated: false
+ description: Get a list of messages for a given user
+ execution: false
+ name: mimecast-list-messages
+ outputs:
+ - contextPath: Mimecast.Message.Subject
+ description: Message Subject
+ type: string
+ - contextPath: Mimecast.Message.ID
+ description: Message ID
+ type: string
+ - contextPath: Mimecast.Message.Size
+ description: The size of the message in bytes
+ type: number
+ - contextPath: Mimecast.Message.RecievedDate
+ description: The date the message was received
+ type: date
+ - contextPath: Mimecast.Message.From
+ description: The mail Sender
+ type: string
+ - contextPath: Mimecast.Message.AttachmentCount
+ description: The number of attachments on the message
+ type: string
+ - arguments:
+ - default: false
+ description: The number of results to request. Default is all
+ isArray: false
+ name: resultsNumber
+ required: false
+ secret: false
+ - default: false
+ description: Start date of logs to return in the following format 2015-11-16T14:49:18+0000.
+ Default is the start of the current day
+ isArray: false
+ name: fromDate
+ required: false
+ secret: false
+ - default: false
+ description: End date of logs to return in the following format 2015-11-16T14:49:18+0000.
+ Default is time of request
+ isArray: false
+ name: toDate
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: malicious
+ description: Filters logs by scan result, default is malicious
+ isArray: false
+ name: resultType
+ predefined:
+ - safe
+ - malicious
+ - timeout
+ - error
+ - unsafe
+ - all
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: Limit number of returned results.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns Attachment Protect logs for a Mimecast customer account
+ execution: false
+ name: mimecast-get-attachment-logs
+ outputs:
+ - contextPath: Mimecast.AttachmentLog.Result
+ description: 'The result of the attachment analysis: clean, malicious, unknown,
+ or timeout'
+ type: string
+ - contextPath: Mimecast.AttachmentLog.Date
+ description: The time at which the attachment was released from the sandbox
+ type: date
+ - contextPath: Mimecast.AttachmentLog.Sender
+ description: The sender of the attachment
+ type: string
+ - contextPath: Mimecast.AttachmentLog.FileName
+ description: The file name of the original attachment
+ type: string
+ - contextPath: Mimecast.AttachmentLog.Action
+ description: The action triggered for the attachment
+ type: string
+ - contextPath: Mimecast.AttachmentLog.Recipient
+ description: The address of the user that received the attachment
+ type: string
+ - contextPath: Mimecast.AttachmentLog.FileType
+ description: The file type of the attachment
+ type: string
+ - contextPath: Mimecast.AttachmentLog.Route
+ description: 'The route of the original email containing the attachment, either:
+ inbound, outbound, internal, or external'
+ type: string
+ - arguments:
+ - default: false
+ description: The number of results to request. Default is all
+ isArray: false
+ name: resultsNumber
+ required: false
+ secret: false
+ - default: false
+ description: Start date of logs to return in the following format 2015-11-16T14:49:18+0000.
+ Default is the start of the current day
+ isArray: false
+ name: fromDate
+ required: false
+ secret: false
+ - default: false
+ description: End date of logs to return in the following format 2015-11-16T14:49:18+0000.
+ Default is time of request
+ isArray: false
+ name: toDate
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: malicious
+ description: Filters logs by scan result, default is all
+ isArray: false
+ name: resultType
+ predefined:
+ - clean
+ - malicious
+ - all
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: Limit number of returned results.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns URL protect logs for a Mimecast customer account
+ execution: false
+ name: mimecast-get-url-logs
+ outputs:
+ - contextPath: Mimecast.UrlLog.Category
+ description: The category of the URL clicked
+ type: string
+ - contextPath: Mimecast.UrlLog.UserAddress
+ description: The email address of the user who clicked the link
+ type: string
+ - contextPath: Mimecast.UrlLog.URL
+ description: The url clicked
+ type: string
+ - contextPath: Mimecast.UrlLog.Awareness
+ description: The action taken by the user if user awareness was applied
+ type: string
+ - contextPath: Mimecast.UrlLog.AdminOverride
+ description: The action defined by the administrator for the URL
+ type: string
+ - contextPath: Mimecast.UrlLog.Date
+ description: The date that the URL was clicked
+ type: date
+ - contextPath: Mimecast.UrlLog.Result
+ description: The result of the URL scan
+ type: string
+ - contextPath: Mimecast.UrlLog.Action
+ description: The action that was taken for the click
+ type: string
+ - contextPath: Mimecast.UrlLog.Route
+ description: 'The route of the original email containing the attachment, either:
+ inbound, outbound, internal, or external'
+ type: string
+ - contextPath: Mimecast.UrlLog. userOverride
+ description: The action requested by the user.
+ type: string
+ - arguments:
+ - default: false
+ description: The number of results to request. Default is all
+ isArray: false
+ name: resultsNumber
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: ' Filters for messages tagged malicious (true) or not tagged malicious
+ (false). Omit for no tag filtering. default is true'
+ isArray: false
+ name: taggedMalicious
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The field to search,Defaults to all (meaning all of the preceding
+ fields)
+ isArray: false
+ name: searchField
+ predefined:
+ - senderAddress
+ - recipientAddress
+ - subject
+ - policy
+ - all
+ required: false
+ secret: false
+ - default: false
+ description: Required if searchField exists. A character string to search for
+ in the logs.
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Filters logs by identifiers, can include any of newly_observed_domain,
+ internal_user_name, repy_address_mismatch, and targeted_threat_dictionary.
+ you can choose more then one identifier separated by comma.
+ isArray: true
+ name: identifiers
+ predefined:
+ - newly_observed_domain
+ - internal_user_name
+ - repy_address_mismatch
+ - targeted_threat_dictionary
+ required: false
+ secret: false
+ - default: false
+ description: Start date of logs to return in the following format 2015-11-16T14:49:18+0000.
+ Default is the start of the current day
+ isArray: false
+ name: fromDate
+ required: false
+ secret: false
+ - default: false
+ description: End date of logs to return in the following format 2015-11-16T14:49:18+0000.
+ Default is time of request
+ isArray: false
+ name: toDate
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Filters logs by action, you can choose more then one action separated
+ by comma.
+ isArray: true
+ name: actions
+ predefined:
+ - delete
+ - hold
+ - bounce
+ - smart_folder
+ - disable_smart_folder
+ - content_expire
+ - meta_expire
+ - stationery
+ - gcc
+ - secure_delivery
+ - derivery_route
+ - document_policy
+ - disable_document_policy
+ - attach_set_policy
+ - remove_email
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: Limit number of returned results.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns Impersonation Protect logs for a Mimecast customer account
+ execution: false
+ name: mimecast-get-impersonation-logs
+ outputs:
+ - contextPath: Mimecast.Impersonation.ResultCount
+ description: The total number of IMPERSONATION log lines found for the request
+ type: number
+ - contextPath: Mimecast.Impersonation.Hits
+ description: The number of identifiers that the message triggered
+ type: number
+ - contextPath: Mimecast.Impersonation.Malicious
+ description: Whether the message was tagged as malicious
+ type: boolean
+ - contextPath: Mimecast.Impersonation.SenderIP
+ description: The source IP address of the message
+ type: string
+ - contextPath: Mimecast.Impersonation.SenderAddress
+ description: The email address of the sender of the message
+ type: string
+ - contextPath: Mimecast.Impersonation.Subject
+ description: The subject of the email
+ type: string
+ - contextPath: Mimecast.Impersonation.Identifiers
+ description: 'The properties of the message that triggered the action: similar_internal_domain,
+ newly_observed_domain, internal_user_name, reply_address_mismatch, and/or
+ targeted_threat_dictionary'
+ type: string
+ - contextPath: Mimecast.Impersonation.Date
+ description: The time at which the log was recorded
+ type: date
+ - contextPath: Mimecast.Impersonation.Action
+ description: ' The action triggered by the email'
+ type: string
+ - contextPath: Mimecast.Impersonation.Policy
+ description: The name of the policy definition that triggered the log
+ type: string
+ - contextPath: Mimecast.Impersonation.ID
+ description: Impersonation Log ID
+ type: string
+ - contextPath: Mimecast.Impersonation.RecipientAddress
+ description: The email address of the recipient of the email
+ type: string
+ - contextPath: Mimecast.Impersonation.External
+ description: Whether the message was tagged as coming from an external address
+ type: boolean
+ - arguments:
+ - default: false
+ description: URL to decode
+ isArray: false
+ name: url
+ required: true
+ secret: false
+ deprecated: false
+ description: Decodes a given url from mimecast
+ execution: false
+ name: mimecast-url-decode
+ outputs:
+ - contextPath: URL.Data
+ description: The encoded url to parse
+ type: string
+ - contextPath: URL.Mimecast.DecodedURL
+ description: Parsed url
+ type: string
+ - deprecated: false
+ description: discover authentication types that are supported for your account
+ and which base URL to use for the requesting user.
+ execution: false
+ name: mimecast-discover
+ outputs:
+ - contextPath: Mimecast.Authentication.AuthenticationTypes
+ description: List of authentication types available to the user
+ type: string
+ - contextPath: Mimecast.Authentication.EmailAddress
+ description: Email address of the request sender
+ type: string
+ - contextPath: Mimecast.Authentication.EmailToken
+ description: Email token of the request sender
+ type: string
+ - deprecated: false
+ description: Refresh access key validity
+ execution: false
+ name: mimecast-refresh-token
+ - deprecated: false
+ description: Login to generate Access Key and Secret Key
+ execution: false
+ name: mimecast-login
+ - arguments:
+ - default: false
+ description: Message ID
+ isArray: false
+ name: messageID
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'Defines which copy of the message part to return, must be one
+ of: "delievered" the copy that has been processed by the Mimecast MTA with
+ policies such as URL rewriting applied, OR "received" - the copy of the message
+ that Mimecast originally received. (Only relevant for part argument = message
+ or all)'
+ isArray: false
+ name: context
+ predefined:
+ - DELIVERED
+ - RECEIVED
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: PLAIN
+ description: The message type to return. (Only relevant for part argument =
+ message or all)
+ isArray: false
+ name: type
+ predefined:
+ - HTML
+ - PLAIN
+ - RFC822
+ - TRANSMISSION_MESSAGE_BODY
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: all
+ description: Define what message part to return - download message, get metadata
+ or both.
+ isArray: false
+ name: part
+ predefined:
+ - message
+ - metadata
+ - all
+ required: false
+ secret: false
+ deprecated: false
+ description: Get the contents or metadata of a given message
+ execution: false
+ name: mimecast-get-message
+ outputs:
+ - contextPath: Mimecast.Message.ID
+ description: Message ID
+ type: string
+ - contextPath: Mimecast.Message.Subject
+ description: The message subject.
+ type: string
+ - contextPath: Mimecast.Message.HeaderDate
+ description: The date of the message as defined in the message headers.
+ type: date
+ - contextPath: Mimecast.Message.Size
+ description: The message size.
+ type: number
+ - contextPath: Mimecast.Message.From
+ description: Sender of the message as defined in the message header.
+ type: string
+ - contextPath: Mimecast.Message.To.EmailAddress
+ description: Recipient of the message.
+ type: string
+ - contextPath: Mimecast.Message.ReplyTo
+ description: The value of the Reply-To header.
+ type: string
+ - contextPath: Mimecast.Message.CC.EmailAddress
+ description: Each CC recipient of the message.
+ type: string
+ - contextPath: Mimecast.Message.EnvelopeFrom
+ description: Sender of the message as defined in the message envelope.
+ type: string
+ - contextPath: Mimecast.Message.Headers.Name
+ description: Header's name.
+ type: string
+ - contextPath: Mimecast.Message.Headers.Values
+ description: Header's value.
+ type: string
+ - contextPath: Mimecast.Message.Attachments.FileName
+ description: Message attachment's file name.
+ type: string
+ - contextPath: Mimecast.Message.Attachments.SHA256
+ description: Message attachment's SHA256.
+ type: string
+ - contextPath: Mimecast.Message.Attachments.ID
+ description: Message attachment's ID.
+ type: string
+ - contextPath: Mimecast.Message.Attachments.Size
+ description: Message attachment's file size.
+ type: number
+ - contextPath: Mimecast.Message.Processed
+ description: The date the message was processed by Mimecast in ISO 8601 format.
+ type: date
+ - contextPath: Mimecast.Message.HasHtmlBody
+ description: If the message has an HTML body part.
+ type: boolean
+ - contextPath: File.Size
+ description: File Size
+ type: number
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file
+ type: string
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file
+ type: string
+ - contextPath: File.Name
+ description: The sample name
+ type: string
+ - contextPath: File.SSDeep
+ description: SSDeep hash of the file
+ type: string
+ - contextPath: File.EntryID
+ description: War-Room Entry ID of the file
+ type: string
+ - contextPath: File.Info
+ description: Basic information of the file
+ type: string
+ - contextPath: File.Type
+ description: File type e.g. "PE"
+ type: string
+ - contextPath: File.MD5
+ description: MD5 hash of the file
+ type: string
+ - arguments:
+ - default: false
+ description: The Mimecast ID of the message attachment to return. (Can be retrieved
+ from mimecast-get-message)
+ isArray: false
+ name: attachmentID
+ required: true
+ secret: false
+ deprecated: false
+ description: Download attachments from a specified message
+ execution: false
+ name: mimecast-download-attachments
+ outputs:
+ - contextPath: File.Size
+ description: File Size
+ type: number
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file
+ type: string
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file
+ type: string
+ - contextPath: File.Name
+ description: The sample name
+ type: string
+ - contextPath: File.SSDeep
+ description: SSDeep hash of the file
+ type: string
+ - contextPath: File.EntryID
+ description: War-Room Entry ID of the file
+ type: string
+ - contextPath: File.Info
+ description: Basic information of the file
+ type: string
+ - contextPath: File.Type
+ description: File type e.g. "PE"
+ type: string
+ - contextPath: File.MD5
+ description: MD5 hash of the file
+ type: string
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- Mimecast test
diff --git a/Integrations/MimecastV2/MimecastV2_description.md b/Integrations/MimecastV2/MimecastV2_description.md
new file mode 100644
index 000000000000..93c291167579
--- /dev/null
+++ b/Integrations/MimecastV2/MimecastV2_description.md
@@ -0,0 +1,11 @@
+1. In order to refresh token / discover auth types of the account / create new access & secret keys,
+you are required to provide: App ID, Account email address & password.
+These parameters support the following integration commands:
+mimecast-login -> fetches new access key & secret key
+mimecast-discover -> lists supported auth types of user
+mimecast-refresh-token -> refreshes the validity duration of access key & secret key (3 days)
+
+2. In order to use the rest of the commands, you are required to provide: App ID, App Key, Access Key & Secret Key.
+
+3. Fetch Incidents - the integration has the ability to fetch 3 types of incidents: url, attachment & impersonation.
+In order to activate them first tick "fetch incidents" box, then tick the relevant boxes for each fetch type you want.
\ No newline at end of file
diff --git a/Integrations/MimecastV2/MimecastV2_image.png b/Integrations/MimecastV2/MimecastV2_image.png
new file mode 100644
index 000000000000..3d8df91d83f5
Binary files /dev/null and b/Integrations/MimecastV2/MimecastV2_image.png differ
diff --git a/Integrations/Netcraft/CHANGELOG.md b/Integrations/Netcraft/CHANGELOG.md
new file mode 100644
index 000000000000..52057093a865
--- /dev/null
+++ b/Integrations/Netcraft/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.8.0] - 2019-08-06
+-
\ No newline at end of file
diff --git a/Integrations/Netcraft/Netcraft.png b/Integrations/Netcraft/Netcraft.png
new file mode 100644
index 000000000000..506aeee57572
Binary files /dev/null and b/Integrations/Netcraft/Netcraft.png differ
diff --git a/Integrations/Netcraft/Netcraft.py b/Integrations/Netcraft/Netcraft.py
new file mode 100644
index 000000000000..962b3e56218a
--- /dev/null
+++ b/Integrations/Netcraft/Netcraft.py
@@ -0,0 +1,520 @@
+
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+import requests
+from requests.auth import HTTPBasicAuth
+from typing import *
+
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+USERNAME = demisto.params()['credentials']['identifier']
+PASSWORD = demisto.params()['credentials']['password']
+LIMIT = int(demisto.params().get('limit'))
+
+USE_SSL = not demisto.params().get('unsecure', False)
+
+
+# Service base URL
+BASE_URL = "https://takedown.netcraft.com/"
+
+
+# codes for maicious site report
+MALICIOUS_REPORT_SUCCESS = "TD_OK"
+MALICIOUS_REPORT_ALREADY_EXISTS = "TD_EXISTS"
+MALICIOUS_REPORT_URL_IS_WILDCARD = "TD_WILDCARD"
+MALICIOUS_REPORT_ACCESS_DENIED = "TD_DENIED"
+MALICIOUS_REPORT_ERROR = "TD_ERROR"
+
+
+# suffix endpoints
+REPORT_MALICIOUS_SUFFIX = "authorise.php"
+GET_TAKEDOWN_INFO_SUFFIX = "apis/get-info.php"
+ACCESS_TAKEDOWN_NOTES_SUFFIX = "apis/note.php"
+ESCALATE_TAKEDOWN_SUFFIX = "apis/escalate.php"
+TEST_MODULE_SUFFIX = "authorise-test.php"
+
+
+# Table Headers
+TAKEDOWN_INFO_HEADER = ["ID", "Status", "Attack Type", "Date Submitted", "Last Updated", "Reporter", "Group ID",
+ "Region", "Evidence URL", "Attack URL", "IP", "Domain", "Hostname", "Country Code",
+ "Domain Attack", "Targeted URL", "Certificate"]
+TAKEDOWN_NOTE_HEADERS = ["Takedown ID", "Note ID", "Note", "Author", "Time", "Group ID"]
+
+# Titles for human readables
+TAKEDOWN_INFO_TITLE = "Takedowns information found:"
+REPORT_MALICIOUS_SUCCESS_TITLE = "New takedown successfully created"
+
+
+''' HELPER FUNCTIONS '''
+
+
+@logger
+def http_request(method, request_suffix, params=None, data=None, should_convert_to_json=True):
+ # A wrapper for requests lib to send our requests and handle requests and responses better
+ # the Netcraft API gets the arguments as params for GET requests, as data for POST
+ res = requests.request(
+ method,
+ BASE_URL + request_suffix,
+ verify=USE_SSL,
+ params=params,
+ data=data,
+ auth=HTTPBasicAuth(USERNAME, PASSWORD)
+ )
+
+ if should_convert_to_json:
+ return res.json()
+ else:
+ return res.text.splitlines()
+
+
+@logger
+def filter_by_id(result_list_to_filter, filtering_id_field, desired_id):
+ """ Given a list of results, returns only the ones that are tied to a given ID.
+
+ Args:
+ result_list_to_filter (list): list of dictionaries, containing data about entries.
+ filtering_id_field: The name of the field containing the IDs to filter.
+ desired_id: The ID to keep when filtering.
+
+ Returns:
+ list: A copy of the input list, containing only entries with the desired ID.
+ """
+
+ new_results_list = [result for result in result_list_to_filter if result[filtering_id_field] == desired_id]
+ return new_results_list
+
+
+@logger
+def generate_report_malicious_site_human_readable(response_lines_array):
+ response_status_code = response_lines_array[0]
+ human_readable = ""
+ if response_status_code == MALICIOUS_REPORT_ALREADY_EXISTS:
+ human_readable = "### Takedown not submitted.\n " \
+ "A takedown for this URL already exists.\n" \
+ "ID number of the existing takedown: {}.".format(response_lines_array[1])
+ elif response_status_code == MALICIOUS_REPORT_URL_IS_WILDCARD:
+ human_readable = "### Takedown not submitted\n " \
+ "This URL is a wildcard sub-domain variation of an existing takedown.\n"
+ elif response_status_code == MALICIOUS_REPORT_ACCESS_DENIED:
+ human_readable = "### Takedown not submitted\n Access is denied."
+ elif response_status_code == MALICIOUS_REPORT_ERROR:
+ human_readable = "### Takedown not submitted\n " \
+ "An error has occurred while submitting your takedown.\n" \
+ "Error is: {}".format(" ".join(response_lines_array))
+ return human_readable
+
+
+@logger
+def return_dict_without_none_values(dict_with_none_values):
+ """ Removes all keys from given dict which have None as a value.
+
+ Args:
+ dict_with_none_values (dict): dict which may include keys with None as their value.
+
+ Returns:
+ dict: A new copy of the input dictionary, from which all keys with None as a value were removed.
+ """
+ new_dict = {key: dict_with_none_values[key] for key in dict_with_none_values if
+ dict_with_none_values[key] is not None}
+ return new_dict
+
+
+@logger
+def generate_takedown_info_context(takedown_info):
+ takedown_info_context = {
+ "ID": takedown_info.get("id"),
+ "GroupID": takedown_info.get("group_id"),
+ "Status": takedown_info.get("status"),
+ "AttackType": takedown_info.get("attack_type"),
+ "AttackURL": takedown_info.get("attack_url"),
+ "Region": takedown_info.get("region"),
+ "DateSubmitted": takedown_info.get("date_submitted"),
+ "LastUpdated": takedown_info.get("last_updated"),
+ "EvidenceURL": takedown_info.get("evidence_url"),
+ "Reporter": takedown_info.get("reporter"),
+ "IP": takedown_info.get("ip"),
+ "Domain": takedown_info.get("domain"),
+ "Hostname": takedown_info.get("hostname"),
+ "CountryCode": takedown_info.get("country_code"),
+ "DomainAttack": takedown_info.get("domain_attack"),
+ "TargetedURL": takedown_info.get("targeted_url"),
+ "Certificate": takedown_info.get("certificate")
+ }
+
+ return createContext(takedown_info_context, removeNull=True)
+
+
+@logger
+def gen_takedown_info_human_readable(list_of_takedowns_contexts, title=TAKEDOWN_INFO_TITLE):
+ contexts_in_human_readable_format = []
+ for takedown_info_context in list_of_takedowns_contexts:
+ human_readable_dict = {
+ "ID": takedown_info_context.get("ID"),
+ "Status": takedown_info_context.get("Status"),
+ "Attack Type": takedown_info_context.get("AttackType"),
+ "Date Submitted": takedown_info_context.get("DateSubmitted"),
+ "Last Updated": takedown_info_context.get("LastUpdated"),
+ "Reporter": takedown_info_context.get("Reporter"),
+ "Group ID": takedown_info_context.get("GroupID"),
+ "Region": takedown_info_context.get("Region"),
+ "Evidence URL": takedown_info_context.get("EvidenceURL"),
+ "Attack URL": takedown_info_context.get("AttackURL"),
+ "IP": takedown_info_context.get("IP"),
+ "Domain": takedown_info_context.get("Domain"),
+ "Hostname": takedown_info_context.get("Hostname"),
+ "Country Code": takedown_info_context.get("CountryCode"),
+ "Domain Attack": takedown_info_context.get("DomainAttack"),
+ "Targeted URL": takedown_info_context.get("TargetedURL"),
+ "Certificate": takedown_info_context.get("Certificate")
+ }
+ contexts_in_human_readable_format.append(human_readable_dict)
+
+ human_readable = tableToMarkdown(title, contexts_in_human_readable_format,
+ headers=TAKEDOWN_INFO_HEADER, removeNull=True)
+ return human_readable
+
+
+@logger
+def generate_list_of_takedowns_context(list_of_takedowns_infos):
+ takedowns_contexts_list = []
+ for takedown_info in list_of_takedowns_infos:
+ takedown_context = generate_takedown_info_context(takedown_info)
+ takedowns_contexts_list.append(takedown_context)
+ return takedowns_contexts_list
+
+
+@logger
+def generate_takedown_note_context(takedown_note_json):
+ takedown_note_context = {
+ "TakedownID": takedown_note_json.get("takedown_id"),
+ "NoteID": takedown_note_json.get("note_id"),
+ "GroupID": takedown_note_json.get("group_id"),
+ "Author": takedown_note_json.get("author"),
+ "Note": takedown_note_json.get("note"),
+ "Time": takedown_note_json.get("time")
+ }
+ takedown_note_context = return_dict_without_none_values(takedown_note_context)
+ return takedown_note_context
+
+
+@logger
+def generate_list_of_takedown_notes_contexts(list_of_takedowns_notes):
+ takedown_notes_contexts_list = []
+ for takedown_note in list_of_takedowns_notes:
+ takedown_note_context = generate_takedown_note_context(takedown_note)
+ takedown_notes_contexts_list.append(takedown_note_context)
+ return takedown_notes_contexts_list
+
+
+@logger
+def gen_takedown_notes_human_readable(entry_context):
+ contexts_in_human_readable_format = []
+ for takedown_note_context in entry_context:
+ human_readable_dict = {
+ "Takedown ID": takedown_note_context.get("TakedownID"),
+ "Note ID": takedown_note_context.get("NoteID"),
+ "Group ID": takedown_note_context.get("GroupID"),
+ "Author": takedown_note_context.get("Author"),
+ "Note": takedown_note_context.get("Note"),
+ "Time": takedown_note_context.get("Time")
+ }
+ human_readable_dict = return_dict_without_none_values(human_readable_dict)
+ contexts_in_human_readable_format.append(human_readable_dict)
+
+ human_readable = tableToMarkdown(TAKEDOWN_INFO_TITLE, contexts_in_human_readable_format,
+ headers=TAKEDOWN_NOTE_HEADERS)
+ return human_readable
+
+
+@logger
+def generate_add_note_human_readable(response):
+ # if the request was successful, the response includes the id of the created note
+ if "note_id" in response:
+ human_readable = "### Note added succesfully\n" \
+ "ID of the note created: {0}".format(response["note_id"])
+ else:
+ human_readable = "### Failed to add note\n" \
+ "An error occured while trying to add the note.\n" \
+ "The error code is: {0}.\n" \
+ "The error message is: {1}.".format(response["error_code"], response["error_code"])
+ return human_readable
+
+
+@logger
+def string_to_bool(string_representing_bool):
+ return string_representing_bool.lower() == "true"
+
+
+@logger
+def generate_escalate_takedown_human_readable(response):
+ if "status" in response:
+ human_readable = "### Takedown escalated successfully"
+ else:
+ human_readable = "### Takedown escalation failed\n" \
+ "An error occured on the takedown escalation attempt.\n" \
+ "Error code is: {0}\n" \
+ "Error message from Netcraft is: {1}".format(response["error_code"], response["error_message"])
+ return human_readable
+
+
+def add_or_update_note_context_in_takedown(note_context, cur_notes_in_takedown):
+ if isinstance(cur_notes_in_takedown, dict):
+ return [note_context]
+ else:
+ note_already_in_context = False
+ for i, cur_note_context in enumerate(cur_notes_in_takedown):
+ cur_note_context = cur_notes_in_takedown[i]
+ if cur_note_context["NoteID"] == note_context["NoteID"]:
+ note_already_in_context = True
+ cur_notes_in_takedown[i] = note_context
+ if not note_already_in_context:
+ cur_notes_in_takedown.append(note_context)
+ return cur_notes_in_takedown
+
+
+def add_note_to_suitable_takedown_in_context(note_context, all_takedowns_entry_context):
+ note_takedown_index = -1
+ if isinstance(all_takedowns_entry_context, dict):
+ new_takedown_entry_context = {
+ "ID": note_context["TakedownID"],
+ "Note": [note_context]
+ }
+ all_takedowns_entry_context = [all_takedowns_entry_context, new_takedown_entry_context] \
+ if all_takedowns_entry_context else [new_takedown_entry_context]
+ else:
+ for i in range(len(all_takedowns_entry_context)):
+ cur_takedown_context = all_takedowns_entry_context[i]
+ if cur_takedown_context["ID"] == note_context["TakedownID"]:
+ note_takedown_index = i
+ if note_takedown_index == -1:
+ new_takedown_entry_context = {
+ "ID": note_context["TakedownID"],
+ "Note": [note_context]
+ }
+ all_takedowns_entry_context.append(new_takedown_entry_context)
+ else:
+ takedown_context_to_change = all_takedowns_entry_context[note_takedown_index]
+ cur_notes_in_takedown = takedown_context_to_change["Note"]
+ takedown_context_to_change["Note"] = add_or_update_note_context_in_takedown(note_context,
+ cur_notes_in_takedown)
+ all_takedowns_entry_context[note_takedown_index] = takedown_context_to_change
+ return all_takedowns_entry_context
+
+
+def generate_netcraft_context_with_notes(list_of_notes_contexts):
+ all_takedowns_entry_context = demisto.context().get("Netcraft", {}).get("Takedown", {})
+ for note_context in list_of_notes_contexts:
+ all_takedowns_entry_context = add_note_to_suitable_takedown_in_context(note_context,
+ all_takedowns_entry_context)
+ return all_takedowns_entry_context
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+@logger
+def escalate_takedown(takedown_id):
+ data_for_request = {
+ "takedown_id": takedown_id
+ }
+ request_result = http_request("POST", ESCALATE_TAKEDOWN_SUFFIX, data=data_for_request)
+ return request_result
+
+
+def escalate_takedown_command():
+ args = demisto.args()
+ response = escalate_takedown(args["takedown_id"])
+ human_readable = generate_escalate_takedown_human_readable(response)
+ return_outputs(
+ readable_output=human_readable,
+ outputs={},
+ raw_response=response
+ )
+
+
+@logger
+def add_notes_to_takedown(takedown_id, note, notify):
+ data_for_request = {
+ "takedown_id": takedown_id,
+ "note": note,
+ "notify": notify
+ }
+
+ data_for_request = return_dict_without_none_values(data_for_request)
+
+ request_result = http_request("POST", ACCESS_TAKEDOWN_NOTES_SUFFIX, data=data_for_request)
+ return request_result
+
+
+def add_notes_to_takedown_command():
+ args = demisto.args()
+ note = args.get("note")
+ notify = string_to_bool(args.get("notify")) if args.get("notify") else None
+ takedown_id = int(args["takedown_id"])
+ response = add_notes_to_takedown(takedown_id, note, notify)
+ human_readable = generate_add_note_human_readable(response)
+ return_outputs(
+ readable_output=human_readable,
+ outputs=response
+ )
+
+
+def get_takedown_notes(takedown_id, group_id, date_from, date_to, author):
+ params_for_request = {
+ "takedown_id": takedown_id,
+ "group_id": group_id,
+ "date_to": date_to,
+ "date_from": date_from,
+ "author": author
+ }
+
+ params_for_request = return_dict_without_none_values(params_for_request)
+
+ request_result = http_request("GET", ACCESS_TAKEDOWN_NOTES_SUFFIX, params=params_for_request)
+ return request_result
+
+
+def get_takedown_notes_command():
+ args = demisto.args()
+ takedown_id = int(args.get("takedown_id")) if args.get("takedown_id") else None
+ group_id = int(args.get("group_id")) if args.get("group_id") else None
+ date_from = args.get("date_from")
+ date_to = args.get("date_to")
+ author = args.get("author")
+ list_of_takedowns_notes = get_takedown_notes(takedown_id, group_id, date_from, date_to, author)
+ list_of_takedowns_notes = list_of_takedowns_notes[:LIMIT]
+ if takedown_id:
+ list_of_takedowns_notes = filter_by_id(list_of_takedowns_notes, "takedown_id", int(takedown_id))
+ list_of_notes_contexts = generate_list_of_takedown_notes_contexts(list_of_takedowns_notes)
+ entry_context = {
+ "Netcraft.Takedown(val.ID == obj.ID)": generate_netcraft_context_with_notes(list_of_notes_contexts)
+ }
+ human_readable = gen_takedown_notes_human_readable(list_of_notes_contexts)
+ return_outputs(
+ readable_output=human_readable,
+ outputs=entry_context,
+ raw_response=list_of_takedowns_notes
+ )
+
+
+@logger
+def get_takedown_info(takedown_id, ip, url, updated_since, date_from, region):
+ params_for_request = {
+ "id": takedown_id,
+ "ip": ip,
+ "url": url,
+ "updated_since": updated_since,
+ "date_from": date_from,
+ "region": region,
+ }
+
+ params_for_request = return_dict_without_none_values(params_for_request)
+
+ request_result = http_request("GET", GET_TAKEDOWN_INFO_SUFFIX, params=params_for_request)
+ return request_result
+
+
+def get_takedown_info_command():
+ args = demisto.args()
+ takedown_id = int(args.get("id")) if args.get("id") else None
+ ip = args.get("ip")
+ url = args.get("url")
+ updated_since = args.get("updated_since")
+ date_from = args.get("date_from")
+ region = args.get("region")
+ list_of_takedowns_infos = get_takedown_info(takedown_id, ip, url, updated_since, date_from, region)
+ list_of_takedowns_infos = list_of_takedowns_infos[:LIMIT]
+ if takedown_id:
+ list_of_takedowns_infos = filter_by_id(list_of_takedowns_infos, "id", str(takedown_id))
+ list_of_takedowns_contexts = generate_list_of_takedowns_context(list_of_takedowns_infos)
+ human_readable = gen_takedown_info_human_readable(list_of_takedowns_contexts)
+ entry_context = {
+ 'Netcraft.Takedown(val.ID == obj.ID)': list_of_takedowns_contexts
+ }
+ return_outputs(
+ readable_output=human_readable,
+ raw_response=list_of_takedowns_infos,
+ outputs=entry_context,
+ )
+
+
+@logger
+def report_attack(malicious_site_url, comment, is_test_request=False):
+ data_for_request = {
+ "attack": malicious_site_url,
+ "comment": comment
+ }
+ if is_test_request:
+ request_url_suffix = TEST_MODULE_SUFFIX
+ else:
+ request_url_suffix = REPORT_MALICIOUS_SUFFIX
+ request_result = http_request("POST", request_url_suffix, data=data_for_request, should_convert_to_json=False)
+ return request_result
+
+
+def report_attack_command():
+ args = demisto.args()
+ entry_context: dict = {}
+ response_lines_array = report_attack(args["attack"], args["comment"])
+ result_answer = response_lines_array[0]
+ if result_answer == MALICIOUS_REPORT_SUCCESS:
+ new_takedown_id = response_lines_array[1]
+ # Until the API bug is fixed, this list will include info of all takedowns and not just the new one
+ new_takedown_infos = get_takedown_info(new_takedown_id, None, None, None, None, None)
+ new_takedown_infos = new_takedown_infos[:LIMIT]
+ new_takedown_infos = filter_by_id(new_takedown_infos, "id", new_takedown_id)
+ list_of_new_takedown_contexts = generate_list_of_takedowns_context(new_takedown_infos)
+ human_readable = gen_takedown_info_human_readable(list_of_new_takedown_contexts, REPORT_MALICIOUS_SUCCESS_TITLE)
+ entry_context = {
+ 'Netcraft.Takedown(val.ID == obj.ID)': list_of_new_takedown_contexts
+ }
+ else:
+ human_readable = generate_report_malicious_site_human_readable(response_lines_array)
+
+ return_outputs(
+ readable_output=human_readable,
+ outputs=entry_context,
+ raw_response=entry_context
+ )
+
+
+def test_module():
+ """
+ Performs basic get request to get item samples
+ """
+ test_result = report_attack("https://www.test.com", "test", True)
+ if test_result[0] != MALICIOUS_REPORT_SUCCESS:
+ raise Exception("Test request failed.")
+ demisto.results("ok")
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('Command being called is %s' % (demisto.command()))
+
+try:
+ # Remove proxy if not set to true in params
+ handle_proxy()
+ if demisto.command() == 'test-module':
+ test_module()
+ elif demisto.command() == 'netcraft-report-attack':
+ report_attack_command()
+ elif demisto.command() == 'netcraft-get-takedown-info':
+ get_takedown_info_command()
+ elif demisto.command() == 'netcraft-get-takedown-notes':
+ get_takedown_notes_command()
+ elif demisto.command() == 'netcraft-add-notes-to-takedown':
+ add_notes_to_takedown_command()
+ elif demisto.command() == 'netcraft-escalate-takedown':
+ escalate_takedown_command()
+
+
+# Log exceptions
+except Exception as e:
+ return_error(str(e))
diff --git a/Integrations/Netcraft/Netcraft.yml b/Integrations/Netcraft/Netcraft.yml
new file mode 100644
index 000000000000..3835d3047e71
--- /dev/null
+++ b/Integrations/Netcraft/Netcraft.yml
@@ -0,0 +1,232 @@
+commonfields:
+ id: Netcraft
+ version: -1
+name: Netcraft
+display: Netcraft
+category: Authentication
+description: An integration for Netcraft, allowing you to open and handle takedown requests.
+configuration:
+- display: Credentials
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: true
+- display: The maximum number of entries (takedowns/notes) to return. Default is 100.
+ name: limit
+ defaultvalue: "100"
+ type: 0
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Trust any certificate (not secure)
+ name: unsecure
+ defaultvalue: ""
+ type: 8
+ required: false
+script:
+ script: ''
+ type: python
+ dockerimage: demisto/python3:3.7.3.221
+ commands:
+ - name: netcraft-report-attack
+ arguments:
+ - name: attack
+ required: true
+ description: The attack location you want taken down. For example, a phishing URL or
+ fraudulent email address.
+ - name: comment
+ required: true
+ description: The reason for submitting the attack, such as a description of the attack.
+ outputs:
+ - contextPath: Netcraft.Takedown.DateSubmitted
+ description: The date and time of reporting.
+ type: String
+ - contextPath: Netcraft.Takedown.LastUpdated
+ description: The date and time of the last action taken on the takedown.
+ type: String
+ - contextPath: Netcraft.Takedown.EvidenceURL
+ description: The URL of the evidence page on incident.netcraft.com.
+ type: String
+ - contextPath: Netcraft.Takedown.Reporter
+ description: The person/account that submitted the takedown.
+ type: String
+ - contextPath: Netcraft.Takedown.Domain
+ description: The domain of the URL or email address being taken down. This will
+ be blank for attacks without a domain name.
+ type: String
+ - contextPath: Netcraft.Takedown.Hostname
+ description: The full hostname of the URL or email address being taken down.
+ This will be blank for attacks without a hostname.
+ type: String
+ - contextPath: Netcraft.Takedown.CountryCode
+ description: ISO country code of the hosting country.
+ type: String
+ - contextPath: Netcraft.Takedown.DomainAttack
+ description: Whether the domain is thought to be fraudulent.
+ type: String
+ - contextPath: Netcraft.Takedown.TargetedURL
+ description: The URL that this attack is masquarading as. For example, the URL of the
+ legitimate login form that the attack targets.
+ type: String
+ - contextPath: Netcraft.Takedown.Certificate
+ description: HTTPS certificate details for the hostname, or null if no certificate
+ was found. The value returned is the output of PHP's openssl_x509_parse function.
+ type: Unknown
+ - contextPath: Netcraft.Takedown.ID
+ description: The ID of the takedown.
+ type: Number
+ - contextPath: Netcraft.Takedown.GroupID
+ description: The group ID of the takedown, can potentially be the same as ID, or empty
+ if there is no group.
+ type: Number
+ - contextPath: Netcraft.Takedown.Status
+ description: The status of the takedown.
+ type: String
+ - contextPath: Netcraft.Takedown.AttackType
+ description: The type of takedown.
+ type: String
+ - contextPath: Netcraft.Takedown.AttackURL
+ description: The location of the attack being taken down.
+ type: String
+ - contextPath: Netcraft.Takedown.Region
+ description: The customer area in which the attack resides.
+ type: String
+ - contextPath: Netcraft.Takedown.IP
+ description: The IPv4 address of the attack.
+ type: String
+ description: Reports an attack to Netcraft.
+ - name: netcraft-get-takedown-info
+ arguments:
+ - name: id
+ description: The ID of the takedowns for which to get information.
+ - name: date_from
+ description: 'Retrieve information for takedowns submitted after this date. Format: YYYY-MM-DD HH:MM:SS.'
+ - name: updated_since
+ description: 'Retrieve information for takedowns updated after this date. Format: YYYY-MM-DD HH:MM:SS.'
+ - name: url
+ description: The URL by which to filter.
+ - name: ip
+ description: The IP by which to filter.
+ - name: region
+ description: The region by which to filter. If the region is invalid
+ or not specified, all regions are returned.
+ outputs:
+ - contextPath: Netcraft.Takedown.ID
+ description: The ID of the takedown.
+ type: number
+ - contextPath: Netcraft.Takedown.GroupID
+ description: The group ID of the takedown, can potentially be the same as ID or empty
+ if there is no group.
+ type: number
+ - contextPath: Netcraft.Takedown.Status
+ description: The status of the takedown.
+ type: string
+ - contextPath: Netcraft.Takedown.AttackType
+ description: The type of takedown.
+ type: string
+ - contextPath: Netcraft.Takedown.AttackURL
+ description: The location of the attack being taken down.
+ type: string
+ - contextPath: Netcraft.Takedown.Region
+ description: The customer area in which the attack resides.
+ type: string
+ - contextPath: Netcraft.Takedown.DateSubmitted
+ description: The date and time of reporting.
+ type: string
+ - contextPath: Netcraft.Takedown.LastUpdated
+ description: The date and time of the last action taken on the takedown.
+ type: string
+ - contextPath: Netcraft.Takedown.EvidenceURL
+ description: The URL of the evidence page on incident.netcraft.com.
+ type: string
+ - contextPath: Netcraft.Takedown.Reporter
+ description: The person/account that submitted the takedown.
+ type: string
+ - contextPath: Netcraft.Takedown.IP
+ description: The IPv4 address of the attack.
+ type: Unknown
+ - contextPath: Netcraft.Takedown.Domain
+ description: "\tThe domain of the URL or email address being taken down. This
+ will be blank for attacks without a domain name."
+ type: Unknown
+ - contextPath: Netcraft.Takedown.Hostname
+ description: The full hostname of the URL or email address being taken down.
+ This will be blank for attacks without a hostname.
+ type: Unknown
+ - contextPath: Netcraft.Takedown.CountryCode
+ description: ISO country code of the hosting country.
+ type: Unknown
+ - contextPath: Netcraft.Takedown.DomainAttack
+ description: Whether the domain is thought to be fraudulent.
+ type: Unknown
+ - contextPath: Netcraft.Takedown.TargetedURL
+ description: The URL which this attack is masquerading as. For example, the URL of the
+ legitimate login form that the attack targets.
+ type: Unknown
+ - contextPath: Netcraft.Takedown.Certificate
+ description: TTPS certificate details for the hostname, or null if no certificate
+ was found. The value returned is the output of PHP's openssl_x509_parse function.
+ type: Unknown
+ description: Returns information on existing takedowns. You can retrieve the takedown ID when you report the malicious URL and open the takedown, using the netcraft-report-attack
+ command.
+ - name: netcraft-get-takedown-notes
+ arguments:
+ - name: takedown_id
+ description: The takedown to get notes for.
+ - name: group_id
+ description: A takedown group to get notes for.
+ - name: date_from
+ description: Retrieve notes created after this date.
+ - name: date_to
+ description: Retrieve notes created before this date.
+ - name: author
+ description: A specific user to get notes for.
+ outputs:
+ - contextPath: Netcraft.Takedown.Note.TakedownID
+ description: The ID of the takedown to which the note belongs.
+ type: number
+ - contextPath: Netcraft.Takedown.Note.NoteID
+ description: The ID of the note.
+ type: number
+ - contextPath: Netcraft.Takedown.Note.GroupID
+ description: If this note is attached to all takedowns in a group, group_id
+ is the ID of that group. Otherwise, the value 0 means the note is sent
+ to a single takedown.
+ type: number
+ - contextPath: Netcraft.Takedown.Note.Author
+ description: The author of the note. "Netcraft" denotes a Netcraft authored
+ note.
+ type: string
+ - contextPath: Netcraft.Takedown.Note.Note
+ description: The content (text) of the note.
+ type: string
+ - contextPath: Netcraft.Takedown.Note.Time
+ description: 'The date/time the note was created. Format (UTC): YYYY-MM-DD HH:MM:SS.'
+ type: string
+ description: Returns notes for takedowns.
+ - name: netcraft-add-notes-to-takedown
+ arguments:
+ - name: takedown_id
+ required: true
+ description: A valid takedown ID to add the note to.
+ - name: note
+ required: true
+ description: The text to add to the takedown.
+ - name: notify
+ auto: PREDEFINED
+ predefined:
+ - "True"
+ - "False"
+ description: Whether to notify Netcraft. Default is "true".
+ description: Adds notes to an existing takedown.
+ - name: netcraft-escalate-takedown
+ arguments:
+ - name: takedown_id
+ required: true
+ description: The ID of the takedown to escalate.
+ description: Escalates a takedown.
+ runonce: false
+ subtype: python3
diff --git a/Integrations/Netcraft/Netcraft_description.md b/Integrations/Netcraft/Netcraft_description.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Integrations/Nmap/CHANGELOG.md b/Integrations/Nmap/CHANGELOG.md
new file mode 100644
index 000000000000..2bd6a57dfe4b
--- /dev/null
+++ b/Integrations/Nmap/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+#### New Integration
+Run nmap scans with the given parameters
\ No newline at end of file
diff --git a/Integrations/Nmap/Nmap.py b/Integrations/Nmap/Nmap.py
new file mode 100644
index 000000000000..5bf61139229f
--- /dev/null
+++ b/Integrations/Nmap/Nmap.py
@@ -0,0 +1,62 @@
+import demistomock as demisto
+from CommonServerPython import *
+from libnmap.process import NmapProcess
+from libnmap.parser import NmapParser
+from libnmap.reportjson import ReportEncoder
+
+if demisto.command() == 'test-module':
+ demisto.results('ok')
+ sys.exit(0)
+if demisto.command() == 'nmap-scan':
+ nm = NmapProcess(argToList(demisto.args()['targets']), options=demisto.args()['options'])
+ rc = nm.run()
+ if rc != 0:
+ demisto.results({
+ 'Type': entryTypes['error'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': 'Unable to execute - ' + nm.stderr
+ })
+ sys.exit(0)
+ r = NmapParser.parse(nm.stdout)
+ md = '## ' + r.summary + '\n'
+ hosts = []
+ for host in r.hosts:
+ h = {}
+ if len(host.hostnames):
+ tmp_host = host.hostnames.pop()
+ h['Hostname'] = tmp_host
+ else:
+ tmp_host = host.address
+
+ h['Address'] = host.address
+ h['Status'] = host.status
+ svc = []
+ md += "### Nmap scan report for {0}".format(tmp_host) + \
+ (" ({0})\n".format(host.address) if tmp_host != host.address else "\n")
+ md += "#### Host is {0}.\n".format(host.status)
+ for serv in host.services:
+ svc.append({
+ 'Port': serv.port,
+ 'Protocol': serv.protocol,
+ 'State': serv.state,
+ 'Service': serv.service,
+ 'Banner': serv.banner
+ })
+ md += tableToMarkdown('Services', svc, ['Port', 'Protocol', 'State', 'Service', 'Banner'])
+ h['Services'] = svc
+ hosts.append(h)
+ scan = {
+ 'Summary': r.summary,
+ 'Version': r.version,
+ 'Started': r.started,
+ 'Ended': r.endtime,
+ 'CommandLine': r.commandline,
+ 'ScanType': r.scan_type,
+ 'Hosts': hosts}
+ demisto.results({
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': json.dumps(r, cls=ReportEncoder),
+ 'HumanReadable': md,
+ 'EntryContext': {'NMAP.Scan': scan}
+ })
diff --git a/Integrations/Nmap/Nmap.yml b/Integrations/Nmap/Nmap.yml
new file mode 100644
index 000000000000..0b2dd8633c98
--- /dev/null
+++ b/Integrations/Nmap/Nmap.yml
@@ -0,0 +1,54 @@
+commonfields:
+ id: nmap
+ version: -1
+name: nmap
+display: nmap
+category: Network Security
+description: Run nmap scans with the given parameters
+configuration: []
+fromversion: 5.0.0
+script:
+ script: ''
+ type: python
+ subtype: python3
+ commands:
+ - name: nmap-scan
+ arguments:
+ - name: targets
+ required: true
+ description: The targets to scan. Accepts comma-separated list.
+ - name: options
+ required: true
+ description: The nmap options to use as documented by nmap
+ outputs:
+ - contextPath: NMAP.Scan.Summary
+ description: Scan summary
+ - contextPath: NMAP.Scan.Version
+ description: nmap version
+ - contextPath: NMAP.Scan.Started
+ description: Start time epoch
+ - contextPath: NMAP.Scan.Ended
+ description: End time epoch
+ - contextPath: NMAP.Scan.CommandLine
+ description: The command line being used
+ - contextPath: NMAP.Scan.ScanType
+ description: The type of discovery scan
+ - contextPath: NMAP.Scan.Hosts.Hostname
+ description: DNS hostname of scanned host
+ - contextPath: NMAP.Scan.Hosts.Address
+ description: Scanned host address
+ - contextPath: NMAP.Scan.Hosts.Status
+ description: Is the host up or down
+ - contextPath: NMAP.Scan.Hosts.Services.Port
+ description: The port of the service
+ - contextPath: NMAP.Scan.Hosts.Services.Protocol
+ description: The protocol of the service
+ - contextPath: NMAP.Scan.Hosts.Services.State
+ description: The state of the service
+ - contextPath: NMAP.Scan.Hosts.Services.Banner
+ description: Any captured banner from the service
+ - contextPath: NMAP.Scan.Hosts.Services.Service
+ description: The service name
+ description: Scan targets with the given parameters
+ execution: true
+ dockerimage: demisto/nmap:1.0.0.1977
diff --git a/Integrations/Nmap/Nmap_image.png b/Integrations/Nmap/Nmap_image.png
new file mode 100644
index 000000000000..9fe83e5c9e94
Binary files /dev/null and b/Integrations/Nmap/Nmap_image.png differ
diff --git a/Integrations/Nmap/Pipfile b/Integrations/Nmap/Pipfile
new file mode 100644
index 000000000000..cdaa285def1f
--- /dev/null
+++ b/Integrations/Nmap/Pipfile
@@ -0,0 +1,27 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+
+[packages]
+cybox = "==2.1.0.13"
+lxml = "==3.7.2"
+mixbox = "==1.0.1"
+olefile = "==0.44"
+ordered-set = "==2.0.1"
+python-dateutil = "==2.6.0"
+python-libnmap = "==0.7.0"
+requests = "==2.13.0"
+six = "==1.10.0"
+stix = "==1.2.0.2"
+virtualenv = "==15.0.3"
+weakrefmethod = "==1.0.3"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/Nmap/Pipfile.lock b/Integrations/Nmap/Pipfile.lock
new file mode 100644
index 000000000000..c72c857e0da3
--- /dev/null
+++ b/Integrations/Nmap/Pipfile.lock
@@ -0,0 +1,344 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "9852d8b44b70708c8e21583e5753e026bbdadf921d334bc2978a67c830c85618"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "cybox": {
+ "hashes": [
+ "sha256:d8dba216aa6ef05295757439bf098e1b271cbf50fff13be6bde1bd7fb46b9420",
+ "sha256:daa2456db3c7f211cb92c567970d7d9b95b068cd2efbf04e5bbbcecf5c3ffdb6"
+ ],
+ "index": "pypi",
+ "version": "==2.1.0.13"
+ },
+ "lxml": {
+ "hashes": [
+ "sha256:188d07d7c21b13be43e45b955e0a9f49bb9aaa1982ff3d25f7e6ed7af7cb845c",
+ "sha256:260a7ab24a27f264b99d7eedd4d12e2cd1fdacbbbc7a4506bf1a8405ea2c8b51",
+ "sha256:343993c62bb395ab04026f35c7d459719e33016ecf9256fadbd3b1751a0b7b0a",
+ "sha256:46b4db3ad2473080621f554c629c62150080b5112f8ddbabdab58ad8e4bb5ed1",
+ "sha256:52766500c74c08a9fa6838163b89914a3274d7617f8cc420793fb6ea870cb558",
+ "sha256:59d9176360dbc3919e9d4bfca85c1ca64ab4f4ee00e6f119d7150ba887e3410a",
+ "sha256:9107bb0d12a940d0ccb50240ceef8701ac717102acc564d7cf7242ab810251ec",
+ "sha256:96724831b1657499521ec0340460f2c853808ea86171af688818a1972c463aad",
+ "sha256:cb013b5a5f58b58068be0203d2b7136d8464d7e9b3db01d54f8884d3c4097149",
+ "sha256:f302a725802b56c173bbef29131cc753413e47749fcf96d7a4bde4e6ffd25dee",
+ "sha256:fb99af148e7507e0cde11ea5c064062d960e581d3530c50e0936d6743da8bb2c"
+ ],
+ "index": "pypi",
+ "version": "==3.7.2"
+ },
+ "mixbox": {
+ "hashes": [
+ "sha256:4256ba29f9e629a300b91530871e8dc6ed576d7648f757ce6a990c8cabc1a0be",
+ "sha256:6df0575f483bff4753075a44d3849f709304e5ebd6d290e6ab2500c0f5905f68",
+ "sha256:e70e82c21995240df708a930afed2850ac3476e75746328890ea44ab94ba1b7a"
+ ],
+ "index": "pypi",
+ "version": "==1.0.1"
+ },
+ "olefile": {
+ "hashes": [
+ "sha256:61f2ca0cd0aa77279eb943c07f607438edf374096b66332fae1ee64a6f0f73ad"
+ ],
+ "index": "pypi",
+ "version": "==0.44"
+ },
+ "ordered-set": {
+ "hashes": [
+ "sha256:55567f094481ba204ffede0117ab563e19af050c7cbf33a9a23292b8cb2b0a0e"
+ ],
+ "index": "pypi",
+ "version": "==2.0.1"
+ },
+ "python-dateutil": {
+ "hashes": [
+ "sha256:3acbef017340600e9ff8f2994d8f7afd6eacb295383f286466a6df3961e486f0",
+ "sha256:537bf2a8f8ce6f6862ad705cd68f9e405c0b5db014aa40fa29eab4335d4b1716",
+ "sha256:62a2f8df3d66f878373fd0072eacf4ee52194ba302e00082828e0d263b0418d2"
+ ],
+ "index": "pypi",
+ "version": "==2.6.0"
+ },
+ "python-libnmap": {
+ "hashes": [
+ "sha256:9d14919142395aaca952e129398f0c7371c0f0a034c63de6dad99cd7050177ad"
+ ],
+ "index": "pypi",
+ "version": "==0.7.0"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:1a720e8862a41aa22e339373b526f508ef0c8988baf48b84d3fc891a8e237efb",
+ "sha256:5722cd09762faa01276230270ff16af7acf7c5c45d623868d9ba116f15791ce8"
+ ],
+ "index": "pypi",
+ "version": "==2.13.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:0ff78c403d9bccf5a425a6d31a12aa6b47f1c21ca4dc2573a7e2f32a97335eb1",
+ "sha256:105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a"
+ ],
+ "index": "pypi",
+ "version": "==1.10.0"
+ },
+ "stix": {
+ "hashes": [
+ "sha256:9475dc56817d26a3da177b1b1a118cc8665cc98ba1af382b42906aa5132a97b4",
+ "sha256:c26de96e1782c303fecabb3e569aa143827c685c446b831aa42e09b06dd47195",
+ "sha256:c7f87fc1ba9df5ee52ec74e2c312f0fa621108fab0583cfad627d88f6b5d321f"
+ ],
+ "index": "pypi",
+ "version": "==1.2.0.2"
+ },
+ "virtualenv": {
+ "hashes": [
+ "sha256:6d9c760d3fc5fa0894b0f99b9de82a4647e1164f0b700a7f99055034bf548b1d",
+ "sha256:cc8164362fc9611d478f784bbc066f3ee74526c50336ec61a6e75d5af97926c8"
+ ],
+ "index": "pypi",
+ "version": "==15.0.3"
+ },
+ "weakrefmethod": {
+ "hashes": [
+ "sha256:37bc1fbb5575acf82172d4eb7b6fc4412d77d5a1d70dff2c1f8a4574301cda66"
+ ],
+ "index": "pypi",
+ "version": "==1.0.3"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:9b3f17b0550f82e28a6776a4e5222441f48e523b0773df4bc505bb6b7c2093b7",
+ "sha256:c7e2e5773d87ccc00d01c273e439386f4d6d63cce61317a79ccce5880162f9fb"
+ ],
+ "version": "==2.3.0"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50",
+ "sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef"
+ ],
+ "version": "==2019.9.11"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26",
+ "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af"
+ ],
+ "markers": "python_version < '3.8'",
+ "version": "==0.23"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:02b260c8deb80db09325b99edf62ae344ce9bc64d68b7a634410b8e9a568edbf",
+ "sha256:18f9c401083a4ba6e162355873f906315332ea7035803d0fd8166051e3d402e3",
+ "sha256:1f2c6209a8917c525c1e2b55a716135ca4658a3042b5122d4e3413a4030c26ce",
+ "sha256:2f06d97f0ca0f414f6b707c974aaf8829c2292c1c497642f63824119d770226f",
+ "sha256:616c94f8176808f4018b39f9638080ed86f96b55370b5a9463b2ee5c926f6c5f",
+ "sha256:63b91e30ef47ef68a30f0c3c278fbfe9822319c15f34b7538a829515b84ca2a0",
+ "sha256:77b454f03860b844f758c5d5c6e5f18d27de899a3db367f4af06bec2e6013a8e",
+ "sha256:83fe27ba321e4cfac466178606147d3c0aa18e8087507caec78ed5a966a64905",
+ "sha256:84742532d39f72df959d237912344d8a1764c2d03fe58beba96a87bfa11a76d8",
+ "sha256:874ebf3caaf55a020aeb08acead813baf5a305927a71ce88c9377970fe7ad3c2",
+ "sha256:9f5caf2c7436d44f3cec97c2fa7791f8a675170badbfa86e1992ca1b84c37009",
+ "sha256:a0c8758d01fcdfe7ae8e4b4017b13552efa7f1197dd7358dc9da0576f9d0328a",
+ "sha256:a4def978d9d28cda2d960c279318d46b327632686d82b4917516c36d4c274512",
+ "sha256:ad4f4be843dace866af5fc142509e9b9817ca0c59342fdb176ab6ad552c927f5",
+ "sha256:ae33dd198f772f714420c5ab698ff05ff900150486c648d29951e9c70694338e",
+ "sha256:b4a2b782b8a8c5522ad35c93e04d60e2ba7f7dcb9271ec8e8c3e08239be6c7b4",
+ "sha256:c462eb33f6abca3b34cdedbe84d761f31a60b814e173b98ede3c81bb48967c4f",
+ "sha256:fd135b8d35dfdcdb984828c84d695937e58cc5f49e1c854eb311c4d6aa03f4f1"
+ ],
+ "version": "==1.4.2"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832",
+ "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"
+ ],
+ "version": "==7.2.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47",
+ "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108"
+ ],
+ "version": "==19.2"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6",
+ "sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34"
+ ],
+ "version": "==0.13.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:92280a6085fc5e4fec67d6330c0c85eae50817696d02bdc85e9ca6bab830ad58",
+ "sha256:ef796b99c243afeebf7a04b4426126ac837940da6bcd5fc47229c507e056fec1"
+ ],
+ "index": "pypi",
+ "version": "==2.4.0"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
+ "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
+ ],
+ "version": "==2.4.2"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:813b99704b22c7d377bbd756ebe56c35252bb710937b46f207100e843440b3c2",
+ "sha256:cc6620b96bc667a0c8d4fa592a8c9c94178a1bd6cc799dbb057dfd9286d31a31"
+ ],
+ "index": "pypi",
+ "version": "==5.1.3"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:1a720e8862a41aa22e339373b526f508ef0c8988baf48b84d3fc891a8e237efb",
+ "sha256:5722cd09762faa01276230270ff16af7acf7c5c45d623868d9ba116f15791ce8"
+ ],
+ "index": "pypi",
+ "version": "==2.13.0"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:510df890afe08d36eca5bb16b4aa6308a6f85e3159ad3013bac8b9de7bd5a010",
+ "sha256:88d3402dd8b3c69a9e4f9d3a73ad11b15920c6efd36bc27bf1f701cf4a8e4646"
+ ],
+ "index": "pypi",
+ "version": "==1.7.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:0ff78c403d9bccf5a425a6d31a12aa6b47f1c21ca4dc2573a7e2f32a97335eb1",
+ "sha256:105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a"
+ ],
+ "index": "pypi",
+ "version": "==1.10.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e",
+ "sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e",
+ "sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0",
+ "sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c",
+ "sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631",
+ "sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4",
+ "sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34",
+ "sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b",
+ "sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a",
+ "sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233",
+ "sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1",
+ "sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36",
+ "sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d",
+ "sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a",
+ "sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12"
+ ],
+ "markers": "implementation_name == 'cpython' and python_version >= '3.7' and python_version < '3.8'",
+ "version": "==1.4.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:2f3eadfea5d92bc7899e75b5968410b749a054b492d5a6379c1344a1481bc2cb",
+ "sha256:9c6c593cb28f52075016307fc26b0a0f8e82bc7d1ff19aaaa959b91710a56c47"
+ ],
+ "version": "==1.25.5"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e",
+ "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
+ ],
+ "version": "==0.6.0"
+ }
+ }
+}
diff --git a/Integrations/OnboardingIntegration/CHANGELOG.md b/Integrations/OnboardingIntegration/CHANGELOG.md
new file mode 100644
index 000000000000..f90c4b9aed3c
--- /dev/null
+++ b/Integrations/OnboardingIntegration/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+- Fix bug where fetch incidents was failing when frequency param is set.
+
diff --git a/Integrations/OnboardingIntegration/OnboardingIntegration.py b/Integrations/OnboardingIntegration/OnboardingIntegration.py
new file mode 100644
index 000000000000..df82d1045fb7
--- /dev/null
+++ b/Integrations/OnboardingIntegration/OnboardingIntegration.py
@@ -0,0 +1,951 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+'''IMPORTS'''
+from faker import Faker
+from faker.providers import internet, misc, lorem, user_agent
+from datetime import datetime
+import json
+import random
+import math
+from email.mime.multipart import MIMEMultipart
+from email.mime.text import MIMEText
+
+'''SETUP'''
+fake = Faker()
+fake.add_provider(internet)
+fake.add_provider(misc)
+fake.add_provider(lorem)
+fake.add_provider(user_agent)
+
+'''GLOBAL VARS'''
+PARAMS = demisto.params()
+INCIDENT_TYPE = PARAMS.get('incidentType', 'PhishingDemo')
+INCIDENTS_PER_MINUTE = int(PARAMS.get('incidents_per_minute', '5'))
+MAX_NUM_OF_INCIDENTS = int(PARAMS.get('max_num_of_incidents', '10'))
+FREQUENCY = PARAMS.get('frequency')
+INDICATORS_PER_INCIDENT = 5
+INDICATORS_TO_INCLUDE = ['ipv4_public', 'url', 'domain_name', 'sha1', 'sha256', 'md5']
+EMAIL_PROTOCOLS = ['POP3', 'IMAP', 'SMTP', 'ESMTP', 'HTTP', 'HTTPS']
+# About the drop some mean regex right now disable-secrets-detection-start
+TEMPLATE_1 = '''
+
+
+
+
+
+ Simple Transactional Email
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Hi there,
+
+ {}
+
+
+ This is a really simple email template. Its sole purpose
+ is to get the recipient to click the
+ button with no distractions.
+
+ Good luck! Hope it works.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+'''
+TEMPLATE_2 = '''
+
+
+
+
+
+
+
+
+
+ Responsive HTML email templates
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+'''
+# Drops the mic disable-secrets-detection-end
+EMAIL_TEMPLATES = [TEMPLATE_1, TEMPLATE_2]
+
+
+'''HELPER FUNCTIONS'''
+
+
+def update_parameters():
+ """
+ Check and see if the integration parameters changed and if so update global vars
+ """
+ params = demisto.params()
+ incidents_per_minute = int(params.get('incidents_per_minute', '5'))
+ max_num_of_incidents = int(params.get('max_num_of_incidents', '10'))
+ frequency = int(params.get('frequency')) if params.get('frequency') else None
+ global INCIDENTS_PER_MINUTE
+ if INCIDENTS_PER_MINUTE != incidents_per_minute:
+ INCIDENTS_PER_MINUTE = incidents_per_minute
+ global MAX_NUM_OF_INCIDENTS
+ if MAX_NUM_OF_INCIDENTS != max_num_of_incidents:
+ MAX_NUM_OF_INCIDENTS = max_num_of_incidents
+ global FREQUENCY
+ if FREQUENCY != frequency:
+ FREQUENCY = frequency
+
+
+def generate_dbot_score(indicator):
+ """ Arbitrary (but consistent) scoring method
+
+ Assign a dbot score according to the last digit of the hash of the indicator.
+
+ parameter: (string) indicator
+ The indicator for which we need to generate a dbot score
+
+ returns:
+ Dbot score (0,1,2, or 3)
+ """
+ the_hash = hash(indicator)
+ last_digit = abs(the_hash) % 10
+ if last_digit == 0:
+ return 0
+ elif last_digit < 5:
+ return 1
+ elif last_digit < 8:
+ return 2
+ else:
+ return 3
+
+
+def create_content():
+ """
+ Generate fake content to populate the email with
+
+ Generates textual contents that are randomly generated and defined to include 5 random IPs, 5 random URLs,
+ 5 random sha1 hashes, 5 random sha256 hashes, 5 random md5 hashes, 5 random email addresses, 5 random domains
+ and 100 random words.
+
+ returns:
+ The randomly generated data as a string
+ """
+ details = fake.text(600) # pylint: disable=no-member
+ details += '\n'
+ for _ in range(INDICATORS_PER_INCIDENT):
+ ipv4, url, domain = fake.ipv4_public(), fake.url(), fake.domain_name() # pylint: disable=no-member
+ sha1, sha256, md5 = fake.sha1(), fake.sha256(), fake.md5() # pylint: disable=no-member
+ details += ipv4 + ' ' + url + ' ' + domain + ' ' + sha1 + ' ' + sha256 + ' ' + md5 + '\n'
+
+ emails = [fake.email() for _ in range(INDICATORS_PER_INCIDENT)] # pylint: disable=no-member
+ details += ' '.join(emails)
+ return details
+
+
+def inject_content_into_template(plaintext):
+ """
+ Choose an email html template at random and populate the main textual component with randomly generated
+ content passed in the 'plaintext' parameter
+
+ parameter: (string) plaintext
+ The randomly generated content to be used in the email html
+
+ returns:
+ The html template populated with the randomly generated content
+ """
+ # Choose random email html template
+ choice = random.randint(0, len(EMAIL_TEMPLATES) - 1)
+ chosen_template = EMAIL_TEMPLATES[choice]
+ html = chosen_template.format(plaintext)
+ return html
+
+
+def create_email():
+ """
+ Create message object using template and random data
+
+ returns:
+ email.Message object and the email as a standard dictionary
+ """
+ sender = fake.email() # pylint: disable=no-member
+ recipient = fake.email() # pylint: disable=no-member
+ cc = [fake.email() for _ in range(random.randint(0, 2))] # pylint: disable=no-member
+ bcc = [fake.email() for _ in range(random.randint(0, 2))] # pylint: disable=no-member
+ the_time = datetime.now()
+ received = 'from ' + fake.hostname() + ' (' + fake.ipv4_public() # pylint: disable=no-member
+ received += ')\r\n' + 'by ' + fake.domain_word() + '.' # pylint: disable=no-member
+ received += fake.free_email_domain() + ' with ' # pylint: disable=no-member
+ received += EMAIL_PROTOCOLS[random.randint(0, len(EMAIL_PROTOCOLS) - 1)]
+ received += '; ' + the_time.strftime('%c')
+ msg = MIMEMultipart('alternative')
+ msg['Subject'] = fake.sentence() # pylint: disable=no-member
+ msg['From'] = sender
+ msg['Reply-To'] = sender
+ msg['To'] = recipient
+ msg['Message-ID'] = fake.uuid4() # pylint: disable=no-member
+ msg['CC'] = ', '.join(cc) if cc else ''
+ msg['BCC'] = ', '.join(bcc) if bcc else ''
+ msg['User-Agent'] = fake.user_agent() # pylint: disable=no-member
+ msg['Date'] = the_time.strftime("%Y-%m-%dT%H:%M:%SZ")
+ msg['Received'] = received
+
+ plaintext = create_content()
+ html = inject_content_into_template(plaintext)
+ part1 = MIMEText(plaintext, 'plain')
+ part2 = MIMEText(html, 'html')
+ msg.attach(part1)
+ msg.attach(part2)
+ email_object = {}
+ for key, val in msg.items():
+ email_object[key] = val
+ email_object['Text'] = plaintext
+ email_object['Body'] = html
+ return msg, email_object
+
+
+def generate_incidents(last_run):
+ """
+ Determines how many incidents to create and generates them
+
+ parameter: (number) last_run
+ The number of incidents generated in the last fetch
+
+ returns:
+ The number of incidents generated in the current call to fetch_incidents and the incidents themselves
+ """
+ if last_run > 0 and last_run > MAX_NUM_OF_INCIDENTS:
+ demisto.info('last_run is greater than MAX_NUM_OF_INCIDENTS')
+ return 0, []
+
+ incidents = []
+ num_of_incidents_left_to_create = MAX_NUM_OF_INCIDENTS - last_run
+
+ if num_of_incidents_left_to_create > INCIDENTS_PER_MINUTE:
+ num_of_incident_to_create = INCIDENTS_PER_MINUTE
+ else:
+ num_of_incident_to_create = num_of_incidents_left_to_create
+
+ for _ in range(num_of_incident_to_create):
+ email, email_object = create_email()
+ incidents.append({
+ 'name': email_object.get('Subject'),
+ 'details': email.as_string(),
+ 'occurred': email_object.get('Date'),
+ 'type': INCIDENT_TYPE,
+ 'rawJSON': json.dumps(email_object)
+ })
+ return num_of_incident_to_create, incidents
+
+
+'''MAIN FUNCTIONS'''
+
+
+def fetch_incidents():
+ """
+ Generates and fetches phishing email-like incidents
+
+ Generates phishing email-like incidents, with the number of incidents, the
+ speed of generation and the recurring time period all set by the values
+ entered in the integration instance parameters. This method operates
+ under the assumption that fetch-incidents is called once per minute.
+
+ returns:
+ Email-like incidents
+ """
+ try:
+ update_parameters()
+ minutes_of_generation = MAX_NUM_OF_INCIDENTS / float(INCIDENTS_PER_MINUTE)
+ if not FREQUENCY or minutes_of_generation > FREQUENCY: # Run once
+ last_run = 0 if not demisto.getLastRun() else demisto.getLastRun().get('numOfIncidentsCreated', 0)
+
+ num_of_incidents_created, incidents = generate_incidents(last_run)
+
+ demisto.incidents(incidents)
+ demisto.setLastRun({'numOfIncidentsCreated': last_run + num_of_incidents_created})
+ return
+ else:
+ run_counter = 0 if not demisto.getLastRun() else demisto.getLastRun().get('run_count', 0)
+ last_run = 0 if not demisto.getLastRun() else demisto.getLastRun().get('numOfIncidentsCreated', 0)
+ should_run = run_counter % FREQUENCY
+ if should_run < math.ceil(minutes_of_generation): # then should run
+ if should_run == 0:
+ last_run = 0
+
+ num_of_incidents_created, incidents = generate_incidents(last_run)
+ demisto.incidents(incidents)
+
+ total_incidents_created = last_run + num_of_incidents_created
+ updated_run_count = run_counter + 1
+ demisto.setLastRun({
+ 'numOfIncidentsCreated': total_incidents_created,
+ 'run_count': updated_run_count
+ })
+ return
+ else:
+ updated_run_count = run_counter + 1
+ demisto.setLastRun({
+ 'numOfIncidentsCreated': last_run,
+ 'run_count': updated_run_count
+ })
+ demisto.incidents([])
+ except Exception:
+ raise
+
+
+def demo_ip_command():
+ """
+ Returns the reputation generated by this integration for the IP address passed as an argument
+
+ demisto param: (string) ip
+ The IP address to get the reputation of
+
+ returns:
+ IP Reputation to the context
+ """
+ ip = demisto.args().get('ip')
+ dbotscore = generate_dbot_score(ip)
+
+ dbotscore_output = {
+ 'Indicator': ip,
+ 'Type': 'ip',
+ 'Vendor': 'OnboardingIntegration',
+ 'Score': dbotscore
+ }
+
+ standard_ip_output = {
+ 'Address': ip
+ }
+
+ if dbotscore == 3:
+ standard_ip_output['Malicious'] = {
+ 'Vendor': 'OnboardingIntegration',
+ 'Description': 'Indicator was found to be malicious.'
+ }
+
+ context = {
+ 'DBotScore': dbotscore_output,
+ outputPaths['ip']: standard_ip_output
+ }
+
+ title = 'OnboardingIntegration IP Reputation - {}'.format(ip)
+ human_readable = tableToMarkdown(title, dbotscore_output)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': context,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': context
+ })
+
+
+def demo_url_command():
+ """
+ Returns the reputation generated by this integration for the URL passed as an argument
+
+ demisto param: (string) url
+ The URL to get the reputation of
+
+ returns:
+ URL Reputation to the context
+ """
+ url = demisto.args().get('url')
+ dbotscore = generate_dbot_score(url)
+
+ dbotscore_output = {
+ 'Indicator': url,
+ 'Type': 'url',
+ 'Vendor': 'OnboardingIntegration',
+ 'Score': dbotscore
+ }
+
+ standard_url_output = {
+ 'Data': url
+ }
+
+ if dbotscore == 3:
+ standard_url_output['Malicious'] = {
+ 'Vendor': 'OnboardingIntegration',
+ 'Description': 'Indicator was found to be malicious.'
+ }
+
+ context = {
+ 'DBotScore': dbotscore_output,
+ outputPaths['url']: standard_url_output
+ }
+
+ title = 'OnboardingIntegration URL Reputation - {}'.format(url)
+ human_readable = tableToMarkdown(title, dbotscore_output)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': context,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': context
+ })
+
+
+def demo_domain_command():
+ """
+ Returns the reputation generated by this integration for the domain passed as an argument
+
+ demisto param: (string) domain
+ The domain to get the reputation of
+
+ returns:
+ Domain Reputation to the context
+ """
+ domain = demisto.args().get('domain')
+ dbotscore = generate_dbot_score(domain)
+
+ dbotscore_output = {
+ 'Indicator': domain,
+ 'Type': 'domain',
+ 'Vendor': 'OnboardingIntegration',
+ 'Score': dbotscore
+ }
+
+ standard_domain_output = {
+ 'Name': domain
+ }
+
+ if dbotscore == 3:
+ standard_domain_output['Malicious'] = {
+ 'Vendor': 'OnboardingIntegration',
+ 'Description': 'Indicator was found to be malicious.'
+ }
+
+ context = {
+ 'DBotScore': dbotscore_output,
+ outputPaths['domain']: standard_domain_output
+ }
+
+ title = 'OnboardingIntegration Domain Reputation - {}'.format(domain)
+ human_readable = tableToMarkdown(title, dbotscore_output)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': context,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': context
+ })
+
+
+def demo_file_command():
+ """
+ Returns the reputation generated by this integration for the file hash passed as an argument
+
+ demisto param: (string) file
+ The file hash to get the reputation of
+
+ returns:
+ File-Hash Reputation to the context
+ """
+ file = demisto.args().get('file')
+ hash_type = get_hash_type(file).upper()
+ dbotscore = generate_dbot_score(file)
+
+ dbotscore_output = {
+ 'Indicator': file,
+ 'Type': 'file',
+ 'Vendor': 'OnboardingIntegration',
+ 'Score': dbotscore
+ }
+
+ standard_file_output = {
+ hash_type: file
+ }
+
+ if dbotscore == 3:
+ standard_file_output['Malicious'] = {
+ 'Vendor': 'OnboardingIntegration',
+ 'Description': 'Indicator was found to be malicious.'
+ }
+
+ context = {
+ 'DBotScore': dbotscore_output,
+ outputPaths['file']: standard_file_output
+ }
+
+ title = 'OnboardingIntegration File Reputation - {}'.format(file)
+ human_readable = tableToMarkdown(title, dbotscore_output)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': context,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': context
+ })
+
+
+def demo_email_command():
+ """
+ Returns the reputation generated by this integration for the email address passed as an argument
+
+ demisto param: (string) email
+ The email address to get the reputation of
+
+ returns:
+ Email Reputation to the context
+ """
+ email = demisto.args().get('email')
+ dbotscore = generate_dbot_score(email)
+
+ dbotscore_output = {
+ 'Indicator': email,
+ 'Type': 'email',
+ 'Vendor': 'OnboardingIntegration',
+ 'Score': dbotscore
+ }
+
+ standard_email_output = {
+ 'Address': email
+ }
+
+ if dbotscore == 3:
+ standard_email_output['Malicious'] = {
+ 'Vendor': 'OnboardingIntegration',
+ 'Description': 'Indicator was found to be malicious.'
+ }
+
+ context = {
+ 'DBotScore': dbotscore_output,
+ outputPaths['email']: standard_email_output
+ }
+
+ title = 'OnboardingIntegration Email Reputation - {}'.format(email)
+ human_readable = tableToMarkdown(title, dbotscore_output)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': context,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': context
+ })
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+COMMANDS = {
+ 'demo-url': demo_url_command,
+ 'demo-ip': demo_ip_command,
+ 'demo-email': demo_email_command,
+ 'demo-file': demo_file_command,
+ 'demo-domain': demo_domain_command,
+ 'fetch-incidents': fetch_incidents
+}
+
+
+def main():
+ try:
+ if demisto.command() == 'test-module':
+ demisto.results('ok')
+ elif demisto.command() in COMMANDS.keys():
+ COMMANDS[demisto.command()]()
+ except Exception as e:
+ return_error(str(e))
+
+
+# python2 uses __builtin__ python3 uses builtin s
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/OnboardingIntegration/OnboardingIntegration.yml b/Integrations/OnboardingIntegration/OnboardingIntegration.yml
new file mode 100644
index 000000000000..ff27ea070a42
--- /dev/null
+++ b/Integrations/OnboardingIntegration/OnboardingIntegration.yml
@@ -0,0 +1,220 @@
+category: Utilities
+commonfields:
+ id: OnboardingIntegration
+ version: -1
+configuration:
+- defaultvalue: '5'
+ display: Number of incidents to create per minute
+ name: incidents_per_minute
+ required: false
+ type: 0
+- defaultvalue: '10'
+ display: Maximum number of incidents to create
+ name: max_num_of_incidents
+ required: false
+ type: 0
+- display: How often to create new incidents (in minutes)
+ name: frequency
+ required: false
+ type: 0
+- defaultvalue: ""
+ display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- defaultvalue: ""
+ display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+description: 'Creates mock email incidents using one of two randomly selected HTML
+ templates. Textual content is randomly generated and defined to include some text
+ (100 random words) and the following data (at least 5 of each data type): IP addresses,
+ URLs, SHA-1 hashes, SHA-256 hashes, MD5 hashes, email addresses, domain names.'
+display: OnboardingIntegration
+name: OnboardingIntegration
+script:
+ script: ''
+ type: python
+ commands:
+ - arguments:
+ - default: false
+ description: The IP address to get the reputation of
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets the simulated reputation of the IP address.
+ execution: false
+ name: demo-ip
+ outputs:
+ - contextPath: IP.Address
+ description: IP address that was tested
+ type: String
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IPs, the vendor that made the decision
+ type: String
+ - contextPath: IP.Malicious.Description
+ description: For malicious IPs, the reason that the vendor made the decision
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: Number
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: String
+ - arguments:
+ - default: false
+ description: The URL to get the reputation of
+ isArray: false
+ name: url
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets the simulated reputation of the URL address.
+ execution: false
+ name: demo-url
+ outputs:
+ - contextPath: URL.Data
+ description: The URL address that was tested
+ type: String
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision
+ type: String
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the reason that the vendor made the decision
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: Number
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: String
+ - arguments:
+ - default: false
+ description: The domain to get the reputation of
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets the simulated reputation of the domain.
+ execution: false
+ name: demo-domain
+ outputs:
+ - contextPath: Domain.Name
+ description: The domain name that was tested
+ type: String
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision
+ type: String
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the reason that the vendor made the decision
+ type: String
+ - contextPath: DBotScore.Type
+ description: Indicator type
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: Number
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: String
+ - arguments:
+ - default: false
+ description: The file hash to get the reputation of
+ isArray: false
+ name: file
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets the simulated reputation of the file hash.
+ execution: false
+ name: demo-file
+ outputs:
+ - contextPath: File.MD5
+ description: File MD5 hash that was tested
+ type: String
+ - contextPath: File.SHA1
+ description: File SHA-1 hash that was tested
+ type: String
+ - contextPath: File.SHA256
+ description: File SHA-256 hash that was tested
+ type: String
+ - contextPath: File.SHA512
+ description: File SHA-512 hash that was tested
+ type: String
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision
+ type: String
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the reason that the vendor made the decision
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: Number
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: String
+ - arguments:
+ - default: false
+ description: The email address to get the reputation of
+ isArray: false
+ name: email
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets the simulated reputation of the email address.
+ execution: false
+ name: demo-email
+ outputs:
+ - contextPath: Account.Email.Address
+ description: The email address that was tested
+ type: String
+ - contextPath: Account.Email.Malicious.Vendor
+ description: For malicious email addresses, the vendor that made the decision
+ type: String
+ - contextPath: Account.Email.Malicious.Description
+ description: For malicious email addresses, the reason that the vendor made
+ the decision
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: Number
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: String
+ dockerimage: demisto/faker:0.1
+ isfetch: true
+ runonce: false
+ subtype: python2
+tests:
+ - No Tests
diff --git a/Integrations/OnboardingIntegration/OnboardingIntegration_description.md b/Integrations/OnboardingIntegration/OnboardingIntegration_description.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Integrations/OnboardingIntegration/OnboardingIntegration_image.png b/Integrations/OnboardingIntegration/OnboardingIntegration_image.png
new file mode 100644
index 000000000000..bb88f9fc8ee2
Binary files /dev/null and b/Integrations/OnboardingIntegration/OnboardingIntegration_image.png differ
diff --git a/Integrations/OnboardingIntegration/OnboardingIntegration_test.py b/Integrations/OnboardingIntegration/OnboardingIntegration_test.py
new file mode 100644
index 000000000000..35de862c189f
--- /dev/null
+++ b/Integrations/OnboardingIntegration/OnboardingIntegration_test.py
@@ -0,0 +1,22 @@
+import demistomock as demisto
+import OnboardingIntegration
+
+
+def test_frequency(mocker):
+ mocker.patch.object(demisto, 'params',
+ return_value={'frequency': '1'})
+ mocker.patch.object(demisto, 'command',
+ return_value='fetch-incidents')
+ mocker.patch.object(demisto, 'incidents')
+ OnboardingIntegration.main()
+ assert demisto.incidents.call_count == 1
+
+
+def test_no_settings(mocker):
+ mocker.patch.object(demisto, 'params',
+ return_value={})
+ mocker.patch.object(demisto, 'command',
+ return_value='fetch-incidents')
+ mocker.patch.object(demisto, 'incidents')
+ OnboardingIntegration.main()
+ assert demisto.incidents.call_count == 1
diff --git a/Integrations/OnboardingIntegration/Pipfile b/Integrations/OnboardingIntegration/Pipfile
new file mode 100644
index 000000000000..d685d1eec2ab
--- /dev/null
+++ b/Integrations/OnboardingIntegration/Pipfile
@@ -0,0 +1,21 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+flake8 = "*"
+
+[packages]
+ipaddress = "==1.0.22"
+python-dateutil = "==2.7.5"
+six = "==1.12.0"
+text-unidecode = "==1.2"
+Faker = "==1.0.1"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/OnboardingIntegration/Pipfile.lock b/Integrations/OnboardingIntegration/Pipfile.lock
new file mode 100644
index 000000000000..b5bfa5a0413e
--- /dev/null
+++ b/Integrations/OnboardingIntegration/Pipfile.lock
@@ -0,0 +1,396 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "7dd9aa8ec413891cd70e3d16438cbfb36000951391cfa16b3f8c4711728c7d3b"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "faker": {
+ "hashes": [
+ "sha256:228419b0a788a7ac867ebfafdd438461559ab1a0975edb607300852d9acaa78d",
+ "sha256:52a3dcc6a565b15fe1c95090321756d5a8a7c1caf5ab3df2f573ed70936ff518"
+ ],
+ "index": "pypi",
+ "version": "==1.0.1"
+ },
+ "ipaddress": {
+ "hashes": [
+ "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794",
+ "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c"
+ ],
+ "index": "pypi",
+ "version": "==1.0.22"
+ },
+ "python-dateutil": {
+ "hashes": [
+ "sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93",
+ "sha256:88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02"
+ ],
+ "index": "pypi",
+ "version": "==2.7.5"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "index": "pypi",
+ "version": "==1.12.0"
+ },
+ "text-unidecode": {
+ "hashes": [
+ "sha256:5a1375bb2ba7968740508ae38d92e1f889a0832913cb1c447d5e2046061a396d",
+ "sha256:801e38bd550b943563660a91de8d4b6fa5df60a542be9093f7abf819f86050cc"
+ ],
+ "index": "pypi",
+ "version": "==1.2"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.5"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
+ "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
+ ],
+ "version": "==2019.6.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:45d1272aad6cfd7a8a06cf5c73f2ceb6a190f6acc1fa707e7f82a4c053b28b18",
+ "sha256:bc37850f0cc42a1725a796ef7d92690651bf1af37d744cc63161dac62cabee17"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==3.8.1"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "entrypoints": {
+ "hashes": [
+ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
+ "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
+ ],
+ "version": "==0.3"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "flake8": {
+ "hashes": [
+ "sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548",
+ "sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696"
+ ],
+ "index": "pypi",
+ "version": "==3.7.8"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.3'",
+ "version": "==1.0.2"
+ },
+ "functools32": {
+ "hashes": [
+ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0",
+ "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.3.post2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16",
+ "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.3.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8",
+ "sha256:80d2de76188eabfbfcf27e6a37342c2827801e59c4cc14b0371c56fed43820e3"
+ ],
+ "version": "==0.19"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9",
+ "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe"
+ ],
+ "version": "==19.1"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e",
+ "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8"
+ ],
+ "markers": "python_version == '3.4.*' or python_version < '3'",
+ "version": "==2.3.4"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pyflakes": {
+ "hashes": [
+ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
+ "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ ],
+ "version": "==2.1.1"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42",
+ "sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300"
+ ],
+ "index": "pypi",
+ "version": "==1.9.5"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
+ "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
+ ],
+ "version": "==2.4.2"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:8fc39199bdda3d9d025d3b1f4eb99a192c20828030ea7c9a0d2840721de7d347",
+ "sha256:d100a02770f665f5dcf7e3f08202db29857fee6d15f34c942be0a511f39814f0"
+ ],
+ "index": "pypi",
+ "version": "==4.6.5"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
+ "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ ],
+ "version": "==2.22.0"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:12e17c7ad1397fd1df5ead7727eb3f1bdc9fe1c18293b0492e0e01b57997e38d",
+ "sha256:dc9e416a095ee7c3360056990d52e5611fb94469352fc1c2dc85be1ff2189146"
+ ],
+ "index": "pypi",
+ "version": "==1.6.0"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "index": "pypi",
+ "version": "==1.12.0"
+ },
+ "typing": {
+ "hashes": [
+ "sha256:38566c558a0a94d6531012c8e917b1b8518a41e418f7f15f00e129cc80162ad3",
+ "sha256:53765ec4f83a2b720214727e319607879fec4acde22c4fbb54fa2604e79e44ce",
+ "sha256:84698954b4e6719e912ef9a42a2431407fe3755590831699debda6fba92aac55"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==3.7.4"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
+ "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
+ ],
+ "version": "==1.25.3"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/PagerDuty/CHANGELOG.md b/Integrations/PagerDuty/CHANGELOG.md
new file mode 100644
index 000000000000..b7e8abf3984f
--- /dev/null
+++ b/Integrations/PagerDuty/CHANGELOG.md
@@ -0,0 +1,12 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+ -
+
+## [19.10.0] - 2019-10-03
+ - added new arguments to the ***PagerDuty-get-users-on-call-now*** command.
+ - escalation_policy_ids: Filters the results, showing only on-calls for the specified escalation
+ policy IDs.
+ - schedule_ids: Filters the results, showing only on-calls for the specified schedule
+ IDs.
diff --git a/Integrations/PagerDuty/PagerDuty.py b/Integrations/PagerDuty/PagerDuty.py
new file mode 100644
index 000000000000..6202d1000115
--- /dev/null
+++ b/Integrations/PagerDuty/PagerDuty.py
@@ -0,0 +1,750 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+"""" IMPORTS """
+import json
+import requests
+from datetime import datetime, timedelta
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARS '''
+# PagerDuty API works only with secured communication.
+USE_SSL = not demisto.params().get('insecure', False)
+
+USE_PROXY = demisto.params().get('proxy', True)
+API_KEY = demisto.params()['APIKey']
+SERVICE_KEY = demisto.params()['ServiceKey']
+FETCH_INTERVAL = demisto.params()['FetchInterval']
+
+SERVER_URL = 'https://api.pagerduty.com/'
+CREATE_EVENT_URL = 'https://events.pagerduty.com/v2/enqueue'
+
+DEFAULT_HEADERS = {
+ 'Authorization': 'Token token=' + API_KEY,
+ 'Accept': 'application/vnd.pagerduty+json;version=2'
+}
+
+'''HANDLE PROXY'''
+if not USE_PROXY:
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+'''PARAMS'''
+UTC_PARAM = '&time_zone=UTC'
+STATUSES = 'statuses%5B%5D'
+INCLUDED_FIELDS = '&include%5B%5D=first_trigger_log_entries&include%5B%5D=assignments'
+
+'''SUFFIX ENDPOINTS'''
+GET_SCHEDULES_SUFFIX = 'schedules'
+CREATE_INCIDENT_SUFFIX = 'incidents'
+GET_INCIDENT_SUFFIX = 'incidents/'
+GET_SERVICES_SUFFIX = 'services'
+ON_CALL_BY_SCHEDULE_SUFFIX = 'schedules/{0}/users'
+ON_CALLS_USERS_SUFFIX = 'oncalls?include%5B%5D=users'
+USERS_NOTIFICATION_RULE = 'users/{0}/notification_rules'
+GET_INCIDENTS_SUFFIX = 'incidents?include%5B%5D=assignees'
+USERS_CONTACT_METHODS_SUFFIX = 'users/{0}/contact_methods'
+
+'''CONTACT_METHOD_TYPES'''
+SMS_CONTACT_TYPE = 'sms_contact_method'
+EMAIL_CONTACT_TYPE = 'email_contact_method'
+PHONE_CONTACT_TYPE = 'phone_contact_method'
+PUSH_CONTACT_TYPE = 'push_notification_contact_method'
+
+CONTACT_METHODS_TO_HUMAN_READABLE = {
+ '': 'Unknown',
+ SMS_CONTACT_TYPE: 'SMS',
+ PUSH_CONTACT_TYPE: 'Push',
+ EMAIL_CONTACT_TYPE: 'Email',
+ PHONE_CONTACT_TYPE: 'Phone'
+}
+
+'''TABLE NAMES'''
+SERVICES = 'Service List'
+SCHEDULES = 'All Schedules'
+TRIGGER_EVENT = 'Trigger Event'
+RESOLVE_EVENT = 'Resolve Event'
+ACKNOLWEDGE_EVENT = 'Acknowledge Event'
+USERS_ON_CALL = 'Users On Call'
+INCIDETS_LIST = 'PagerDuty Incidents'
+INCIDENT = 'PagerDuty Incident'
+CONTACT_METHODS = 'Contact Methods'
+USERS_ON_CALL_NOW = 'Users On Call Now'
+NOTIFICATION_RULES = 'User notification rules'
+
+'''TABLE HEADERS'''
+CONTACT_METHODS_HEADERS = ['ID', 'Type', 'Details']
+SERVICES_HEADERS = ['ID', 'Name', 'Status', 'Created At', 'Integration']
+NOTIFICATION_RULES_HEADERS = ['ID', 'Type', 'Urgency', 'Notification timeout(minutes)']
+SCHEDULES_HEADERS = ['ID', 'Name', 'Today', 'Time Zone', 'Escalation Policy', 'Escalation Policy ID']
+USERS_ON_CALL_NOW_HEADERS = ['ID', 'Email', 'Name', 'Role', 'User Url', 'Time Zone']
+INCIDENTS_HEADERS = ['ID', 'Title', 'Description', 'Status', 'Created On', 'Urgency', 'Html Url',
+ 'Assigned To User', 'Service ID', 'Service Name', 'Escalation Policy', 'Last Status Change On',
+ 'Last Status Change By', 'Number Of Escalations', 'Resolved By User', 'Resolve Reason']
+
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url, params_dict=None, data=None):
+ LOG('running %s request with url=%s\nparams=%s' % (method, url, json.dumps(params_dict)))
+ try:
+ res = requests.request(method,
+ url,
+ verify=USE_SSL,
+ params=params_dict,
+ headers=DEFAULT_HEADERS,
+ data=data
+ )
+ res.raise_for_status()
+
+ return unicode_to_str_recur(res.json())
+
+ except Exception as e:
+ LOG(e)
+ raise
+
+
+def translate_severity(sev):
+ if sev == 'high':
+ return 3
+ elif sev == 'Low':
+ return 1
+ return 0
+
+
+def unicode_to_str_recur(obj):
+ """Converts unicode elements of obj (incl. dictionary and list) to string recursively"""
+ if IS_PY3:
+ return obj
+ if isinstance(obj, dict):
+ obj = {unicode_to_str_recur(k): unicode_to_str_recur(v) for k, v in obj.items()}
+ elif isinstance(obj, list):
+ obj = map(unicode_to_str_recur, obj)
+ elif isinstance(obj, unicode):
+ obj = obj.encode('utf-8')
+ return obj
+
+
+def test_module():
+ try:
+ get_on_call_now_users_command()
+ except Exception as e:
+ return_error(e)
+
+ demisto.results('ok')
+
+
+def extract_on_call_user_data(users):
+ """Extact data about user from a given schedule."""
+ outputs = []
+ contexts = []
+ for user in users:
+ output = {}
+ context = {}
+
+ output['ID'] = user.get('id')
+ output['Name'] = user.get('name')
+ output['Role'] = user.get('role')
+ output['Email'] = user.get('email')
+ output['Time Zone'] = user.get('time_zone')
+ output['User Url'] = user.get('html_url')
+
+ context['ID'] = output['ID']
+ context['Role'] = output['Role']
+ context['Email'] = output['Email']
+ context['Username'] = output['Name']
+ context['DisplayName'] = output['Name']
+ context['TimeZone'] = output['Time Zone']
+
+ outputs.append(output)
+ contexts.append(context)
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': users,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(USERS_ON_CALL, outputs, USERS_ON_CALL_NOW_HEADERS),
+ 'EntryContext': {
+ 'PagerDutyUser(val.ID==obj.ID)': contexts
+ }
+ }
+
+
+def extract_on_call_now_user_data(users_on_call_now):
+ """Extract the user data from the oncalls json."""
+ outputs = [] # type: List[Dict]
+ contexts = [] # type: List[Dict]
+ oncalls = users_on_call_now.get('oncalls', {})
+
+ for i in xrange(len(oncalls)):
+ output = {}
+ context = {}
+
+ data = oncalls[i]
+ user = data.get('user')
+
+ output['ID'] = user.get('id')
+ output['Name'] = user.get('name')
+ output['Role'] = user.get('role')
+ output['Email'] = user.get('email')
+ output['User Url'] = user.get('html_url')
+ output['Time Zone'] = user.get('time_zone')
+
+ context['ID'] = output['ID']
+ context['Role'] = output['Role']
+ context['Email'] = output['Email']
+ context['Username'] = output['Name']
+ context['DisplayName'] = output['Name']
+ context['TimeZone'] = output['Time Zone']
+
+ escal_level = data.get('escalation_level', 1)
+ outputs.insert(escal_level - 1, output)
+ contexts.insert(escal_level - 1, context)
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': users_on_call_now,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(USERS_ON_CALL_NOW, outputs, USERS_ON_CALL_NOW_HEADERS),
+ 'EntryContext': {
+ 'PagerDutyUser(val.ID===obj.ID)': contexts
+ }
+ }
+
+
+def parse_incident_data(incidents):
+ """Parse incident data to output,context format"""
+ outputs = []
+ contexts = []
+ raw_response = []
+ for i, incident in enumerate(incidents):
+ output = {}
+ context = {}
+
+ context['ID'] = output['ID'] = incident.get('id')
+ context['Title'] = output['Title'] = incident.get('summary')
+ output['Description'] = incident.get('first_trigger_log_entry', {}).get('channel', {}).get('details', '')
+ context['Description'] = output['Description']
+ context['Status'] = output['Status'] = incident.get('status')
+ context['created_at'] = output['Created On'] = incident.get('created_at')
+ context['urgency'] = output['Urgency'] = incident.get('urgency', '')
+ output['Html Url'] = incident.get('html_url')
+
+ if len(incident.get('assignments', [])) > 0:
+ output['Assigned To User'] = incident['assignments'][0].get('assignee', {}).get('name')
+ else:
+ output['Assigned To User'] = '-'
+
+ context['assignee'] = output['Assigned To User']
+
+ context['service_id'] = output['Service ID'] = incident.get('service', {}).get('id')
+ context['service_name'] = output['Service Name'] = incident.get('service', {}).get('summary')
+
+ output['Escalation Policy'] = incident.get('escalation_policy', {}).get('summary')
+ context['escalation_policy'] = output['Escalation Policy']
+
+ context['last_status_change_at'] = output['Last Status Change On'] = incident.get('last_status_change_at')
+ output['Last Status Change By'] = incident.get('last_status_change_by', {}).get('summary')
+ context['last_status_change_by'] = output['Last Status Change By']
+
+ context['number_of_escalations'] = output['Number Of Escalations'] = incident.get('number_of_escalations')
+
+ if output['Status'] == 'resolved':
+ output['Resolved By User'] = output['Last Status Change By']
+ else:
+ output['Resolved By User'] = '-'
+
+ context['resolved_by'] = output['Assigned To User']
+ context['resolve_reason'] = output['Resolve reason'] = incident.get('resolve_reason', '')
+
+ context['teams'] = []
+ for team in incident.get('teams', []):
+ team_id = team.get('id', '')
+ team_name = team.get('summary', '')
+
+ team_data = {
+ "ID": team_id,
+ "Name": team_name
+ }
+
+ context['teams'].append(team_data)
+
+ assignment = incident.get('assignments', [{}, ])
+ if len(assignment) > 0:
+ context['assignment'] = {
+ "time": assignment[0].get('at', ''),
+ "assignee": assignment[0].get('assignee', {}).get('summary', ''),
+ }
+ else:
+ context['assignment'] = {}
+
+ acknowledgements = incident.get('acknowledgements', [{}, ])
+ if len(acknowledgements) > 0:
+ context['acknowledgement'] = {
+ "time": assignment[0].get('at', ''),
+ "acknowledger": assignment[0].get('acknowledger', {}).get('summary', ''),
+ }
+ else:
+ context['acknowledgement'] = {}
+
+ outputs.append(output)
+ contexts.append(context)
+ raw_response.append(incident)
+
+ return outputs, contexts, raw_response
+
+
+def extract_incidents_data(incidents, table_name):
+ """Extact data about incidents."""
+ outputs, contexts, _ = parse_incident_data(incidents)
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': incidents,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(table_name, outputs, INCIDENTS_HEADERS, removeNull=True),
+ 'EntryContext': {
+ 'PagerDuty.Incidents(val.ID==obj.ID)': contexts
+ }
+ }
+
+
+def extract_all_schedules_data(schedules):
+ """Extract the data about all the schedules."""
+ outputs = []
+ contexts = []
+ for i in range(len(schedules)):
+ output = {}
+ context = {} # type: Dict
+ data = schedules[i]
+
+ output['ID'] = data.get('id')
+ output['Name'] = data.get('name')
+ output['Time Zone'] = data.get('time_zone')
+ output['Today'] = datetime.today().strftime('%Y-%m-%d')
+ escalation_policies = data.get('escalation_policies', [])
+ if len(escalation_policies) > 0:
+ output['Escalation Policy ID'] = escalation_policies[0].get('id')
+ output['Escalation Policy'] = escalation_policies[0].get('summary')
+
+ context['escalation_policies'] = [{}, ]
+ context['escalation_policies'][0]['name'] = output['Escalation Policy']
+ context['escalation_policies'][0]['id'] = output['Escalation Policy ID']
+ else:
+ output['Escalation Policy'] = '-'
+ output['Escalation Policy ID'] = '-'
+
+ context['id'] = output['ID']
+ context['name'] = output['Name']
+ context['today'] = output['Today']
+ context['time_zone'] = output['Time Zone']
+
+ outputs.append(output)
+ contexts.append(context)
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': schedules,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(SCHEDULES, outputs, SCHEDULES_HEADERS),
+ 'EntryContext': {
+ 'PagerDuty.Schedules(val.id==obj.id)': contexts,
+ }
+ }
+
+
+def create_new_incident(source, summary, severity, action, description='No description', group='',
+ event_class='', component='', incident_key=None, service_key=SERVICE_KEY):
+ """Create a new incident in the PagerDuty instance."""
+ payload = {
+ 'routing_key': service_key,
+ 'event_action': action,
+ 'dedup_key': incident_key,
+ 'images': [],
+ 'links': [],
+ 'payload': {
+ 'summary': summary,
+ 'source': source,
+ 'severity': severity,
+ 'group': group,
+ 'class': event_class,
+ 'component': component,
+ 'custom_details': {
+ 'description': description
+ }
+ }
+ }
+
+ return http_request('POST', CREATE_EVENT_URL, data=json.dumps(payload))
+
+
+def resolve_or_ack_incident(action, incident_key, service_key=SERVICE_KEY):
+ """Resolve or Acknowledge an incident in the PagerDuty instance."""
+ payload = {
+ 'routing_key': service_key,
+ 'event_action': action,
+ 'dedup_key': incident_key
+ }
+
+ return http_request('POST', CREATE_EVENT_URL, data=json.dumps(payload))
+
+
+def extract_new_event_data(table_name, response):
+ """Extract the data from the response of creating a new command."""
+ output = {}
+ context = {}
+
+ output['Status'] = response.get('status', '')
+ output['Message'] = response.get('message', '')
+ output['Incident key'] = response.get('dedup_key', '')
+
+ context['Status'] = output['Status']
+ context['Message'] = output['Message']
+ context['incident_key'] = output['Incident key']
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(table_name, output),
+ 'EntryContext': {
+ 'PagerDuty.Event(val.incident_key==obj.dedup_key)': context,
+ 'Event.ID(val.ID==obj.dedup_key)': context['incident_key']
+ }
+ }
+
+
+def extract_users_contact_methods(user_contact_methods):
+ """Extract all the contact methods of a given user."""
+ outputs = []
+ contexts = []
+ contact_methods = user_contact_methods.get('contact_methods')
+ for contact_method in contact_methods:
+ output = {}
+
+ output['ID'] = contact_method.get('id')
+ output['Type'] = CONTACT_METHODS_TO_HUMAN_READABLE[contact_method.get('type', '')]
+
+ country_code = str(contact_method.get('country_code', ''))
+ address = contact_method.get('address', '')
+ output['Details'] = country_code + address
+
+ outputs.append(output)
+
+ del contact_method['address']
+ if output['Type'] == 'SMS' or output['Type'] == 'Phone':
+ del contact_method['country_code']
+ contact_method['phone'] = output['Details']
+ else:
+ contact_method['email'] = output['Details']
+
+ contexts.append(contact_method)
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': user_contact_methods,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(CONTACT_METHODS, outputs, CONTACT_METHODS_HEADERS),
+ 'EntryContext': {
+ 'PagerDuty.Contact_methods(val.id==obj.id)': contexts,
+ }
+ }
+
+
+def extract_users_notification_role(user_notication_role):
+ """Extract the notification role of a given user."""
+ outputs = []
+ notification_rules = user_notication_role.get('notification_rules')
+ for notification_rule in notification_rules:
+ output = {}
+
+ output['ID'] = notification_rule.get('id')
+ output['Type'] = notification_rule.get('type', '')
+ output['Urgency'] = notification_rule.get('urgency')
+ output['Notification timeout(minutes)'] = notification_rule.get('start_delay_in_minutes')
+
+ outputs.append(output)
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': user_notication_role,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(NOTIFICATION_RULES, outputs, NOTIFICATION_RULES_HEADERS),
+ 'EntryContext': {
+ 'PagerDuty.Notification_rules(val.id==obj.id)': notification_rules,
+ }
+ }
+
+
+'''COMMANDS'''
+
+
+def fetch_incidents():
+ param_dict = {}
+ now_time = datetime.utcnow()
+ now = datetime.isoformat(now_time)
+ lastRunObject = demisto.getLastRun()
+ if lastRunObject:
+ param_dict['since'] = lastRunObject['time']
+ else:
+ param_dict['since'] = datetime.isoformat(now_time - timedelta(minutes=int(FETCH_INTERVAL)))
+
+ param_dict['until'] = now
+
+ url = SERVER_URL + GET_INCIDENTS_SUFFIX + configure_status()
+ res = http_request('GET', url, param_dict)
+ _, parsed_incidents, raw_responses = parse_incident_data(res.get('incidents', []))
+
+ incidents = []
+ for incident, raw_response in zip(parsed_incidents, raw_responses):
+ incidents.append({
+ 'name': incident['ID'] + ' - ' + incident['Title'],
+ 'occurred': incident['created_at'],
+ 'severity': translate_severity(incident['urgency']),
+ 'rawJSON': json.dumps(raw_response)
+ })
+
+ demisto.incidents(incidents)
+ demisto.setLastRun({'time': now})
+
+
+def configure_status(status='triggered,acknowledged'):
+ statuses = status.split(',')
+ statuses_string = "&" + STATUSES + '='
+ statuses = statuses_string.join(statuses)
+ status_request = '&' + STATUSES + '=' + statuses
+
+ status_request = status_request + INCLUDED_FIELDS + UTC_PARAM
+ return status_request
+
+
+def get_incidents_command(since=None, until=None, status='triggered,acknowledged', sortBy=None):
+ """Get incidents command."""
+ param_dict = {}
+ if since is not None:
+ param_dict['since'] = since
+ if until is not None:
+ param_dict['until'] = until
+ if sortBy is not None:
+ param_dict['sortBy'] = sortBy
+
+ url = SERVER_URL + GET_INCIDENTS_SUFFIX + configure_status(status)
+ res = http_request('GET', url, param_dict)
+ return extract_incidents_data(res.get('incidents', []), INCIDETS_LIST)
+
+
+def submit_event_command(source, summary, severity, action, description='No description', group='',
+ event_class='', component='', incident_key=None, serviceKey=SERVICE_KEY):
+ """Create new event."""
+ if serviceKey is None:
+ raise Exception('You must enter a ServiceKey at the integration '
+ 'parmaters or in the command to process this action.')
+
+ res = create_new_incident(source, summary, severity, action, description,
+ group, event_class, component, incident_key, serviceKey)
+ return extract_new_event_data(TRIGGER_EVENT, res)
+
+
+def get_all_schedules_command(query=None, limit=None):
+ """Get all the schedules."""
+ param_dict = {}
+ if query is not None:
+ param_dict['query'] = query
+ if limit is not None:
+ param_dict['limit'] = limit
+
+ url = SERVER_URL + GET_SCHEDULES_SUFFIX
+ res = http_request('GET', url, param_dict)
+ schedules = res.get('schedules', [])
+ return extract_all_schedules_data(schedules)
+
+
+def get_on_call_users_command(scheduleID, since=None, until=None):
+ """Get the list of user on call in a from scheduleID"""
+ param_dict = {}
+ if since is not None:
+ param_dict['since'] = since
+ if until is not None:
+ param_dict['until'] = until
+
+ url = SERVER_URL + ON_CALL_BY_SCHEDULE_SUFFIX.format(scheduleID)
+ users_on_call = http_request('GET', url, param_dict)
+ return extract_on_call_user_data(users_on_call.get('users', []))
+
+
+def get_on_call_now_users_command(limit=None, escalation_policy_ids=None, schedule_ids=None):
+ """Get the list of users that are on call now."""
+ param_dict = {}
+ if limit is not None:
+ param_dict['limit'] = limit
+ if escalation_policy_ids is not None:
+ param_dict['escalation_policy_ids[]'] = argToList(escalation_policy_ids)
+ if schedule_ids is not None:
+ param_dict['schedule_ids[]'] = argToList(schedule_ids)
+
+ url = SERVER_URL + ON_CALLS_USERS_SUFFIX
+ users_on_call_now = http_request('GET', url, param_dict)
+ return extract_on_call_now_user_data(users_on_call_now)
+
+
+def get_users_contact_methods_command(UserID):
+ """Get the contact methods of a given user."""
+ url = SERVER_URL + USERS_CONTACT_METHODS_SUFFIX.format(UserID)
+ user_contact_methods = http_request('GET', url, {})
+ return extract_users_contact_methods(user_contact_methods)
+
+
+def get_users_notification_command(UserID):
+ """Get the notification rule of a given user"""
+ url = SERVER_URL + USERS_NOTIFICATION_RULE.format(UserID)
+ user_notication_role = http_request('GET', url, {})
+ return extract_users_notification_role(user_notication_role)
+
+
+def resolve_event(incident_key=None, serviceKey=SERVICE_KEY):
+ if serviceKey is None:
+ raise Exception('You must enter a ServiceKey at the integration '
+ 'parmaters or in the command to process this action.')
+
+ action_response = resolve_or_ack_incident('resolve', incident_key, serviceKey)
+
+ res = http_request('GET', SERVER_URL + GET_INCIDENTS_SUFFIX, {'incident_key': incident_key})
+ _, contexts, _ = parse_incident_data(res.get('incidents', []))
+ if contexts[0]['Status'] != "resolved":
+ raise Exception('Could not resolve incident, you may have created it with different Service Key')
+
+ return extract_new_event_data(RESOLVE_EVENT, action_response)
+
+
+def acknowledge_event(incident_key=None, serviceKey=SERVICE_KEY):
+ if serviceKey is None:
+ raise Exception('You must enter a ServiceKey at the integration '
+ 'parmaters or in the command to process this action.')
+
+ action_response = resolve_or_ack_incident('acknowledge', incident_key, serviceKey)
+
+ res = http_request('GET', SERVER_URL + GET_INCIDENTS_SUFFIX, {'incident_key': incident_key})
+ _, contexts, _ = parse_incident_data(res.get('incidents', []))
+ if contexts[0]['Status'] != "acknowledged":
+ raise Exception('Could not acknowledge incident, you may have created it with different Service Key')
+
+ return extract_new_event_data(ACKNOLWEDGE_EVENT, action_response)
+
+
+def get_incident_data():
+ incident_id = demisto.args().get('incident_id')
+
+ url = SERVER_URL + GET_INCIDENT_SUFFIX + incident_id
+ res = http_request('GET', url, {})
+ return extract_incidents_data([res.get('incident', {})], INCIDENT)
+
+
+def get_service_keys():
+ offset = 0
+ raw_response = []
+
+ url = SERVER_URL + GET_SERVICES_SUFFIX
+ res = http_request('GET', url, {"offset": offset})
+ raw_response.append(res)
+
+ outputs = []
+ contexts = []
+ while res.get('services', []):
+ services = res.get('services', [])
+ for service in services:
+ output = {}
+ context = {}
+ context['ID'] = output['ID'] = service.get('id')
+ context['Name'] = output['Name'] = service.get('name')
+ context['Status'] = output['Status'] = service.get('status')
+ context['CreatedAt'] = output['Created At'] = service.get('created_at')
+
+ integration_list = []
+ integration_string = ""
+ for integration in service.get('integrations', []):
+ integration_url = integration.get('self', '')
+ if integration_url:
+ integration_data = {}
+ integration_res = http_request('GET', integration_url, {}).get('integration', {})
+ integration_data['Name'] = integration_res.get('service', {}).get('summary', '')
+ integration_data['Key'] = integration_res.get('integration_key', '')
+ vendor_value = integration_res.get('vendor', {})
+ if not vendor_value:
+ integration_data['Vendor'] = 'Missing Vendor information'
+ else:
+ integration_data['Vendor'] = vendor_value.get('summary', 'Missing Vendor information')
+
+ integration_list.append(integration_data)
+ integration_string += "Name: {}, Vendor: {}, Key: {}\n".format(integration_data['Name'],
+ integration_data['Vendor'],
+ integration_data['Key'])
+
+ output['Integration'] = integration_string
+ context['Integration'] = integration_list
+
+ outputs.append(output)
+ contexts.append(context)
+
+ offset += 25
+ res = http_request('GET', url, {"offset": offset})
+ raw_response.append(res)
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': raw_response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(SERVICES, outputs, SERVICES_HEADERS),
+ 'EntryContext': {
+ 'PagerDuty.Service(val.ID==obj.ID)': contexts,
+ }
+ }
+
+
+''' EXECUTION CODE '''
+
+LOG('command is %s' % (demisto.command(), ))
+
+try:
+ if demisto.command() == 'test-module':
+ test_module()
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+ elif demisto.command() == 'PagerDuty-incidents':
+ demisto.results(get_incidents_command(**demisto.args()))
+ elif demisto.command() == 'PagerDuty-submit-event':
+ demisto.results(submit_event_command(**demisto.args()))
+ elif demisto.command() == 'PagerDuty-get-users-on-call':
+ demisto.results(get_on_call_users_command(**demisto.args()))
+ elif demisto.command() == 'PagerDuty-get-all-schedules':
+ demisto.results(get_all_schedules_command(**demisto.args()))
+ elif demisto.command() == 'PagerDuty-get-users-on-call-now':
+ demisto.results(get_on_call_now_users_command(**demisto.args()))
+ elif demisto.command() == 'PagerDuty-get-contact-methods':
+ demisto.results(get_users_contact_methods_command(**demisto.args()))
+ elif demisto.command() == 'PagerDuty-get-users-notification':
+ demisto.results(get_users_notification_command(**demisto.args()))
+ elif demisto.command() == 'PagerDuty-resolve-event':
+ demisto.results(resolve_event(**demisto.args()))
+ elif demisto.command() == 'PagerDuty-acknowledge-event':
+ demisto.results(acknowledge_event(**demisto.args()))
+ elif demisto.command() == 'PagerDuty-get-incident-data':
+ demisto.results(get_incident_data())
+ elif demisto.command() == 'PagerDuty-get-service-keys':
+ demisto.results(get_service_keys())
+
+
+except Exception as e:
+ return_error(e)
diff --git a/Integrations/PagerDuty/PagerDuty.yml b/Integrations/PagerDuty/PagerDuty.yml
new file mode 100644
index 000000000000..4530e6b154f6
--- /dev/null
+++ b/Integrations/PagerDuty/PagerDuty.yml
@@ -0,0 +1,554 @@
+category: Messaging
+commonfields:
+ id: PagerDuty v2
+ version: -1
+configuration:
+- display: API Key
+ name: APIKey
+ required: true
+ type: 4
+- display: Service Key (for triggering events only)
+ name: ServiceKey
+ required: false
+ type: 0
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: ''
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- defaultvalue: '1'
+ display: Initial Fetch Interval(In minutes, used only for first fetch or after Reset
+ last run)
+ name: FetchInterval
+ required: false
+ type: 0
+description: Alert and notify users using PagerDuty
+display: PagerDuty v2
+name: PagerDuty v2
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Show only the schedules whose name matches the query
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: The limit for the amount of schedules to receive(Default is 25,
+ max value is 100)
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Receive all schedules from PagerDuty
+ execution: false
+ name: PagerDuty-get-all-schedules
+ outputs:
+ - contextPath: PagerDuty.Schedules.id
+ description: The ID of the schedule
+ type: string
+ - contextPath: PagerDuty.Schedules.name
+ description: The name of the schedule
+ type: string
+ - arguments:
+ - default: true
+ description: (default and mandatory) The unique identifier of the schdule
+ isArray: false
+ name: scheduleID
+ required: true
+ secret: false
+ - default: false
+ description: The start of the date range Using ISO 8601 Representation. E.g.
+ !PagerDutyGetUsersOnCall since=2011-05-06T17:00Z
+ isArray: false
+ name: since
+ required: false
+ secret: false
+ - default: false
+ description: The end of the date range
+ isArray: false
+ name: until
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns the names and details of on call users at a certain time
+ or by specific schedule
+ execution: false
+ name: PagerDuty-get-users-on-call
+ outputs:
+ - contextPath: PagerDutyUser.id
+ description: User's ID
+ type: string
+ - contextPath: PagerDutyUser.Emails
+ description: Email of user
+ type: string
+ - contextPath: PagerDutyUser.Username
+ description: Username of person
+ type: string
+ - contextPath: PagerDutyUser.DisplayName
+ description: Display name of person
+ type: string
+ - contextPath: PagerDutyUser.Role
+ description: Display role of person
+ type: string
+ - contextPath: PagerDutyUser.TimeZone
+ description: The time zone of the user
+ type: string
+ - arguments:
+ - default: false
+ description: The limit for the amount of users to receive(Default is 25, max
+ value is 100)
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: Filters the results, showing only on-call users for the specified escalation
+ policy IDs.
+ isArray: true
+ name: escalation_policy_ids
+ required: false
+ secret: false
+ - default: false
+ description: Filters the results, showing only on-call users for the specified schedule
+ IDs. If the value is null, permanent on-call user are included due to direct user
+ escalation policy targets.
+ isArray: true
+ name: schedule_ids
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns the names and details of current on call personnel
+ execution: false
+ name: PagerDuty-get-users-on-call-now
+ outputs:
+ - contextPath: PagerDutyUser.ID
+ description: User's ID
+ type: string
+ - contextPath: PagerDutyUser.Email
+ description: Email of user
+ type: string
+ - contextPath: PagerDutyUser.Username
+ description: Username of person
+ type: string
+ - contextPath: PagerDutyUser.DisplayName
+ description: Display name of person
+ type: string
+ - contextPath: PagerDutyUser.Role
+ description: Role of person
+ type: string
+ - contextPath: PagerDutyUser.TimeZone
+ description: The time zone of the user
+ type: string
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: Returns only the incidents currently in the passed status(es).
+ Valid status options are triggered,acknowledged, and resolved. (Default values
+ are triggered,acknowledged)
+ isArray: false
+ name: status
+ predefined:
+ - triggered
+ - acknowledged
+ - resolved
+ required: false
+ secret: false
+ - default: false
+ description: Beginning date and time. Using ISO 8601 Representation. E.g. PagerDutyIncidents
+ since=2011-05-06T17:00Z (must be used with until argument)
+ isArray: false
+ name: since
+ required: false
+ secret: false
+ - default: false
+ description: Used to specify both the field you wish to sort the results on,
+ as well as the direction (ascending/descending) of the results.See more https://v2.developer.pagerduty.com/v2/page/api-reference#!/Incidents/get_incidents
+ isArray: false
+ name: sortBy
+ required: false
+ secret: false
+ - default: false
+ description: Last date and time. Using ISO 8601 Representation. E.g. PagerDutyIncidents
+ until=2016-05-06T13:00Z
+ isArray: false
+ name: until
+ required: false
+ secret: false
+ deprecated: false
+ description: Shows incidents in PagerDuty. Default status parameters are triggered,acknowledged
+ execution: false
+ name: PagerDuty-incidents
+ outputs:
+ - contextPath: PagerDuty.Incidents.ID
+ description: Incident ID
+ type: string
+ - contextPath: PagerDuty.Incidents.Title
+ description: The title of the incident
+ type: string
+ - contextPath: PagerDuty.Incidents.Status
+ description: Incident Status
+ type: string
+ - contextPath: PagerDuty.Incidents.created_at
+ description: Time in which the incident was created
+ type: date
+ - contextPath: PagerDuty.Incidents.urgency
+ description: Incident Urgency
+ type: string
+ - contextPath: PagerDuty.Incidents.assignee
+ description: 'The assignee of the incident '
+ type: string
+ - contextPath: PagerDuty.Incidents.service_id
+ description: The id of the impacted service
+ type: string
+ - contextPath: PagerDuty.Incidents.service_name
+ description: The name of the impacted service
+ type: string
+ - contextPath: PagerDuty.Incidents.escalation_policy
+ description: The escalation policy
+ type: string
+ - contextPath: PagerDuty.Incidents.last_status_change_at
+ description: Time in which the last status change occurred
+ type: date
+ - contextPath: PagerDuty.Incidents.last_status_change_by
+ description: Name of the user who done the last status change
+ type: string
+ - contextPath: PagerDuty.Incidents.number_of_escalations
+ description: Number of escalations that took place
+ type: number
+ - contextPath: PagerDuty.Incidents.resolved_by
+ description: Name of the User who resolved the incident
+ type: string
+ - contextPath: PagerDuty.Incidents.resolve_reason
+ description: The reason for resolving the issue
+ type: string
+ - contextPath: PagerDuty.Incidents.Description
+ description: The Description of the incident
+ type: string
+ - contextPath: PagerDuty.Incidents.teams.ID
+ description: The ID of the team assigned for the incident.
+ type: string
+ - contextPath: PagerDuty.Incidents.teams.ID
+ description: The name of the team assigned for the incident.
+ type: string
+ - contextPath: PagerDuty.Incidents.assignment.time
+ description: The time of the assignment to the incident
+ type: date
+ - contextPath: PagerDuty.Incidents.assignment.assignee
+ description: The name of the assignee to the incident
+ type: string
+ - contextPath: PagerDuty.Incidents.acknowledgement.time
+ description: The time of the acknowledgement to the incident
+ type: date
+ - contextPath: PagerDuty.Incidents.acknowledgement.acknowledger
+ description: The name of the acknowledger to the incident
+ type: string
+ - arguments:
+ - default: false
+ description: Specific human-readable unique identifier, such as a hostname,
+ for the system having the problem.
+ isArray: false
+ name: source
+ required: true
+ secret: false
+ - default: false
+ description: "\t A high-level, text summary message of the event. Will be used\
+ \ to construct an alert's description."
+ isArray: false
+ name: summary
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The severity of the event
+ isArray: false
+ name: severity
+ predefined:
+ - critical
+ - error
+ - warning
+ - info
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The action to be executed
+ isArray: false
+ name: action
+ predefined:
+ - trigger
+ - acknowledge
+ - resolve
+ required: true
+ secret: false
+ - default: false
+ description: A short description of the problem
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: 'A cluster or grouping of sources. For example, sources “prod-datapipe-02â€
+ and “prod-datapipe-03†might both be part of “prod-datapipeâ€. Example: "prod-datapipe"
+ "www"'
+ isArray: false
+ name: group
+ required: false
+ secret: false
+ - default: false
+ description: 'The class/type of the event. Example: "High CPU" "Latency"'
+ isArray: false
+ name: event_class
+ required: false
+ secret: false
+ - default: false
+ description: 'The part or component of the affected system that is broken. Example:
+ "keepalive" "webping"'
+ isArray: false
+ name: component
+ required: false
+ secret: false
+ - default: false
+ description: Incident key, used to acknowledge/resolve specific event
+ isArray: false
+ name: incident_key
+ required: false
+ secret: false
+ - default: false
+ description: Service key for the integration
+ isArray: false
+ name: serviceKey
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a new event/incident in PagerDuty(In order to use this command
+ you have to enter the Service Key in the integration settings)
+ execution: false
+ name: PagerDuty-submit-event
+ outputs:
+ - contextPath: PagerDuty.Event.Status
+ description: Status of the action on the event
+ type: string
+ - contextPath: PagerDuty.Event.incident_key
+ description: Incident key
+ type: string
+ - arguments:
+ - default: false
+ description: 'ID of the wanted user '
+ isArray: false
+ name: UserID
+ required: true
+ secret: false
+ deprecated: false
+ description: Get the contact methods of a given user
+ execution: false
+ name: PagerDuty-get-contact-methods
+ outputs:
+ - contextPath: PagerDuty.Contact_methods.phone
+ description: The phone number of the user
+ type: string
+ - contextPath: PagerDuty.Contact_methods.id
+ description: ID of the contact method
+ type: string
+ - contextPath: PagerDuty.Contact_methods.type
+ description: The type of the current contact method
+ type: string
+ - contextPath: PagerDuty.Contact_methods.email
+ description: The email of the user
+ type: string
+ - arguments:
+ - default: false
+ description: ID of the wanted user
+ isArray: false
+ name: UserID
+ required: true
+ secret: false
+ deprecated: false
+ description: Get the users notification rules
+ execution: false
+ name: PagerDuty-get-users-notification
+ outputs:
+ - contextPath: PagerDuty.Notification_rules.start_delay_in_minutes
+ description: The delay time for notifying the user
+ type: string
+ - contextPath: PagerDuty.Notification_rules.urgency
+ description: The urgency of the notification
+ type: string
+ - contextPath: PagerDuty.Notification_rules.id
+ description: The id of the notification rule
+ type: string
+ - arguments:
+ - default: false
+ description: Incident key
+ isArray: false
+ name: incident_key
+ required: true
+ secret: false
+ - default: false
+ description: Service key for the integration
+ isArray: false
+ name: serviceKey
+ required: true
+ secret: false
+ deprecated: false
+ description: Resolves an existing event in PagerDuty
+ execution: false
+ name: PagerDuty-resolve-event
+ outputs:
+ - contextPath: PagerDuty.Event.Status
+ description: Status of the action on the event
+ type: string
+ - contextPath: PagerDuty.Event.incident_key
+ description: Incident key
+ type: string
+ - arguments:
+ - default: false
+ description: Incident key
+ isArray: false
+ name: incident_key
+ required: true
+ secret: false
+ - default: false
+ description: Service key for the integration
+ isArray: false
+ name: serviceKey
+ required: true
+ secret: false
+ deprecated: false
+ description: Acknowledges an existing event in PagerDuty
+ execution: false
+ name: PagerDuty-acknowledge-event
+ outputs:
+ - contextPath: PagerDuty.Event.Status
+ description: Status of the action on the event
+ type: string
+ - contextPath: PagerDuty.Event.incident_key
+ description: Incident key
+ type: string
+ - arguments:
+ - default: false
+ description: ID of the incident to get information for.
+ isArray: false
+ name: incident_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Get data about a incident from PagerDuty
+ execution: false
+ name: PagerDuty-get-incident-data
+ outputs:
+ - contextPath: PagerDuty.Incidents.ID
+ description: Incident ID
+ type: string
+ - contextPath: PagerDuty.Incidents.Title
+ description: The title of the incident
+ type: string
+ - contextPath: PagerDuty.Incidents.Status
+ description: Incident Status
+ type: string
+ - contextPath: PagerDuty.Incidents.created_at
+ description: Time in which the incident was created
+ type: date
+ - contextPath: PagerDuty.Incidents.urgency
+ description: Incident Urgency
+ type: string
+ - contextPath: PagerDuty.Incidents.assignee
+ description: 'The assignee of the incident '
+ type: string
+ - contextPath: PagerDuty.Incidents.service_id
+ description: The id of the impacted service
+ type: string
+ - contextPath: PagerDuty.Incidents.service_name
+ description: The name of the impacted service
+ type: string
+ - contextPath: PagerDuty.Incidents.escalation_policy
+ description: The escalation policy
+ type: string
+ - contextPath: PagerDuty.Incidents.last_status_change_at
+ description: Time in which the last status change occurred
+ type: date
+ - contextPath: PagerDuty.Incidents.last_status_change_by
+ description: Name of the user who done the last status change
+ type: string
+ - contextPath: PagerDuty.Incidents.number_of_escalations
+ description: Number of escalations that took place
+ type: number
+ - contextPath: PagerDuty.Incidents.resolved_by
+ description: Name of the User who resolved the incident
+ type: string
+ - contextPath: PagerDuty.Incidents.resolve_reason
+ description: The reason for resolving the issue
+ type: string
+ - contextPath: PagerDuty.Incidents.Description
+ description: The Description of the incident
+ type: string
+ - contextPath: PagerDuty.Incidents.teams.ID
+ description: The ID of the team assigned for the incident.
+ type: string
+ - contextPath: PagerDuty.Incidents.teams.ID
+ description: The name of the team assigned for the incident.
+ type: string
+ - contextPath: PagerDuty.Incidents.assignment.time
+ description: The time of the assignment to the incident
+ type: date
+ - contextPath: PagerDuty.Incidents.assignment.assignee
+ description: The name of the assignee to the incident
+ type: string
+ - contextPath: PagerDuty.Incidents.acknowledgement.time
+ description: The time of the acknowledgement to the incident
+ type: date
+ - contextPath: PagerDuty.Incidents.acknowledgement.acknowledger
+ description: The name of the acknowledger to the incident
+ type: string
+ - deprecated: false
+ description: Get Service keys for each of the services configured in the PagerDuty
+ instance
+ execution: false
+ name: PagerDuty-get-service-keys
+ outputs:
+ - contextPath: PagerDuty.Service.ID
+ description: The ID of the service connected to PagerDuty
+ type: string
+ - contextPath: PagerDuty.Service.Name
+ description: The name of the service connected to PagerDuty
+ type: string
+ - contextPath: PagerDuty.Service.Status
+ description: The status of the service connected to PagerDuty
+ type: string
+ - contextPath: PagerDuty.Service.CreatedAt
+ description: The date in which the service connected to PagerDuty was created
+ type: date
+ - contextPath: PagerDuty.Service.Integration.Name
+ description: The name of the integration used with the service
+ type: string
+ - contextPath: PagerDuty.Service.Integration.Vendor
+ description: The name of the vendor for the integration used with the service.(A
+ value of 'Missing Vendor information' will appear once no information could
+ be found)
+ type: string
+ - contextPath: PagerDuty.Service.Integration.Key
+ description: The key used to control events with the integration
+ type: string
+ isfetch: true
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- PagerDuty Test
diff --git a/Integrations/PagerDuty/PagerDuty_Image.png b/Integrations/PagerDuty/PagerDuty_Image.png
new file mode 100644
index 000000000000..da18ce515002
Binary files /dev/null and b/Integrations/PagerDuty/PagerDuty_Image.png differ
diff --git a/Integrations/PagerDuty/PagerDuty_description.md b/Integrations/PagerDuty/PagerDuty_description.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Integrations/PaloAltoNetworksCortex/PaloAltoNetworksCortex.py b/Integrations/PaloAltoNetworksCortex/PaloAltoNetworksCortex.py
new file mode 100644
index 000000000000..22b3181487d1
--- /dev/null
+++ b/Integrations/PaloAltoNetworksCortex/PaloAltoNetworksCortex.py
@@ -0,0 +1,798 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+import os
+import requests
+import json
+from pancloud import LoggingService, Credentials
+import base64
+from cryptography.hazmat.primitives.ciphers.aead import AESGCM
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARS '''
+AUTH_ID = demisto.params().get('auth_id')
+# If there's a stored token in integration context, it's newer than current
+TOKEN = demisto.getIntegrationContext().get('token')
+if not TOKEN:
+ TOKEN = demisto.params().get('token')
+
+ENC_KEY = demisto.params().get('auth_key')
+
+USE_SSL = not demisto.params().get('insecure', False)
+TOKEN_RETRIEVAL_URL = 'https://demistobot.demisto.com/panw-token'
+FETCH_QUERY = None
+
+FIRST_FETCH_TIMESTAMP = demisto.params().get('first_fetch_timestamp', '').strip()
+if not FIRST_FETCH_TIMESTAMP:
+ FIRST_FETCH_TIMESTAMP = '24 hours'
+
+if not demisto.params().get('proxy', False):
+ os.environ.pop('HTTP_PROXY', '')
+ os.environ.pop('HTTPS_PROXY', '')
+ os.environ.pop('http_proxy', '')
+ os.environ.pop('https_proxy', '')
+
+FETCH_QUERY_DICT = {
+ 'Traps Threats': 'SELECT * FROM tms.threat',
+ 'Firewall Threats': 'SELECT * FROM panw.threat',
+ 'Cortex XDR Analytics': 'SELECT * FROM magnifier.alert'
+}
+
+THREAT_TABLE_HEADERS = [
+ 'id', 'score', 'risk-of-app', 'type', 'action', 'app', 'pcap_id', 'proto', 'dst', 'reportid',
+ 'rule', 'category-of-threatid', 'characteristic-of-app', 'device_name', 'subtype',
+ 'time_received', 'pcap', 'name-of-threatid', 'severity', 'nat', 'natdport', 'natdst',
+ 'natsrc', 'src', 'category-of-app', 'srcloc', 'dstloc', 'category', 'SHA256', 'filetype', 'filename'
+]
+
+TRAFFIC_TABLE_HEADERS = [
+ 'id', 'score', 'aggregations.size', 'action', 'app', 'proto', 'dst', 'rule', 'characteristic-of-app',
+ 'device_name', 'risk-of-app', 'natsport', 'start', 'subcategory-of-app', 'time_received',
+ 'nat', 'natdport', 'natdst', 'natsrc', 'src', 'category-of-app', 'srcloc', 'dstloc'
+]
+
+COMMON_HEADERS = [
+ 'id', 'score', 'action', 'app', 'proto', 'dst', 'rule', 'characteristic-of-app', 'device_name',
+ 'nat', 'natdport', 'natdst', 'natsrc', 'src', 'category-of-app', 'srcloc', 'dstloc', 'filetype',
+ 'SHA256', 'filename'
+]
+
+''' HELPER FUNCTIONS '''
+
+
+def get_encrypted(auth_id: str, key: str) -> str:
+ """
+
+ Args:
+ auth_id (str): auth_id from Demistobot
+ key (str): key from Demistobot
+
+ Returns:
+
+ """
+ def create_nonce() -> bytes:
+ return os.urandom(12)
+
+ def encrypt(string: str, enc_key: str) -> bytes:
+ """
+
+ Args:
+ enc_key (str):
+ string (str):
+
+ Returns:
+ bytes:
+ """
+ # String to bytes
+ enc_key = enc_key.encode()
+ # Create key
+ aes_gcm = AESGCM(enc_key)
+ # Create nonce
+ nonce = create_nonce()
+ # Create ciphered data
+ data = string.encode()
+ ct = aes_gcm.encrypt(nonce, data, None)
+ return base64.b64encode(nonce + ct)
+ now = epoch_seconds()
+ return encrypt(f'{now}:{auth_id}', key).decode('utf-8')
+
+
+def prepare_fetch_query(fetch_timestamp):
+ query = FETCH_QUERY_DICT[demisto.params().get('fetch_query', 'Traps Threats')]
+ if 'tms' in query:
+ query += f" WHERE serverTime>'{fetch_timestamp}'"
+ FETCH_SEVERITY = demisto.params().get('traps_severity')
+ if not FETCH_SEVERITY:
+ FETCH_SEVERITY = ['all']
+ if 'all' not in FETCH_SEVERITY:
+ query += ' AND ('
+ for index, severity in enumerate(FETCH_SEVERITY):
+ if index == (len(FETCH_SEVERITY) - 1):
+ query += f"messageData.trapsSeverity='{severity}'"
+ else:
+ query += f"messageData.trapsSeverity='{severity}' OR "
+ query += ')'
+ if 'panw' in query:
+ query += f' WHERE receive_time>{fetch_timestamp}'
+ FETCH_SEVERITY = demisto.params().get('firewall_severity')
+ if not FETCH_SEVERITY:
+ FETCH_SEVERITY = ['all']
+ FETCH_SUBTYPE = demisto.params().get('firewall_subtype')
+ if not FETCH_SUBTYPE:
+ FETCH_SUBTYPE = ['all']
+ if 'all' not in FETCH_SUBTYPE:
+ query += ' AND ('
+ for index, subtype in enumerate(FETCH_SUBTYPE):
+ if index == (len(FETCH_SUBTYPE) - 1):
+ query += f"subtype='{subtype}'"
+ else:
+ query += f"subtype='{subtype}' OR "
+ query += ')'
+ if 'all' not in FETCH_SEVERITY:
+ query += ' AND ('
+ for index, severity in enumerate(FETCH_SEVERITY):
+ if index == (len(FETCH_SEVERITY) - 1):
+ query += f"severity='{severity}'"
+ else:
+ query += f"severity='{severity}' OR "
+ query += ')'
+ if 'magnifier' in query:
+ query += f' WHERE time_generated>{fetch_timestamp}'
+ FETCH_SEVERITY = demisto.params().get('xdr_severity')
+ if not FETCH_SEVERITY:
+ FETCH_SEVERITY = ['all']
+ FETCH_CATEGORY = demisto.params().get('xdr_category')
+ if not FETCH_CATEGORY:
+ FETCH_CATEGORY = ['all']
+ if 'all' not in FETCH_CATEGORY:
+ query += ' AND ('
+ for index, subtype in enumerate(FETCH_CATEGORY):
+ if index == (len(FETCH_CATEGORY) - 1):
+ query += f"alert.category.keyword='{subtype}'"
+ else:
+ query += f"alert.category.keyword='{subtype}' OR "
+ query += ')'
+ if 'all' not in FETCH_SEVERITY:
+ query += ' AND ('
+ for index, severity in enumerate(FETCH_SEVERITY):
+ if index == (len(FETCH_SEVERITY) - 1):
+ query += f"alert.severity.keyword='{severity}'"
+ else:
+ query += f"alert.severity.keyword='{severity}' OR "
+ query += ')'
+ # Only get new Alerts
+ query += ' AND sub_type.keyword = \'New\''
+ return query
+
+
+def epoch_seconds(d=None):
+ """
+ Return the number of seconds for given date. If no date, return current.
+
+ parameter: (date) d
+ The date to convert to seconds
+
+ returns:
+ The date in seconds
+ """
+ if not d:
+ d = datetime.utcnow()
+ return int((d - datetime.utcfromtimestamp(0)).total_seconds())
+
+
+def get_access_token():
+ integration_context = demisto.getIntegrationContext()
+ access_token = integration_context.get('access_token')
+ stored = integration_context.get('stored')
+ if access_token and stored:
+ if epoch_seconds() - stored < 60 * 60 - 30:
+ return access_token
+ headers = {
+ 'Authorization': AUTH_ID,
+ 'Accept': 'application/json'
+ }
+
+ dbot_response = requests.get(
+ TOKEN_RETRIEVAL_URL,
+ headers=headers,
+ params={'token': get_encrypted(TOKEN, ENC_KEY)},
+ verify=USE_SSL
+ )
+ if dbot_response.status_code not in {200, 201}:
+ msg = 'Error in authentication. Try checking the credentials you entered.'
+ try:
+ demisto.info('Authentication failure from server: {} {} {}'.format(
+ dbot_response.status_code, dbot_response.reason, dbot_response.text))
+ err_response = dbot_response.json()
+ server_msg = err_response.get('message')
+ if not server_msg:
+ title = err_response.get('title')
+ detail = err_response.get('detail')
+ if title:
+ server_msg = f'{title}. {detail}'
+ if server_msg:
+ msg += ' Server message: {}'.format(server_msg)
+ except Exception as ex:
+ demisto.error('Failed parsing error response: [{}]. Exception: {}'.format(err_response.content, ex))
+ raise Exception(msg)
+ try:
+ parsed_response = dbot_response.json()
+ except ValueError:
+ raise Exception(
+ 'There was a problem in retrieving an updated access token.\n'
+ 'The response from the Demistobot server did not contain the expected content.'
+ )
+ access_token = parsed_response.get('access_token')
+ api_url = parsed_response.get('url')
+ token = parsed_response.get('token')
+
+ demisto.setIntegrationContext({
+ 'access_token': access_token,
+ 'stored': epoch_seconds(),
+ 'api_url': api_url,
+ 'token': token
+ })
+ return access_token
+
+
+def initial_logging_service():
+ api_url = demisto.getIntegrationContext().get('api_url', 'https://api.us.paloaltonetworks.com')
+ credentials = Credentials(
+ access_token=get_access_token(),
+ verify=USE_SSL
+ )
+ logging_service = LoggingService(
+ url=api_url,
+ credentials=credentials
+ )
+
+ return logging_service
+
+
+def poll_query_result(query_id):
+
+ logging_service = initial_logging_service()
+
+ poll_params = { # Prepare 'poll' params
+ "maxWaitTime": 30000 # waiting for response up to 3000ms
+ }
+
+ # we poll the logging service until we have a complete response
+ response = logging_service.poll(query_id, 0, poll_params)
+
+ return response
+
+
+def query_loggings(query_data):
+ """
+ This function handles all the querying of Cortex Logging service
+ """
+
+ logging_service = initial_logging_service()
+
+ response = logging_service.query(query_data)
+ query_result = response.json()
+
+ if not response.ok:
+ status_code = query_result.get('statusCode', '')
+ error = query_result.get('error', '')
+ message = query_result.get('payload', {}).get('message', '')
+ raise Exception(f"Error in query to Cortex [{status_code}] - {error}: {message}")
+
+ try:
+ query_id = query_result['queryId'] # access 'queryId' from 'query' response
+ except Exception as e:
+ raise Exception('Received error %s when querying logs.' % e)
+
+ poll_response = poll_query_result(query_id)
+ return poll_response
+
+
+def transform_row_keys(row):
+ transformed_row = {}
+ for metric, value in row.items():
+ if metric == 'filedigest':
+ transformed_row['SHA256'] = value
+ elif metric == 'misc':
+ transformed_row['filename'] = value
+ elif metric == 'category' and str(value) == '1':
+ transformed_row['category'] = 'malicious'
+ else:
+ transformed_row[metric] = value
+ return transformed_row
+
+
+def results_screener(table_name, full_results):
+ """
+ This function is used to make sure we include only pre-defined metrics in the human readable
+ """
+ screened_results = []
+
+ if table_name == "traffic":
+ for row in full_results:
+ screened_row = {metric: value for metric, value in row.items() if metric in TRAFFIC_TABLE_HEADERS}
+ screened_results.append(screened_row)
+ elif table_name == "threat":
+ for row in full_results:
+ screened_row = {metric: value for metric, value in row.items() if metric in THREAT_TABLE_HEADERS}
+ screened_results.append(screened_row)
+ elif table_name == "common":
+ for row in full_results:
+ screened_row = {metric: value for metric, value in row.items() if metric in COMMON_HEADERS}
+ screened_results.append(screened_row)
+ else:
+ return full_results
+
+ return screened_results
+
+
+def get_start_time(date_type, time_value):
+ current_date = datetime.now()
+ if date_type == 'minutes':
+ return current_date - timedelta(minutes=time_value)
+ elif date_type == 'days':
+ return current_date - timedelta(days=time_value)
+ elif date_type == 'weeks':
+ return current_date - timedelta(weeks=time_value)
+
+
+def convert_log_to_incident(log):
+ log_contents = log.get('_source')
+ if log_contents.get('id'):
+ log_contents['xdr_id'] = log_contents.get('id') # XDR ID before it is overwritten
+ log_contents['id'] = log.get('_id')
+ log_contents['score'] = log.get('_score')
+ if 'Traps' in FETCH_QUERY: # type: ignore
+ occurred = log_contents.get('generatedTime')
+ time_received = log_contents.get('serverTime')
+ elif 'Firewall' in FETCH_QUERY: # type: ignore
+ time_generated = log_contents.get('time_generated')
+ occurred = datetime.utcfromtimestamp(time_generated).isoformat() + 'Z'
+ time_received = log_contents.get('receive_time')
+ elif 'XDR' in FETCH_QUERY: # type: ignore
+ # first_detected_at in alert.schedule can be present or not, can be in s or ms
+ # if not detected, fallback to time_generated
+ try:
+ time_received = int(log_contents.get('time_generated')) or 0
+ except ValueError:
+ time_received = 0
+
+ occurred_raw = 0
+ first_detected_at = None
+ try:
+ first_detected_at = str(log_contents.get('alert', {}).get('schedule', {}).get('first_detected_at'))
+ except AttributeError:
+ first_detected_at = None
+ if first_detected_at is not None:
+ if len(first_detected_at) == 13: # ms
+ occurred_raw = int(float(first_detected_at) / 1000)
+ elif len(first_detected_at) == 10: # s
+ occurred_raw = int(first_detected_at)
+ else: # unknown length, fallback to time_received
+ occurred_raw = int(time_received)
+ else: # not present, fallback to time_received
+ occurred_raw = int(time_received)
+ occurred = datetime.utcfromtimestamp(occurred_raw).isoformat() + 'Z'
+
+ # stringifying dictionary values for fetching. (json.dumps() doesn't stringify dictionary values)
+ event_id = log.get('_id', '')
+ incident = {
+ 'name': 'Cortex Event ' + event_id,
+ 'rawJSON': json.dumps(log_contents, ensure_ascii=False),
+ 'occurred': occurred
+ }
+ return incident, time_received
+
+
+''' COMMANDS FUNCTIONS '''
+
+
+def query_logs_command():
+ """
+ Return the result of querying the Logging service
+ """
+ args = demisto.args()
+ start_time = args.get('startTime')
+ end_time = args.get('endTime')
+ time_range = args.get('timeRange')
+ time_value = args.get('rangeValue')
+
+ if time_range:
+ if time_value:
+ service_end_date = datetime.now()
+ service_start_date = get_start_time(time_range, int(time_value))
+ else:
+ raise Exception('Enter timeRange and timeValue, or startTime and endTime')
+ else:
+ time_format = '%Y-%m-%d %H:%M:%S'
+ # Thu Jan 01 02:00:00 IST 1970' does not match format '%Y-%m-%d %H:%M:%S'
+ service_start_date = datetime.strptime(start_time, time_format)
+ service_end_date = datetime.strptime(end_time, time_format)
+
+ # transforms datetime object to epoch time
+ service_start_date_epoch = int(service_start_date.strftime('%s'))
+ service_end_date_epoch = int(service_end_date.strftime('%s'))
+
+ query = args.get('query')
+
+ if 'limit' not in query.lower():
+ query += ' LIMIT 100'
+
+ query_data = {
+ "query": query,
+ "startTime": service_start_date_epoch,
+ "endTime": service_end_date_epoch,
+ }
+
+ response = query_loggings(query_data)
+
+ try:
+ response_json = response.json()
+ query_status = response_json.get('queryStatus', '')
+ if query_status in {'RUNNING', 'JOB_FAILED'}:
+ raise Exception(f'Logging query job failed with status: {query_status}')
+ result = response_json.get('result', {})
+ pages = result.get('esResult', {}).get('hits', {}).get('hits', [])
+ table_name = result['esQuery']['table'][0].split('.')[1]
+ except ValueError:
+ raise Exception('Failed to parse the response from Cortex')
+
+ output = []
+
+ for page in pages:
+ row_contents = page.get('_source')
+ row_contents['id'] = page.get('_id')
+ row_contents['score'] = page.get('_score')
+ transformed_row = transform_row_keys(row_contents)
+ output.append(transformed_row)
+
+ screened_results = results_screener('common', output)
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': output,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Logs ' + table_name + ' table', screened_results),
+ 'EntryContext': {
+ 'Cortex.Logging(val.id===obj.id)': output
+ }
+ }
+
+ return entry
+
+
+def get_critical_logs_command():
+ """
+ Queries Cortex Logging according to a pre-set query
+ """
+
+ args = demisto.args()
+
+ start_time = args.get('startTime')
+ end_time = args.get('endTime')
+ value = args.get('logsAmount')
+ time_range = args.get('timeRange')
+ time_value = args.get('rangeValue')
+
+ if time_range:
+ if time_value:
+ service_end_date = datetime.now()
+ service_start_date = get_start_time(time_range, int(time_value))
+ else:
+ raise Exception('Enter timeRange and timeValue, or startTime and endTime')
+ else:
+ # parses user input to datetime object
+ service_start_date = datetime.strptime(start_time, "%Y-%m-%d %H:%M:%S")
+ service_end_date = datetime.strptime(end_time, "%Y-%m-%d %H:%M:%S")
+
+ # transforms datetime object to epoch time
+ service_start_date_epoch = int(service_start_date.strftime("%s"))
+ service_end_date_epoch = int(service_end_date.strftime("%s"))
+
+ api_query = "SELECT * FROM panw.threat WHERE severity = '5' LIMIT " + value
+
+ query_data = {
+ "query": api_query,
+ "startTime": service_start_date_epoch,
+ "endTime": service_end_date_epoch,
+ }
+
+ response = query_loggings(query_data)
+
+ try:
+ result = response.json()['result']
+ pages = result.get('esResult', {}).get('hits', {}).get('hits', [])
+ table_name = result['esQuery']['table'][0].split('.')[1]
+ except ValueError:
+ raise Exception('Failed to parse the response from Cortex')
+
+ output = []
+
+ for page in pages:
+ row_contents = page.get('_source')
+ row_contents['id'] = page.get('_id')
+ row_contents['score'] = page.get('_score')
+ transformed_row = transform_row_keys(row_contents)
+ output.append(transformed_row)
+
+ screened_results = results_screener('threat', output)
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': output,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Logs ' + table_name + ' table', screened_results),
+ 'EntryContext': {
+ 'Cortex.Logging(val.id==obj.id)': output
+ }
+ }
+ return entry
+
+
+def get_social_applications_command():
+ """ Queries Cortex Logging according to a pre-set query """
+
+ args = demisto.args()
+
+ start_time = args.get('startTime')
+ end_time = args.get('endTime')
+ value = args.get('logsAmount')
+ time_range = args.get('timeRange')
+ time_value = args.get('rangeValue')
+
+ if time_range:
+ if time_value:
+ service_end_date = datetime.now()
+ service_start_date = get_start_time(time_range, int(time_value))
+ else:
+ raise Exception('Enter timeRange and timeValue, or startTime and endTime')
+ else:
+ # parses user input to datetime object
+ service_start_date = datetime.strptime(start_time, "%Y-%m-%d %H:%M:%S")
+ service_end_date = datetime.strptime(end_time, "%Y-%m-%d %H:%M:%S")
+
+ # transforms datetime object to epoch time
+ service_start_date_epoch = int(service_start_date.strftime("%s"))
+ service_end_date_epoch = int(service_end_date.strftime("%s"))
+
+ api_query = "SELECT * FROM panw.traffic WHERE subcategory-of-app = 'social-networking' LIMIT " + value
+
+ query_data = {
+ "query": api_query,
+ "startTime": service_start_date_epoch,
+ "endTime": service_end_date_epoch,
+ }
+
+ response = query_loggings(query_data)
+
+ try:
+ result = response.json()['result']
+ pages = result.get('esResult', {}).get('hits', {}).get('hits', [])
+ table_name = result['esQuery']['table'][0].split('.')[1]
+ except ValueError:
+ raise Exception('Failed to parse the response from Cortex')
+
+ output = []
+
+ for page in pages:
+ row_contents = page.get('_source')
+ row_contents['id'] = page.get('_id')
+ row_contents['score'] = page.get('_score')
+ transformed_row = transform_row_keys(row_contents)
+ output.append(transformed_row)
+
+ screened_results = results_screener('traffic', output)
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': output,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Logs ' + table_name + ' table', screened_results),
+ 'EntryContext': {
+ 'Cortex.Logging(val.id===obj.id)': output
+ }
+ }
+ return entry
+
+
+def search_by_file_hash_command():
+ """
+ Queries Cortex Logging according to a pre-set query
+ """
+
+ args = demisto.args()
+
+ start_time = args.get('startTime')
+ end_time = args.get('endTime')
+ value = args.get('logsAmount')
+ time_range = args.get('timeRange')
+ time_value = args.get('rangeValue')
+ filehash = args.get('SHA256')
+
+ if (time_range):
+ if (time_value):
+ service_end_date = datetime.now()
+ service_start_date = get_start_time(time_range, int(time_value))
+ else:
+ raise Exception('Please enter timeRange and timeValue, or startTime and endTime')
+ else:
+ # parses user input to datetime object
+ service_start_date = datetime.strptime(start_time, "%Y-%m-%d %H:%M:%S")
+ service_end_date = datetime.strptime(end_time, "%Y-%m-%d %H:%M:%S")
+
+ # transforms datetime object to epoch time
+ service_start_date_epoch = int(service_start_date.strftime("%s"))
+ service_end_date_epoch = int(service_end_date.strftime("%s"))
+
+ api_query = "SELECT * FROM panw.threat WHERE filedigest='" + filehash + "' LIMIT " + value
+
+ query_data = {
+ "query": api_query,
+ "startTime": service_start_date_epoch,
+ "endTime": service_end_date_epoch,
+ }
+
+ response = query_loggings(query_data)
+
+ try:
+ result = response.json()['result']
+ pages = result.get('esResult', {}).get('hits', {}).get('hits', [])
+ table_name = result['esQuery']['table'][0].split('.')[1]
+ except ValueError:
+ raise Exception('Failed to parse the response from Cortex')
+
+ output = []
+
+ for page in pages:
+ row_contents = page.get('_source')
+ row_contents['id'] = page.get('_id')
+ row_contents['score'] = page.get('_score')
+ transformed_row = transform_row_keys(row_contents)
+ output.append(transformed_row)
+
+ screened_results = results_screener('threat', output)
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': output,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Logs ' + table_name + ' table', screened_results),
+ 'EntryContext': {
+ 'Cortex.Logging(val.id==obj.id)': output
+ }
+ }
+ return entry
+
+
+def process_incident_pairs(incident_pairs, max_incidents):
+ sorted_pairs = sorted(incident_pairs, key=lambda x: x[1])
+ sorted_pairs = sorted_pairs[:max_incidents]
+ max_timestamp = sorted_pairs[-1][1]
+ return list(map(lambda x: x[0], sorted_pairs)), max_timestamp
+
+
+def fetch_incidents():
+
+ last_run = demisto.getLastRun()
+ last_fetched_event_timestamp = last_run.get('last_fetched_event_timestamp')
+ last_query_id = last_run.get('last_query_id')
+
+ if last_query_id:
+ # Need to poll query results fron last run
+ response = poll_query_result(last_query_id)
+ else:
+ if last_fetched_event_timestamp is not None:
+ last_fetched_event_timestamp = datetime.strptime(last_fetched_event_timestamp, '%Y-%m-%dT%H:%M:%S.%f')
+ else:
+ last_fetched_event_timestamp, _ = parse_date_range(FIRST_FETCH_TIMESTAMP)
+
+ # Need sometime in the future, so the timestamp will be taken from the query
+ service_end_date_epoch = int(datetime.now().strftime('%s')) + 1000
+
+ if 'Firewall' in FETCH_QUERY or 'XDR' in FETCH_QUERY: # type: ignore
+ fetch_timestamp = int(last_fetched_event_timestamp.strftime('%s'))
+ elif 'Traps' in FETCH_QUERY: # type: ignore
+ fetch_timestamp = last_fetched_event_timestamp.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
+
+ query = prepare_fetch_query(fetch_timestamp)
+
+ query_data = {
+ 'query': query,
+ 'startTime': 0,
+ 'endTime': service_end_date_epoch,
+ }
+
+ response = query_loggings(query_data)
+
+ try:
+ response_json = response.json()
+ query_status = response_json.get('queryStatus', '')
+ if query_status == 'JOB_FAILED':
+ raise Exception(f'Logging query job failed with status: JOB_FAILED\nResponse: {response.text}')
+ elif query_status == 'RUNNING':
+ if isinstance(last_fetched_event_timestamp, datetime):
+ # In case we don't have query ID from previous run
+ last_fetched_event_timestamp = last_fetched_event_timestamp.strftime('%Y-%m-%dT%H:%M:%S.%f')
+ # If query job is still running after 30 seconds (max timeout), pass it to next run
+ demisto.setLastRun({
+ 'last_fetched_event_timestamp': last_fetched_event_timestamp,
+ 'last_query_id': response_json.get('queryId', '')
+ })
+ demisto.incidents([])
+ return
+ result = response_json.get('result', {})
+ pages = result.get('esResult', {}).get('hits', {}).get('hits', [])
+ except ValueError:
+ raise Exception('Failed to parse the response from Cortex')
+
+ incident_pairs = []
+
+ max_fetched_event_timestamp = last_fetched_event_timestamp
+ for page in pages:
+ incident, time_received = convert_log_to_incident(page)
+ if 'Firewall' in FETCH_QUERY or 'XDR' in FETCH_QUERY: # type: ignore
+ time_received_dt = datetime.fromtimestamp(time_received)
+ elif 'Traps' in FETCH_QUERY: # type: ignore
+ time_received_dt = datetime.strptime(time_received, '%Y-%m-%dT%H:%M:%S.%fZ')
+ incident_pairs.append((incident, time_received_dt))
+ if incident_pairs:
+ incidents, max_fetched_event_timestamp = process_incident_pairs(incident_pairs, 100) # max 100 per run
+ demisto.setLastRun({
+ 'last_fetched_event_timestamp': max_fetched_event_timestamp.strftime('%Y-%m-%dT%H:%M:%S.%f')
+ })
+ demisto.incidents(incidents)
+ else:
+ demisto.incidents([])
+
+
+''' EXECUTION CODE '''
+
+
+def main():
+ global FETCH_QUERY
+ FETCH_QUERY = demisto.params().get('fetch_query', 'Traps Threats')
+
+ LOG('command is %s' % (demisto.command(),))
+ try:
+ if demisto.command() == 'test-module':
+ if demisto.params().get('isFetch'):
+ last_fetched_event_timestamp, _ = parse_date_range(FIRST_FETCH_TIMESTAMP)
+ test_args = {
+ "query": "SELECT * FROM panw.threat LIMIT 1",
+ "startTime": 0,
+ "endTime": 1609459200,
+ }
+ if query_loggings(test_args):
+ demisto.results('ok')
+ else:
+ demisto.results('test failed')
+ elif demisto.command() == 'cortex-query-logs':
+ demisto.results(query_logs_command())
+ elif demisto.command() == 'cortex-get-critical-threat-logs':
+ demisto.results(get_critical_logs_command())
+ elif demisto.command() == 'cortex-get-social-applications':
+ demisto.results(get_social_applications_command())
+ elif demisto.command() == 'cortex-search-by-file-hash':
+ demisto.results(search_by_file_hash_command())
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+ except Exception as e:
+ error_message = str(e)
+ if demisto.command() == 'fetch-incidents':
+ LOG(error_message)
+ LOG.print_log()
+ raise
+ else:
+ return_error(error_message)
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/PaloAltoNetworksCortex/PaloAltoNetworksCortex.yml b/Integrations/PaloAltoNetworksCortex/PaloAltoNetworksCortex.yml
new file mode 100644
index 000000000000..75254cb10656
--- /dev/null
+++ b/Integrations/PaloAltoNetworksCortex/PaloAltoNetworksCortex.yml
@@ -0,0 +1,638 @@
+category: Analytics & SIEM
+commonfields:
+ id: Palo Alto Networks Cortex
+ version: -1
+configuration:
+- display: Authentication Token
+ name: token
+ required: true
+ type: 4
+- display: Authentication ID
+ name: auth_id
+ required: true
+ type: 4
+- display: Authentication Key
+ name: auth_key
+ required: true
+ type: 4
+- defaultvalue: 'false'
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- defaultvalue: 'false'
+ display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- defaultvalue: Traps Threats
+ display: Query for fetching events
+ name: fetch_query
+ options:
+ - Traps Threats
+ - Firewall Threats
+ - Cortex XDR Analytics
+ required: false
+ type: 15
+- defaultvalue: 24 hours
+ display: First fetch time ( , e.g., 12 hours, 7 days, 3 months,
+ 1 year)
+ name: first_fetch_timestamp
+ required: false
+ type: 0
+- display: Severity of events to fetch (Traps)
+ name: traps_severity
+ options:
+ - all
+ - critical
+ - high
+ - medium
+ - low
+ - informational
+ - configuration
+ required: false
+ type: 16
+- display: Severity of events to fetch (Firewall)
+ name: firewall_severity
+ options:
+ - all
+ - critical
+ - high
+ - medium
+ - low
+ - informational
+ - unused
+ required: false
+ type: 16
+- display: Subtype of events to fetch (Firewall)
+ name: firewall_subtype
+ options:
+ - all
+ - url
+ - antivirus
+ - spyware
+ - vulnerability
+ - file
+ - scan
+ - flood
+ - packet
+ - resource
+ - data
+ - wildfire
+ - wildfire-virus
+ required: false
+ type: 16
+- display: Severity of alerts to fetch (XDR Analytics)
+ name: xdr_severity
+ defaultvalue: ""
+ type: 16
+ required: false
+ options:
+ - all
+ - High
+ - Medium
+ - Low
+ - Info
+- display: Category of alerts to fetch (XDR Analytics)
+ name: xdr_category
+ defaultvalue: ""
+ type: 16
+ required: false
+ options:
+ - all
+ - Consecutive Connections
+ - DNS Tunneling
+ - Failed Connections
+ - Failed DNS
+ - Grayware
+ - High Connection Rate
+ - Large Upload (FTP)
+ - Large Upload (Generic)
+ - Large Upload (HTTPS)
+ - Large Upload (SMTP)
+ - Malware
+ - New Administrative Behavior
+ - Port Scan
+ - Random Looking DNS
+ - Recurring Rare Domain Access
+ - Recurring Rare IP Access
+ - Remote Command Execution
+ - Reverse Connection
+ - SMB/KRB Traffic from Non-Standard Process
+ - Script Connecting to Rare External Host
+ - SpamBot Traffic
+ - Tunneling Process
+ - Uncommon ARP Cache Listing via arp.exe
+ - Uncommon IP Configuration Listing via ipconfig.exe
+ - Uncommon Local Scheduled Task Creation via schtasks.exe
+ - Uncommon Net Group Execution
+ - Uncommon Net User
+ - Uncommon Remote Scheduled Task Creation via schtasks.exe
+ - Uncommon Routing Table Listing via route.exe
+ - Uncommon net localgroup Execution
+ - scrons.exe Rare Child Process
+ - wmiprsve.exe Rare Child Process
+ - wsmprovhost.exe Rare Child Process
+description: This framework manages all PA's cloud managed products
+display: Palo Alto Networks Cortex
+name: Palo Alto Networks Cortex
+script:
+ commands:
+ - arguments:
+ - default: false
+ defaultValue: '1970-01-01 00:00:00'
+ description: The query start time. For example, startTime="2018-04-26 00:00:00"
+ isArray: false
+ name: startTime
+ required: false
+ secret: false
+ - default: true
+ defaultValue: '2020-01-01 00:00:00'
+ description: The query end time. For example, endTime="2018-04-26 00:00:00"
+ isArray: false
+ name: endTime
+ required: false
+ secret: false
+ - default: false
+ defaultValue: select * from panw.traffic limit 5
+ description: 'A free-text SQL query. For example, query="select * from panw.traffic
+ limit 5". There are multiple tables in Loggings, for example: threat, traffic,
+ and so on. Refer to the Cortex Logging service schema reference for the full
+ list.'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The time range for the query, used with the rangeValue argument.
+ For example, timeRange="weeks" timeValue="1" would run the query on the previous
+ week.
+ isArray: false
+ name: timeRange
+ predefined:
+ - minutes
+ - days
+ - weeks
+ required: false
+ secret: false
+ - default: false
+ description: The time value for the query, used with the timeRange argument.
+ For example, timeRange="weeks" rangeValue="1" would run the query on the previous
+ week.
+ isArray: false
+ name: rangeValue
+ required: false
+ secret: false
+ deprecated: false
+ description: Runs a query on the Cortex logging service.
+ execution: false
+ name: cortex-query-logs
+ outputs:
+ - contextPath: Cortex.Logging.id
+ description: The ID of the log.
+ type: string
+ - contextPath: Cortex.Logging.score
+ description: The score of the log.
+ type: number
+ - contextPath: Cortex.Logging.action
+ description: The action of the log.
+ type: Unknown
+ - contextPath: Cortex.Logging.app
+ description: The app of the log.
+ type: Unknown
+ - contextPath: Cortex.Logging.proto
+ description: The protocol used.
+ type: string
+ - contextPath: Cortex.Logging.dst
+ description: The destination IP.
+ type: string
+ - contextPath: Cortex.Logging.rule
+ description: The rule used.
+ type: Unknown
+ - contextPath: Cortex.Logging.src
+ description: The source of the action.
+ type: Unknown
+ - contextPath: Cortex.Logging.category-of-app
+ description: The category of the application.
+ type: string
+ - contextPath: Cortex.Logging.srcloc
+ description: The source location.
+ type: string
+ - contextPath: Cortex.Logging.dstloc
+ description: The destination location.
+ type: string
+ - contextPath: Cortex.Logging.characteristic-of-app
+ description: The application's characteristics.
+ type: Unknown
+ - contextPath: Cortex.Logging.device_name
+ description: The name of the device.
+ type: string
+ - contextPath: Cortex.Logging.nat
+ description: Whether NAT was used.
+ type: number
+ - contextPath: Cortex.Logging.natdport
+ description: The NAT port.
+ type: Unknown
+ - contextPath: Cortex.Logging.natdst
+ description: The NAT destination.
+ type: Unknown
+ - contextPath: Cortex.Logging.natsrc
+ description: The NAT source.
+ type: Unknown
+ - arguments:
+ - default: false
+ defaultValue: '1970-01-01 00:00:00'
+ description: The query start time. For example, startTime="2018-04-26 00:00:00"
+ isArray: false
+ name: startTime
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '2020-01-01 00:00:00'
+ description: The query end time. For example, endTime="2018-04-26 00:00:00"
+ isArray: false
+ name: endTime
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: The number of logs to return. Default is 10
+ isArray: false
+ name: logsAmount
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The time range for the query, used with the rangeValue argument.
+ For example, timeRange="weeks" rangeValue="1" would run the query on the previous
+ week.
+ isArray: false
+ name: timeRange
+ predefined:
+ - minutes
+ - days
+ - weeks
+ required: false
+ secret: false
+ - default: false
+ description: The time value for the query, used with the timeRange argument.
+ For example, timeRange="weeks" rangeValue="1" would run the query on the previous
+ week.
+ isArray: false
+ name: rangeValue
+ required: false
+ secret: false
+ deprecated: false
+ description: Runs a query on the Cortex logging service, according to preset queries.
+ execution: false
+ name: cortex-get-critical-threat-logs
+ outputs:
+ - contextPath: Cortex.Logging.id
+ description: The ID of the log.
+ type: string
+ - contextPath: Cortex.Logging.score
+ description: The score of the log.
+ type: number
+ - contextPath: Cortex.Logging.action
+ description: The action of the log.
+ type: Unknown
+ - contextPath: Cortex.Logging.app
+ description: The app of the log.
+ type: Unknown
+ - contextPath: Cortex.Logging.proto
+ description: The protocol used.
+ type: string
+ - contextPath: Cortex.Logging.dst
+ description: The destination IP.
+ type: string
+ - contextPath: Cortex.Logging.rule
+ description: The rule used.
+ type: Unknown
+ - contextPath: Cortex.Logging.src
+ description: The source of the action.
+ type: Unknown
+ - contextPath: Cortex.Logging.category-of-app
+ description: The category of the application.
+ type: string
+ - contextPath: Cortex.Logging.srcloc
+ description: The source location.
+ type: string
+ - contextPath: Cortex.Logging.dstloc
+ description: The destination location.
+ type: string
+ - contextPath: Cortex.Logging.characteristic-of-app
+ description: The application's characteristics.
+ type: Unknown
+ - contextPath: Cortex.Logging.device_name
+ description: The name of the device.
+ type: string
+ - contextPath: Cortex.Logging.nat
+ description: Whether NAT was used.
+ type: number
+ - contextPath: Cortex.Logging.natdport
+ description: The NAT port.
+ type: Unknown
+ - contextPath: Cortex.Logging.natdst
+ description: The NAT destination.
+ type: Unknown
+ - contextPath: Cortex.Logging.natsrc
+ description: The NAT source.
+ type: Unknown
+ - contextPath: Cortex.Logging.risk-of-app
+ description: The risk of the application.
+ type: Unknown
+ - contextPath: Cortex.Logging.type
+ description: The threat type.
+ type: Unknown
+ - contextPath: Cortex.Logging.pcad_id
+ description: The PCAP ID.
+ type: Unknown
+ - contextPath: Cortex.Logging.reportid
+ description: The report ID.
+ type: number
+ - contextPath: Cortex.Logging.category-of-threatid
+ description: The category of the threat ID.
+ type: Unknown
+ - contextPath: Cortex.Logging.subtype
+ description: The threat sub-type.
+ type: Unknown
+ - contextPath: Cortex.Logging.time_received
+ description: The time the logging was received.
+ type: Unknown
+ - contextPath: Cortex.Logging.pcap
+ description: The PCAP.
+ type: Unknown
+ - contextPath: Cortex.Logging.name-of-threatid
+ description: The name of the threat ID.
+ type: string
+ - contextPath: Cortex.Logging.severity
+ description: The threat severity.
+ type: Unknown
+ - arguments:
+ - default: false
+ defaultValue: '1970-01-01 00:00:00'
+ description: Query start time. For example, startTime="2018-04-26 00:00:00"
+ isArray: false
+ name: startTime
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '2020-01-01 00:00:00'
+ description: Query end time. For example, endTime="2018-04-26 00:00:00"
+ isArray: false
+ name: endTime
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Amount of logs. Default is 10
+ isArray: false
+ name: logsAmount
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The time range for the query, used with the rangeValue argument.
+ For example, timeRange="weeks" rangeValue="1" would run the query on the previous
+ week.
+ isArray: false
+ name: timeRange
+ predefined:
+ - minutes
+ - days
+ - weeks
+ required: false
+ secret: false
+ - default: false
+ description: The time value for the query, used with the timeRange argument.
+ For example, timeRange="weeks" rangeValue="1" would run the query on the previous
+ week.
+ isArray: false
+ name: rangeValue
+ required: false
+ secret: false
+ deprecated: false
+ description: Runs a query on the Cortex logging service, according to preset queries.
+ execution: false
+ name: cortex-get-social-applications
+ outputs:
+ - contextPath: Cortex.Logging.id
+ description: The id of the log.
+ type: string
+ - contextPath: Cortex.Logging.score
+ description: The score of the log.
+ type: number
+ - contextPath: Cortex.Logging.action
+ description: The action of the log.
+ type: Unknown
+ - contextPath: Cortex.Logging.app
+ description: The app of the log.
+ type: Unknown
+ - contextPath: Cortex.Logging.proto
+ description: The protocol used.
+ type: string
+ - contextPath: Cortex.Logging.dst
+ description: The destination IP.
+ type: string
+ - contextPath: Cortex.Logging.rule
+ description: The rule used.
+ type: Unknown
+ - contextPath: Cortex.Logging.src
+ description: The source of the action.
+ type: Unknown
+ - contextPath: Cortex.Logging.category-of-app
+ description: The category of the application.
+ type: string
+ - contextPath: Cortex.Logging.srcloc
+ description: The source location.
+ type: string
+ - contextPath: Cortex.Logging.dstloc
+ description: The destination location.
+ type: string
+ - contextPath: Cortex.Logging.characteristic-of-app
+ description: The application's characteristics.
+ type: Unknown
+ - contextPath: Cortex.Logging.device_name
+ description: The name of the device.
+ type: string
+ - contextPath: Cortex.Logging.nat
+ description: Whether NAT was used.
+ type: number
+ - contextPath: Cortex.Logging.natdport
+ description: The NAT port.
+ type: Unknown
+ - contextPath: Cortex.Logging.natdst
+ description: The NAT destination.
+ type: Unknown
+ - contextPath: Cortex.Logging.natsrc
+ description: The NAT source.
+ type: Unknown
+ - contextPath: Cortex.Logging.risk-of-app
+ description: The risk of the application.
+ type: Unknown
+ - contextPath: Cortex.Logging.aggregations.size
+ description: The aggregations size.
+ type: Unknown
+ - contextPath: Cortex.Logging.natsport
+ description: The NAT port.
+ type: Unknown
+ - contextPath: Cortex.Logging.start
+ description: The traffic start.
+ type: Unknown
+ - contextPath: Cortex.Logging.subcategory-of-apptime_received
+ description: The sub-category of the application time.
+ type: Unknown
+ - arguments:
+ - default: false
+ defaultValue: '1970-01-01 00:00:00'
+ description: The query start time. For example, startTime="2018-04-26 00:00:00"
+ isArray: false
+ name: startTime
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '2020-01-01 00:00:00'
+ description: The query end time. For example, endTime="2018-04-26 00:00:00"
+ isArray: false
+ name: endTime
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: The number of logs to return. Default is 10.
+ isArray: false
+ name: logsAmount
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The time range for the query, used with the rangeValue argument.
+ For example, timeRange="weeks" rangeValue="1" would run the query on the previous
+ week.
+ isArray: false
+ name: timeRange
+ predefined:
+ - minutes
+ - days
+ - weeks
+ required: false
+ secret: false
+ - default: false
+ description: The time value for the query, used with the timeRange argument.
+ For example, timeRange="weeks" rangeValue="1" would run the query on the last
+ week.
+ isArray: false
+ name: rangeValue
+ required: false
+ secret: false
+ - default: false
+ description: The SHA256 hash of the file for the query. For example, SHA256="503ca1a4fc0d48b18c0336f544ba0f0abf305ae3a3f49b3c2b86b8645d6572dc"
+ would return all logs associated with this file.
+ isArray: false
+ name: SHA256
+ required: true
+ secret: false
+ deprecated: false
+ description: Runs a query on the Cortex logging service, according to preset queries.
+ execution: false
+ name: cortex-search-by-file-hash
+ outputs:
+ - contextPath: Cortex.Logging.id
+ description: The ID of the log.
+ type: string
+ - contextPath: Cortex.Logging.score
+ description: The score of the log.
+ type: number
+ - contextPath: Cortex.Logging.action
+ description: The action of the log.
+ type: Unknown
+ - contextPath: Cortex.Logging.app
+ description: The app of the log.
+ type: Unknown
+ - contextPath: Cortex.Logging.proto
+ description: The protocol used.
+ type: string
+ - contextPath: Cortex.Logging.dst
+ description: The destination IP address.
+ type: string
+ - contextPath: Cortex.Logging.rule
+ description: The rule used.
+ type: Unknown
+ - contextPath: Cortex.Logging.src
+ description: The source of the action.
+ type: Unknown
+ - contextPath: Cortex.Logging.category-of-app
+ description: The category of the application.
+ type: string
+ - contextPath: Cortex.Logging.srcloc
+ description: The source location.
+ type: string
+ - contextPath: Cortex.Logging.dstloc
+ description: The destination location.
+ type: string
+ - contextPath: Cortex.Logging.characteristic-of-app
+ description: The application's characteristics.
+ type: Unknown
+ - contextPath: Cortex.Logging.device_name
+ description: The name of the device.
+ type: string
+ - contextPath: Cortex.Logging.nat
+ description: Whether NAT was used.
+ type: number
+ - contextPath: Cortex.Logging.natdport
+ description: The NAT port.
+ type: Unknown
+ - contextPath: Cortex.Logging.natdst
+ description: The NAT destination.
+ type: Unknown
+ - contextPath: Cortex.Logging.natsrc
+ description: The NAT source.
+ type: Unknown
+ - contextPath: Cortex.Logging.risk-of-app
+ description: The risk of the application.
+ type: Unknown
+ - contextPath: Cortex.Logging.type
+ description: The threat type.
+ type: Unknown
+ - contextPath: Cortex.Logging.pcad_id
+ description: The PCAP ID.
+ type: Unknown
+ - contextPath: Cortex.Logging.reportid
+ description: The report ID.
+ type: number
+ - contextPath: Cortex.Logging.category-of-threatid
+ description: The category of the threat ID.
+ type: Unknown
+ - contextPath: Cortex.Logging.subtype
+ description: The threat sub-type.
+ type: Unknown
+ - contextPath: Cortex.Logging.time_received
+ description: The time the logging was received.
+ type: Unknown
+ - contextPath: Cortex.Logging.pcap
+ description: The PCAP.
+ type: Unknown
+ - contextPath: Cortex.Logging.name-of-threatid
+ description: The name of the threat ID.
+ type: string
+ - contextPath: Cortex.Logging.severity
+ description: Threat Severity.
+ type: Unknown
+ dockerimage: demisto/python_pancloud:1.0.0.286
+ isfetch: true
+ runonce: false
+ script: ''
+ type: python
+ subtype: python3
+tests:
+- Palo Alto Networks Cortex Test
+fromversion: 4.1.0
diff --git a/Integrations/PaloAltoNetworksCortex/PaloAltoNetworksCortex_image.png b/Integrations/PaloAltoNetworksCortex/PaloAltoNetworksCortex_image.png
new file mode 100644
index 000000000000..249fc6f403d6
Binary files /dev/null and b/Integrations/PaloAltoNetworksCortex/PaloAltoNetworksCortex_image.png differ
diff --git a/Integrations/PaloAltoNetworksCortex/PaloAltoNetworksCortex_test.py b/Integrations/PaloAltoNetworksCortex/PaloAltoNetworksCortex_test.py
new file mode 100644
index 000000000000..75899dd35338
--- /dev/null
+++ b/Integrations/PaloAltoNetworksCortex/PaloAltoNetworksCortex_test.py
@@ -0,0 +1,125 @@
+import random
+import string
+import demistomock as demisto
+from datetime import datetime, timedelta
+
+""" Helper functions """
+
+
+def random_string(string_length=10) -> str:
+ """Generate a random string of fixed length
+
+ Args:
+ string_length (int): length of string to return
+
+ Returns:
+ str: random string
+ """
+ letters = string.ascii_lowercase
+ return ''.join(random.choice(letters) for i in range(string_length))
+
+
+def test_get_start_time(mocker):
+ integration_context = {
+ 'stored': int((datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()),
+ 'access_token': 'dummy'
+ }
+ mocker.patch.object(demisto, 'getIntegrationContext', return_value=integration_context)
+
+ from PaloAltoNetworksCortex import get_start_time
+
+ five_minutes_start_time = get_start_time('minutes', 5)
+ expected_response = datetime.now() - timedelta(minutes=5)
+ assert five_minutes_start_time.replace(microsecond=0) == expected_response.replace(microsecond=0)
+
+ ten_days_start_time = get_start_time('days', 10)
+ expected_response = datetime.now() - timedelta(days=10)
+ assert ten_days_start_time.replace(microsecond=0) == expected_response.replace(microsecond=0)
+
+ four_weeks_start_time = get_start_time('weeks', 4)
+ expected_response = datetime.now() - timedelta(weeks=4)
+ assert four_weeks_start_time.replace(microsecond=0) == expected_response.replace(microsecond=0)
+
+
+def test_process_incident_pairs():
+ from PaloAltoNetworksCortex import process_incident_pairs
+ incident_pairs = [
+ (1, datetime.fromtimestamp(1)),
+ (3, datetime.fromtimestamp(3)),
+ (2, datetime.fromtimestamp(2)),
+ ]
+ incidents, max_ts = process_incident_pairs(incident_pairs, 3)
+ assert incidents[2] == 3
+ assert max_ts == datetime.fromtimestamp(3)
+ incidents, max_ts = process_incident_pairs(incident_pairs, 2)
+ assert incidents[1] == 2
+ assert len(incidents) == 2
+ assert max_ts == datetime.fromtimestamp(2)
+
+
+def test_prepare_fetch_query(mocker):
+ from PaloAltoNetworksCortex import prepare_fetch_query, main
+
+ traps_params = {
+ 'fetch_query': 'Traps Threats',
+ }
+ mocker.patch.object(demisto, 'params',
+ return_value=traps_params)
+ main()
+ traps_fetch_timestamp = '2018-04-22T10:34:07.371267Z'
+
+ traps_query = prepare_fetch_query(traps_fetch_timestamp)
+ assert traps_query == "SELECT * FROM tms.threat WHERE serverTime>'2018-04-22T10:34:07.371267Z'"
+
+ traps_params['traps_severity'] = ['critical', 'high']
+ traps_query_with_severity = prepare_fetch_query(traps_fetch_timestamp)
+ assert traps_query_with_severity == "SELECT * FROM tms.threat WHERE serverTime>'2018-04-22T10:34:07.371267Z' " \
+ "AND (messageData.trapsSeverity='critical' OR messageData.trapsSeverity='high')"
+
+ firewall_params = {
+ 'fetch_query': 'Firewall Threats',
+ }
+ mocker.patch.object(demisto, 'params',
+ return_value=firewall_params)
+ main()
+ firewall_fetch_timestamp = '1524383011'
+
+ firewall_query = prepare_fetch_query(firewall_fetch_timestamp)
+ assert firewall_query == "SELECT * FROM panw.threat WHERE receive_time>1524383011"
+
+ firewall_params['firewall_severity'] = ['medium']
+ firewall_query_with_severity = prepare_fetch_query(firewall_fetch_timestamp)
+ assert firewall_query_with_severity == "SELECT * FROM panw.threat " \
+ "WHERE receive_time>1524383011 AND (severity='medium')"
+
+ firewall_params['firewall_subtype'] = ['url', 'antivirus']
+ firewall_query_with_severity_and_subtype = prepare_fetch_query(firewall_fetch_timestamp)
+ assert firewall_query_with_severity_and_subtype == "SELECT * FROM panw.threat WHERE receive_time>1524383011 " \
+ "AND (subtype='url' OR subtype='antivirus') " \
+ "AND (severity='medium')"
+
+ xdr_params = {
+ 'fetch_query': 'Cortex XDR Analytics',
+ }
+ mocker.patch.object(demisto, 'params',
+ return_value=xdr_params)
+ main()
+ xdr_fetch_timestamp = '2018-04-22T10:34:07.371267Z'
+
+ xdr_query = prepare_fetch_query(xdr_fetch_timestamp)
+ assert xdr_query == "SELECT * FROM magnifier.alert WHERE time_generated>2018-04-22T10:34:07.371267Z " \
+ "AND sub_type.keyword = 'New'"
+
+ xdr_params['xdr_severity'] = ['High', 'Medium']
+ xdr_query_with_severity = prepare_fetch_query(xdr_fetch_timestamp)
+ assert xdr_query_with_severity == "SELECT * FROM magnifier.alert WHERE " \
+ "time_generated>2018-04-22T10:34:07.371267Z AND " \
+ "(alert.severity.keyword='High' OR alert.severity.keyword='Medium') AND " \
+ "sub_type.keyword = 'New'"
+
+
+def test_get_encrypted():
+ from PaloAltoNetworksCortex import get_encrypted
+ auth_id = random_string(50)
+ auth_key = random_string(32)
+ get_encrypted(auth_id, auth_key)
diff --git a/Integrations/PaloAltoNetworksCortex/PaloAltoNetworksCortext_description.md b/Integrations/PaloAltoNetworksCortex/PaloAltoNetworksCortext_description.md
new file mode 100644
index 000000000000..e660a14aad61
--- /dev/null
+++ b/Integrations/PaloAltoNetworksCortex/PaloAltoNetworksCortext_description.md
@@ -0,0 +1,3 @@
+There are several steps required to configure this integration. You will navigate between Demisto and [Cortex Hub](https://apps.paloaltonetworks.com/marketplace/demisto) to retrieve tokens required later in the process.
+
+For more information, see the [Palo Alto Networks Cortex](https://support.demisto.com/hc/en-us/articles/360004173094) integration documentation.
\ No newline at end of file
diff --git a/Integrations/PaloAltoNetworksCortex/Pipfile b/Integrations/PaloAltoNetworksCortex/Pipfile
new file mode 100644
index 000000000000..3f4c5a84e70a
--- /dev/null
+++ b/Integrations/PaloAltoNetworksCortex/Pipfile
@@ -0,0 +1,19 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+flake8 = "*"
+
+[packages]
+requests = "*"
+pancloud = "*"
+crypto = "*"
+cryptography = "*"
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/PaloAltoNetworksCortex/Pipfile.lock b/Integrations/PaloAltoNetworksCortex/Pipfile.lock
new file mode 100644
index 000000000000..27fb5db65dfd
--- /dev/null
+++ b/Integrations/PaloAltoNetworksCortex/Pipfile.lock
@@ -0,0 +1,387 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "044778e20682923f75aa6dcac76202d8c6b19a4e0bf72738fa9e081affe78db6"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "arrow": {
+ "hashes": [
+ "sha256:3397e5448952e18e1295bf047014659effa5ae8da6a5371d37ff0ddc46fa6872",
+ "sha256:6f54d9f016c0b7811fac9fb8c2c7fa7421d80c54dbdd75ffb12913c55db60b8a"
+ ],
+ "version": "==0.13.1"
+ },
+ "asn1crypto": {
+ "hashes": [
+ "sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87",
+ "sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
+ ],
+ "version": "==0.24.0"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5",
+ "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae"
+ ],
+ "version": "==2019.3.9"
+ },
+ "cffi": {
+ "hashes": [
+ "sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774",
+ "sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d",
+ "sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90",
+ "sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b",
+ "sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63",
+ "sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45",
+ "sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25",
+ "sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3",
+ "sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b",
+ "sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647",
+ "sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016",
+ "sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4",
+ "sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb",
+ "sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753",
+ "sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7",
+ "sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9",
+ "sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f",
+ "sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8",
+ "sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f",
+ "sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc",
+ "sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42",
+ "sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3",
+ "sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909",
+ "sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45",
+ "sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d",
+ "sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512",
+ "sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff",
+ "sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201"
+ ],
+ "version": "==1.12.3"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "crypto": {
+ "hashes": [
+ "sha256:8f2ee9756a0265c18845ac097ae447c75cfbde158abe1361b7491619f866a9bd",
+ "sha256:985120aa86f71545388199f96a2a0e00f7ccfe5ecd14c56355eb399e1a63d164"
+ ],
+ "index": "pypi",
+ "version": "==1.4.1"
+ },
+ "cryptography": {
+ "hashes": [
+ "sha256:066f815f1fe46020877c5983a7e747ae140f517f1b09030ec098503575265ce1",
+ "sha256:210210d9df0afba9e000636e97810117dc55b7157c903a55716bb73e3ae07705",
+ "sha256:26c821cbeb683facb966045e2064303029d572a87ee69ca5a1bf54bf55f93ca6",
+ "sha256:2afb83308dc5c5255149ff7d3fb9964f7c9ee3d59b603ec18ccf5b0a8852e2b1",
+ "sha256:2db34e5c45988f36f7a08a7ab2b69638994a8923853dec2d4af121f689c66dc8",
+ "sha256:409c4653e0f719fa78febcb71ac417076ae5e20160aec7270c91d009837b9151",
+ "sha256:45a4f4cf4f4e6a55c8128f8b76b4c057027b27d4c67e3fe157fa02f27e37830d",
+ "sha256:48eab46ef38faf1031e58dfcc9c3e71756a1108f4c9c966150b605d4a1a7f659",
+ "sha256:6b9e0ae298ab20d371fc26e2129fd683cfc0cfde4d157c6341722de645146537",
+ "sha256:6c4778afe50f413707f604828c1ad1ff81fadf6c110cb669579dea7e2e98a75e",
+ "sha256:8c33fb99025d353c9520141f8bc989c2134a1f76bac6369cea060812f5b5c2bb",
+ "sha256:9873a1760a274b620a135054b756f9f218fa61ca030e42df31b409f0fb738b6c",
+ "sha256:9b069768c627f3f5623b1cbd3248c5e7e92aec62f4c98827059eed7053138cc9",
+ "sha256:9e4ce27a507e4886efbd3c32d120db5089b906979a4debf1d5939ec01b9dd6c5",
+ "sha256:acb424eaca214cb08735f1a744eceb97d014de6530c1ea23beb86d9c6f13c2ad",
+ "sha256:c8181c7d77388fe26ab8418bb088b1a1ef5fde058c6926790c8a0a3d94075a4a",
+ "sha256:d4afbb0840f489b60f5a580a41a1b9c3622e08ecb5eec8614d4fb4cd914c4460",
+ "sha256:d9ed28030797c00f4bc43c86bf819266c76a5ea61d006cd4078a93ebf7da6bfd",
+ "sha256:e603aa7bb52e4e8ed4119a58a03b60323918467ef209e6ff9db3ac382e5cf2c6"
+ ],
+ "index": "pypi",
+ "version": "==2.6.1"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "jmespath": {
+ "hashes": [
+ "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6",
+ "sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c"
+ ],
+ "version": "==0.9.4"
+ },
+ "naked": {
+ "hashes": [
+ "sha256:12b76b8a14595d07039422f1d2219ca8fbef8b237f9cdf5d8e947c03e148677e",
+ "sha256:19de9961f4edb29e75cf837e8e031d6b52fbba4f0033515893d26f69c74b3b1f"
+ ],
+ "version": "==0.1.31"
+ },
+ "pancloud": {
+ "hashes": [
+ "sha256:374ca770405f9bfda69489ad9cd1ef3d716287f584771566cf9ff6d22f189a4e"
+ ],
+ "index": "pypi",
+ "version": "==1.5.1"
+ },
+ "pycparser": {
+ "hashes": [
+ "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
+ ],
+ "version": "==2.19"
+ },
+ "python-dateutil": {
+ "hashes": [
+ "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb",
+ "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"
+ ],
+ "version": "==2.8.0"
+ },
+ "pyyaml": {
+ "hashes": [
+ "sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c",
+ "sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95",
+ "sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2",
+ "sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4",
+ "sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad",
+ "sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba",
+ "sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1",
+ "sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e",
+ "sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673",
+ "sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13",
+ "sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19"
+ ],
+ "version": "==5.1"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
+ "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
+ ],
+ "index": "pypi",
+ "version": "==2.21.0"
+ },
+ "shellescape": {
+ "hashes": [
+ "sha256:3ff2aeb6ce2c5a4e6059fe4a2a745a824f5a3834fe8365a39c5ea691073cfdb6",
+ "sha256:e618b2bc13f2553315ca1669995dc10fcc2cae5f1e0fda49035ef02d56f0b358"
+ ],
+ "version": "==3.4.1"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "tinydb": {
+ "hashes": [
+ "sha256:260b1f69168a24518af63f0270c72dc026476607016a0105aef6a966d8d2fbdc",
+ "sha256:a05c4c81e6e867c4f8a2e51c5236d0d897019aa5e9296f5947455b0bdd3c519d"
+ ],
+ "version": "==3.13.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4",
+ "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb"
+ ],
+ "version": "==1.24.3"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "entrypoints": {
+ "hashes": [
+ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
+ "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
+ ],
+ "version": "==0.3"
+ },
+ "flake8": {
+ "hashes": [
+ "sha256:859996073f341f2670741b51ec1e67a01da142831aa1fdc6242dbf88dffbe661",
+ "sha256:a796a115208f5c03b18f332f7c11729812c8c3ded6c46319c59b53efd3819da8"
+ ],
+ "index": "pypi",
+ "version": "==3.7.7"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:1349c6f7c2a0f7539f5f2ace51a9a8e4a37086ce4de6f78f5f53fb041d0a3cd5",
+ "sha256:f09911f6eb114e5592abe635aded8bf3d2c3144ebcfcaf81ee32e7af7b7d1870"
+ ],
+ "version": "==4.3.18"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:2112d2ca570bb7c3e53ea1a35cd5df42bb0fd10c45f0fb97178679c3c03d64c7",
+ "sha256:c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a"
+ ],
+ "markers": "python_version > '2.7'",
+ "version": "==7.0.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:25a1bc1d148c9a640211872b4ff859878d422bccb59c9965e04eed468a0aa180",
+ "sha256:964cedd2b27c492fbf0b7f58b3284a09cf7f99b0f715941fb24a439b3af1bd1a"
+ ],
+ "version": "==0.11.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pyflakes": {
+ "hashes": [
+ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
+ "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ ],
+ "version": "==2.1.1"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:1a8aa4fa958f8f451ac5441f3ac130d9fc86ea38780dd2715e6d5c5882700b24",
+ "sha256:b8bf138592384bd4e87338cb0f256bf5f615398a649d4bd83915f0e4047a5ca6"
+ ],
+ "index": "pypi",
+ "version": "==4.5.0"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:132eae51d6ef3ff4a8c47c393a4ef5ebf0d1aecc96880eb5d6c8ceab7017cc9b",
+ "sha256:18141c1484ab8784006c839be8b985cfc82a2e9725837b0ecfa0203f71c4e39d",
+ "sha256:2baf617f5bbbfe73fd8846463f5aeafc912b5ee247f410700245d68525ec584a",
+ "sha256:3d90063f2cbbe39177e9b4d888e45777012652d6110156845b828908c51ae462",
+ "sha256:4304b2218b842d610aa1a1d87e1dc9559597969acc62ce717ee4dfeaa44d7eee",
+ "sha256:4983ede548ffc3541bae49a82675996497348e55bafd1554dc4e4a5d6eda541a",
+ "sha256:5315f4509c1476718a4825f45a203b82d7fdf2a6f5f0c8f166435975b1c9f7d4",
+ "sha256:6cdfb1b49d5345f7c2b90d638822d16ba62dc82f7616e9b4caa10b72f3f16649",
+ "sha256:7b325f12635598c604690efd7a0197d0b94b7d7778498e76e0710cd582fd1c7a",
+ "sha256:8d3b0e3b8626615826f9a626548057c5275a9733512b137984a68ba1598d3d2f",
+ "sha256:8f8631160c79f53081bd23446525db0bc4c5616f78d04021e6e434b286493fd7",
+ "sha256:912de10965f3dc89da23936f1cc4ed60764f712e5fa603a09dd904f88c996760",
+ "sha256:b010c07b975fe853c65d7bbe9d4ac62f1c69086750a574f6292597763781ba18",
+ "sha256:c908c10505904c48081a5415a1e295d8403e353e0c14c42b6d67f8f97fae6616",
+ "sha256:c94dd3807c0c0610f7c76f078119f4ea48235a953512752b9175f9f98f5ae2bd",
+ "sha256:ce65dee7594a84c466e79d7fb7d3303e7295d16a83c22c7c4037071b059e2c21",
+ "sha256:eaa9cfcb221a8a4c2889be6f93da141ac777eb8819f077e1d09fb12d00a09a93",
+ "sha256:f3376bc31bad66d46d44b4e6522c5c21976bf9bca4ef5987bb2bf727f4506cbb",
+ "sha256:f9202fa138544e13a4ec1a6792c35834250a85958fde1251b6a22e07d1260ae7"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.3.5"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ }
+ }
+}
diff --git a/Integrations/PaloAltoNetworks_PAN_OS_EDL_Management/CHANGELOG.md b/Integrations/PaloAltoNetworks_PAN_OS_EDL_Management/CHANGELOG.md
new file mode 100644
index 000000000000..3b679ed73934
--- /dev/null
+++ b/Integrations/PaloAltoNetworks_PAN_OS_EDL_Management/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.8.0] - 2019-08-06
+ - Added the ***pan-os-edl-get-external-file-metadata*** command.
+ - When a non-existent list is specified in the ***pan-os-edl-update-from-external-file*** command, the list is automatically created, and the file data is saved to the list.
diff --git a/Integrations/PaloAltoNetworks_PAN_OS_EDL_Management/PaloAltoNetworks_PAN_OS_EDL_Management.py b/Integrations/PaloAltoNetworks_PAN_OS_EDL_Management/PaloAltoNetworks_PAN_OS_EDL_Management.py
new file mode 100644
index 000000000000..ca3b64e7045f
--- /dev/null
+++ b/Integrations/PaloAltoNetworks_PAN_OS_EDL_Management/PaloAltoNetworks_PAN_OS_EDL_Management.py
@@ -0,0 +1,587 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+'''IMPORTS'''
+
+import tempfile
+import subprocess
+import shutil
+import os
+
+''' GLOBALS '''
+
+
+def create_certificate_file(authentication: dict):
+ password = authentication.get('password', None)
+ certificate = None
+ if 'credentials' in authentication and 'sshkey' in authentication['credentials'] and len(
+ authentication['credentials']['sshkey']) > 0:
+ certificate = authentication.get('credentials', None).get('sshkey')
+
+ cert_file = tempfile.NamedTemporaryFile(delete=False, mode='w')
+ if certificate:
+ cert_file.write(certificate)
+ cert_file.flush()
+ os.chmod(cert_file.name, 0o400)
+ elif password:
+ # check that password field holds a certificate and not a password
+ if password.find('-----') == -1:
+ return_error('Password parameter must contain a certificate.')
+ # split certificate by dashes
+ password_list = password.split('-----')
+ # replace spaces with newline characters
+ password_fixed = '-----'.join(password_list[:2] + [password_list[2].replace(' ', '\n')] + password_list[3:])
+ cert_file.write(password_fixed)
+ cert_file.flush()
+ os.chmod(cert_file.name, 0o400)
+ else:
+ return_error('To connect to the remote server, provide a certificate.')
+
+ return cert_file
+
+
+AUTHENTICATION = demisto.params().get('Authentication')
+
+HOSTNAME = demisto.params().get('hostname')
+USERNAME = AUTHENTICATION.get('identifier')
+PORT = str(demisto.params().get('port')) if demisto.params().get('port', None) and len(
+ demisto.params().get('port')) > 0 else None
+
+SSH_EXTRA_PARAMS = demisto.params().get('ssh_extra_params').split() if demisto.params().get('ssh_extra_params',
+ None) else None
+SCP_EXTRA_PARAMS = demisto.params().get('scp_extra_params').split() if demisto.params().get('scp_extra_params',
+ None) else None
+DOCUMENT_ROOT = '/' + demisto.params().get('document_root') if demisto.params().get('document_root', None) else None
+
+CERTIFICATE_FILE = create_certificate_file(AUTHENTICATION)
+
+''' UTILS '''
+
+
+def ssh_execute(command: str):
+ if PORT and SSH_EXTRA_PARAMS:
+ param_list = ['ssh', '-o', 'StrictHostKeyChecking=no', '-i', CERTIFICATE_FILE.name, '-p',
+ PORT] + SSH_EXTRA_PARAMS + [USERNAME + '@' + HOSTNAME, command]
+ result = subprocess.run(param_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
+ elif PORT:
+ result = subprocess.run(
+ ['ssh', '-o', 'StrictHostKeyChecking=no', '-i', CERTIFICATE_FILE.name, '-p', PORT,
+ USERNAME + '@' + HOSTNAME, command], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
+ elif SSH_EXTRA_PARAMS:
+ param_list = ['ssh', '-o', 'StrictHostKeyChecking=no', '-i', CERTIFICATE_FILE.name] + SSH_EXTRA_PARAMS + [
+ USERNAME + '@' + HOSTNAME, command]
+ result = subprocess.run(param_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
+ else:
+ result = subprocess.run(
+ ['ssh', '-o', 'StrictHostKeyChecking=no', '-i', CERTIFICATE_FILE.name, USERNAME + '@' + HOSTNAME, command],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
+
+ if result.returncode != 0:
+ if result.stderr:
+ if result.stderr.find("Warning: Permanently added") != -1:
+ return result.stdout # ignore addition of new hosts warnings
+ elif result.stderr.find("Permission denied") != -1:
+ return_error(
+ 'Permission denied, check your username and certificate.\n' + 'Got error: ' + result.stderr)
+ else:
+ return_error(result.stderr)
+ elif command.find('grep') != -1 and result.returncode == 1:
+ # a search command that did not find any value
+ demisto.results({
+ 'Type': 11,
+ 'Contents': 'Search string was not found in the external file path given.',
+ 'ContentsFormat': formats['text']
+ })
+ sys.exit(0)
+ else:
+ return_error('Command failed with exit status: ' + str(result.returncode))
+
+ return result.stdout
+
+
+def scp_execute(file_name: str, file_path: str):
+ if SCP_EXTRA_PARAMS:
+ param_list = ['scp', '-o', 'StrictHostKeyChecking=no', '-i', CERTIFICATE_FILE.name] + SCP_EXTRA_PARAMS + [
+ file_name, USERNAME + '@' + HOSTNAME + ':' + file_path]
+ result = subprocess.run(param_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
+ else:
+ param_list = ['scp', '-o', 'StrictHostKeyChecking=no', '-i', CERTIFICATE_FILE.name, file_name,
+ USERNAME + '@' + HOSTNAME + ':' + file_path]
+ result = subprocess.run(param_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
+
+ if result.returncode != 0:
+ if result.stderr:
+ if result.stderr.find("Warning: Permanently added") != -1:
+ return True # ignore addition of new hosts warnings
+ else:
+ return_error(result.stderr)
+ else:
+ return_error('Command failed with exit status: ' + str(result.returncode))
+ else:
+ return True
+
+
+''' COMMANDS '''
+
+
+def edl_get_external_file(file_path: str):
+ command = f'cat {file_path}'
+ result = ssh_execute(command)
+ return result
+
+
+def edl_get_external_file_command():
+ """
+ Get external file from web-server and prints to Warroom
+ """
+ file_path = demisto.args().get('file_path')
+ if DOCUMENT_ROOT:
+ file_path = os.path.join(DOCUMENT_ROOT, file_path)
+
+ result = edl_get_external_file(file_path)
+
+ md = tableToMarkdown('File Content:', result, headers=['List'])
+ demisto.results({
+ 'ContentsFormat': formats['markdown'],
+ 'Type': entryTypes['note'],
+ 'Contents': md
+ })
+
+
+def edl_search_external_file(file_path: str, search_string: str):
+ return ssh_execute(f'grep {search_string} {file_path}')
+
+
+def edl_search_external_file_command():
+ """
+ Search the external file and return all matching entries to Warroom
+ """
+ file_path = demisto.args().get('file_path')
+ if DOCUMENT_ROOT:
+ file_path = os.path.join(DOCUMENT_ROOT, file_path)
+ search_string = demisto.args().get('search_string')
+
+ result = edl_search_external_file(file_path, search_string)
+
+ md = tableToMarkdown('Search Results', result, headers=['Result'])
+
+ demisto.results({
+ 'ContentsFormat': formats['markdown'],
+ 'Type': entryTypes['note'],
+ 'Contents': md
+ })
+
+
+def edl_update_external_file(file_path: str, list_name: str, verbose: bool) -> bool:
+ dict_of_lists = demisto.getIntegrationContext()
+ list_data = dict_of_lists.get(list_name)
+
+ file_name = file_path.rsplit('/', 1)[-1] + '.txt'
+ try:
+ with open(file_name, 'w') as file:
+ file.write("\n".join(list_data))
+ success = scp_execute(file_name, file_path)
+ finally:
+ shutil.rmtree(file_name, ignore_errors=True)
+
+ if not success:
+ return False
+ else:
+ if verbose:
+ return ssh_execute(f'cat {file_path}')
+ else:
+ return True
+
+
+def edl_update():
+ """
+ Updates the instance context with the list name and items given
+ Overrides external file path with internal list
+ """
+ file_path = demisto.args().get('file_path')
+ if DOCUMENT_ROOT:
+ file_path = os.path.join(DOCUMENT_ROOT, file_path)
+ list_name = demisto.args().get('list_name')
+ list_items = argToList(demisto.args().get('list_items'))
+ add = demisto.args().get('add_or_remove') == 'add'
+ verbose = demisto.args().get('verbose') == 'true'
+
+ # update internal list
+ dict_of_lists = demisto.getIntegrationContext()
+ if not dict_of_lists:
+ dict_of_lists = {list_name: list_items}
+ if verbose:
+ md = tableToMarkdown('List items:', list_items, headers=[list_name])
+ else:
+ md = 'Instance context updated successfully'
+ else:
+ if not dict_of_lists.get(list_name, None) and not add:
+ return_error('Cannot remove items from an empty list')
+ if dict_of_lists.get(list_name, None):
+ if add:
+ list_items = list(set(dict_of_lists.get(list_name) + list_items))
+ else:
+ list_items = [item for item in dict_of_lists.get(list_name) if item not in list_items]
+
+ if len(list_items) == 0: # delete list from instance context
+ dict_of_lists.pop(list_name, None)
+ md = 'List is empty, deleted from instance context.'
+ else:
+ dict_of_lists.update({list_name: list_items})
+ if verbose:
+ md = tableToMarkdown('List items:', list_items, headers=[list_name])
+ else:
+ md = 'Instance context updated successfully'
+
+ demisto.setIntegrationContext(dict_of_lists)
+ demisto.results({
+ 'ContentsFormat': formats['markdown'],
+ 'Type': entryTypes['note'],
+ 'Contents': md
+ })
+
+ # scp internal list to file_path
+ result = edl_update_external_file(file_path, list_name, verbose)
+ if result:
+ if verbose:
+ md = tableToMarkdown('Updated File Data:', result, headers=['Data'])
+ else:
+ md = 'External file updated successfully'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': md,
+ 'ContentsFormat': formats['markdown']
+ })
+
+
+def edl_update_from_external_file(list_name: str, file_path: str, type_: str):
+ dict_of_lists = demisto.getIntegrationContext()
+ list_data = dict_of_lists.get(list_name, None)
+ file_data = edl_get_external_file(file_path)
+
+ if list_data:
+ set_internal = set(list_data)
+ set_external = set(file_data.split('\n'))
+ set_external.discard('')
+ if type_ == 'merge':
+ unified = set_internal.union(set_external)
+ list_data_new = list(unified)
+ else: # type_ == 'override'
+ list_data_new = list(set_external)
+ dict_of_lists.update({list_name: list_data_new})
+ demisto.setIntegrationContext(dict_of_lists)
+ return list_data_new
+ else:
+ dict_of_lists.update({list_name: file_data})
+ demisto.setIntegrationContext(dict_of_lists)
+ return file_data
+
+
+def edl_update_from_external_file_command():
+ """
+ Updates internal list data with external file contents
+ """
+ file_path = demisto.args().get('file_path')
+ if DOCUMENT_ROOT:
+ file_path = os.path.join(DOCUMENT_ROOT, file_path)
+ list_name = demisto.args().get('list_name')
+ type_ = demisto.args().get('type')
+ verbose = demisto.args().get('verbose') == 'true'
+
+ list_data_new = edl_update_from_external_file(list_name, file_path, type_)
+
+ if verbose:
+ md = tableToMarkdown('List items:', list_data_new, headers=[list_name])
+ else:
+ md = 'Instance context updated successfully'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': md,
+ 'ContentsFormat': formats['markdown']
+ })
+
+
+def edl_delete_external_file(file_path: str):
+ ssh_execute('rm -f ' + file_path)
+ return 'File deleted successfully'
+
+
+def edl_delete_external_file_command():
+ """
+ Delete external file
+ """
+ file_path = demisto.args().get('file_path')
+ if DOCUMENT_ROOT:
+ file_path = os.path.join(DOCUMENT_ROOT, file_path)
+ result = edl_delete_external_file(file_path)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': result,
+ 'ContentsFormat': formats['text']
+ })
+
+
+def edl_list_internal_lists_command():
+ """
+ List all instance context lists
+ """
+ dict_of_lists = demisto.getIntegrationContext()
+ list_names = list(dict_of_lists.keys())
+
+ md = tableToMarkdown('Instance context Lists:', list_names, headers=['List names'])
+
+ demisto.results({
+ 'ContentsFormat': formats['markdown'],
+ 'Type': entryTypes['note'],
+ 'Contents': md
+ })
+
+
+def edl_search_internal_list_command():
+ """
+ Search a string on internal list
+ """
+ list_name = demisto.args().get('list_name')
+ search_string = demisto.args().get('search_string')
+
+ dict_of_lists = demisto.getIntegrationContext()
+ list_data = dict_of_lists.get(list_name, None)
+
+ if not list_data:
+ demisto.results({
+ 'Type': 11,
+ 'Contents': 'List was not found in instance context.',
+ 'ContentsFormat': formats['text']
+ })
+ elif search_string in list_data:
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': 'Search string is in internal list.',
+ 'ContentsFormat': formats['text']
+ })
+ else:
+ demisto.results({
+ 'Type': 11,
+ 'Contents': 'Search string was not found in instance context list.',
+ 'ContentsFormat': formats['text']
+ })
+
+
+def edl_print_internal_list_command():
+ """
+ Print to warroom instance context list
+ """
+ list_name = demisto.args().get('list_name')
+ dict_of_lists = demisto.getIntegrationContext()
+ list_data = dict_of_lists.get(list_name, None)
+
+ if not list_data:
+ demisto.results({
+ 'Type': 11,
+ 'Contents': 'List was not found in instance context.',
+ 'ContentsFormat': formats['text']
+ })
+ else:
+ md = tableToMarkdown('List items:', list_data, headers=[list_name])
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': md,
+ 'ContentsFormat': formats['markdown']
+ })
+
+
+def edl_dump_internal_list_command():
+ """
+ Dumps an instance context list to either a file or incident context
+ """
+ destination = demisto.args().get('destination')
+ list_name = demisto.args().get('list_name')
+
+ dict_of_lists = demisto.getIntegrationContext()
+ list_data = dict_of_lists.get(list_name, None)
+ if not list_data:
+ demisto.results({
+ 'Type': 11,
+ 'Contents': 'List was not found in instance context or has no data.',
+ 'ContentsFormat': formats['text']
+ })
+ sys.exit(0)
+ if destination == 'file': # dump list as file
+ internal_file_path = demisto.uniqueFile()
+
+ try:
+ with open(internal_file_path, 'w') as f:
+ f.write("\n".join(list_data))
+ file_type = entryTypes['entryInfoFile']
+ with open(internal_file_path, 'rb') as file:
+ file_entry = fileResult(internal_file_path, file.read(), file_type)
+ demisto.results(file_entry)
+ finally:
+ shutil.rmtree(internal_file_path, ignore_errors=True)
+
+ else: # update incident context
+ md = tableToMarkdown('List items:', list_data, headers=[list_name])
+ ec = {
+ 'ListName': list_name,
+ 'ListItems': list_data
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': md,
+ 'ContentsFormat': formats['markdown'],
+ 'EntryContext': {
+ "PANOSEDL(val.ListName == obj.ListName)": ec
+ }
+ })
+
+
+def edl_compare_command():
+ list_name = demisto.args().get('list_name')
+ file_path = demisto.args().get('file_path')
+ if DOCUMENT_ROOT:
+ file_path = os.path.join(DOCUMENT_ROOT, file_path)
+
+ dict_of_lists = demisto.getIntegrationContext()
+ list_data = dict_of_lists.get(list_name, None)
+ if not list_data:
+ demisto.results({
+ 'Type': 11,
+ 'Contents': 'List was not found in instance context.',
+ 'ContentsFormat': formats['text']
+ })
+ sys.exit(0)
+
+ file_data = edl_get_external_file(file_path)
+ if not file_data:
+ demisto.results({
+ 'Type': 11,
+ 'Contents': 'file was not found in external web-server.',
+ 'ContentsFormat': formats['text']
+ })
+ sys.exit(0)
+
+ set_internal = set(list_data)
+ set_external = set(file_data.split('\n'))
+ set_external.discard('')
+
+ unique_internal = set_internal - set_external
+ unique_external = set_external - set_internal
+
+ md = ''
+ if unique_external:
+ md += '### Warning: External file contain values which are not in the internal demisto list.\n'
+ md += '#### Please check who has writing permissions to the external file.\n'
+ md += tableToMarkdown('', list(unique_external),
+ headers=[file_path.rsplit('/')[-1]])
+ if unique_internal:
+ md += '### Warning: Internal list has values which are not in the external file.\n'
+ md += '#### Please check who has writing permissions to the external file.\n'
+ md += tableToMarkdown('', list(unique_internal), headers=[list_name])
+ if len(md) == 0:
+ md = 'Internal list and External file have the same values'
+
+ demisto.results({
+ 'Type': 11 if unique_external or unique_internal else entryTypes['note'],
+ 'Contents': md,
+ 'ContentsFormat': formats['markdown'],
+ })
+
+
+def edl_get_external_file_metadata_command():
+ file_path = demisto.args().get('file_path')
+ if DOCUMENT_ROOT:
+ file_path = os.path.join(DOCUMENT_ROOT, file_path)
+
+ result = ssh_execute(f'stat {file_path}')
+
+ file_size = int(result.split("Size: ", 1)[1].split(" ", 1)[0])
+ file_name = file_path.split("/")[-1]
+ if len(file_name) < 0:
+ file_name = file_path
+ last_modified_parts = result.split("Change: ", 1)[1].split(" ", 2)[0:2]
+ last_modified = ' '.join(last_modified_parts)
+
+ number_of_lines = int(ssh_execute(f'wc -l < {file_path}')) + 1
+
+ metadata_outputs = {
+ 'FileName': file_name,
+ 'Size': file_size,
+ 'LastModified': last_modified,
+ 'NumberOfLines': number_of_lines
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': result,
+ 'ContentsFormat': formats['text'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('File metadata:', metadata_outputs,
+ ['FileName', 'Size', 'NumberOfLines', 'LastModified'], removeNull=True),
+ 'EntryContext': {"PANOSEDL(val.FileName == obj.FileName)": metadata_outputs}
+ })
+
+
+''' EXECUTION '''
+
+
+def main():
+ LOG('command is %s' % (demisto.command(),))
+ try:
+ if demisto.command() == 'test-module':
+ ssh_execute('echo 1')
+ demisto.results('ok')
+
+ elif demisto.command() == 'pan-os-edl-get-external-file':
+ edl_get_external_file_command()
+
+ elif demisto.command() == 'pan-os-edl-search-external-file':
+ edl_search_external_file_command()
+
+ elif demisto.command() == 'pan-os-edl-update':
+ edl_update()
+
+ elif demisto.command() == 'pan-os-edl-update-from-external-file':
+ edl_update_from_external_file_command()
+
+ elif demisto.command() == 'pan-os-edl-delete-external-file':
+ edl_delete_external_file_command()
+
+ elif demisto.command() == 'pan-os-edl-list-internal-lists':
+ edl_list_internal_lists_command()
+
+ elif demisto.command() == 'pan-os-edl-search-internal-list':
+ edl_search_internal_list_command()
+
+ elif demisto.command() == 'pan-os-edl-print-internal-list':
+ edl_print_internal_list_command()
+
+ elif demisto.command() == 'pan-os-edl-dump-internal-list':
+ edl_dump_internal_list_command()
+
+ elif demisto.command() == 'pan-os-edl-compare':
+ edl_compare_command()
+
+ elif demisto.command() == 'pan-os-edl-get-external-file-metadata':
+ edl_get_external_file_metadata_command()
+
+ else:
+ return_error('Unrecognized command: ' + demisto.command())
+
+ except Exception as ex:
+ if str(ex).find('warning') != -1:
+ LOG(str(ex))
+ else:
+ return_error(str(ex))
+
+ finally:
+ shutil.rmtree(CERTIFICATE_FILE.name, ignore_errors=True)
+ LOG.print_log()
+
+
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/PaloAltoNetworks_PAN_OS_EDL_Management/PaloAltoNetworks_PAN_OS_EDL_Management.yml b/Integrations/PaloAltoNetworks_PAN_OS_EDL_Management/PaloAltoNetworks_PAN_OS_EDL_Management.yml
new file mode 100644
index 000000000000..4cfa7870f555
--- /dev/null
+++ b/Integrations/PaloAltoNetworks_PAN_OS_EDL_Management/PaloAltoNetworks_PAN_OS_EDL_Management.yml
@@ -0,0 +1,272 @@
+category: Utilities
+commonfields:
+ id: palo_alto_networks_pan_os_edl_management
+ version: -1
+configuration:
+- display: Hostname or IP of server
+ name: hostname
+ required: true
+ type: 0
+- display: server port
+ name: port
+ required: false
+ type: 0
+- display: SSH credentials to server (username and certificate)
+ name: Authentication
+ required: true
+ type: 9
+- display: SSH extra parameters (e.g., "-c ChaCha20")
+ name: ssh_extra_params
+ required: false
+ type: 0
+- display: SCP extra parameters (e.g., "-c ChaCha20 -l 8000")
+ name: scp_extra_params
+ required: false
+ type: 0
+- display: Document root (e.g., var/www/html/files)
+ name: document_root
+ required: false
+ type: 0
+description: This integration enables you to manage and edit files located on a remote
+ web server via SSH using integration context as Single Source of Truth.
+display: Palo Alto Networks PAN-OS EDL Management
+name: palo_alto_networks_pan_os_edl_management
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Unique path to the file on a remote server.
+ isArray: false
+ name: file_path
+ required: true
+ secret: false
+ deprecated: false
+ description: Displays the contents of the specified remote file located in the
+ War Room.
+ execution: false
+ name: pan-os-edl-get-external-file
+ - arguments:
+ - default: false
+ description: Unique path to the file on a remote server.
+ isArray: false
+ name: file_path
+ required: true
+ secret: false
+ - default: false
+ description: String to search for in the remote file.
+ isArray: false
+ name: search_string
+ required: true
+ secret: false
+ deprecated: false
+ description: Searches for a string in a remote file.
+ execution: false
+ name: pan-os-edl-search-external-file
+ - arguments:
+ - default: false
+ description: List from the instance context with which to override the remote
+ file.
+ isArray: false
+ name: list_name
+ required: true
+ secret: false
+ - default: false
+ description: Unique path to file
+ isArray: false
+ name: file_path
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Prints the updated remote file to the War Room. Default is "false".
+ isArray: false
+ name: verbose
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: List items.
+ isArray: true
+ name: list_items
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: add
+ description: Whether to add to, or remove from the list. Default is "add".
+ isArray: false
+ name: add_or_remove
+ predefined:
+ - add
+ - remove
+ required: true
+ secret: false
+ deprecated: false
+ description: Updates the instance context with the specified list name and list
+ items, and then overrides the path of the remote file with the internal list.
+ execution: true
+ name: pan-os-edl-update
+ - arguments:
+ - default: false
+ description: Unique path to the file on a remote server.
+ isArray: false
+ name: file_path
+ required: true
+ secret: false
+ - default: false
+ description: List name.
+ isArray: false
+ name: list_name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: merge
+ description: Update type. "Merge" adds non-duplicate values, "Override" deletes
+ existing data in the internal list. Default is "merge".
+ isArray: false
+ name: type
+ predefined:
+ - merge
+ - override
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Prints the updated internal list to the War Room. Default is "false".
+ isArray: false
+ name: verbose
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Updates internal list data with the contents of a remote file.
+ execution: true
+ name: pan-os-edl-update-from-external-file
+ - arguments:
+ - default: false
+ description: Unique path to the file on a remote server.
+ isArray: false
+ name: file_path
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes a file from a remote server.
+ execution: true
+ name: pan-os-edl-delete-external-file
+ - arguments:
+ - default: false
+ description: List name.
+ isArray: false
+ name: list_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Displays internal list data in the War Room.
+ execution: false
+ name: pan-os-edl-print-internal-list
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: file
+ description: List data destination. Default is "file".
+ isArray: false
+ name: destination
+ predefined:
+ - file
+ - incident_context
+ required: true
+ secret: false
+ - default: false
+ description: List name.
+ isArray: false
+ name: list_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Dumps (copies) instance context to either the incident context or
+ a file.
+ execution: false
+ name: pan-os-edl-dump-internal-list
+ outputs:
+ - contextPath: PANOSEDL.ListItems
+ description: Items of the internal list.
+ type: string
+ - contextPath: PANOSEDL.ListName
+ description: Name of the internal list.
+ type: string
+ - deprecated: false
+ description: Displays instance context list names.
+ execution: false
+ name: pan-os-edl-list-internal-lists
+ - arguments:
+ - default: false
+ description: Name of list
+ isArray: false
+ name: list_name
+ required: true
+ secret: false
+ - default: false
+ description: String to search for in the remote file.
+ isArray: false
+ name: search_string
+ required: true
+ secret: false
+ deprecated: false
+ description: Search for a string in internal list.
+ execution: false
+ name: pan-os-edl-search-internal-list
+ - arguments:
+ - default: false
+ description: List name.
+ isArray: false
+ name: list_name
+ required: true
+ secret: false
+ - default: false
+ description: Unique path to the file on a remote server.
+ isArray: false
+ name: file_path
+ required: true
+ secret: false
+ deprecated: false
+ description: Compares internal list and external file contents.
+ execution: false
+ name: pan-os-edl-compare
+ - arguments:
+ - default: false
+ description: Unique path to the file on a remote server.
+ isArray: false
+ name: file_path
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets metadata for an external file.
+ execution: false
+ name: pan-os-edl-get-external-file-metadata
+ outputs:
+ - contextPath: PANOSEDL.FileName
+ description: Name of the external file.
+ type: String
+ - contextPath: PANOSEDL.Size
+ description: File size.
+ type: Number
+ - contextPath: PANOSEDL.NumberOfLines
+ description: Number of lines.
+ type: Number
+ - contextPath: PANOSEDL.LastModified
+ description: Date that the file was last modified.
+ type: String
+ dockerimage: demisto/openssh:1.0.0.305
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- PAN OS EDL Management - Test
diff --git a/Integrations/PaloAltoNetworks_PAN_OS_EDL_Management/PaloAltoNetworks_PAN_OS_EDL_Management_description.md b/Integrations/PaloAltoNetworks_PAN_OS_EDL_Management/PaloAltoNetworks_PAN_OS_EDL_Management_description.md
new file mode 100644
index 000000000000..eac7bbe9ab34
--- /dev/null
+++ b/Integrations/PaloAltoNetworks_PAN_OS_EDL_Management/PaloAltoNetworks_PAN_OS_EDL_Management_description.md
@@ -0,0 +1,8 @@
+ ## Set Up a Remote Web Server
+ To use the Palo Alto Networks PAN-OS EDL Management integration, you need to set up a remote web server.
+ 1. Set up a remote server with Apache.
+ 2. Generate a pair of SSH keys and send the keys to the Apache server.
+ 3. Save the private SSH key in Demisto Credentials.
+ 4. To verify the location of the document root where the files are stored, run the following command.
+ - **CentOS**: `"httpd -S"`
+ - **Ubuntu**: `apcahe2 -S"`
diff --git a/Integrations/PaloAltoNetworks_PAN_OS_EDL_Management/PaloAltoNetworks_PAN_OS_EDL_Management_image.png b/Integrations/PaloAltoNetworks_PAN_OS_EDL_Management/PaloAltoNetworks_PAN_OS_EDL_Management_image.png
new file mode 100644
index 000000000000..ff44f2d6baac
Binary files /dev/null and b/Integrations/PaloAltoNetworks_PAN_OS_EDL_Management/PaloAltoNetworks_PAN_OS_EDL_Management_image.png differ
diff --git a/Integrations/PaloAltoNetworks_Traps/CHANGELOG.md b/Integrations/PaloAltoNetworks_Traps/CHANGELOG.md
new file mode 100644
index 000000000000..558f7c152799
--- /dev/null
+++ b/Integrations/PaloAltoNetworks_Traps/CHANGELOG.md
@@ -0,0 +1,17 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+Updated integration category to *Endpoint*.
+
+
+## [19.10.0] - 2019-10-03
+#### New Integration
+Endpoint protection and response stops threats on endpoints and coordinates enforcement with network and cloud security to prevent successful cyberattacks.
+The integration enables the following abilities:
+ - Initiate scans.
+ - Retrieve files from events.
+ - Isolate endpoints.
+ - Quarantine files.
+ - Add and remove hashes from blacklist.
+ - Getting endpoints info.
diff --git a/Integrations/PaloAltoNetworks_Traps/PaloAltoNetworks_Traps.py b/Integrations/PaloAltoNetworks_Traps/PaloAltoNetworks_Traps.py
new file mode 100644
index 000000000000..924ade473aba
--- /dev/null
+++ b/Integrations/PaloAltoNetworks_Traps/PaloAltoNetworks_Traps.py
@@ -0,0 +1,548 @@
+''' IMPORTS '''
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+import json
+import requests
+import copy
+import jwt
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+PARAMS = demisto.params()
+# Remove trailing slash to prevent wrong URL path to service
+SERVER = PARAMS['url'][:-1] \
+ if (PARAMS['url'] and PARAMS['url'].endswith('/')) else PARAMS['url']
+# Should we use SSL
+USE_SSL = not PARAMS.get('insecure', False)
+# Service base URL
+BASE_URL = SERVER + '/xapi/v1/'
+APPLICATION_ID = PARAMS.get('application_id')
+PRIVATE_KEY = PARAMS.get('private_key')
+# Headers to be sent in requests
+REQUEST_HEADERS = {
+ 'Content-Type': 'application/json'
+}
+
+OUTPUTS = {
+ 'get_endpoint_by_id': {
+ 'ID': 'guid',
+ 'Name': 'name',
+ 'Domain': 'domain',
+ 'Platform': 'platform',
+ 'Status': 'status',
+ 'IP': 'ip',
+ 'ComputerSid': 'computerSid',
+ 'IsCompromised': 'compromised',
+ 'OsVersion': 'osVersion',
+ 'OsProductType': 'osProductType',
+ 'OsProductName': 'osProductName',
+ 'Is64': 'is64',
+ 'LastSeen': 'lastSeen',
+ 'LastUser': 'lastUser'
+ },
+ 'endpoint_files_retrieve': {
+ 'OperationID': 'operationId'
+ },
+ 'endpoint_isolate': {
+ 'OperationID': 'operationId'
+ },
+ 'endpoint_scan': {
+ 'OperationID': 'operationId'
+ },
+ 'event_bulk_update_status': {
+ 'EventID': 'eventGuid'
+ },
+ 'hashes_blacklist_status': {
+ 'SHA256': 'hash',
+ 'BlacklistStatus': 'status'
+ },
+ 'event_quarantine_result': {
+ 'SHA256': 'fileHash',
+ 'FilePath': 'filePath'
+ },
+ 'endpoint_scan_result': {
+ 'FileScanned': 'filesScanned',
+ 'FilesFailed': 'filesFailed',
+ 'MalwareFound': 'malwareFound'
+ }
+}
+
+# OUTPUT_EXCEPTIONS
+
+''' HELPER FUNCTIONS '''
+
+
+def create_headers(with_auth):
+ headers = copy.deepcopy(REQUEST_HEADERS)
+ if with_auth:
+ token = generate_auth_token().decode('utf-8')
+ headers['Authorization'] = f'Bearer {token}'
+ return headers
+
+
+def http_request(method, url_suffix, plain_url=False, params=None, data=None, operation_err=None, parse_response=True,
+ with_auth=True):
+ # A wrapper for requests lib to send our requests and handle requests and responses better
+ try:
+ res = requests.request(
+ method,
+ BASE_URL + url_suffix if not plain_url else url_suffix,
+ verify=USE_SSL,
+ params=params,
+ data=json.dumps(data) if data else data,
+ headers=create_headers(with_auth),
+ )
+ except requests.exceptions.ConnectionError:
+ return_error(f'Error connecting to Traps server. Please check your connection and you server address')
+ if parse_response:
+ res = extract_and_validate_http_response(res, operation_err, plain_url)
+ return res
+
+
+def extract_and_validate_http_response(resp, operation_err_message, test=False):
+ try:
+ resp.raise_for_status()
+ return resp.json() if not test else resp.content
+ except requests.exceptions.HTTPError:
+ try:
+ err_message = resp.json().get('message')
+ except Exception:
+ try:
+ err_obj = json.loads(xml2json(resp.text))
+ err_message = demisto.get(err_obj, 'Error.Message')
+ except Exception:
+ err_message = f'Could not parse error'
+ return_error(f'{operation_err_message}: \n{err_message}')
+
+
+def health_check():
+ path = f'{SERVER}/xapi/health-check'
+ server_status = http_request('GET', path, plain_url=True).decode('utf-8')
+ if server_status == '"Ok"':
+ return
+ else:
+ return_error(f'Server health-check failed. Status returned was: {server_status}')
+
+
+def generate_auth_token():
+ key = PRIVATE_KEY
+ data = {'appId': APPLICATION_ID}
+ token = jwt.encode(data, key, algorithm='RS256')
+ return token
+
+
+def parse_data_from_response(resp_obj, operation_name=None):
+ new_data_obj = {} # type: dict
+ outputs_obj = OUTPUTS[operation_name]
+ for key, val in outputs_obj.items():
+ new_data_obj[key] = resp_obj.get(val)
+
+ return new_data_obj
+
+
+def get_endpoint_by_id(endpoint_id):
+ path = f'agents/{endpoint_id}'
+ endpoint_data = http_request('GET', path, operation_err=f'Get endpoint {endpoint_id} failed')
+ return parse_data_from_response(endpoint_data, 'get_endpoint_by_id'), endpoint_data
+
+
+def endpoint_files_retrieve(endpoint_id, file_name, event_id):
+ path = f'agents/{endpoint_id}/files-retrieve'
+ data = {
+ 'incidentId': event_id,
+ 'files': [
+ {
+ "path": file_name
+ }
+ ]
+ }
+ resp = http_request('POST', path, data=data,
+ operation_err=f'Files retrieve command on endpoint {endpoint_id} failed')
+ operation_obj = parse_data_from_response(resp, 'endpoint_files_retrieve')
+ operation_obj.update({
+ 'EndpointID': endpoint_id,
+ 'Type': 'files-retrieve'
+ })
+ return operation_obj
+
+
+def endpoint_scan(endpoint_id):
+ path = f'agents/{endpoint_id}/scan'
+ resp = http_request('POST', path, operation_err=f'Scanning endpoint: {endpoint_id} failed')
+ operation_obj = parse_data_from_response(resp, 'endpoint_scan')
+ operation_obj.update({
+ 'EndpointID': endpoint_id,
+ 'Type': 'endpoint-scan'
+ })
+ return operation_obj
+
+
+def endpoint_scan_result(operation_id):
+ status, additional_data = sam_operation(operation_id, f'Could not get scan results')
+ scan_data = parse_data_from_response(additional_data.get('scanData'),
+ 'endpoint_scan_result') if additional_data else {}
+ scan_data['Status'] = status
+ scan_data['OperationID'] = operation_id
+ return scan_data
+
+
+def update_event_status(event_ids, status):
+ path = f'events/status'
+ data = {
+ "guids": event_ids,
+ "status": status
+ }
+ resp = http_request('PATCH', path, data=data, operation_err=f'Update events {event_ids} status failed')
+ return resp
+
+
+def update_event_comment(event_id, comment):
+ path = f'events/{event_id}/comment'
+ data = {
+ "comment": comment
+ }
+ http_request('POST', path, data=data, operation_err=f'Update event: {event_id} comment failed')
+ return
+
+
+def event_update_status_and_command(event_id, status, comment):
+ if not status and not comment:
+ return_error('Please add a status or a comment. Neither was given')
+ if status:
+ resp = update_event_status([event_id], status)
+ if resp.get('failed'):
+ return_error(f'Update status for event: {event_id} has failed')
+ if comment:
+ update_event_comment(event_id, comment)
+ return
+
+
+def event_bulk_update_status(event_ids, status):
+ ids_obj = update_event_status(event_ids, status)
+ # maybe should be changed to a separate function
+ results = {
+ 'UpdateSuccess': list(
+ map(lambda id_obj: parse_data_from_response(id_obj, 'event_bulk_update_status'), ids_obj.get('succeeded'))),
+ 'UpdateFail': list(
+ map(lambda id_obj: parse_data_from_response(id_obj, 'event_bulk_update_status'), ids_obj.get('failed'))),
+ 'UpdateIgnored': list(
+ map(lambda id_obj: parse_data_from_response(id_obj, 'event_bulk_update_status'), ids_obj.get('ignored')))
+ }
+ return results
+
+
+def hash_blacklist(hash_id):
+ path = f'hashes/{hash_id}/blacklist'
+ result = http_request('POST', path, operation_err=f'Failed to blacklist {hash_id}')
+ return result.get('status')
+
+
+def remove_hash_from_blacklist(hash_id):
+ path = f'hashes/{hash_id}/blacklist-remove'
+ result = http_request('POST', path, operation_err=f'Failed to remove {hash_id} from blacklist')
+ return result.get('status')
+
+
+# TODO: Check if needed error message.
+def hashes_blacklist_status(hash_ids):
+ path = f'hashes/blacklist-status'
+ data = {
+ 'hashes': hash_ids
+ }
+ ids_obj = http_request('POST', path, data=data, operation_err='Failed to get hashes status')
+ result = list(map(lambda id_obj: parse_data_from_response(id_obj, 'hashes_blacklist_status'), ids_obj))
+ return result
+
+
+def event_quarantine(event_id):
+ path = f'events/{event_id}/quarantine'
+ resp = http_request('POST', path, operation_err=f'Quarantine event {event_id} failed')
+ message_ids = resp.get('operationId').get('samMessageIds')
+ operations = []
+ for op_id in message_ids:
+ operations.append({
+ 'EventID': event_id,
+ 'Type': 'event-quarantine',
+ 'OperationID': op_id
+ })
+ return operations
+
+
+def endpoint_isolate(endpoint_id):
+ path = f'agents/{endpoint_id}/isolate'
+ resp = http_request('POST', path, operation_err=f'Isolation of endpoint: {endpoint_id} failed')
+ operation_obj = parse_data_from_response(resp, 'endpoint_isolate')
+ operation_obj.update({
+ 'EndpointID': endpoint_id,
+ 'Type': 'endpoint-isolate'
+ })
+ return operation_obj
+
+
+def sam_operation(operation_id, operation_err):
+ """
+ This functions invokes an API call to the sam operation endpoint on Traps server to get the operation status and/or
+ results.
+ :param operation_id: the operation on which to get the status/results
+ :param operation_err: The error to return in case of a failure (changes according to the command fired.)
+ :return:
+ status: the status of the operation.
+ additional_data: additional data regarding the operation (like scan results)
+ """
+ path = f'sam/operations/{operation_id}'
+ result = http_request('GET', path, operation_err=operation_err)
+ if result.get('summaryData').get('incompatible'):
+ return_error(f'{operation_err} incompatible operation')
+ if result.get('summaryData').get('samExists'):
+ return 'ignored', None
+ for status_obj in result.get('statuses'):
+ if status_obj.get('count') > 0:
+ return status_obj.get('status'), result.get('additionalData')
+ return_error(f'{operation_err}: Could not retrieve status')
+
+
+def endpoint_isolate_status(operation_id):
+ status, _ = sam_operation(operation_id, f'Could not get endpoint isolate status')
+ return {'Status': status, 'OperationID': operation_id}
+
+
+def event_quarantine_result(operation_id):
+ status, additional_data = sam_operation(operation_id, f'Could not get event quarantine status')
+ quarantine_data = parse_data_from_response(additional_data.get('quarantineData'),
+ 'event_quarantine_result') if additional_data else {}
+ quarantine_data['Status'] = status
+ quarantine_data['OperationID'] = operation_id
+ return quarantine_data
+
+
+def endpoint_files_retrieve_result(operation_id):
+ status, additional_data = sam_operation(operation_id, f'Failed to get file retrieve results')
+ if status == 'finished':
+ file_info = additional_data.get('uploadData')
+ file_name = file_info.get('fileName')
+ url = file_info.get('downloadUrl')
+ data = http_request('GET', url, plain_url=True, operation_err=f'Unable to download file.', with_auth=False)
+ demisto.results(fileResult(filename=file_name, data=data))
+ return {'Status': status, 'OperationID': operation_id}
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module_command():
+ health_check()
+ res = http_request('GET', 'agents/1', parse_response=False)
+ if res.status_code == 403:
+ return_error(f'Error connecting to server. Check your Application ID and Private key')
+ return
+
+
+def get_endpoint_by_id_command():
+ args = demisto.args()
+ endpoint_id = args.get('endpoint_id')
+ endpoint_data, raw_data = get_endpoint_by_id(endpoint_id)
+ md = tableToMarkdown(f'Endpoint {endpoint_id} data:', endpoint_data, headerTransform=pascalToSpace)
+ context = {'Traps.Endpoint(val.ID == obj.ID)': createContext(endpoint_data)}
+ return_outputs(md, context, raw_response=raw_data)
+
+
+def endpoint_files_retrieve_command():
+ args = demisto.args()
+ endpoint_id = args.get('endpoint_id')
+ file_name = args.get('file_name')
+ event_id = args.get('event_id')
+ operation_obj = endpoint_files_retrieve(endpoint_id, file_name, event_id)
+ md = tableToMarkdown(f'Files retrieve command on endpoint: {endpoint_id} received', operation_obj,
+ headerTransform=pascalToSpace)
+ context = {'Traps.FileRetrieve(val.OperationID == obj.OperationID)': operation_obj}
+ return_outputs(md, context)
+
+
+def endpoint_files_retrieve_result_command():
+ args = demisto.args()
+ operation_id = args.get('operation_id')
+ status_obj = endpoint_files_retrieve_result(operation_id)
+ md = f'### File retrieval status is: {status_obj.get("Status")}'
+ context = {'Traps.FileRetrieveResult(val.OperationID == obj.OperationID)': status_obj}
+ return_outputs(md, context)
+
+
+def endpoint_scan_command():
+ args = demisto.args()
+ endpoint_id = args.get('endpoint_id')
+ operation_obj = endpoint_scan(endpoint_id)
+ md = tableToMarkdown(f'Scan command on endpoint: {endpoint_id} received', operation_obj,
+ headerTransform=pascalToSpace)
+ context = {'Traps.Scan(val.OperationID == obj.OperationID)': operation_obj}
+ return_outputs(md, context)
+
+
+def endpoint_scan_result_command():
+ args = demisto.args()
+ operation_id = args.get('operation_id')
+ status_obj = endpoint_scan_result(operation_id)
+ context = {f'Traps.ScanResult(val.OperationID == obj.OperationID)': status_obj}
+ md = tableToMarkdown(f'Status of scan operation: {operation_id}', status_obj, headerTransform=pascalToSpace)
+ return_outputs(md, context)
+
+
+def event_update_command():
+ args = demisto.args()
+ event_id = args.get('event_id')
+ status = args.get('status')
+ comment = args.get('comment')
+ event_update_status_and_command(event_id, status, comment)
+ md = f'### Event: {event_id} was updated'
+ md += f'\n##### New status: {status}' if status else ''
+ md += f'\n##### New comment: {comment}' if comment else ''
+ return_outputs(md, None)
+
+
+def event_bulk_update_status_command():
+ args = demisto.args()
+ event_ids = argToList(args.get('event_ids'))
+ status = args.get('status')
+ results = event_bulk_update_status(event_ids, status)
+ md = tableToMarkdown('Successfully updated', results.get('UpdateSuccess'), headerTransform=pascalToSpace)
+ md += tableToMarkdown('Failed to update', results.get('UpdateFail'), headerTransform=pascalToSpace)
+ md += tableToMarkdown('Ignored', results.get('UpdateIgnored'), headerTransform=pascalToSpace)
+ return_outputs(md, {})
+
+
+def event_quarantine_command():
+ args = demisto.args()
+ event_id = args.get('event_id')
+ operations = event_quarantine(event_id)
+ md = tableToMarkdown(f'Quarantine command on event: {event_id} received', operations,
+ headerTransform=pascalToSpace)
+ context = {'Traps.Quarantine(val.OperationID == obj.OperationID)': operations}
+ return_outputs(md, context)
+
+
+def event_quarantine_result_command():
+ args = demisto.args()
+ operation_id = args.get('operation_id')
+ status_obj = event_quarantine_result(operation_id)
+ context = {f'Traps.QuarantineResult(val.OperationID == obj.OperationID)': status_obj}
+ md = tableToMarkdown(f'Status of quarantine operation: {operation_id}', status_obj, headerTransform=pascalToSpace)
+ return_outputs(md, context)
+
+
+def hash_blacklist_command():
+ args = demisto.args()
+ hash_id = args.get('hash_id')
+ status = hash_blacklist(hash_id)
+ context = {} # type: dict
+ if status == 'success':
+ md = f'#### Successfully blacklisted: {hash_id}'
+ status_obj = {
+ 'SHA256': hash_id,
+ 'BlacklistStatus': 'blacklisted'
+ }
+ context = {'Traps.File(val.SHA256 == obj.SHA256)': status_obj}
+
+ elif status == 'ignore':
+ md = f'#### Hash: {hash_id} already appears in blacklist'
+ else:
+ md = f'#### Failed to blacklist: {hash_id}'
+ return_outputs(md, context)
+
+
+def hash_blacklist_remove_command():
+ args = demisto.args()
+ hash_id = args.get('hash_id')
+ status = remove_hash_from_blacklist(hash_id)
+ context = {} # type: dict
+ if status == 'success':
+ md = f'#### Successfully removed {hash_id} from blacklist'
+ status_obj = {
+ 'SHA256': hash_id,
+ 'BlacklistStatus': 'none'
+ }
+ context = {'Traps.File(val.SHA256 == obj.SHA256)': status_obj}
+ else:
+ md = f'#### Failed to remove {hash_id} from blacklist:'
+
+ return_outputs(md, context)
+
+
+def hashes_blacklist_status_command():
+ args = demisto.args()
+ hash_ids = args.get('hash_ids').split(',')
+ ids_obj = hashes_blacklist_status(hash_ids)
+ md = tableToMarkdown('Hashes status:', ids_obj, headerTransform=pascalToSpace)
+ context = {'Traps.File(val.SHA256 == obj.SHA256)': ids_obj}
+ return_outputs(md, context)
+
+
+def endpoint_isolate_command():
+ args = demisto.args()
+ endpoint_id = args.get('endpoint_id')
+ operation_obj = endpoint_isolate(endpoint_id)
+ md = tableToMarkdown(f'Isolate command on endpoint {endpoint_id} received', operation_obj,
+ headerTransform=pascalToSpace)
+ context = {'Traps.Isolate(val.OperationID == obj.OperationID)': operation_obj}
+ return_outputs(md, context)
+
+
+def endpoint_isolate_status_command():
+ args = demisto.args()
+ operation_id = args.get('operation_id')
+ isolate_status = endpoint_isolate_status(operation_id)
+ md = f'### Isolate status is: {isolate_status.get("Status")}'
+ context = {f'Traps.IsolateResult(val.OperationID == obj.OperationID)': isolate_status}
+ return_outputs(md, context)
+
+
+def main():
+ # Remove proxy if not set to true in params
+ handle_proxy()
+
+ ''' COMMANDS MANAGER / SWITCH PANEL '''
+
+ LOG('Command being called is %s' % (demisto.command()))
+
+ try:
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module_command()
+ demisto.results('ok')
+ elif demisto.command() == 'traps-get-endpoint-by-id':
+ get_endpoint_by_id_command()
+ elif demisto.command() == 'traps-endpoint-files-retrieve':
+ endpoint_files_retrieve_command()
+ elif demisto.command() == 'traps-endpoint-files-retrieve-result':
+ endpoint_files_retrieve_result_command()
+ elif demisto.command() == 'traps-endpoint-scan':
+ endpoint_scan_command()
+ elif demisto.command() == 'traps-endpoint-scan-result':
+ endpoint_scan_result_command()
+ elif demisto.command() == 'traps-event-update':
+ event_update_command()
+ elif demisto.command() == 'traps-event-bulk-update-status':
+ event_bulk_update_status_command()
+ elif demisto.command() == 'traps-hash-blacklist':
+ hash_blacklist_command()
+ elif demisto.command() == 'traps-hash-blacklist-remove':
+ hash_blacklist_remove_command()
+ elif demisto.command() == 'traps-hashes-blacklist-status':
+ hashes_blacklist_status_command()
+ elif demisto.command() == 'traps-event-quarantine':
+ event_quarantine_command()
+ elif demisto.command() == 'traps-event-quarantine-result':
+ event_quarantine_result_command()
+ elif demisto.command() == 'traps-endpoint-isolate':
+ endpoint_isolate_command()
+ elif demisto.command() == 'traps-endpoint-isolate-status':
+ endpoint_isolate_status_command()
+ # Log exceptions
+ except Exception as e:
+ return_error(e)
+
+
+if __name__ in ["__builtin__", "builtins", "__main__"]:
+ main()
diff --git a/Integrations/PaloAltoNetworks_Traps/PaloAltoNetworks_Traps.yml b/Integrations/PaloAltoNetworks_Traps/PaloAltoNetworks_Traps.yml
new file mode 100644
index 000000000000..e4cbcfec73c8
--- /dev/null
+++ b/Integrations/PaloAltoNetworks_Traps/PaloAltoNetworks_Traps.yml
@@ -0,0 +1,387 @@
+category: Endpoint
+commonfields:
+ id: Traps
+ version: -1
+configuration:
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Server URL
+ name: url
+ required: true
+ type: 0
+- display: Application ID
+ name: application_id
+ required: true
+ type: 0
+- display: Private Key
+ name: private_key
+ required: true
+ type: 14
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: |-
+ Endpoint protection and response stops threats on endpoints and coordinates enforcement with network and cloud security to prevent successful cyberattacks.
+ The integration enables the following abilities:
+ - Initiate scans.
+ - Retrieve files from events.
+ - Isolate endpoints.
+ - Quarantine files.
+ - Add and remove hashes from blacklist.
+ - Getting endpoints info.
+display: Palo Alto Networks Traps
+name: Traps
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Endpoint ID.
+ isArray: true
+ name: endpoint_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns details for the specified endpoint.
+ execution: false
+ name: traps-get-endpoint-by-id
+ outputs:
+ - contextPath: Traps.Endpoint.ID
+ description: The ID of the endpoint.
+ type: String
+ - contextPath: Traps.Endpoint.Name
+ description: The name of the endpoint.
+ type: String
+ - contextPath: Traps.Endpoint.Domain
+ description: The domain of the endpoint.
+ type: date
+ - contextPath: Traps.Endpoint.Platform
+ description: The OS of the endpoint.
+ type: String
+ - contextPath: Traps.Endpoint.Status
+ description: The status of the endpoint.
+ type: String
+ - contextPath: Traps.Endpoint.IP
+ description: The IP address of the endpoint.
+ type: String
+ - contextPath: Traps.Endpoint.ComputerSid
+ description: The computer SID.
+ type: String
+ - contextPath: Traps.Endpoint.IsCompromised
+ description: Whether the endpoint is compromised.
+ type: String
+ - contextPath: Traps.Endpoint.OsVersion
+ description: The version of the OS on the endpoint.
+ type: String
+ - contextPath: Traps.Endpoint.OsProductType
+ description: The OS type of the endpoint.
+ type: String
+ - contextPath: Traps.Endpoint.OsProductName
+ description: The name of the OS on the endpoint.
+ type: String
+ - contextPath: Traps.Endpoint.Is64
+ description: The bitness of the OS on the endpoint.
+ type: String
+ - contextPath: Traps.Endpoint.LastSeen
+ description: The date/time of the last active ping.
+ type: String
+ - contextPath: Traps.Endpoint.LastUser
+ description: The last active user on the machine.
+ type: String
+ - arguments:
+ - default: false
+ description: The ID of the endpoint.
+ isArray: false
+ name: endpoint_id
+ required: true
+ secret: false
+ - default: false
+ description: The name of the file to retrieve (including path).
+ isArray: false
+ name: file_name
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the event.
+ isArray: false
+ name: event_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Executes a file retrieve operation / SAM on the specified agent.
+ execution: false
+ name: traps-endpoint-files-retrieve
+ outputs:
+ - contextPath: Traps.FileRetrieve.EndpointID
+ description: The ID of the endpoint.
+ type: String
+ - contextPath: Traps.FileRetrieve.OperationID
+ description: The ID of the operation.
+ type: String
+ - contextPath: Traps.FileRetrieve.Type
+ description: The type of operation.
+ type: String
+ - arguments:
+ - default: false
+ description: The ID of the endpoint.
+ isArray: false
+ name: endpoint_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Performs a scan operation on the specified endpoint.
+ execution: false
+ name: traps-endpoint-scan
+ outputs:
+ - contextPath: Traps.Scan.EndpointID
+ description: The ID of the endpoint.
+ type: String
+ - contextPath: Traps.Scan.OperationID
+ description: The ID of the operation.
+ type: String
+ - contextPath: Traps.Scan.Type
+ description: The type of operation.
+ type: String
+ - arguments:
+ - default: false
+ description: The ID of the event to modify.
+ isArray: false
+ name: event_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The new status for the event.
+ isArray: false
+ name: status
+ predefined:
+ - new
+ - investigating
+ - closed
+ required: false
+ secret: false
+ - default: false
+ description: A comment for the event.
+ isArray: false
+ name: comment
+ required: false
+ secret: false
+ deprecated: false
+ description: Modifies the status and adds a comment to an existing event.
+ execution: false
+ name: traps-event-update
+ - arguments:
+ - default: false
+ description: A comma-separated list of IDs for events to modify.
+ isArray: true
+ name: event_ids
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The new status for the event.
+ isArray: false
+ name: status
+ predefined:
+ - new
+ - investigating
+ - closed
+ required: true
+ secret: false
+ deprecated: false
+ description: Modifies the status of multiple events.
+ execution: false
+ name: traps-event-bulk-update-status
+ - arguments:
+ - default: false
+ description: The SHA256 hash to add to the blacklist.
+ isArray: false
+ name: hash_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds the specified file hash to the blacklist.
+ execution: false
+ name: traps-hash-blacklist
+ outputs:
+ - contextPath: Traps.File.BlacklistStatus
+ description: The status of the file hash ("blacklisted" or "none").
+ type: String
+ - contextPath: Traps.File.SHA256
+ description: The SHA256 hash of the file.
+ type: String
+ - arguments:
+ - default: false
+ description: The SHA256 hash to remove from the blacklist.
+ isArray: false
+ name: hash_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes the specified file hash from the blacklist.
+ execution: false
+ name: traps-hash-blacklist-remove
+ outputs:
+ - contextPath: Traps.File.BlacklistStatus
+ description: The status of the file hash ("blacklisted" or "none").
+ type: String
+ - contextPath: Traps.File.SHA256
+ description: The SHA256 hash of the file.
+ type: String
+ - arguments:
+ - default: false
+ description: A comma-separated list of SHA256 file hashes for which to return the
+ blacklist status.
+ isArray: true
+ name: hash_ids
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the blacklist status of the specified file hashes.
+ execution: false
+ name: traps-hashes-blacklist-status
+ outputs:
+ - contextPath: Traps.File.BlacklistStatus
+ description: The blacklist status of the file hash. Can be "blacklisted" or
+ "none".
+ type: String
+ - contextPath: Traps.File.SHA256
+ description: The SHA256 hash of the file.
+ type: String
+ - arguments:
+ - default: false
+ description: The ID of the event for which to create a quarantine entry..
+ isArray: false
+ name: event_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Creates a quarantine entry for the specified event.
+ execution: false
+ name: traps-event-quarantine
+ outputs:
+ - contextPath: Traps.Quarantine.EventID
+ description: The ID of the event.
+ type: String
+ - contextPath: Traps.Quarantine.OperationID
+ description: The ID of the operation.
+ type: String
+ - contextPath: Traps.Quarantine.Type
+ description: The type of operation.
+ type: String
+ - arguments:
+ - default: false
+ description: The ID of the endpoint to isolate.
+ isArray: false
+ name: endpoint_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Isolates the specified endpoint.
+ execution: false
+ name: traps-endpoint-isolate
+ outputs:
+ - contextPath: Traps.Isolate.EndpointID
+ description: The ID of the endpoint.
+ type: String
+ - contextPath: Traps.Isolate.OperationID
+ description: The ID of the operation.
+ type: String
+ - contextPath: Traps.Isolate.Type
+ description: The type of operation.
+ type: String
+ - arguments:
+ - default: false
+ description: The ID of the operation for which to get the result of the quarantine
+ operation.
+ isArray: false
+ name: operation_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the result of the specified quarantine operation.
+ execution: false
+ name: traps-event-quarantine-result
+ outputs:
+ - contextPath: Traps.QuarantineResult.SHA256
+ description: The SHA256 hash of the file.
+ type: String
+ - contextPath: Traps.QuarantineResult.FilePath
+ description: The file path on the endpoint.
+ type: String
+ - contextPath: Traps.QuarantineResult.OperationID
+ description: The ID of the operation.
+ type: String
+ - contextPath: Traps.QuarantineResult.Status
+ description: The status of the quarantine operation.
+ type: String
+ - arguments:
+ - default: false
+ description: The ID of the operation.
+ isArray: false
+ name: operation_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the status of the specified endpoint isolate operation.
+ execution: false
+ name: traps-endpoint-isolate-status
+ outputs:
+ - contextPath: Traps.IsolateResult.OperationID
+ description: Operation ID. Use this to retrieve status / results.
+ type: String
+ - contextPath: Traps.IsolateResult.Status
+ description: The status of the isolation operation.
+ type: String
+ - arguments:
+ - default: false
+ description: The ID of the operation.
+ isArray: false
+ name: operation_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the result of the endpoint file retrieve operation.
+ execution: false
+ name: traps-endpoint-files-retrieve-result
+ - arguments:
+ - default: false
+ description: The ID of the operation.
+ isArray: false
+ name: operation_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the results of an endpoint scan operation.
+ execution: false
+ name: traps-endpoint-scan-result
+ outputs:
+ - contextPath: Traps.ScanResult.FileScanned
+ description: The number of scanned files.
+ type: Number
+ - contextPath: Traps.ScanResult.FilesFailed
+ description: The number of files that were not scanned.
+ type: Number
+ - contextPath: Traps.ScanResult.MalwareFound
+ description: The number of detected malware.
+ type: Number
+ - contextPath: Traps.ScanResult.OperationID
+ description: The ID of the operation.
+ type: String
+ - contextPath: Traps.ScanResult.Status
+ description: The status of the scan.
+ type: String
+ dockerimage: demisto/pyjwt3:1.0.0.1284
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- Traps test
diff --git a/Integrations/PaloAltoNetworks_Traps/PaloAltoNetworks_Traps_description.md b/Integrations/PaloAltoNetworks_Traps/PaloAltoNetworks_Traps_description.md
new file mode 100644
index 000000000000..9b3ddb9aa7f3
--- /dev/null
+++ b/Integrations/PaloAltoNetworks_Traps/PaloAltoNetworks_Traps_description.md
@@ -0,0 +1,11 @@
+ ## Traps TMS
+ **Endpoint protection and response stops threats on endpoints and coordinates enforcement with network and cloud security to prevent successful cyberattacks.**
+
+ ### Integrations credentials
+ 1. To create your API key and client ID go to the Traps TMS UI.
+ 2. Click the settings button and choose API Keys.
+ 3. Click the **Add** button to create new API Key.
+ 4. Beware to copy and save your API key as you will not be able to restore it. When coping the API Key note to include the whole text.
+ 5. Paste the API Key and with Client ID back to the Traps integration configuration in Demisto.
+
+
\ No newline at end of file
diff --git a/Integrations/PaloAltoNetworks_Traps/PaloAltoNetworks_Traps_image.png b/Integrations/PaloAltoNetworks_Traps/PaloAltoNetworks_Traps_image.png
new file mode 100644
index 000000000000..ff44f2d6baac
Binary files /dev/null and b/Integrations/PaloAltoNetworks_Traps/PaloAltoNetworks_Traps_image.png differ
diff --git a/Integrations/PaloAltoNetworks_Traps/PaloAltoNetworks_Traps_test.py b/Integrations/PaloAltoNetworks_Traps/PaloAltoNetworks_Traps_test.py
new file mode 100644
index 000000000000..31902e9ccab7
--- /dev/null
+++ b/Integrations/PaloAltoNetworks_Traps/PaloAltoNetworks_Traps_test.py
@@ -0,0 +1,121 @@
+import demistomock as demisto
+
+SERVER_MOCK_URL = 'https://demisto.mock.mybrz.net/xapi/v1/'
+
+integration_params = {
+ "application_id": "bcab5b57-6ca4-43ee-a4c0-618a2246d4ac",
+ "insecure": True,
+ "private_key": "-----BEGIN PRIVATE KEY-----\nMIIBVAIBADANBgkqhkiG9w0BAQEFAASCAT4wggE6AgEAAkEAyf1wyfSTygQ/Ogl/\n"
+ "B9DfMIszhnV/TwlygafjvwzruekpHDnJUQ9u+A7BD8zLAnLOaWgL94ioGlUpAXBa\n"
+ "ewC/0wIDAQABAkEAo+egaoConDkuBS5HglQfiAis2uLlV4FXBZby28jkT4pNqs/J\n"
+ "7wv9iRAjxJvV/K/GCa6wPcHqn7dN3XT1QODeQQIhAPaYlDmmqq2O+uftBm5y3ALG\n"
+ "NvFWI7OeO3l/K/I2H8cLAiEA0bFj9GBxmJWCxjk1kWoSNY3fZO9KiOqd5467KUuR\n"
+ "x1kCIH3jfOBFnqKF+L9H+N2P05Oy/z+LWySKZhBrhNLdILHrAiBLb+7OpreXNgpi\n"
+ "94fe9XLxk0WP0UpWMVl3SXDprUcXmQIgEVMV4W44YHywdmSEpzSOI+3YTedfVQzq\n"
+ "a7AVPWWb4tU=\n-----END PRIVATE KEY-----",
+ "proxy": False,
+ "url": "https://demisto.mock.mybrz.net"
+}
+
+
+def test_create_headers(mocker):
+ mocker.patch.object(demisto, 'params', return_value=integration_params)
+ from PaloAltoNetworks_Traps import create_headers
+ mocker.patch.object(demisto, 'params', return_value=integration_params)
+ headers = create_headers(True)
+ expect_headers = {
+ 'Content-Type': 'application/json',
+ 'Authorization': 'Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJhcHBJZCI6ImJj'
+ 'YWI1YjU3LTZjYTQtNDNlZS1hNGMwLTYxOGEyMjQ2ZDRhYyJ9.iM26FZt0FL6b7Eq95DMq'
+ 'hoNzBCS06dPfayZTaBzFElycBbR0BSyXhmkzudOPui5NCEtyvJ3YxkpZvLK8LuIRYA'
+ }
+ assert headers == expect_headers
+
+
+def test_parse_data_from_response(mocker):
+ mocker.patch.object(demisto, 'params', return_value=integration_params)
+ from PaloAltoNetworks_Traps import parse_data_from_response
+ resp_obj = {
+ 'guid': 'd3339851f18f470182bf2bf98ad5db4b',
+ 'name': 'EC2AMAZ-8IEUJEN',
+ 'domain': 'WORKGROUP',
+ 'platform': 'windows',
+ 'status': 'active',
+ 'scanStatus': 'success',
+ 'trapsVersion': '6.1.0.13046',
+ 'contentVersion': '63-10484', 'ip': 'xxx.xx.xx.xxx',
+ 'computerSid': 'S-1-5-21-202186053-2642234773-3690463397', 'installStatus': 'installed',
+ 'installTime': '2019-09-05T10:51:35.000Z',
+ 'distributionId': {'guid': 'afbf42010b6233624ffc20ca95d51ff3'},
+ 'compromised': False, 'alias': None,
+ 'osVersion': '10.0.14393',
+ 'osProductType': 'server',
+ 'osProductName': '',
+ 'is64': True,
+ 'lastSeen': '2019-09-24T15:10:21.000Z',
+ 'provisioning': {'name': None, 'domain': None, 'ip': None},
+ 'lastUser': 'Administrator',
+ 'isLicensed': True,
+ 'vdi': 'none',
+ 'isolationStatus': 'isolated',
+ 'wsConnected': False,
+ 'capabilities': {
+ 'quarantine': True,
+ 'networkIsolation': True,
+ 'terminateProcess': True,
+ 'fileRetrieval': True,
+ 'liveTerminal': True,
+ 'scriptExecution': False
+ }
+ }
+ endpoint_data = parse_data_from_response(resp_obj, 'get_endpoint_by_id')
+ expected_endpoint_data = {
+ 'ID': 'd3339851f18f470182bf2bf98ad5db4b',
+ 'Name': 'EC2AMAZ-8IEUJEN',
+ 'Domain': 'WORKGROUP',
+ 'Platform': 'windows',
+ 'Status': 'active',
+ 'IP': 'xxx.xx.xx.xxx',
+ 'ComputerSid': 'S-1-5-21-202186053-2642234773-3690463397',
+ 'IsCompromised': False,
+ 'OsVersion': '10.0.14393',
+ 'OsProductType': 'server',
+ 'OsProductName': '',
+ 'Is64': True,
+ 'LastSeen': '2019-09-24T15:10:21.000Z',
+ 'LastUser': 'Administrator'
+ }
+
+ assert endpoint_data == expected_endpoint_data
+
+
+def test_event_quarantine(requests_mock, mocker):
+ mocker.patch.object(demisto, 'params', return_value=integration_params)
+ from PaloAltoNetworks_Traps import event_quarantine
+ event_id = '7dc177a4df1c41b19ca1e67e8573b6be'
+ quarantine_path = f'events/{event_id}/quarantine'
+ mock_resp_json = {'operationId': {'samMessageIds': ['80cf8859df7811e9acbf0245d8e950da']}}
+ requests_mock.post(SERVER_MOCK_URL + quarantine_path, json=mock_resp_json)
+ operations = event_quarantine(event_id)
+ expected_operations = [{
+ 'EventID': '7dc177a4df1c41b19ca1e67e8573b6be',
+ 'Type': 'event-quarantine',
+ 'OperationID': '80cf8859df7811e9acbf0245d8e950da'
+ }]
+ assert expected_operations == operations
+
+
+def test_endpoint_isolate(requests_mock, mocker):
+ mocker.patch.object(demisto, 'params', return_value=integration_params)
+ from PaloAltoNetworks_Traps import endpoint_isolate
+ endpoint_id = 'd3339851f18f470182bf2bf98ad5db4b'
+ isolate_path = f'agents/{endpoint_id}/isolate'
+ mock_resp_json = {'operationId': '458e2003dfb411e9acbf0245d8e950da'}
+ requests_mock.post(SERVER_MOCK_URL + isolate_path, json=mock_resp_json)
+ operation_obj = endpoint_isolate(endpoint_id)
+ expected_operation = {
+ 'OperationID': '458e2003dfb411e9acbf0245d8e950da',
+ 'EndpointID': 'd3339851f18f470182bf2bf98ad5db4b',
+ 'Type': 'endpoint-isolate'
+ }
+ assert operation_obj == expected_operation
diff --git a/Integrations/PaloAltoNetworks_XDR/CHANGELOG.md b/Integrations/PaloAltoNetworks_XDR/CHANGELOG.md
new file mode 100644
index 000000000000..022821fc8508
--- /dev/null
+++ b/Integrations/PaloAltoNetworks_XDR/CHANGELOG.md
@@ -0,0 +1,9 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+Return a meaningful error when no query args are given for the !xdr-get-incidents command
+
+
+## [19.8.0] - 2019-08-06
+Added instructions in the integration instance Detailed Description section how to generate an API Key, API Key ID, and how to copy the integration URL.
diff --git a/Integrations/PaloAltoNetworks_XDR/PaloAltoNetworks_XDR.py b/Integrations/PaloAltoNetworks_XDR/PaloAltoNetworks_XDR.py
new file mode 100644
index 000000000000..7e3e5b4579df
--- /dev/null
+++ b/Integrations/PaloAltoNetworks_XDR/PaloAltoNetworks_XDR.py
@@ -0,0 +1,469 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import json
+import requests
+from datetime import datetime, timezone
+import secrets
+import string
+import hashlib
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+# Remove trailing slash to prevent wrong URL path to service
+SERVER = demisto.params()['url'][:-1] \
+ if (demisto.params()['url'] and demisto.params()['url'].endswith('/')) else demisto.params()['url']
+
+USE_SSL = not demisto.params().get('insecure', False)
+API_KEY = demisto.params().get('apikey')
+API_KEY_ID = demisto.params().get('apikey_id')
+FETCH_TIME = demisto.params().get('fetch_time', '3 days')
+BASE_URL = SERVER + '/public_api/v1'
+
+TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
+
+# Remove proxy if not set to true in params
+if not demisto.params().get('proxy'):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+NONCE_LENGTH = 64
+API_KEY_LENGTH = 128
+
+
+def convert_epoch_to_milli(ts):
+ if ts is None:
+ return None
+ if 9 < len(str(ts)) < 13:
+ ts = int(ts) * 1000
+ return int(ts)
+
+
+def convert_datetime_to_epoch(the_time=0):
+ if the_time is None:
+ return None
+ else:
+ try:
+ if isinstance(the_time, datetime):
+ return int(the_time.strftime('%s'))
+ except Exception as e:
+ print(e)
+ return 0
+
+
+def convert_datetime_to_epoch_millis(the_time=0):
+ return convert_epoch_to_milli(convert_datetime_to_epoch(the_time=the_time))
+
+
+def generate_current_epoch_utc():
+ return convert_datetime_to_epoch_millis(datetime.now(timezone.utc))
+
+
+def generate_key():
+ return "".join([secrets.choice(string.ascii_letters + string.digits) for _ in range(API_KEY_LENGTH)])
+
+
+def create_auth(api_key):
+ nonce = "".join([secrets.choice(string.ascii_letters + string.digits) for _ in range(NONCE_LENGTH)])
+ timestamp = str(generate_current_epoch_utc()) # Get epoch time utc millis
+ m = hashlib.sha256()
+ m.update((api_key + nonce + timestamp).encode("utf-8"))
+ return nonce, timestamp, m.hexdigest()
+
+
+# nonce, timestamp, auth = create_auth(API_KEY)
+nonce = "".join([secrets.choice(string.ascii_letters + string.digits) for _ in range(64)])
+timestamp = str(int(datetime.now(timezone.utc).timestamp()) * 1000)
+auth_key = "%s%s%s" % (API_KEY, nonce, timestamp)
+auth_key = auth_key.encode("utf-8")
+api_key_hash = hashlib.sha256(auth_key).hexdigest()
+
+HEADERS = {
+ "x-xdr-timestamp": timestamp,
+ "x-xdr-nonce": nonce,
+ "x-xdr-auth-id": str(API_KEY_ID),
+ "Authorization": api_key_hash
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url_suffix, params=None, data=None):
+ demisto.debug(json.dumps(data, indent=4))
+
+ res = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ verify=USE_SSL,
+ params=params,
+ json=data,
+ headers=HEADERS
+ )
+ # Handle error responses gracefully
+ if res.status_code not in [200]:
+ if 'err_code' in res.text:
+ error = res.json().get('reply')
+ raise ValueError('Error occurred while doing HTTP request.\nURL: {}\nstatus_code: {}\nerr_code: {}'
+ '\nerr_message: {}\n{}'
+ .format(BASE_URL + url_suffix, res.status_code, error.get('err_code'),
+ error.get('err_msg'), error.get('err_extra')))
+
+ raise ValueError('Error in API call to Palo Alto Networks XDR [%d] - %s' % (res.status_code, res.reason))
+
+ try:
+ return res.json()
+ except Exception:
+ raise ValueError("Failed to parse HTTP response to JSON. Original response: \n\n{}".format(res.text))
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """
+ Performs basic get request to get item samples
+ """
+ last_one_day, _ = parse_date_range(FETCH_TIME, TIME_FORMAT)
+ get_incidents(lte_creation_time=last_one_day, limit=1)
+
+
+def get_incidents_command():
+ """
+ Gets details about a items using IDs or some other filters
+ """
+ incident_id_list = argToList(demisto.args().get('incident_id_list'))
+
+ lte_modification_time = demisto.args().get('lte_modification_time')
+ gte_modification_time = demisto.args().get('gte_modification_time')
+ since_modification_time = demisto.args().get('since_modification_time')
+
+ if since_modification_time and gte_modification_time:
+ raise ValueError('Can\'t set both since_modification_time and lte_modification_time')
+ elif since_modification_time:
+ gte_modification_time, _ = parse_date_range(since_modification_time, TIME_FORMAT)
+
+ lte_creation_time = demisto.args().get('lte_creation_time')
+ gte_creation_time = demisto.args().get('gte_creation_time')
+ since_creation_time = demisto.args().get('since_creation_time')
+
+ if since_creation_time and gte_creation_time:
+ raise ValueError('Can\'t set both since_creation_time and lte_creation_time')
+ elif since_creation_time:
+ gte_creation_time, _ = parse_date_range(since_creation_time, TIME_FORMAT)
+
+ sort_by_modification_time = demisto.args().get('sort_by_modification_time')
+ sort_by_creation_time = demisto.args().get('sort_by_creation_time')
+
+ page = int(demisto.args().get('page', 0))
+ limit = int(demisto.args().get('limit', 100))
+
+ # If no filters were given, return a meaningful error message
+ if (not lte_modification_time and not gte_modification_time and not since_modification_time
+ and not lte_creation_time and not gte_creation_time and not since_creation_time):
+ return_error("Specify a query for the incidents.\nFor example:"
+ " !xdr-get-incidents since_creation_time=\"1 year\" sort_by_creation_time=\"desc\" limit=10")
+
+ raw_incidents = get_incidents(
+ incident_id_list=incident_id_list,
+ lte_modification_time=lte_modification_time,
+ gte_modification_time=gte_modification_time,
+ lte_creation_time=lte_creation_time,
+ gte_creation_time=gte_creation_time,
+ sort_by_creation_time=sort_by_creation_time,
+ sort_by_modification_time=sort_by_modification_time,
+ page_number=page,
+ limit=limit
+ )
+
+ return_outputs(
+ readable_output=tableToMarkdown('Incidents', raw_incidents),
+ outputs={
+ 'PaloAltoNetworksXDR.Incident(val.incident_id==obj.incident_id)': raw_incidents
+ },
+ raw_response=raw_incidents
+ )
+
+
+def get_incidents(incident_id_list=None, lte_modification_time=None, gte_modification_time=None,
+ lte_creation_time=None, gte_creation_time=None, sort_by_modification_time=None,
+ sort_by_creation_time=None, page_number=0, limit=100, gte_creation_time_milliseconds=0):
+ """
+ Filters and returns incidents
+
+ :param incident_id_list: List of incident ids - must be list
+ :param lte_modification_time: string of time format "2019-12-31T23:59:00"
+ :param gte_modification_time: string of time format "2019-12-31T23:59:00"
+ :param lte_creation_time: string of time format "2019-12-31T23:59:00"
+ :param gte_creation_time: string of time format "2019-12-31T23:59:00"
+ :param sort_by_modification_time: optional - enum (asc,desc)
+ :param sort_by_creation_time: optional - enum (asc,desc)
+ :param page_number: page number
+ :param limit: maximum number of incidents to return per page
+ :param gte_creation_time_milliseconds: greater than time in milliseconds
+ :return:
+ """
+ search_from = page_number * limit
+ search_to = search_from + limit
+
+ request_data = {
+ 'search_from': search_from,
+ 'search_to': search_to
+ }
+
+ if sort_by_creation_time and sort_by_modification_time:
+ raise ValueError('Should be provide either sort_by_creation_time or '
+ 'sort_by_modification_time. Can\'t provide both')
+ elif sort_by_creation_time:
+ request_data['sort'] = {
+ 'field': 'creation_time',
+ 'keyword': sort_by_creation_time
+ }
+ elif sort_by_modification_time:
+ request_data['sort'] = {
+ 'field': 'modification_time',
+ 'keyword': sort_by_modification_time
+ }
+
+ filters = []
+ if incident_id_list is not None and len(incident_id_list) > 0:
+ filters.append({
+ 'field': 'incident_id_list',
+ 'operator': 'in',
+ 'value': incident_id_list
+ })
+
+ if lte_creation_time:
+ filters.append({
+ 'field': 'creation_time',
+ 'operator': 'lte',
+ 'value': date_to_timestamp(lte_creation_time, TIME_FORMAT)
+ })
+
+ if gte_creation_time:
+ filters.append({
+ 'field': 'creation_time',
+ 'operator': 'gte',
+ 'value': date_to_timestamp(gte_creation_time, TIME_FORMAT)
+ })
+
+ if lte_modification_time:
+ filters.append({
+ 'field': 'modification_time',
+ 'operator': 'lte',
+ 'value': date_to_timestamp(lte_modification_time, TIME_FORMAT)
+ })
+
+ if gte_modification_time:
+ filters.append({
+ 'field': 'modification_time',
+ 'operator': 'gte',
+ 'value': date_to_timestamp(gte_modification_time, TIME_FORMAT)
+ })
+
+ if gte_creation_time_milliseconds > 0:
+ filters.append({
+ 'field': 'creation_time',
+ 'operator': 'gte',
+ 'value': gte_creation_time_milliseconds
+ })
+
+ if len(filters) > 0:
+ request_data['filters'] = filters
+
+ res = http_request('POST', '/incidents/get_incidents/', data={'request_data': request_data})
+ incidents = res.get('reply').get('incidents', [])
+
+ return incidents
+
+
+def get_incident_extra_data_command():
+ incident_id = demisto.args().get('incident_id')
+ alerts_limit = int(demisto.args().get('alerts_limit', 1000))
+
+ raw_incident = get_incident_extra_data(incident_id, alerts_limit)
+
+ incident = raw_incident.get('incident')
+ incident_id = incident.get('incident_id')
+ alerts = raw_incident.get('alerts').get('data')
+ file_artifacts = raw_incident.get('file_artifacts').get('data')
+ network_artifacts = raw_incident.get('network_artifacts').get('data')
+
+ readable_output = [tableToMarkdown('Incident {}'.format(incident_id), incident)]
+
+ if len(alerts) > 0:
+ readable_output.append(tableToMarkdown('Alerts', alerts))
+ else:
+ readable_output.append(tableToMarkdown('Alerts', []))
+
+ if len(network_artifacts) > 0:
+ readable_output.append(tableToMarkdown('Network Artifacts', network_artifacts))
+ else:
+ readable_output.append(tableToMarkdown('Network Artifacts', []))
+
+ if len(file_artifacts) > 0:
+ readable_output.append(tableToMarkdown('File Artifacts', file_artifacts))
+ else:
+ readable_output.append(tableToMarkdown('File Artifacts', []))
+
+ incident.update({
+ 'alerts': alerts,
+ 'file_artifacts': file_artifacts,
+ 'network_artifacts': network_artifacts
+ })
+ return_outputs(
+ readable_output='\n'.join(readable_output),
+ outputs={
+ 'PaloAltoNetworksXDR.Incident(val.incident_id==obj.incident_id)': incident
+ },
+ raw_response=raw_incident
+ )
+
+
+def get_incident_extra_data(incident_id, alerts_limit=1000):
+ """
+ Returns incident by id
+
+ :param incident_id: The id of incident
+ :param alerts_limit: Maximum number alerts to get
+ :return:
+ """
+ request_data = {
+ 'incident_id': incident_id,
+ 'alerts_limit': alerts_limit
+ }
+
+ reply = http_request('POST', '/incidents/get_incident_extra_data/', data={'request_data': request_data})
+ incident = reply.get('reply')
+
+ return incident
+
+
+def update_incident_command():
+ incident_id = demisto.args().get('incident_id')
+ assigned_user_mail = demisto.args().get('assigned_user_mail')
+ assigned_user_pretty_name = demisto.args().get('assigned_user_pretty_name')
+ status = demisto.args().get('status')
+ severity = demisto.args().get('manual_severity')
+ unassign_user = demisto.args().get('unassign_user') == 'true'
+ resolve_comment = demisto.args().get('resolve_comment')
+
+ update_incident(
+ incident_id=incident_id,
+ assigned_user_mail=assigned_user_mail,
+ assigned_user_pretty_name=assigned_user_pretty_name,
+ unassign_user=unassign_user,
+ status=status,
+ severity=severity,
+ resolve_comment=resolve_comment
+ )
+
+ return_outputs('Incident {} has been updated'.format(incident_id), outputs=None)
+
+
+def update_incident(incident_id, assigned_user_mail, assigned_user_pretty_name, status, severity, resolve_comment,
+ unassign_user):
+ update_data = {}
+
+ if unassign_user and (assigned_user_mail or assigned_user_pretty_name):
+ raise ValueError("Can't provide both assignee_email/assignee_name and unassign_user")
+ elif unassign_user:
+ update_data['assigned_user_mail'] = 'none'
+
+ if assigned_user_mail:
+ update_data['assigned_user_mail'] = assigned_user_mail
+
+ if assigned_user_pretty_name:
+ update_data['assigned_user_pretty_name'] = assigned_user_pretty_name
+
+ if status:
+ update_data['status'] = status
+
+ if severity:
+ update_data['manual_severity'] = severity
+
+ if resolve_comment:
+ update_data['resolve_comment'] = resolve_comment
+
+ request_data = {
+ 'incident_id': incident_id,
+ 'update_data': update_data
+ }
+ demisto.info(json.dumps(request_data, indent=4))
+ http_request('POST', '/incidents/update_incident/', data={'request_data': request_data})
+
+
+def fetch_incidents():
+ last_run = demisto.getLastRun()
+ # Get the last fetch time, if exists
+ last_fetch = last_run.get('time')
+
+ # Handle first time fetch, fetch incidents retroactively
+ if last_fetch is None:
+ last_fetch, _ = parse_date_range(FETCH_TIME, to_timestamp=True)
+
+ incidents = []
+ raw_incidents = get_incidents(gte_creation_time_milliseconds=last_fetch,
+ limit=50, sort_by_creation_time='asc')
+
+ for raw_incident in raw_incidents:
+ incident_id = raw_incident.get('incident_id')
+ description = raw_incident.get('description')
+ occurred = timestamp_to_datestring(raw_incident['creation_time'], TIME_FORMAT + 'Z')
+ incident = {
+ 'name': '#{} - {}'.format(incident_id, description),
+ 'occurred': occurred,
+ 'rawJSON': json.dumps(raw_incident)
+ }
+
+ # Update last run and add incident if the incident is newer than last fetch
+ if raw_incident['creation_time'] > last_fetch:
+ last_fetch = raw_incident['creation_time']
+
+ incidents.append(incident)
+
+ demisto.setLastRun({'time': last_fetch + 1})
+ demisto.incidents(incidents)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+
+def main():
+ LOG('Command being called is %s' % (demisto.command()))
+
+ try:
+ if demisto.command() == 'test-module':
+ test_module()
+ demisto.results('ok')
+
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+
+ elif demisto.command() == 'xdr-get-incidents':
+ get_incidents_command()
+
+ elif demisto.command() == 'xdr-get-incident-extra-data':
+ get_incident_extra_data_command()
+
+ elif demisto.command() == 'xdr-update-incident':
+ update_incident_command()
+
+ # Log exceptions
+ except Exception as e:
+ if demisto.command() == 'fetch-incidents':
+ LOG(str(e))
+ raise
+ else:
+ return_error(e)
+
+
+if __name__ in ['__main__', '__builtin__', 'builtins']:
+ main()
diff --git a/Integrations/PaloAltoNetworks_XDR/PaloAltoNetworks_XDR.yml b/Integrations/PaloAltoNetworks_XDR/PaloAltoNetworks_XDR.yml
new file mode 100644
index 000000000000..fd5f00713663
--- /dev/null
+++ b/Integrations/PaloAltoNetworks_XDR/PaloAltoNetworks_XDR.yml
@@ -0,0 +1,420 @@
+category: Endpoint
+fromversion: 4.1.0
+commonfields:
+ id: Cortex XDR - IR
+ version: -1
+configuration:
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- display: Server URL (copy url from XDR - press ? to see more info)
+ name: url
+ required: true
+ type: 0
+- display: API Key ID
+ name: apikey_id
+ required: true
+ type: 4
+- display: API Key
+ name: apikey
+ required: true
+ type: 4
+- defaultvalue: 'true'
+ display: Trust any certificate (insecure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+- defaultvalue: 3 days
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days)
+ name: fetch_time
+ required: false
+ type: 0
+description: Cortex XDR is the world's first detection and response app that natively integrates network, endpoint and cloud data to stop sophisticated attacks.
+display: Palo Alto Networks Cortex XDR - Investigation and Response
+name: Cortex XDR - IR
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Time format 2019-12-31T23:59:00.
+ isArray: false
+ name: lte_creation_time
+ required: false
+ secret: false
+ - default: false
+ description: Returned incidents that were created on or after the specified date/time, in the format 2019-12-31T23:59:00.
+ isArray: false
+ name: gte_creation_time
+ required: false
+ secret: false
+ - default: false
+ description: Filters returned incidents that were created on or before the specified date/time, in the format 2019-12-31T23:59:00.
+ isArray: false
+ name: lte_modification_time
+ required: false
+ secret: false
+ - default: false
+ description: Filters returned incidents that were modified on or after the specified date/time, in the format 2019-12-31T23:59:00.
+ isArray: false
+ name: gte_modification_time
+ required: false
+ secret: false
+ - default: false
+ description: An array or CSV string of incident IDs.
+ isArray: true
+ name: incident_id_list
+ required: false
+ secret: false
+ - default: false
+ isArray: false
+ name: since_creation_time
+ required: false
+ secret: false
+ description: Filters returned incidents that were created on or after the specified date/time range, for example, 1 month, 2 days, 1 hour, and so on.
+ - default: false
+ isArray: false
+ name: since_modification_time
+ required: false
+ secret: false
+ description: Filters returned incidents that were modified on or after the specified date/time range, for example, 1 month, 2 days, 1 hour, and so on.
+ - auto: PREDEFINED
+ default: false
+ isArray: false
+ name: sort_by_modification_time
+ predefined:
+ - asc
+ - desc
+ required: false
+ secret: false
+ description: Sorts returned incidents by the date/time that the incident was last modified ("asc" - ascending, "desc" - descending).
+ - auto: PREDEFINED
+ default: false
+ isArray: false
+ name: sort_by_creation_time
+ predefined:
+ - asc
+ - desc
+ required: false
+ secret: false
+ description: Sorts returned incidents by the date/time that the incident was created ("asc" - ascending, "desc" - descending).
+ - default: false
+ defaultValue: '0'
+ description: Page number (for pagination). The default is 0 (the first page).
+ isArray: false
+ name: page
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: Maximum number of incidents to return per page. The default and maximum is 100.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: |-
+ Returns a list of incidents, which you can filter by a list of incident IDs (max 100), the time the incident was last modified, and the time the incident was created.
+ If you pass multiple filtering argument, they will be concatenated using the AND condition. The OR condition is not supported.
+ execution: false
+ name: xdr-get-incidents
+ outputs:
+ - contextPath: PaloAltoNetworksXDR.Incident.incident_id
+ description: Unique ID assigned to each returned incident.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.manual_severity
+ description: Incident severity assigned by the user. This does not affect the calculated severity (LOW, MEDIUM, HIGH).
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.manual_description
+ description: Incident description provided by the user.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.assigned_user_mail
+ description: Email address of the assigned user.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.high_severity_alert_count
+ description: Number of alerts with the severity HIGH.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.host_count
+ description: Number of hosts involved in the incident.
+ type: number
+ - contextPath: PaloAltoNetworksXDR.Incident.xdr_url
+ description: A link to the incident view on XDR.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.assigned_user_pretty_name
+ description: Full name of the user assigned to the incident.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.alert_count
+ description: Total number of alerts in the incident.
+ type: number
+ - contextPath: PaloAltoNetworksXDR.Incident.med_severity_alert_count
+ description: Number of alerts with the severity MEDIUM.
+ type: number
+ - contextPath: PaloAltoNetworksXDR.Incident.user_count
+ description: Number of users involved in the incident.
+ type: number
+ - contextPath: PaloAltoNetworksXDR.Incident.severity
+ description: Calculated severity of the incident (LOW, MEDIUM, HIGH).
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.low_severity_alert_count
+ description: Number of alerts with the severity LOW.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.status
+ description: Current status of the incident (NEW, UNDER_INVESTIGATION, RESOLVED_THREAT_HANDLED,
+ RESOLVED_KNOWN_ISSUE, RESOLVED_DUPLICATE, RESOLVED_FALSE_POSITIVE, RESOLVED_OTHER).
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.description
+ description: Dynamic calculated description of the incident.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.resolve_comment
+ description: Comments entered by the user when the incident was resolved.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.notes
+ description: Comments entered by the user regarding the incident.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.creation_time
+ description: Date and time the incident was created on XDR.
+ type: date
+ - contextPath: PaloAltoNetworksXDR.Incident.detection_time
+ description: Date and time that the first alert occurred in the incident.
+ type: date
+ - contextPath: PaloAltoNetworksXDR.Incident.modification_time
+ description: Date and time that the incident was last modified.
+ type: date
+ - arguments:
+ - default: false
+ description: The ID of the incident for which to get additional data.
+ isArray: false
+ name: incident_id
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '1000'
+ description: Maximum number of alerts to return. Default is 1,000.
+ isArray: false
+ name: alerts_limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns additional data for the specified incident, for example, related alerts,
+ file artifacts, network artifacts, and so on.
+ execution: false
+ name: xdr-get-incident-extra-data
+ outputs:
+ - contextPath: PaloAltoNetworksXDR.Incident.incident_id
+ description: Unique ID assigned to each returned incident.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.manual_severity
+ description: Incident severity assigned by the user. This does not affect the calculated severity (LOW, MEDIUM, HIGH).
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.manual_description
+ description: Incident description provided by the user.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.assigned_user_mail
+ description: Email address of the assigned user.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.high_severity_alert_count
+ description: Number of alerts with the severity HIGH.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.host_count
+ description: Number of hosts involved in the incident
+ type: number
+ - contextPath: PaloAltoNetworksXDR.Incident.xdr_url
+ description: A link to the incident view on XDR.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.assigned_user_pretty_name
+ description: Full name of the user assigned to the incident.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.alert_count
+ description: Total number of alerts in the incident.
+ type: number
+ - contextPath: PaloAltoNetworksXDR.Incident.med_severity_alert_count
+ description: Number of alerts with the severity MEDIUM.
+ type: number
+ - contextPath: PaloAltoNetworksXDR.Incident.user_count
+ description: Number of users involved in the incident.
+ type: number
+ - contextPath: PaloAltoNetworksXDR.Incident.severity
+ description: Calculated severity of the incident (LOW, MEDIUM, HIGH).
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.low_severity_alert_count
+ description: Number of alerts with the severity LOW.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.status
+ description: Current status of the incident (NEW, UNDER_INVESTIGATION, RESOLVED_THREAT_HANDLED,
+ RESOLVED_KNOWN_ISSUE, RESOLVED_DUPLICATE, RESOLVED_FALSE_POSITIVE, RESOLVED_OTHER).
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.description
+ description: Dynamic calculated description of the incident.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.resolve_comment
+ description: Comments entered by the user when the incident was resolved.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.notes
+ description: Comments entered by the user regarding the incident.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.creation_time
+ description: Date and time the incident was created on XDR.
+ type: date
+ - contextPath: PaloAltoNetworksXDR.Incident.detection_time
+ description: Date and time that the first alert occurred in the incident.
+ type: date
+ - contextPath: PaloAltoNetworksXDR.Incident.modification_time
+ description: Date and time that the incident was last modified.
+ type: date
+ - contextPath: PaloAltoNetworksXDR.Incident.alerts.category
+ description: Category of the alert, for example, Spyware Detected via Anti-Spyware profile.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.alerts.action_pretty
+ description: The action that triggered the alert.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.alerts.description
+ description: Textual description of the alert.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.alerts.severity
+ description: Severity of the alert.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.alerts.host_ip
+ description: Host IP involved in the alert.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.alerts.source
+ description: Source of the alert.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.alerts.user_name
+ description: User name involved with the alert.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.alerts.alert_id
+ description: Unique ID for each alert.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.alerts.host_name
+ description: Host name involved in the alert.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.alerts.detection_timestamp
+ description: Date and time that the alert occurred.
+ type: date
+ - contextPath: PaloAltoNetworksXDR.Incident.alerts.name
+ description: Calculated name of the alert.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.network_artifacts.network_remote_port
+ description: The remote port related to the artifact.
+ type: number
+ - contextPath: PaloAltoNetworksXDR.Incident.network_artifacts.alert_count
+ description: Number of alerts related to the artifact.
+ type: number
+ - contextPath: PaloAltoNetworksXDR.Incident.network_artifacts.network_remote_ip
+ description: The remote IP related to the artifact.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.network_artifacts.is_manual
+ description: Whether the artifact was created by the user (manually).
+ type: boolean
+ - contextPath: PaloAltoNetworksXDR.Incident.network_artifacts.network_domain
+ description: The domain related to the artifact.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.network_artifacts.type
+ description: The artifact type, for example, IP.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.file_artifacts.file_signature_status
+ description: Digital signature status of the file.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.file_artifacts.is_process
+ description: Whether the file artifact is related to a process execution.
+ type: boolean
+ - contextPath: PaloAltoNetworksXDR.Incident.file_artifacts.file_name
+ description: Name of the file.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.file_artifacts.file_wildfire_verdict
+ description: The file verdict, calculated by Wildfire.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.file_artifacts.alert_count
+ description: Number of alerts related to the artifact.
+ type: number
+ - contextPath: PaloAltoNetworksXDR.Incident.file_artifacts.is_malicious
+ description: Whether the artifact is malicious, decided by the Wildfire verdict.
+ type: boolean
+ - contextPath: PaloAltoNetworksXDR.Incident.file_artifacts.is_manual
+ description: Whether the artifact was created by the user (manually).
+ type: boolean
+ - contextPath: PaloAltoNetworksXDR.Incident.file_artifacts.type
+ description: The artifact type, for example, hash.
+ type: String
+ - contextPath: PaloAltoNetworksXDR.Incident.file_artifacts.file_sha256
+ description: SHA-256 hash of the file
+ type: String
+ - arguments:
+ - default: false
+ description: XDR incident ID. You can get the incident ID from the output of the 'xdr-get-incidents' command or the 'xdr-get-incident-extra-details' command.
+ isArray: false
+ name: incident_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Severity to assign to the incident (LOW, MEDIUM, or HIGH).
+ isArray: false
+ name: manual_severity
+ predefined:
+ - HIGH
+ - MEDIUM
+ - LOW
+ required: false
+ secret: false
+ - default: false
+ description: Email address of the user to assigned to the incident.
+ isArray: false
+ name: assigned_user_mail
+ required: false
+ secret: false
+ - default: false
+ description: Full name of the user assigned to the incident.
+ isArray: false
+ name: assigned_user_pretty_name
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Status of the incident (NEW, UNDER_INVESTIGATION, RESOLVED_THREAT_HANDLED, RESOLVED_KNOWN_ISSUE, RESOLVED_DUPLICATE, RESOLVED_FALSE_POSITIVE, RESOLVED_OTHER).
+ isArray: false
+ name: status
+ predefined:
+ - NEW
+ - UNDER_INVESTIGATION
+ - RESOLVED_THREAT_HANDLED
+ - RESOLVED_KNOWN_ISSUE
+ - RESOLVED_DUPLICATE
+ - RESOLVED_FALSE_POSITIVE
+ - RESOLVED_OTHER
+ required: false
+ secret: false
+ - default: false
+ description: Comment explaining why the incident was resolved. This should be set when the incident is resolved.
+ isArray: false
+ name: resolve_comment
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If true, will remove all assigned users from the incident.
+ isArray: false
+ name: unassign_user
+ predefined:
+ - 'true'
+ required: false
+ secret: false
+ deprecated: false
+ description: |-
+ Updates one or more fields of a specified incident. Missing fields will be ignored. To remove the assignment for an incident, pass a null value in assignee email argument.
+ execution: false
+ name: xdr-update-incident
+ dockerimage: demisto/python3:3.7.4.977
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
diff --git a/Integrations/PaloAltoNetworks_XDR/PaloAltoNetworks_XDR_description.md b/Integrations/PaloAltoNetworks_XDR/PaloAltoNetworks_XDR_description.md
new file mode 100644
index 000000000000..b8ce94e2ccc4
--- /dev/null
+++ b/Integrations/PaloAltoNetworks_XDR/PaloAltoNetworks_XDR_description.md
@@ -0,0 +1,13 @@
+## Palo Alto Networks Cortex XDR - IR
+Cortex XDR is the world's first detection and response app that natively integrates network, endpoint and cloud data to stop sophisticated attacks.
+
+### Generate an API Key and API Key ID
+1. In your Cortex XDR platform, go to **Settings**.
+2. Click the **+New Key** button in the top right corner
+3. Generate a key of type **Advanced**.
+4. Copy and paste the key.
+5. From the ID column, copy the Key ID.
+
+### URL
+1. In your Cortex XDR platform, go to **Settings**.
+2. Click the **Copy URL** button in the top right corner.
diff --git a/Integrations/PaloAltoNetworks_XDR/PaloAltoNetworks_XDR_image.png b/Integrations/PaloAltoNetworks_XDR/PaloAltoNetworks_XDR_image.png
new file mode 100644
index 000000000000..249fc6f403d6
Binary files /dev/null and b/Integrations/PaloAltoNetworks_XDR/PaloAltoNetworks_XDR_image.png differ
diff --git a/Integrations/PaloAlto_MineMeld/CHANGELOG.md b/Integrations/PaloAlto_MineMeld/CHANGELOG.md
new file mode 100644
index 000000000000..c519459791e5
--- /dev/null
+++ b/Integrations/PaloAlto_MineMeld/CHANGELOG.md
@@ -0,0 +1,9 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+ - Improved error handling for API errors.
+ - Changed the name of the proxy parameter from *Use system proxy* to *Use system proxy settings*.
+
+## [19.9.0] - 2019-09-04
+Added support for non-root URL structures.
diff --git a/Integrations/PaloAlto_MineMeld/PaloAlto_MineMeld.py b/Integrations/PaloAlto_MineMeld/PaloAlto_MineMeld.py
new file mode 100644
index 000000000000..b4f825af851a
--- /dev/null
+++ b/Integrations/PaloAlto_MineMeld/PaloAlto_MineMeld.py
@@ -0,0 +1,724 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import urllib2
+import json
+import base64
+import ssl
+import os.path
+import os
+import time
+import re
+
+# globals and constants
+IPV4_CLASS = 'minemeld.ft.local.YamlIPv4FT'
+IPV6_CLASS = 'minemeld.ft.local.YamlIPv6FT'
+URL_CLASS = 'minemeld.ft.local.YamlURLFT'
+DOMAIN_CLASS = 'minemeld.ft.local.YamlDomainFT'
+LOCALDB_CLASS = 'minemeld.ft.localdb.Miner'
+SUPPORTED_MINER_CLASSES = [IPV4_CLASS, IPV6_CLASS, URL_CLASS, DOMAIN_CLASS, LOCALDB_CLASS]
+SERVER_URL = demisto.params()['url']
+USERNAME = demisto.params()['credentials']['identifier']
+PASSWORD = demisto.params()['credentials']['password']
+USE_PROXY = demisto.params()['proxy']
+WHITELISTS = [] # type: list
+BLACKLISTS = [] # type: list
+WHITELISTS = argToList(demisto.params().get('whitelist'))
+BLACKLISTS = argToList(demisto.params().get('blacklist'))
+
+if not USE_PROXY:
+ os.environ['NO_PROXY'] = SERVER_URL
+if not isinstance(WHITELISTS, (list)) or not isinstance(BLACKLISTS, (list)):
+ return_error(
+ 'Either blacklist or whitelist params were misconfigured - expecting comma seperated list, ex: miner_a,miner_b,miner_c')
+
+
+# API class
+class APIClient(object):
+
+ def __init__(self, url, username, password, capath):
+ self.url = url
+ self.username = username
+ self.password = password
+
+ self.cafile = None
+ self.capath = None
+ self.context = None
+ self.data_file_type = None
+
+ if capath is None:
+ self.context = ssl.create_default_context()
+ self.context.check_hostname = False
+ self.context.verify_mode = ssl.CERT_NONE
+ else:
+ if os.path.isfile(capath):
+ self.cafile = capath
+ elif os.path.isdir(capath):
+ self.capath = capath
+ else:
+ return_error('CA path should be a file or a directory: {}'.format(capath))
+
+ def _call_api(self, uri, data=None, headers=None, method=None):
+ if headers is None:
+ headers = {}
+
+ api_url = ''.join([self.url, uri])
+ api_request = urllib2.Request(api_url, headers=headers)
+ basic_authorization = base64.b64encode('{}:{}'.format(self.username, self.password))
+ api_request.add_header(
+ 'Authorization',
+ 'Basic {}'.format(basic_authorization)
+ )
+
+ if method is not None:
+ api_request.get_method = lambda: method # type: ignore
+ try:
+ result = urllib2.urlopen(
+ api_request,
+ data=data,
+ timeout=30,
+ capath=self.capath,
+ cafile=self.cafile,
+ context=self.context
+ )
+ content = result.read()
+ result.close()
+
+ except urllib2.HTTPError, e:
+ demisto.debug(e.reason)
+ if e.code != 400:
+ return_error('{0}: {1} \nCheck you Minmeld instance.'.format(e.reason, e.code))
+ content = '{ "result":[] }'
+
+ return content
+
+ def get_all_nodes(self):
+ content = self._call_api('/status/minemeld')
+ minemeld_status = json.loads(content)['result']
+
+ return minemeld_status
+
+ def validate_miner(self, miner):
+ content = self._call_api('/status/minemeld')
+ minemeld_status = json.loads(content)['result']
+
+ for node in minemeld_status:
+ if node['name'] == miner:
+ if not node['class'] in SUPPORTED_MINER_CLASSES:
+ return_error('Unsupported miner class of type: {}'.format(node['class']))
+ self.data_file_type = 'localdb' if node['class'] == LOCALDB_CLASS else 'yaml'
+ return True
+
+ return_error('Miner {} was not found in miners list'.format(miner))
+ return False
+
+ def retrieve_miner(self, miner):
+ content = self._call_api('/config/data/{}_indicators?t={}'.format(miner, self.data_file_type))
+ return json.loads(content)['result']
+
+ def upload(self, miner, data):
+ if self.data_file_type == 'localdb':
+ ts = time.time()
+ self._call_api(
+ '/config/data/{}_indicators/append?_{}&h={}&t={}'.format(miner, ts, miner, self.data_file_type),
+ data=data,
+ headers={'Content-Type': 'application/json'},
+ method='POST'
+ )
+ return
+
+ self._call_api(
+ '/config/data/{}_indicators?h={}'.format(miner, miner),
+ data=data,
+ headers={'Content-Type': 'application/json'},
+ method='PUT'
+ )
+
+
+# system funcs
+def get_miner_list(MineMeldClient, miner, type_=False):
+ MineMeldClient.validate_miner(miner)
+ miner_list = MineMeldClient.retrieve_miner(miner)
+ return miner_list
+
+
+def add_indicator_to_miner(MineMeldClient, miner, indicators, type_, comment=''):
+ miner_list = get_miner_list(MineMeldClient, miner)
+ updated_miner_list = {
+ e['indicator']: json.dumps(e, sort_keys=True) for e in miner_list
+ }
+ request_params = {} # type: dict
+
+ if not isinstance(indicators, list):
+ indicators = indicators.split(',')
+
+ if type_ is False:
+ type_ = ''
+
+ for indicator in indicators:
+ if MineMeldClient.data_file_type == 'localdb':
+ request_params = {
+ 'indicator': indicator,
+ 'comment': comment,
+ 'type': type_,
+ 'ttl': 'disabled'
+ }
+ else:
+ request_params = {
+ 'indicator': indicator,
+ 'comment': comment
+ }
+ updated_miner_list[indicator] = json.dumps(request_params)
+
+ MineMeldClient.upload(miner, '[{}]'.format(','.join(updated_miner_list.values())))
+
+
+def remove_indicator_from_miner(MineMeldClient, miner, indicators):
+ miner_list = get_miner_list(MineMeldClient, miner)
+ updated_miner_list = {
+ e['indicator']: json.dumps(e, sort_keys=True) for e in miner_list
+ }
+ request_params = {} # type: dict
+
+ if not isinstance(indicators, list):
+ indicators = indicators.split(',')
+
+ if MineMeldClient.data_file_type == 'localdb':
+ # check that all indicators to remove are on localdb miner
+ miner_list_indicators = [o['indicator'] for o in miner_list]
+ contain_all_indicators = all(elem in miner_list_indicators for elem in indicators)
+ if not contain_all_indicators:
+ return_error('Did not find all indicators on miner {}'.format(miner))
+
+ for indicator in indicators:
+ request_params = {
+ 'indicator': indicator,
+ 'type': json.loads(updated_miner_list[indicator])['type'],
+ 'ttl': -1
+ }
+ updated_miner_list[indicator] = json.dumps(request_params)
+ else:
+ # remove indicator from miner, if nothing was removed, indicator not on miner
+ for indicator in indicators:
+ indicator_from_list = updated_miner_list.pop(indicator, None)
+ if not indicator_from_list:
+ return_error('Did not find indicator {} on miner {}'.format(indicator, miner))
+
+ MineMeldClient.upload(miner, '[{}]'.format(','.join(updated_miner_list.values())))
+
+
+def get_indicators_from_miner(miner_name, indicator_value=False):
+ result_indicator = []
+ miner_list = get_miner_list(MineMeldClient, miner_name)
+ for indicator in miner_list:
+ if indicator['indicator'] == indicator_value or indicator_value is False:
+ indicator['miner'] = miner_name
+ result_indicator.append(indicator)
+
+ return result_indicator
+
+
+def get_indicator_type(indicator):
+ indicator_type = ''
+
+ if not indicator_type:
+ url = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+',
+ indicator) # guardrails-disable-line
+ if url:
+ indicator_type = 'URL'
+
+ if not indicator_type:
+ ipv4 = re.findall(
+ '^(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$',
+ indicator)
+ if ipv4 and not indicator_type:
+ indicator_type = 'IPv4'
+
+ if not indicator_type:
+ ipv6 = re.findall(
+ '^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4]'
+ '[0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]'
+ '{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|'
+ '25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]'
+ '{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.)'
+ '{3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::'
+ '(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]'
+ '|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}'
+ '[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]'
+ '|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|'
+ '(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|'
+ '(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]'
+ '|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|'
+ '(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|'
+ '25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|'
+ '(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)$',
+ indicator)
+ if ipv6:
+ indicator_type = 'IPv6'
+
+ if not indicator_type:
+ file = get_hash_type(indicator)
+ if file != 'Unknown':
+ indicator_type = file
+
+ return indicator_type
+
+
+# commands
+def domain():
+ domain = demisto.args()['domain']
+ # output vars
+ result_indicator = [] # type: list
+ miner_name = ''
+ dbotscore = 0
+ indicator_context_data = {} # type: dict
+ entry_context = {} # type: dict
+
+ # search for indicator in all miners defined by user
+ for blacklist in BLACKLISTS:
+ result_indicator = get_indicators_from_miner(blacklist, domain)
+ if result_indicator:
+ dbotscore = 3
+ break
+ if dbotscore != 3:
+ for whitelist in WHITELISTS:
+ result_indicator = get_indicators_from_miner(whitelist, domain)
+ if result_indicator:
+ dbotscore = 1
+ break
+
+ # start building output and context
+ dbotscore_list = {
+ 'Indicator': domain,
+ 'Type': 'domain',
+ 'Vendor': 'Palo Alto MineMeld',
+ 'Score': dbotscore
+ }
+
+ if result_indicator:
+ miner_name = result_indicator[0]['miner']
+ # add only malicious to context
+ if dbotscore == 3:
+ indicator_context_data = {
+ 'MineMeld': {
+ 'Indicators': result_indicator
+ },
+ 'Malicious': {
+ 'Vendor': 'Palo Alto MineMeld',
+ 'Description': 'Indicator was found in MineMeld\'s blacklist: {}'.format(miner_name)
+ },
+ 'Name': domain
+ }
+ else:
+ indicator_context_data = {
+ 'MineMeld': {
+ 'Miner': {'name': miner_name},
+ 'Indicators': result_indicator
+ },
+ 'Name': domain
+ }
+
+ entry_context = {
+ 'DBotScore': dbotscore_list,
+ outputPaths['domain']: indicator_context_data,
+ 'MineMeld.Indicators(val.indicator == obj.indicator)': result_indicator,
+ 'MineMeld.Miner(val.name == obj.name)': {'name': miner_name},
+ }
+ result_text = 'MineMeld Domain found at miner: {}'.format(miner_name)
+ else:
+ result_text = 'MineMeld Domain severity - unknown'
+ entry_context = {
+ 'DBotScore': dbotscore_list,
+ 'MineMeld.Indicators(val.indicator == obj.indicator)': result_indicator,
+ 'MineMeld.Miner(val.name == obj.name)': {'name': miner_name},
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': result_indicator,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(result_text, result_indicator, ['indicator', 'type', 'comment']),
+ 'EntryContext': entry_context
+ })
+
+
+def url():
+ url = demisto.args()['url']
+ # output vars
+ result_indicator = [] # type: list
+ miner_name = ''
+ dbotscore = 0
+ indicator_context_data = {} # type: dict
+ entry_context = {} # type: dict
+
+ # search for indicator in all miners defined by user
+ for blacklist in BLACKLISTS:
+ result_indicator = get_indicators_from_miner(blacklist, url)
+ if result_indicator:
+ dbotscore = 3
+ break
+ if dbotscore != 3:
+ for whitelist in WHITELISTS:
+ result_indicator = get_indicators_from_miner(whitelist, url)
+ if result_indicator:
+ dbotscore = 1
+ break
+
+ # start building output and context
+ dbotscore_list = {
+ 'Indicator': url,
+ 'Type': 'url',
+ 'Vendor': 'Palo Alto MineMeld',
+ 'Score': dbotscore
+ }
+
+ if result_indicator:
+ miner_name = result_indicator[0]['miner']
+ # add only malicious to context
+ if dbotscore == 3:
+ indicator_context_data = {
+ 'MineMeld': {
+ 'Indicators': result_indicator
+ },
+ 'Malicious': {
+ 'Vendor': 'Palo Alto MineMeld',
+ 'Description': 'Indicator was found in MineMeld\'s blacklist: {}'.format(miner_name)
+ },
+ 'Data': url
+ }
+ else:
+ indicator_context_data = {
+ 'MineMeld': {
+ 'Miner': {'name': miner_name},
+ 'Indicators': result_indicator
+ },
+ 'Data': url
+ }
+
+ entry_context = {
+ 'DBotScore': dbotscore_list,
+ outputPaths['url']: indicator_context_data,
+ 'MineMeld.Indicators(val.indicator == obj.indicator)': result_indicator,
+ 'MineMeld.Miner(val.name == obj.name)': {'name': miner_name},
+ }
+ result_text = 'MineMeld URL found at miner: {}'.format(miner_name)
+ else:
+ result_text = 'MineMeld URL severity - unknown'
+ entry_context = {
+ 'DBotScore': dbotscore_list,
+ 'MineMeld.Indicators(val.indicator == obj.indicator)': result_indicator,
+ 'MineMeld.Miner(val.name == obj.name)': {'name': miner_name},
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': result_indicator,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(result_text, result_indicator, ['indicator', 'type', 'comment']),
+ 'EntryContext': entry_context
+ })
+
+
+def file():
+ file = demisto.args()['file']
+ # output vars
+ result_indicator = [] # type: list
+ miner_name = ''
+ dbotscore = 0
+ indicator_context_data = {} # type: dict
+ entry_context = {} # type: dict
+
+ # search for indicator in all miners defined by user
+ for blacklist in BLACKLISTS:
+ result_indicator = get_indicators_from_miner(blacklist, file)
+ if result_indicator:
+ dbotscore = 3
+ break
+ if dbotscore != 3:
+ for whitelist in WHITELISTS:
+ result_indicator = get_indicators_from_miner(whitelist, file)
+ if result_indicator:
+ dbotscore = 1
+ break
+
+ # start building output and context
+ dbotscore_list = {
+ 'Indicator': file,
+ 'Type': 'hash',
+ 'Vendor': 'Palo Alto MineMeld',
+ 'Score': dbotscore
+ }
+
+ if result_indicator:
+ miner_name = result_indicator[0]['miner']
+ # add only malicious to context
+ if dbotscore == 3:
+ indicator_context_data = {
+ 'MineMeld': {
+ 'Indicators': result_indicator
+ },
+ 'Malicious': {
+ 'Vendor': 'Palo Alto MineMeld',
+ 'Description': 'Indicator was found in MineMeld\'s blacklist: {}'.format(miner_name)
+ },
+ get_hash_type(file): file
+ }
+ else:
+ indicator_context_data = {
+ 'MineMeld': {
+ 'Miner': {'name': miner_name},
+ 'Indicators': result_indicator
+ },
+ get_hash_type(file): file
+ }
+
+ entry_context = {
+ 'DBotScore': dbotscore_list,
+ outputPaths['file']: indicator_context_data,
+ 'MineMeld.Indicators(val.indicator == obj.indicator)': result_indicator,
+ 'MineMeld.Miner(val.name == obj.name)': {'name': miner_name},
+ }
+ result_text = 'MineMeld File found at miner: {}'.format(miner_name)
+ else:
+ result_text = 'MineMeld File severity - unknown'
+ entry_context = {
+ 'DBotScore': dbotscore_list,
+ 'MineMeld.Indicators(val.indicator == obj.indicator)': result_indicator,
+ 'MineMeld.Miner(val.name == obj.name)': {'name': miner_name},
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': result_indicator,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(result_text, result_indicator, ['indicator', 'type', 'comment']),
+ 'EntryContext': entry_context
+ })
+
+
+def ip():
+ ip = demisto.args()['ip']
+ # output vars
+ result_indicator = [] # type: list
+ miner_name = ''
+ dbotscore = 0
+ indicator_context_data = {} # type: dict
+ entry_context = {} # type: dict
+
+ # search for indicator in all miners defined by user
+ for blacklist in BLACKLISTS:
+ result_indicator = get_indicators_from_miner(blacklist, ip)
+ if result_indicator:
+ dbotscore = 3
+ break
+ if dbotscore != 3:
+ for whitelist in WHITELISTS:
+ result_indicator = get_indicators_from_miner(whitelist, ip)
+ if result_indicator:
+ dbotscore = 1
+ break
+
+ # start building output and context
+ dbotscore_list = {
+ 'Indicator': ip,
+ 'Type': 'ip',
+ 'Vendor': 'Palo Alto MineMeld',
+ 'Score': dbotscore
+ }
+
+ if result_indicator:
+ miner_name = result_indicator[0]['miner']
+ # add only malicious to context
+ if dbotscore == 3:
+ indicator_context_data = {
+ 'MineMeld': {
+ 'Indicators': result_indicator
+ },
+ 'Malicious': {
+ 'Vendor': 'Palo Alto MineMeld',
+ 'Description': 'Indicator was found in MineMeld\'s blacklist: {}'.format(miner_name)
+ },
+ 'Address': ip
+ }
+ else:
+ indicator_context_data = {
+ 'MineMeld': {
+ 'Miner': {'name': miner_name},
+ 'Indicators': result_indicator
+ },
+ 'Address': ip
+ }
+
+ entry_context = {
+ 'DBotScore': dbotscore_list,
+ outputPaths['ip']: indicator_context_data,
+ 'MineMeld.Indicators(val.indicator == obj.indicator)': result_indicator,
+ 'MineMeld.Miner(val.name == obj.name)': {'name': miner_name},
+ }
+ result_text = 'MineMeld IP found at miner: {}'.format(miner_name)
+ else:
+ result_text = 'MineMeld IP severity - unknown'
+ entry_context = {
+ 'DBotScore': dbotscore_list,
+ 'MineMeld.Indicators(val.indicator == obj.indicator)': result_indicator,
+ 'MineMeld.Miner(val.name == obj.name)': {'name': miner_name},
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': result_indicator,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(result_text, result_indicator, ['indicator', 'type', 'comment']),
+ 'EntryContext': entry_context
+ })
+
+
+def get_all_miner_names():
+ miners_list = MineMeldClient.get_all_nodes()
+ supported_miners = []
+
+ for miner in miners_list:
+ if miner['class'] in SUPPORTED_MINER_CLASSES:
+ supported_miners.append({
+ 'name': miner['name'],
+ 'indicators': miner['length'],
+ 'class': miner['class']
+ })
+
+ if supported_miners:
+ result_text = 'Miners found: '
+ else:
+ result_text = 'No miners found'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': supported_miners,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(result_text, supported_miners, ['name', 'indicators', 'class']),
+ 'EntryContext': {
+ 'MineMeld.Miner(val.name == obj.name)': supported_miners
+ }
+ })
+
+
+def get_indicator_from_miner():
+ miner_name = demisto.args()['miner']
+ indicator = demisto.args()['indicator']
+ supported_miners = [] # type: list
+
+ supported_miners = get_indicators_from_miner(miner_name, indicator)
+
+ if supported_miners:
+ result_text = 'Items found at miner: {}'.format(miner_name)
+ else:
+ result_text = 'No items found at miner'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': supported_miners,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(result_text, supported_miners, ['indicator', 'type', 'comment']),
+ 'EntryContext': {
+ 'MineMeld.Miner(val.name == obj.name)': {'name': miner_name},
+ 'MineMeld.Indicators(val.miner == obj.miner && val.indicator == obj.indicator)': supported_miners
+ }
+ })
+
+
+def retrieve_miner_indicators():
+ miner_name = demisto.args()['miner']
+ result_list = [] # type: list
+ markdown_headers = ['indicator', 'comment', 'type']
+ miners_context = []
+
+ if miner_name == 'all':
+ markdown_headers.insert(0, 'miner')
+ miner_name = 'all miners'
+ miners_list = MineMeldClient.get_all_nodes()
+
+ for miner in miners_list:
+ if miner['class'] in SUPPORTED_MINER_CLASSES:
+ miners_context.append(
+ {
+ 'name': miner['name'],
+ 'class': miner['class']
+ }
+ )
+ miner_list = get_indicators_from_miner(miner['name'])
+ result_list.extend(miner_list)
+
+ else:
+ result_list = get_indicators_from_miner(miner_name)
+ miners_context = {'name': miner_name} # type: ignore
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': result_list,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Minemeld indicators {}'.format(miner_name), result_list, markdown_headers),
+ 'EntryContext': {
+ 'MineMeld.Miner(val.name == obj.name)': miners_context,
+ 'MineMeld.Indicators(val.miner == obj.miner && val.indicator == obj.indicator)': result_list
+ }
+ })
+
+
+def update_miner():
+ miner = demisto.args()['miner']
+ indicators = argToList(demisto.args()['indicator'])
+ if len(indicators) < 1:
+ return_error('Insert at least 1 indicator')
+ type_ = demisto.args().get('type', get_indicator_type(indicators[0]))
+ comment = demisto.args().get('comment', '')
+
+ for indicator in indicators:
+ if ' ' in indicator:
+ return_error("Don't use space in indicator")
+
+ if demisto.command() == 'minemeld-add-to-miner':
+ add_indicator_to_miner(MineMeldClient, miner, indicators, type_, comment)
+ elif demisto.command() == 'minemeld-remove-from-miner':
+ remove_indicator_from_miner(MineMeldClient, miner, indicators)
+
+ demisto.results('Performed action successfully')
+
+
+def test():
+ if MineMeldClient.get_all_nodes():
+ demisto.results('ok')
+
+
+# code starts here
+
+MineMeldClient = APIClient(
+ url=SERVER_URL,
+ username=USERNAME,
+ password=PASSWORD,
+ capath=None
+)
+
+if demisto.command() == 'test-module':
+ test()
+elif demisto.command() == 'minemeld-add-to-miner' or demisto.command() == 'minemeld-remove-from-miner':
+ update_miner()
+elif demisto.command() == 'minemeld-retrieve-miner':
+ retrieve_miner_indicators()
+elif demisto.command() == 'minemeld-get-indicator-from-miner':
+ get_indicator_from_miner()
+elif demisto.command() == 'minemeld-get-all-miners-names':
+ get_all_miner_names()
+elif demisto.command() == 'domain':
+ domain()
+elif demisto.command() == 'url':
+ url()
+elif demisto.command() == 'file':
+ file()
+elif demisto.command() == 'ip':
+ ip()
diff --git a/Integrations/PaloAlto_MineMeld/PaloAlto_MineMeld.yml b/Integrations/PaloAlto_MineMeld/PaloAlto_MineMeld.yml
new file mode 100644
index 000000000000..3478ae73b02b
--- /dev/null
+++ b/Integrations/PaloAlto_MineMeld/PaloAlto_MineMeld.yml
@@ -0,0 +1,479 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: Palo Alto Minemeld
+ version: -1
+configuration:
+- display: MineMeld url e.g.(https://192.0.0.1/)
+ name: url
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: true
+ type: 9
+- display: Blacklist names e.g.(BlackListIPv4, BlackListURL,Malicious)
+ name: blacklist
+ required: false
+ type: 0
+- display: Whitelist names e.g.(wlWhiteListIPv4,wlWhiteListDomain,Unmalicious)
+ name: whitelist
+ required: false
+ type: 0
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: MineMeld streamlines the aggregation, enforcement and sharing of threat
+ intelligence.
+display: Palo Alto Networks Minemeld
+name: Palo Alto Minemeld
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Miner name - you can get it under Nodes page on Minemeld UI (nodes
+ of type "miner").
+ isArray: false
+ name: miner
+ required: true
+ secret: false
+ - default: false
+ description: The indicator's value to add. Support list \ comma-separated values.
+ isArray: true
+ name: indicator
+ required: true
+ secret: false
+ - default: false
+ description: Comment for the indicator.
+ isArray: false
+ name: comment
+ required: false
+ secret: false
+ deprecated: false
+ description: Add indicator to a miner.
+ execution: false
+ name: minemeld-add-to-miner
+ - arguments:
+ - default: false
+ description: Miner name - you can get it under Nodes page on Minemeld UI (nodes
+ of type "miner").
+ isArray: false
+ name: miner
+ required: true
+ secret: false
+ - default: false
+ description: The value of the indicator to remove.
+ isArray: false
+ name: indicator
+ required: true
+ secret: false
+ deprecated: false
+ description: Remove an indicator from a miner.
+ execution: false
+ name: minemeld-remove-from-miner
+ - arguments:
+ - default: false
+ description: Miner name, value 'all' for all miners indicators
+ isArray: false
+ name: miner
+ required: true
+ secret: false
+ deprecated: false
+ description: Get miner's indicators list.
+ execution: false
+ name: minemeld-retrieve-miner
+ outputs:
+ - contextPath: MineMeld.Miner
+ description: Entire miner object
+ type: unknown
+ - contextPath: MineMeld.Miner.name
+ description: Miner name
+ type: string
+ - contextPath: MineMeld.Miner.class
+ description: Miner class
+ type: string
+ - contextPath: MineMeld.Indicators
+ description: Entire indicator object
+ type: Unknown
+ - contextPath: MineMeld.Indicators.miner
+ description: Indicator's miner
+ type: string
+ - contextPath: MineMeld.Indicators.type
+ description: Indicator type
+ type: string
+ - contextPath: MineMeld.Indicators.indicator
+ description: Indicator's value
+ type: string
+ - contextPath: MineMeld.Indicators.comment
+ description: Indicator's comment
+ type: string
+ - arguments:
+ - default: false
+ description: Miner name
+ isArray: false
+ name: miner
+ required: true
+ secret: false
+ - default: false
+ description: Indicator details
+ isArray: false
+ name: indicator
+ required: true
+ secret: false
+ deprecated: false
+ description: Get indicator from a miner's list
+ execution: false
+ name: minemeld-get-indicator-from-miner
+ outputs:
+ - contextPath: MineMeld.Miner
+ description: Entire miner object
+ type: Unknown
+ - contextPath: MineMeld.Miner.name
+ description: Miner name
+ type: string
+ - contextPath: MineMeld.Indicators
+ description: Entire indicator object
+ type: unknown
+ - contextPath: MineMeld.Indicators.miner
+ description: Indicator's miner
+ type: string
+ - contextPath: MineMeld.Indicators.type
+ description: Indicator type
+ type: string
+ - contextPath: MineMeld.Indicators.indicator
+ description: Indicator's value
+ type: string
+ - contextPath: MineMeld.Indicators.comment
+ description: Indicator's comment
+ type: string
+ - arguments:
+ - default: true
+ description: ip
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: false
+ description: Search for ip on miners
+ execution: false
+ name: ip
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IPs, the vendor that made the decision
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IPs, the reason for the vendor to make the decision
+ type: string
+ - contextPath: IP.Address
+ description: IP address
+ type: string
+ - contextPath: IP.MineMeld.Indicators
+ description: Entire indicator object
+ type: Unknown
+ - contextPath: IP.MineMeld.Indicators.indicator
+ description: Indicator's value
+ type: string
+ - contextPath: IP.MineMeld.Indicators.miner
+ description: Indicator's miner
+ type: string
+ - contextPath: IP.MineMeld.Indicators.type
+ description: Indicator type
+ type: string
+ - contextPath: IP.MineMeld.Indicators.comment
+ description: Indicator's comment
+ type: string
+ - contextPath: IP.MineMeld.Miner
+ description: Entire miner object
+ type: unknown
+ - contextPath: IP.MineMeld.Miner.name
+ description: Miner name
+ type: string
+ - contextPath: MineMeld.Indicators
+ description: Entire indicator object
+ type: Unknown
+ - contextPath: MineMeld.Indicators.indicator
+ description: Indicator's value
+ type: string
+ - contextPath: MineMeld.Indicators.miner
+ description: Indicator's miner
+ type: string
+ - contextPath: MineMeld.Indicators.type
+ description: Indicator type
+ type: string
+ - contextPath: MineMeld.Indicators.comment
+ description: Indicator's comment
+ type: string
+ - contextPath: MineMeld.Miner
+ description: Entire miner object
+ type: Unknown
+ - contextPath: MineMeld.Miner.name
+ description: Miner name
+ type: string
+ - arguments:
+ - default: true
+ description: file
+ isArray: false
+ name: file
+ required: true
+ secret: false
+ deprecated: false
+ description: Search for file on lists
+ execution: false
+ name: file
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the reason for the vendor to make the decision
+ type: string
+ - contextPath: File.MineMeld.Indicators
+ description: Entire indicator object
+ type: Unknown
+ - contextPath: File.MineMeld.Indicators.indicator
+ description: Indicator's value
+ type: string
+ - contextPath: File.MineMeld.Indicators.miner
+ description: Indicator's miner
+ type: string
+ - contextPath: File.MineMeld.Indicators.type
+ description: Indicator type
+ type: string
+ - contextPath: File.MineMeld.Indicators.comment
+ description: Indicator's comment
+ type: string
+ - contextPath: File.MineMeld.Miner
+ description: Entire miner object
+ type: unknown
+ - contextPath: File.MineMeld.Miner.name
+ description: Miner name
+ type: string
+ - contextPath: MineMeld.Indicators
+ description: Entire indicator object
+ type: Unknown
+ - contextPath: MineMeld.Indicators.indicator
+ description: Indicator's value
+ type: string
+ - contextPath: MineMeld.Indicators.miner
+ description: Indicator's miner
+ type: string
+ - contextPath: MineMeld.Indicators.type
+ description: Indicator type
+ type: string
+ - contextPath: MineMeld.Indicators.comment
+ description: Indicator's comment
+ type: string
+ - contextPath: MineMeld.Miner
+ description: Entire miner object
+ type: Unknown
+ - contextPath: MineMeld.Miner.name
+ description: Miner name
+ type: string
+ - contextPath: File.MD5
+ description: MD5 hash of the file
+ type: string
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file
+ type: string
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file
+ type: string
+ - arguments:
+ - default: true
+ description: domain
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: Search for domain on lists
+ execution: false
+ name: domain
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious Domains, the vendor that made the decision
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious Domains, the reason for the vendor to make the decision
+ type: string
+ - contextPath: Domain.Name
+ description: Domain's name (value)
+ type: string
+ - contextPath: Domain.MineMeld.Indicators
+ description: Entire indicator object
+ type: Unknown
+ - contextPath: Domain.MineMeld.Indicators.indicator
+ description: Indicator's value
+ type: string
+ - contextPath: Domain.MineMeld.Indicators.miner
+ description: Indicator's miner
+ type: string
+ - contextPath: Domain.MineMeld.Indicators.type
+ description: Indicator type
+ type: string
+ - contextPath: Domain.MineMeld.Indicators.comment
+ description: Indicator's comment
+ type: string
+ - contextPath: Domain.MineMeld.Miner
+ description: Entire miner object
+ type: unknown
+ - contextPath: Domain.MineMeld.Miner.name
+ description: Miner name
+ type: string
+ - contextPath: MineMeld.Indicators
+ description: Entire indicator object
+ type: Unknown
+ - contextPath: MineMeld.Indicators.indicator
+ description: Indicator's value
+ type: string
+ - contextPath: MineMeld.Indicators.miner
+ description: Indicator's miner
+ type: string
+ - contextPath: MineMeld.Indicators.type
+ description: Indicator type
+ type: string
+ - contextPath: MineMeld.Indicators.comment
+ description: Indicator's comment
+ type: string
+ - contextPath: MineMeld.Miner
+ description: Entire miner object
+ type: Unknown
+ - contextPath: MineMeld.Miner.name
+ description: Miner name
+ type: string
+ - arguments:
+ - default: true
+ description: url
+ isArray: false
+ name: url
+ required: true
+ secret: false
+ deprecated: false
+ description: Search for url on lists
+ execution: false
+ name: url
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the reason for the vendor to make the decision
+ type: string
+ - contextPath: URL.Data
+ description: URL's data (value)
+ type: string
+ - contextPath: URL.MineMeld.Indicators
+ description: Entire indicator object
+ type: Unknown
+ - contextPath: URL.MineMeld.Indicators.indicator
+ description: Indicator's value
+ type: string
+ - contextPath: URL.MineMeld.Indicators.miner
+ description: Indicator's miner
+ type: string
+ - contextPath: URL.MineMeld.Indicators.type
+ description: Indicator type
+ type: string
+ - contextPath: URL.MineMeld.Indicators.comment
+ description: Indicator's comment
+ type: string
+ - contextPath: URL.MineMeld.Miner
+ description: Entire miner object
+ type: unknown
+ - contextPath: URL.MineMeld.Miner.name
+ description: Miner name
+ type: string
+ - contextPath: MineMeld.Indicators
+ description: Entire indicator object
+ type: Unknown
+ - contextPath: MineMeld.Indicators.indicator
+ description: Indicator's value
+ type: string
+ - contextPath: MineMeld.Indicators.miner
+ description: Indicator's miner
+ type: string
+ - contextPath: MineMeld.Indicators.type
+ description: Indicator type
+ type: string
+ - contextPath: MineMeld.Indicators.comment
+ description: Indicator's comment
+ type: string
+ - contextPath: MineMeld.Miner
+ description: Entire miner object
+ type: Unknown
+ - contextPath: MineMeld.Miner.name
+ description: Miner name
+ type: string
+ - deprecated: false
+ description: Returns all miners names (with supported classes of custom indicators
+ lists)
+ execution: false
+ name: minemeld-get-all-miners-names
+ outputs:
+ - contextPath: MineMeld.Miner
+ description: Entire miner object
+ type: unknown
+ - contextPath: MineMeld.Miner.name
+ description: Miner's name
+ type: string
+ - contextPath: MineMeld.Miner.class
+ description: Miner's class
+ type: string
+ - contextPath: MineMeld.Miner.indicators
+ description: Amount of miner's indicators
+ type: string
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- minemeld_test
diff --git a/Integrations/PaloAlto_MineMeld/PaloAlto_MineMeld_description.md b/Integrations/PaloAlto_MineMeld/PaloAlto_MineMeld_description.md
new file mode 100644
index 000000000000..52a7fdebf74f
--- /dev/null
+++ b/Integrations/PaloAlto_MineMeld/PaloAlto_MineMeld_description.md
@@ -0,0 +1,5 @@
+There are 2 list types, each list type holds the names of the miners(lists) that are accounted as threat intel lists (blacklist, whitelist)
+Each list type represents different DbotScore on Demisto.
+Blacklist - if indicator is found in one of the blacklists then the indicator will get DbotScore 3, which is considered malicious in Demisto.
+Whitelist - if indicator is found in one of the whitelists then the indicator will get DbotScore 1 which is considered good in Demisto.
+If indicator is not found in any list, it will get DbotScore of 0 - unknown severity.
\ No newline at end of file
diff --git a/Integrations/PaloAlto_MineMeld/PaloAlto_MineMeld_image.png b/Integrations/PaloAlto_MineMeld/PaloAlto_MineMeld_image.png
new file mode 100644
index 000000000000..2d476d086688
Binary files /dev/null and b/Integrations/PaloAlto_MineMeld/PaloAlto_MineMeld_image.png differ
diff --git a/Integrations/Palo_Alto_Networks_WildFire_v2/CHANGELOG.md b/Integrations/Palo_Alto_Networks_WildFire_v2/CHANGELOG.md
new file mode 100644
index 000000000000..7a8cad1750c2
--- /dev/null
+++ b/Integrations/Palo_Alto_Networks_WildFire_v2/CHANGELOG.md
@@ -0,0 +1,9 @@
+## [Unreleased]
+Fixed an issue in which testing the integration instance failed.
+
+## [19.9.1] - 2019-09-18
+ - Fixed an issue in which the ***wildfire-report*** command failed for specific hash values.
+ - Fixed an issue in which the ***wildfire-report*** command failed when issuing it for an in-progress analysis.
+
+## [19.9.0] - 2019-09-04
+Fixed an issue in which the ***wildfire-report*** command failed when setting the *verbose* argument to *true*.
diff --git a/Integrations/Palo_Alto_Networks_WildFire_v2/Palo_Alto_Networks_WildFire_v2.py b/Integrations/Palo_Alto_Networks_WildFire_v2/Palo_Alto_Networks_WildFire_v2.py
new file mode 100644
index 000000000000..26e596ee1aa0
--- /dev/null
+++ b/Integrations/Palo_Alto_Networks_WildFire_v2/Palo_Alto_Networks_WildFire_v2.py
@@ -0,0 +1,747 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+import json
+import requests
+import shutil
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+URL = demisto.getParam('server')
+TOKEN = demisto.getParam('token')
+USE_SSL = not demisto.params().get('insecure', False)
+FILE_TYPE_SUPPRESS_ERROR = demisto.getParam('suppress_file_type_error')
+DEFAULT_HEADERS = {'Content-Type': 'application/x-www-form-urlencoded'}
+MULTIPART_HEADERS = {'Content-Type': "multipart/form-data; boundary=upload_boundry"}
+
+URL_DICT = {
+ 'verdict': '/get/verdict',
+ 'verdicts': '/get/verdicts',
+ 'upload_file': '/submit/file',
+ 'upload_url': '/submit/link',
+ 'upload_file_url': '/submit/url',
+ 'report': '/get/report',
+ 'sample': '/get/sample'
+}
+
+ERROR_DICT = {
+ '401': 'Unauthorized, API key invalid',
+ '404': 'Not Found, The report was not found',
+ '405': 'Method Not Allowed, Method other than POST used',
+ '413': 'Request Entity Too Large, Sample file size over max limit',
+ '415': 'Unsupported Media Type',
+ '418': 'Unsupported File Type Sample, file type is not supported',
+ '419': 'Request quota exceeded',
+ '420': 'Insufficient arguments',
+ '421': 'Invalid arguments',
+ '500': 'Internal error',
+ '502': 'Bad Gateway',
+ '513': 'File upload failed'
+}
+
+VERDICTS_DICT = {
+ '0': 'benign',
+ '1': 'malware',
+ '2': 'grayware',
+ '4': 'phishing',
+ '-100': 'pending, the sample exists, but there is currently no verdict',
+ '-101': 'error',
+ '-102': 'unknown, cannot find sample record in the database',
+ '-103': 'invalid hash value'
+}
+
+VERDICTS_TO_DBOTSCORE = {
+ '0': 1,
+ '1': 3,
+ '2': 2,
+ '4': 3,
+ '-100': 0,
+ '-101': 0,
+ '-102': 0,
+ '-103': 0
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(url, method, headers=None, body=None, params=None, files=None):
+ LOG('running request with url=%s' % url)
+ result = requests.request(
+ method,
+ url,
+ headers=headers,
+ data=body,
+ verify=USE_SSL,
+ params=params,
+ files=files
+ )
+
+ if str(result.reason) == 'Not Found':
+ # sample not found
+ if url.find(URL_DICT["sample"]) != -1:
+ demisto.results(
+ 'Sample was not found. '
+ 'Please note that grayware and benign samples are available for 14 days only. '
+ 'For more info contact your wildfire representative.')
+ sys.exit(0)
+ # report not found
+ if url.find(URL_DICT["report"]) != -1:
+ demisto.results('Report not found.')
+ sys.exit(0)
+
+ if result.status_code < 200 or result.status_code >= 300:
+ if str(result.status_code) in ERROR_DICT:
+ if result.status_code == 418 and FILE_TYPE_SUPPRESS_ERROR:
+ demisto.results({
+ 'Type': 11,
+ 'Contents': f'Request Failed with status: {result.status_code}'
+ f' Reason is: {ERROR_DICT[str(result.status_code)]}',
+ 'ContentsFormat': formats['text']
+ })
+ sys.exit(0)
+ else:
+ return_error(f'Request Failed with status: {result.status_code}'
+ f' Reason is: {ERROR_DICT[str(result.status_code)]}')
+ else:
+ return_error(f'Request Failed with status: {result.status_code} Reason is: {result.reason}')
+ if result.text.find("Forbidden. (403)") != -1:
+ return_error('Request Forbidden - 403, check SERVER URL and API Key')
+
+ if result.headers['Content-Type'] == 'application/octet-stream':
+ return result
+ else:
+ try:
+ json_res = json.loads(xml2json(result.text))
+ return json_res
+ except Exception:
+ return_error(f'Failed to parse response to json. response: {result.text}')
+
+
+def prettify_upload(upload_body):
+ pretty_upload = {
+ 'MD5': upload_body["md5"],
+ 'SHA256': upload_body["sha256"],
+ 'Status': 'Pending'
+ }
+ if 'filetype' in upload_body:
+ pretty_upload["FileType"] = upload_body["filetype"]
+ if 'size' in upload_body:
+ pretty_upload["Size"] = upload_body["size"]
+ if 'url' in upload_body:
+ pretty_upload["URL"] = upload_body["url"]
+
+ return pretty_upload
+
+
+def prettify_report_entry(file_info):
+ pretty_report = {
+ 'MD5': file_info["md5"],
+ 'SHA256': file_info["sha256"],
+ 'Status': 'Completed'
+ }
+ if 'filetype' in file_info:
+ pretty_report["FileType"] = file_info["filetype"]
+ if 'size' in file_info:
+ pretty_report["Size"] = file_info["size"]
+ if 'url' in file_info:
+ pretty_report["URL"] = file_info["url"]
+
+ return pretty_report
+
+
+def prettify_verdict(verdict_data):
+ pretty_verdict = {}
+
+ if 'md5' in verdict_data:
+ pretty_verdict["MD5"] = verdict_data["md5"]
+ if 'sha256' in verdict_data:
+ pretty_verdict["SHA256"] = verdict_data["sha256"]
+
+ pretty_verdict["Verdict"] = verdict_data["verdict"]
+ pretty_verdict["VerdictDescription"] = VERDICTS_DICT[verdict_data["verdict"]]
+
+ return pretty_verdict
+
+
+def create_dbot_score_from_verdict(pretty_verdict):
+ if 'SHA256' not in pretty_verdict and 'MD5' not in pretty_verdict:
+ return_error('Hash is missing in WildFire verdict.')
+ if pretty_verdict["Verdict"] not in VERDICTS_TO_DBOTSCORE:
+ return_error('This hash verdict is not mapped to a DBotScore. Contact Demisto support for more information.')
+ dbot_score = {
+ 'Indicator': pretty_verdict["SHA256"] if 'SHA256' in pretty_verdict else pretty_verdict["MD5"],
+ 'Type': 'hash',
+ 'Vendor': 'WildFire',
+ 'Score': VERDICTS_TO_DBOTSCORE[pretty_verdict["Verdict"]]
+ }
+ return dbot_score
+
+
+def prettify_verdicts(verdicts_data):
+ pretty_verdicts_arr = []
+
+ for verdict_data in verdicts_data:
+ pretty_verdict = {}
+ if 'md5' in verdict_data:
+ pretty_verdict["MD5"] = verdict_data["md5"]
+ if 'sha256' in verdict_data:
+ pretty_verdict["SHA256"] = verdict_data["sha256"]
+
+ pretty_verdict["Verdict"] = verdict_data["verdict"]
+ pretty_verdict["VerdictDescription"] = VERDICTS_DICT[verdict_data["verdict"]]
+
+ pretty_verdicts_arr.append(pretty_verdict)
+
+ return pretty_verdicts_arr
+
+
+def create_dbot_score_from_verdicts(pretty_verdicts):
+ dbot_score_arr = []
+
+ for pretty_verdict in pretty_verdicts:
+
+ if 'SHA256' not in pretty_verdict and 'MD5' not in pretty_verdict:
+ return_error('Hash is missing in WildFire verdict.')
+ if pretty_verdict["Verdict"] not in VERDICTS_TO_DBOTSCORE:
+ return_error(
+ 'This hash verdict is not mapped to a DBotScore. Contact Demisto support for more information.')
+
+ dbot_score = {
+ 'Indicator': pretty_verdict["SHA256"] if "SHA256" in pretty_verdict else pretty_verdict["MD5"],
+ 'Type': 'hash',
+ 'Vendor': 'WildFire',
+ 'Score': VERDICTS_TO_DBOTSCORE[pretty_verdict["Verdict"]]
+ }
+ dbot_score_arr.append(dbot_score)
+
+ return dbot_score_arr
+
+
+def create_upload_entry(upload_body, title, result):
+ pretty_upload_body = prettify_upload(upload_body)
+ md = tableToMarkdown(title, pretty_upload_body, removeNull=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': result,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': md,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'EntryContext': {
+ "WildFire.Report(val.SHA256 == obj.SHA256 || val.MD5 == obj.MD5)": pretty_upload_body
+ }
+ })
+
+
+def hash_args_handler(sha256=None, md5=None):
+ # hash argument used in wildfire-report, wildfire-verdict commands
+ inputs = argToList(sha256) if sha256 else argToList(md5)
+ for element in inputs:
+ if sha256Regex.match(element) or md5Regex.match(element):
+ continue
+ else:
+ return_error('Invalid hash. Only SHA256 and MD5 are supported.')
+
+ return inputs
+
+
+def file_args_handler(file=None, sha256=None, md5=None):
+ # file/md5/sha256 are used in file command
+ if (file and not md5 and not sha256) or (not file and md5 and not sha256) or (not file and md5 and not sha256):
+ if file:
+ inputs = argToList(file)
+ elif md5:
+ inputs = argToList(md5)
+ else:
+ inputs = argToList(sha256)
+
+ for element in inputs:
+ if sha256Regex.match(element) or md5Regex.match(element) or sha1Regex.match(element):
+ continue
+ else:
+ return_error('Invalid hash. Only SHA256 and MD5 are supported.')
+
+ return inputs
+
+ else:
+ return_error('Specify exactly 1 of the following arguments: file, sha256, md5.')
+
+
+def hash_list_to_file(hash_list):
+ file_path = demisto.uniqueFile()
+ with open(file_path, 'w') as f:
+ f.write("\n".join(hash_list))
+
+ return [file_path]
+
+
+''' COMMANDS '''
+
+
+def test_module():
+ if wildfire_upload_url('https://www.demisto.com')[1]:
+ demisto.results('ok')
+
+
+@logger
+def wildfire_upload_file(upload):
+ upload_file_uri = URL + URL_DICT["upload_file"]
+ body = {'apikey': TOKEN}
+
+ file_path = demisto.getFilePath(upload)['path']
+ file_name = demisto.getFilePath(upload)['name']
+
+ try:
+ shutil.copy(file_path, file_name)
+ except Exception:
+ return_error('Failed to prepare file for upload.')
+
+ try:
+ with open(file_name, 'rb') as f:
+ result = http_request(
+ upload_file_uri,
+ 'POST',
+ body=body,
+ files={'file': f}
+ )
+ finally:
+ shutil.rmtree(file_name, ignore_errors=True)
+
+ upload_file_data = result["wildfire"]["upload-file-info"]
+
+ return result, upload_file_data
+
+
+def wildfire_upload_file_command():
+ uploads = argToList(demisto.args().get('upload'))
+ for upload in uploads:
+ result, upload_file_data = wildfire_upload_file(upload)
+ create_upload_entry(upload_file_data, 'WildFire Upload File', result)
+
+
+@logger
+def wildfire_upload_file_url(upload):
+ upload_file_url_uri = URL + URL_DICT["upload_file_url"]
+ body = f'''--upload_boundry
+Content-Disposition: form-data; name="apikey"
+
+{TOKEN}
+--upload_boundry
+Content-Disposition: form-data; name="url"
+
+{upload}
+--upload_boundry--'''
+
+ result = http_request(
+ upload_file_url_uri,
+ 'POST',
+ headers=MULTIPART_HEADERS,
+ body=body
+ )
+
+ upload_file_url_data = result["wildfire"]["upload-file-info"]
+
+ return result, upload_file_url_data
+
+
+def wildfire_upload_file_url_command():
+ uploads = argToList(demisto.args().get('upload'))
+ for upload in uploads:
+ result, upload_file_url_data = wildfire_upload_file_url(upload)
+ create_upload_entry(upload_file_url_data, 'WildFire Upload File URL', result)
+
+
+@logger
+def wildfire_upload_url(upload):
+ upload_url_uri = URL + URL_DICT["upload_url"]
+ body = '''--upload_boundry
+Content-Disposition: form-data; name="apikey"
+
+{apikey}
+--upload_boundry
+Content-Disposition: form-data; name="link"
+
+{link}
+--upload_boundry--'''.format(apikey=TOKEN, link=upload)
+
+ result = http_request(
+ upload_url_uri,
+ 'POST',
+ headers=MULTIPART_HEADERS,
+ body=body
+ )
+
+ upload_url_data = result["wildfire"]["submit-link-info"]
+
+ return result, upload_url_data
+
+
+def wildfire_upload_url_command():
+ uploads = argToList(demisto.args().get('upload'))
+ for upload in uploads:
+ result, upload_url_data = wildfire_upload_url(upload)
+ create_upload_entry(upload_url_data, 'WildFire Upload URL', result)
+
+
+@logger
+def wildfire_get_verdict(file_hash):
+ get_verdict_uri = URL + URL_DICT["verdict"]
+ body = 'apikey=' + TOKEN + '&hash=' + file_hash
+
+ result = http_request(get_verdict_uri, 'POST', headers=DEFAULT_HEADERS, body=body)
+ verdict_data = result["wildfire"]["get-verdict-info"]
+
+ return result, verdict_data
+
+
+def wildfire_get_verdict_command():
+ inputs = hash_args_handler(demisto.args().get('hash'))
+ for element in inputs:
+ result, verdict_data = wildfire_get_verdict(element)
+
+ pretty_verdict = prettify_verdict(verdict_data)
+ md = tableToMarkdown('WildFire Verdict', pretty_verdict, removeNull=True)
+
+ dbot_score = create_dbot_score_from_verdict(pretty_verdict)
+ ec = {
+ "WildFire.Verdicts(val.SHA256 == obj.SHA256 || val.MD5 == obj.MD5)": pretty_verdict,
+ "DBotScore(val.Indicator == obj.Indicator)": dbot_score
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': result,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': md,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+
+
+@logger
+def wildfire_get_verdicts(file_path):
+ get_verdicts_uri = URL + URL_DICT["verdicts"]
+ body = {'apikey': TOKEN}
+
+ try:
+ with open(file_path, 'rb') as f:
+ result = http_request(
+ get_verdicts_uri,
+ 'POST',
+ body=body,
+ files={'file': f}
+ )
+ finally:
+ shutil.rmtree(file_path, ignore_errors=True)
+
+ verdicts_data = result["wildfire"]["get-verdict-info"]
+
+ return result, verdicts_data
+
+
+def wildfire_get_verdicts_command():
+ if ('EntryID' in demisto.args() and 'hash_list' in demisto.args()) or (
+ 'EntryID' not in demisto.args() and 'hash_list' not in demisto.args()):
+ return_error('Specify exactly 1 of the following arguments: EntryID, hash_list.')
+
+ if 'EntryID' in demisto.args():
+ inputs = argToList(demisto.args().get('EntryID'))
+ paths = [demisto.getFilePath(element)['path'] for element in inputs]
+
+ else:
+ paths = hash_list_to_file(argToList(demisto.args().get('hash_list')))
+
+ for file_path in paths:
+ result, verdicts_data = wildfire_get_verdicts(file_path)
+
+ pretty_verdicts = prettify_verdicts(verdicts_data)
+ md = tableToMarkdown('WildFire Verdicts', pretty_verdicts, removeNull=True)
+
+ dbot_score = create_dbot_score_from_verdicts(pretty_verdicts)
+ ec = {
+ "WildFire.Verdicts(val.SHA256 == obj.SHA256 || val.MD5 == obj.MD5)": pretty_verdicts,
+ "DBotScore(val.Indicator == obj.Indicator)": dbot_score
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': result,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': md,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+
+
+def create_report(file_hash, reports, file_info, format_='xml', verbose=False):
+ udp_ip = []
+ udp_port = []
+ tcp_ip = []
+ tcp_port = []
+ dns_query = []
+ dns_response = []
+ evidence_md5 = []
+ evidence_text = []
+
+ # When only one report is in response, it's returned as a single json object and not a list.
+ if not isinstance(reports, list):
+ reports = [reports]
+
+ for report in reports:
+ if 'network' in report and report["network"]:
+ if 'UDP' in report["network"]:
+ if '-ip' in report["network"]["UDP"]:
+ udp_ip.append(report["network"]["UDP"]["-ip"])
+ if '-port' in report["network"]["UDP"]:
+ udp_port.append(report["network"]["UDP"]["-port"])
+ if 'TCP' in report["network"]:
+ if '-ip' in report["network"]["TCP"]:
+ tcp_ip.append(report["network"]["TCP"]["-ip"])
+ if '-port' in report["network"]["TCP"]:
+ tcp_port.append(report["network"]["TCP"]['-port'])
+ if 'dns' in report["network"]:
+ for dns_obj in report["network"]["dns"]:
+ if '-query' in dns_obj:
+ dns_query.append(dns_obj['-query'])
+ if '-response' in dns_obj:
+ dns_response.append(dns_obj['-response'])
+
+ if 'evidence' in report and report["evidence"]:
+ if 'file' in report["evidence"]:
+ if isinstance(report["evidence"]["file"], dict) and 'entry' in report["evidence"]["file"]:
+ if '-md5' in report["evidence"]["file"]["entry"]:
+ evidence_md5.append(report["evidence"]["file"]["entry"]["-md5"])
+ if '-text' in report["evidence"]["file"]["entry"]:
+ evidence_text.append(report["evidence"]["file"]["entry"]["-text"])
+
+ outputs = {
+ 'Status': 'Success',
+ 'SHA256': file_info["sha256"]
+ }
+
+ if len(udp_ip) > 0 or len(udp_port) > 0 or len(tcp_ip) > 0 or len(tcp_port) > 0 or dns_query or dns_response:
+
+ outputs["Network"] = {}
+
+ if len(udp_ip) > 0 or len(udp_port) > 0:
+ outputs["Network"]["UDP"] = {}
+ if len(udp_ip) > 0:
+ outputs["Network"]["UDP"]["IP"] = udp_ip
+ if len(udp_port) > 0:
+ outputs["Network"]["UDP"]["Port"] = udp_port
+
+ if len(tcp_ip) > 0 or len(tcp_port) > 0:
+ outputs["Network"]["TCP"] = {}
+ if len(tcp_ip) > 0:
+ outputs["Network"]["TCP."]["IP"] = tcp_ip
+ if len(tcp_port) > 0:
+ outputs["Network"]["TCP"]["Port"] = tcp_port
+
+ if len(dns_query) > 0 or len(dns_response) > 0:
+ outputs["Network"]["DNS"] = {}
+ if len(dns_query) > 0:
+ outputs["Network"]["DNS"]["Query"] = dns_query
+ if len(dns_response) > 0:
+ outputs["Network"]["DNS"]["Response"] = dns_response
+
+ if len(evidence_md5) > 0 or len(evidence_text) > 0:
+ outputs["Evidence"] = {}
+ if len(evidence_md5) > 0:
+ outputs["Evidence"]["md5"] = evidence_md5
+ if len(evidence_text) > 0:
+ outputs["Evidence"]["Text"] = evidence_text
+
+ ec = {}
+ ec["DBotScore"] = {
+ 'Indicator': file_hash,
+ 'Type': 'hash',
+ 'Vendor': 'WildFire',
+ 'Score': 0
+ }
+ ec["WildFire.Report(val.SHA256 === obj.SHA256)"] = outputs
+
+ if file_info:
+ if file_info["malware"] == 'yes':
+ ec["DBotScore"]["Score"] = 3
+ ec[outputPaths['file']] = {
+ 'Type': file_info["filetype"],
+ 'MD5': file_info["md5"],
+ 'SHA1': file_info["sha1"],
+ 'SHA256': file_info["sha256"],
+ 'Size': file_info["size"],
+ 'Name': file_info["filename"] if 'filename' in file_info else None,
+ 'Malicious': {'Vendor': 'WildFire'}
+ }
+ else:
+ ec["DBotScore"]["Score"] = 1
+ if format_ == 'pdf':
+ get_report_uri = URL + URL_DICT["report"]
+ params = {
+ 'apikey': TOKEN,
+ 'format': 'pdf',
+ 'hash': file_hash
+ }
+
+ res_pdf = http_request(get_report_uri, 'POST', headers=DEFAULT_HEADERS, params=params)
+
+ file_name = 'wildfire_report_' + file_hash + '.pdf'
+ file_type = entryTypes['entryInfoFile']
+ result = fileResult(file_name, res_pdf.content,
+ file_type) # will be saved under 'InfoFile' in the context.
+ result['EntryContext'] = ec
+
+ demisto.results(result)
+
+ else:
+ md = tableToMarkdown('WildFire Report', prettify_report_entry(file_info))
+ if verbose:
+ for report in reports:
+ if isinstance(report, dict):
+ md += tableToMarkdown('Report ', report, list(report), removeNull=True)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': reports,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': md,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+
+
+@logger
+def wildfire_get_report(file_hash):
+ get_report_uri = URL + URL_DICT["report"]
+ params = {
+ 'apikey': TOKEN,
+ 'format': 'xml',
+ 'hash': file_hash
+ }
+
+ json_res = http_request(get_report_uri, 'POST', headers=DEFAULT_HEADERS, params=params)
+
+ if not json_res:
+ demisto.results('Report not found')
+ sys.exit(0)
+
+ task_info = json_res["wildfire"].get('task_info', None)
+ reports = task_info.get('report', None) if task_info else None
+ file_info = json_res["wildfire"].get('file_info', None)
+
+ if not reports or not file_info:
+ demisto.results('The sample is still being analyzed. Please wait to download the report')
+ sys.exit(0)
+ return file_hash, reports, file_info
+
+
+def wildfire_get_report_command():
+ if 'sha256' in demisto.args():
+ sha256 = demisto.args().get('sha256', None)
+ elif 'hash' in demisto.args():
+ sha256 = demisto.args().get('hash', None)
+ else:
+ sha256 = None
+ md5 = demisto.args().get('md5', None)
+ inputs = hash_args_handler(sha256, md5)
+
+ verbose = demisto.args().get('verbose', 'false').lower() == 'true'
+ format_ = demisto.args().get('format', 'xml')
+ for element in inputs:
+ file_hash, report, file_info = wildfire_get_report(element)
+ create_report(file_hash, report, file_info, format_, verbose)
+
+
+def wildfire_file_command():
+ inputs = file_args_handler(demisto.args().get('file'), demisto.args().get('md5'), demisto.args().get('sha256'))
+ for element in inputs:
+ if sha1Regex.match(element):
+ demisto.results({
+ 'Type': 11,
+ 'Contents': 'WildFire file hash reputation supports only MD5, SHA256 hashes',
+ 'ContentsFormat': formats['text']
+ })
+ else:
+ file_hash, report, file_info = wildfire_get_report(element)
+ create_report(file_hash, report, file_info, 'xml', False)
+
+
+def wildfire_get_sample(file_hash):
+ get_report_uri = URL + URL_DICT["sample"]
+ params = {
+ 'apikey': TOKEN,
+ 'hash': file_hash
+ }
+
+ result = http_request(get_report_uri, 'POST', headers=DEFAULT_HEADERS, params=params)
+ return result
+
+
+def wildfire_get_sample_command():
+ if 'sha256' or 'hash' in demisto.args():
+ sha256 = demisto.args().get('sha256', None)
+ else:
+ sha256 = None
+ md5 = demisto.args().get('md5', None)
+ inputs = hash_args_handler(sha256, md5)
+
+ for element in inputs:
+ result = wildfire_get_sample(element)
+
+ headers_string = str(result.headers)
+ file_name = headers_string.split("filename=", 1)[1]
+
+ # will be saved under 'File' in the context, can be farther investigated.
+ file_entry = fileResult(file_name, result.content)
+
+ demisto.results(file_entry)
+
+
+''' EXECUTION '''
+
+
+def main():
+ LOG('command is %s' % (demisto.command(),))
+
+ try:
+ # Remove proxy if not set to true in params
+ handle_proxy()
+
+ if demisto.command() == 'test-module':
+ test_module()
+
+ elif demisto.command() == 'wildfire-upload':
+ wildfire_upload_file_command()
+
+ elif demisto.command() in ['wildfire-upload-file-remote', 'wildfire-upload-file-url']:
+ wildfire_upload_file_url_command()
+
+ elif demisto.command() == 'wildfire-upload-url':
+ wildfire_upload_url_command()
+
+ elif demisto.command() == 'wildfire-report':
+ wildfire_get_report_command()
+
+ elif demisto.command() == 'file':
+ wildfire_file_command()
+
+ elif demisto.command() == 'wildfire-get-sample':
+ wildfire_get_sample_command()
+
+ elif demisto.command() == 'wildfire-get-verdict':
+ wildfire_get_verdict_command()
+
+ elif demisto.command() == 'wildfire-get-verdicts':
+ wildfire_get_verdicts_command()
+
+ except Exception as ex:
+ return_error(str(ex))
+
+ finally:
+ LOG.print_log()
+
+
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/Palo_Alto_Networks_WildFire_v2/Palo_Alto_Networks_WildFire_v2.yml b/Integrations/Palo_Alto_Networks_WildFire_v2/Palo_Alto_Networks_WildFire_v2.yml
new file mode 100644
index 000000000000..18fda9008197
--- /dev/null
+++ b/Integrations/Palo_Alto_Networks_WildFire_v2/Palo_Alto_Networks_WildFire_v2.yml
@@ -0,0 +1,361 @@
+category: Forensics & Malware Analysis
+commonfields:
+ id: WildFire-v2
+ version: -1
+configuration:
+- display: Server URL (e.g. https://192.168.0.1)
+ name: server
+ defaultvalue: https://wildfire.paloaltonetworks.com/publicapi
+ type: 0
+ required: true
+- display: API Key
+ name: token
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Trust any certificate (not secure)
+ name: insecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Return warning entry for unsupported file types
+ name: suppress_file_type_error
+ defaultvalue: ""
+ type: 8
+ required: false
+description: Perform malware dynamic analysis
+display: Palo Alto Networks WildFire v2
+name: WildFire-v2
+fromversion: 4.0.0
+script:
+ commands:
+ - name: file
+ arguments:
+ - name: file
+ default: true
+ description: File hash to check.
+ isArray: true
+ - name: md5
+ description: MD5 hash to check.
+ isArray: true
+ - name: sha256
+ description: SHA256 hash to check.
+ isArray: true
+ outputs:
+ - contextPath: File.Name
+ description: Name of the file.
+ type: string
+ - contextPath: File.Type
+ description: 'File type, for example: "PE"'
+ type: string
+ - contextPath: File.Size
+ description: Size of the file.
+ type: string
+ - contextPath: File.MD5
+ description: MD5 hash of the file.
+ type: string
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file.
+ type: string
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file.
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: WildFire.Report.Status
+ description: The status of the submission.
+ type: string
+ - contextPath: WildFire.Report.SHA256
+ description: SHA256 hash of the submission.
+ type: string
+ - contextPath: InfoFile.EntryID
+ description: The EntryID of the report file.
+ type: Unknown
+ - contextPath: InfoFile.Extension
+ description: Extension of the report file.
+ type: string
+ - contextPath: InfoFile.Name
+ description: Name of the report file.
+ type: string
+ - contextPath: InfoFile.Info
+ description: Details of the report file.
+ type: string
+ - contextPath: InfoFile.Size
+ description: Size of the report file.
+ type: number
+ - contextPath: InfoFile.Type
+ description: The report file type.
+ type: string
+ description: Retrieve results for a file hash using WildFire
+ - name: wildfire-upload
+ arguments:
+ - name: upload
+ required: true
+ description: ID of the entry containing the file to upload
+ isArray: true
+ outputs:
+ - contextPath: WildFire.Report.MD5
+ description: MD5 hash of the submission.
+ type: string
+ - contextPath: WildFire.Report.SHA256
+ description: SHA256 hash of the submission.
+ type: string
+ - contextPath: WildFire.Report.FileType
+ description: The submission type.
+ type: string
+ - contextPath: WildFire.Report.Size
+ description: The size of the submission.
+ type: number
+ - contextPath: WildFire.Report.Status
+ description: The status of the submission.
+ type: string
+ description: Uploads a file to WildFire for analysis.
+ - name: wildfire-upload-file-url
+ arguments:
+ - name: upload
+ required: true
+ description: URL of the remote file to upload.
+ outputs:
+ - contextPath: WildFire.Report.MD5
+ description: MD5 hash of the submission.
+ type: string
+ - contextPath: WildFire.Report.SHA256
+ description: SHA256 hash of the submission.
+ type: string
+ - contextPath: WildFire.Report.Status
+ description: The status of the submission.
+ type: string
+ - contextPath: WildFire.Report.URL
+ description: URL of the submission.
+ type: string
+ description: Uploads the URL of a remote file to WildFire for analysis.
+ - name: wildfire-report
+ arguments:
+ - name: md5
+ description: MD5 hash to check.
+ isArray: true
+ - name: sha256
+ description: SHA256 hash to check
+ isArray: true
+ - name: hash
+ description: Deprecated - Use the sha256 argument instead.
+ isArray: true
+ - name: format
+ auto: PREDEFINED
+ predefined:
+ - xml
+ - pdf
+ description: Request a structured report (XML PDF).
+ defaultValue: pdf
+ - name: verbose
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Receive extended information from WildFire.
+ defaultValue: "false"
+ outputs:
+ - contextPath: File.Name
+ description: Name of the file.
+ type: string
+ - contextPath: File.Type
+ description: 'File type, for example: "PE"'
+ type: string
+ - contextPath: File.Size
+ description: Size of the file.
+ type: number
+ - contextPath: File.MD5
+ description: MD5 hash of the file.
+ type: string
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file.
+ type: string
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file.
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: WildFire.Report.Status
+ description: The status of the submissiom.
+ type: string
+ - contextPath: WildFire.Report.SHA256
+ description: SHA256 hash of the submission.
+ type: string
+ - contextPath: InfoFile.EntryID
+ description: The EntryID of the report file.
+ type: string
+ - contextPath: InfoFile.Extension
+ description: The extension of the report file.
+ type: string
+ - contextPath: InfoFile.Name
+ description: The name of the report file.
+ type: string
+ - contextPath: InfoFile.Info
+ description: Details of the report file.
+ type: string
+ - contextPath: InfoFile.Size
+ description: The size of the report file.
+ type: number
+ - contextPath: InfoFile.Type
+ description: The report file type.
+ type: string
+ - contextPath: WildFire.Report.Network.UDP.IP
+ description: Submission related IPs, in UDP protocol.
+ type: string
+ - contextPath: WildFire.Report.Network.UDP.Port
+ description: Submission related ports, in UDP protocol.
+ type: string
+ - contextPath: WildFire.Report.Network.TCP.IP
+ description: Submission related IPs, in TCP protocol.
+ type: string
+ - contextPath: WildFire.Report.Network.TCP.Port
+ description: Submission related ports, in TCP protocol.
+ type: string
+ - contextPath: WildFire.Report.Network.DNS.Query
+ description: Submission DNS queries.
+ type: string
+ - contextPath: WildFire.Report.Network.DNS.Response
+ description: Submission DNS responses.
+ type: string
+ - contextPath: WildFire.Report.Evidence.md5
+ description: Submission evidence MD5 hash.
+ type: string
+ - contextPath: WildFire.Report.Evidence.Text
+ description: Submission evidence text.
+ type: string
+ description: Retrieves results for a file hash using WildFire.
+ - name: wildfire-get-verdict
+ arguments:
+ - name: hash
+ required: true
+ description: Hash to get the verdict for.
+ isArray: true
+ outputs:
+ - contextPath: WildFire.Verdicts.MD5
+ description: MD5 hash of the file.
+ type: string
+ - contextPath: WildFire.Verdicts.SHA256
+ description: SHA256 hash of the file.
+ type: string
+ - contextPath: WildFire.Verdicts.Verdict
+ description: Verdict of the file.
+ type: number
+ - contextPath: WildFire.Verdicts.VerdictDescription
+ description: Description of the file verdict.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ description: Returns a verdict for a hash.
+ - name: wildfire-get-verdicts
+ arguments:
+ - name: EntryID
+ description: EntryID of the text file that contains multiple hashes. Limit is
+ 500 hashes.
+ isArray: true
+ - name: hash_list
+ description: A list of hashes to get verdicts for.
+ isArray: true
+ outputs:
+ - contextPath: WildFire.Verdicts.MD5
+ description: MD5 hash of the file.
+ type: string
+ - contextPath: WildFire.Verdicts.SHA256
+ description: SHA256 hash of the file.
+ type: string
+ - contextPath: WildFire.Verdicts.Verdict
+ description: Verdict of the file.
+ type: number
+ - contextPath: WildFire.Verdicts.VerdictDescription
+ description: Description of the file verdict.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ description: Returns a verdict regarding multiple hashes, stored in a TXT file or
+ given as list.
+ - name: wildfire-upload-url
+ arguments:
+ - name: upload
+ required: true
+ description: URL to submit to WildFire.
+ isArray: true
+ outputs:
+ - contextPath: WildFire.Report.MD5
+ description: MD5 of the submission.
+ type: string
+ - contextPath: WildFire.Report.SHA256
+ description: SHA256 of the submission.
+ type: string
+ - contextPath: WildFire.Report.Status
+ description: The status of the submission.
+ type: string
+ - contextPath: WildFire.Report.URL
+ description: URL of the submission.
+ type: string
+ description: Uploads a URL of a webpage to WildFire for analysis.
+ - name: wildfire-get-sample
+ arguments:
+ - name: md5
+ description: MD5 hash of the sample.
+ isArray: true
+ - name: sha256
+ description: SHA256 hash of the sample.
+ description: Retrieves a sample.
+ dockerimage: demisto/python3:3.7.3.221
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- Wildfire Test
diff --git a/Integrations/Palo_Alto_Networks_WildFire_v2/Palo_Alto_Networks_WildFire_v2_description.md b/Integrations/Palo_Alto_Networks_WildFire_v2/Palo_Alto_Networks_WildFire_v2_description.md
new file mode 100644
index 000000000000..8b2fe47f28e4
--- /dev/null
+++ b/Integrations/Palo_Alto_Networks_WildFire_v2/Palo_Alto_Networks_WildFire_v2_description.md
@@ -0,0 +1,5 @@
+Don't have a WildFire API key?
+Go to your WildFire account,
+Login to: https://wildfire.paloaltonetworks.com/wildfire/account
+Select the "Account" tab
+Copy your API Key
\ No newline at end of file
diff --git a/Integrations/Palo_Alto_Networks_WildFire_v2/Palo_Alto_Networks_WildFire_v2_image.png b/Integrations/Palo_Alto_Networks_WildFire_v2/Palo_Alto_Networks_WildFire_v2_image.png
new file mode 100644
index 000000000000..ff44f2d6baac
Binary files /dev/null and b/Integrations/Palo_Alto_Networks_WildFire_v2/Palo_Alto_Networks_WildFire_v2_image.png differ
diff --git a/Integrations/Palo_Alto_Networks_WildFire_v2/Palo_Alto_Networks_WildFire_v2_test.py b/Integrations/Palo_Alto_Networks_WildFire_v2/Palo_Alto_Networks_WildFire_v2_test.py
new file mode 100644
index 000000000000..40029e7acfad
--- /dev/null
+++ b/Integrations/Palo_Alto_Networks_WildFire_v2/Palo_Alto_Networks_WildFire_v2_test.py
@@ -0,0 +1,68 @@
+from Palo_Alto_Networks_WildFire_v2 import prettify_upload, prettify_report_entry, prettify_verdict, \
+ create_dbot_score_from_verdict, prettify_verdicts, create_dbot_score_from_verdicts, hash_args_handler, \
+ file_args_handler
+
+
+def test_will_return_ok():
+ assert 1 == 1
+
+
+def test_prettify_upload():
+ expected_upload_dict = dict({
+ 'MD5': "md5_hash", 'SHA256': "sha256_hash", 'FileType': "pdf", 'Size': 5, 'Status': "Pending"})
+ prettify_upload_res = prettify_upload(
+ {'md5': "md5_hash", 'sha256': "sha256_hash", 'filetype': "pdf", 'size': 5})
+ assert expected_upload_dict == prettify_upload_res
+
+
+def test_prettify_report_entry():
+ expected_report_dict = dict({
+ 'MD5': "md5_hash", 'SHA256': "sha256_hash", 'FileType': "pdf", 'Size': 5, 'Status': "Completed"})
+ prettify_report_entry_res = prettify_report_entry(
+ {'md5': "md5_hash", 'sha256': "sha256_hash", 'filetype': "pdf", 'size': 5})
+ assert expected_report_dict == prettify_report_entry_res
+
+
+def test_prettify_verdict():
+ expected_verdict_dict = dict({
+ 'MD5': "md5_hash", 'SHA256': "sha256_hash", 'Verdict': "1", 'VerdictDescription': 'malware'})
+ prettify_verdict_res = prettify_verdict(
+ {'md5': "md5_hash", 'sha256': "sha256_hash", 'verdict': "1"})
+ assert expected_verdict_dict == prettify_verdict_res
+
+
+def test_create_dbot_score_from_verdict():
+ expected_dbot_score = dict({
+ 'Indicator': "sha256_hash", 'Type': "hash", 'Vendor': "WildFire", 'Score': 3})
+ dbot_score_dict = create_dbot_score_from_verdict({'SHA256': "sha256_hash", 'Verdict': "1"})
+ assert expected_dbot_score == dbot_score_dict
+
+
+def test_prettify_verdicts():
+ expected_verdicts_dict = [
+ {'MD5': "md5_hash", 'SHA256': "sha256_hash", 'Verdict': "1", 'VerdictDescription': 'malware'}]
+ prettify_verdicts_res = prettify_verdicts(
+ [{'md5': "md5_hash", 'sha256': "sha256_hash", 'verdict': "1"}])
+ assert expected_verdicts_dict == prettify_verdicts_res
+
+
+def test_create_dbot_score_from_verdicts():
+ expected_dbot_scores = [{'Indicator': "sha256_hash", 'Type': "hash", 'Vendor': "WildFire", 'Score': 3},
+ {'Indicator': "md5_hash", 'Type': "hash", 'Vendor': "WildFire", 'Score': 1}]
+ dbot_score_dict = create_dbot_score_from_verdicts(
+ [{'SHA256': "sha256_hash", 'Verdict': "1"}, {'MD5': "md5_hash", 'Verdict': "0"}])
+ assert expected_dbot_scores == dbot_score_dict
+
+
+def test_hash_args_handler():
+ expected_hash_list = ['12345678901234567890123456789012']
+ hash_list = hash_args_handler(md5='12345678901234567890123456789012')
+ assert expected_hash_list == hash_list
+
+
+def test_file_args_handler():
+ expected_file_hash_list = ['12345678901234567890123456789012',
+ '1d457069cb511af47a587287d59817148d404a2a7f39e1032d16094811f648e3']
+ file_hash_list = file_args_handler(
+ file="12345678901234567890123456789012,1d457069cb511af47a587287d59817148d404a2a7f39e1032d16094811f648e3")
+ assert expected_file_hash_list == file_hash_list
diff --git a/Integrations/Palo_Alto_Networks_WildFire_v2/Pipfile b/Integrations/Palo_Alto_Networks_WildFire_v2/Pipfile
new file mode 100644
index 000000000000..41c7519a7a9f
--- /dev/null
+++ b/Integrations/Palo_Alto_Networks_WildFire_v2/Pipfile
@@ -0,0 +1,14 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+
+[packages]
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/Palo_Alto_Networks_WildFire_v2/Pipfile.lock b/Integrations/Palo_Alto_Networks_WildFire_v2/Pipfile.lock
new file mode 100644
index 000000000000..643d94a1edbe
--- /dev/null
+++ b/Integrations/Palo_Alto_Networks_WildFire_v2/Pipfile.lock
@@ -0,0 +1,174 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "030517bfcc68d7e2f82fb5831e88abe2f6540ec99eefed71048ae95c58697218"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:18c796c2cd35eb1a1d3f012a214a542790a1aed95e29768bdcb9f2197eccbd0b",
+ "sha256:96151fca2c6e736503981896495d344781b60d18bfda78dc11b290c6125ebdb6"
+ ],
+ "version": "==4.3.15"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33",
+ "sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39",
+ "sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019",
+ "sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088",
+ "sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b",
+ "sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e",
+ "sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6",
+ "sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b",
+ "sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5",
+ "sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff",
+ "sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd",
+ "sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7",
+ "sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff",
+ "sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d",
+ "sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2",
+ "sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35",
+ "sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4",
+ "sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514",
+ "sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252",
+ "sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109",
+ "sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f",
+ "sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c",
+ "sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92",
+ "sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577",
+ "sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d",
+ "sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d",
+ "sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f",
+ "sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a",
+ "sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b"
+ ],
+ "version": "==1.3.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:0125e8f60e9e031347105eb1682cef932f5e97d7b9a1a28d9bf00c22a5daef40",
+ "sha256:590044e3942351a1bdb1de960b739ff4ce277960f2425ad4509446dbace8d9d1"
+ ],
+ "markers": "python_version > '2.7'",
+ "version": "==6.0.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f",
+ "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746"
+ ],
+ "version": "==0.9.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:592eaa2c33fae68c7d75aacf042efc9f77b27c08a6224a4f59beab8d9a420523",
+ "sha256:ad3ad5c450284819ecde191a654c09b0ec72257a2c711b9633d677c71c9850c4"
+ ],
+ "index": "pypi",
+ "version": "==4.3.1"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:4d0d06d173eecf172703219a71dbd4ade0e13904e6bbce1ce660e2e0dc78b5c4",
+ "sha256:bfdf02789e3d197bd682a758cae0a4a18706566395fbe2803badcd1335e0173e"
+ ],
+ "index": "pypi",
+ "version": "==1.10.1"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:035a54ede6ce1380599b2ce57844c6554666522e376bd111eb940fbc7c3dad23",
+ "sha256:037c35f2741ce3a9ac0d55abfcd119133cbd821fffa4461397718287092d9d15",
+ "sha256:049feae7e9f180b64efacbdc36b3af64a00393a47be22fa9cb6794e68d4e73d3",
+ "sha256:19228f7940beafc1ba21a6e8e070e0b0bfd1457902a3a81709762b8b9039b88d",
+ "sha256:2ea681e91e3550a30c2265d2916f40a5f5d89b59469a20f3bad7d07adee0f7a6",
+ "sha256:3a6b0a78af298d82323660df5497bcea0f0a4a25a0b003afd0ce5af049bd1f60",
+ "sha256:5385da8f3b801014504df0852bf83524599df890387a3c2b17b7caa3d78b1773",
+ "sha256:606d8afa07eef77280c2bf84335e24390055b478392e1975f96286d99d0cb424",
+ "sha256:69245b5b23bbf7fb242c9f8f08493e9ecd7711f063259aefffaeb90595d62287",
+ "sha256:6f6d839ab09830d59b7fa8fb6917023d8cb5498ee1f1dbd82d37db78eb76bc99",
+ "sha256:730888475f5ac0e37c1de4bd05eeb799fdb742697867f524dc8a4cd74bcecc23",
+ "sha256:9819b5162ffc121b9e334923c685b0d0826154e41dfe70b2ede2ce29034c71d8",
+ "sha256:9e60ef9426efab601dd9aa120e4ff560f4461cf8442e9c0a2b92548d52800699",
+ "sha256:af5fbdde0690c7da68e841d7fc2632345d570768ea7406a9434446d7b33b0ee1",
+ "sha256:b64efdbdf3bbb1377562c179f167f3bf301251411eb5ac77dec6b7d32bcda463",
+ "sha256:bac5f444c118aeb456fac1b0b5d14c6a71ea2a42069b09c176f75e9bd4c186f6",
+ "sha256:bda9068aafb73859491e13b99b682bd299c1b5fd50644d697533775828a28ee0",
+ "sha256:d659517ca116e6750101a1326107d3479028c5191f0ecee3c7203c50f5b915b0",
+ "sha256:eddd3fb1f3e0f82e5915a899285a39ee34ce18fd25d89582bc89fc9fb16cd2c6"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.3.1"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ }
+ }
+}
diff --git a/Integrations/Panorama/CHANGELOG.md b/Integrations/Panorama/CHANGELOG.md
new file mode 100644
index 000000000000..ee40fcd141c0
--- /dev/null
+++ b/Integrations/Panorama/CHANGELOG.md
@@ -0,0 +1,31 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+- Added the *tag* argument to several commands.
+ - List commands - filter by a tag.
+ - Create and edit commands
+ - Added the context output Tags to all list, create, edit, and get commands.
+ - Added support in the ***panorama-query-logs*** command to supply a list of arguments, which are separated using the "OR" operator.
+ - Improved error messaging when trying to configure a *device-group* that does not exist.
+
+## [19.9.0] - 2019-09-04
+ - Added 3 commands.
+ - ***panorama-query-logs***
+ - ***panorama-check-logs-status***
+ - ***panorama-get-logs***
+ - Added the **Panorama Query Logs** playbook.
+ - Added *log-forwarding* as an option for the *element_to_change* argument in the ***panorama-edit-rule*** command.
+ - Added support for Shared objects and Rules in Panorama instances.
+ - Added the device-group argument to all relevant commands.
+
+
+## [19.8.2] - 2019-08-22
+ - Improved error handling in cases of trying to refresh an EDL object on a Panorama instance.
+
+## [19.8.0] - 2019-08-06
+ - Improved error handling for URL filtering licensing.
+ - Improved error handling when trying to edit an uncommitted Custom URL category.
+ - Added the ***panorama-list-rules*** command.
+ - Added *edl* as an option for the *object_type* argument in the ***panorama-custom-block-rule*** command.
+
diff --git a/Integrations/Panorama/Panorama.py b/Integrations/Panorama/Panorama.py
new file mode 100644
index 000000000000..91c5123c3610
--- /dev/null
+++ b/Integrations/Panorama/Panorama.py
@@ -0,0 +1,4110 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+from datetime import datetime
+from typing import Dict, List, Any, Optional
+import uuid
+import json
+import requests
+
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS '''
+if not demisto.params().get('port'):
+ return_error('Set a port for the instance')
+
+URL = demisto.params()['server'].rstrip('/:') + ':' + demisto.params().get('port') + '/api/'
+API_KEY = str(demisto.params().get('key'))
+USE_SSL = not demisto.params().get('insecure')
+
+# determine a vsys or a device-group
+VSYS = demisto.params().get('vsys')
+if demisto.args() and demisto.args().get('device-group', None):
+ DEVICE_GROUP = demisto.args().get('device-group')
+else:
+ DEVICE_GROUP = demisto.params().get('device_group', None)
+
+# configuration check
+if DEVICE_GROUP and VSYS:
+ return_error('Cannot configure both vsys and Device group. Set vsys for firewall, set Device group for Panorama.')
+if not DEVICE_GROUP and not VSYS:
+ return_error('Set vsys for firewall or Device group for Panorama.')
+
+# setting security xpath relevant to FW or panorama management
+if DEVICE_GROUP:
+ device_group_shared = DEVICE_GROUP.lower()
+ if device_group_shared == 'shared':
+ XPATH_SECURITY_RULES = "/config/shared/"
+ DEVICE_GROUP = device_group_shared
+ else:
+ XPATH_SECURITY_RULES = "/config/devices/entry/device-group/entry[@name=\'" + DEVICE_GROUP + "\']/"
+else:
+ XPATH_SECURITY_RULES = "/config/devices/entry/vsys/entry[@name=\'" + VSYS + "\']/rulebase/security/rules/entry"
+
+# setting objects xpath relevant to FW or panorama management
+if DEVICE_GROUP:
+ device_group_shared = DEVICE_GROUP.lower()
+ if DEVICE_GROUP == 'shared':
+ XPATH_OBJECTS = "/config/shared/"
+ DEVICE_GROUP = device_group_shared
+ else:
+ XPATH_OBJECTS = "/config/devices/entry/device-group/entry[@name=\'" + DEVICE_GROUP + "\']/"
+else:
+ XPATH_OBJECTS = "/config/devices/entry/vsys/entry[@name=\'" + VSYS + "\']/"
+
+# Security rule arguments for output handling
+SECURITY_RULE_ARGS = {
+ 'rulename': 'Name',
+ 'source': 'Source',
+ 'destination': 'Destination',
+ 'negate_source': 'NegateSource',
+ 'negate_destination': 'NegateDestination',
+ 'action': 'Action',
+ 'service': 'Service',
+ 'disable': 'Disabled',
+ 'application': 'Application',
+ 'source_user': 'SourceUser',
+ 'disable_server_response_inspection': 'DisableServerResponseInspection',
+ 'description': 'Description',
+ 'target': 'Target',
+ 'log_forwarding': 'LogForwarding',
+ 'log-setting': 'LogForwarding',
+ 'tag': 'Tags'
+}
+
+PAN_OS_ERROR_DICT = {
+ '1': 'Unknown command - The specific config or operational command is not recognized.',
+ '2': 'Internal errors - Check with technical support when seeing these errors.',
+ '3': 'Internal errors - Check with technical support when seeing these errors.',
+ '4': 'Internal errors - Check with technical support when seeing these errors.',
+ '5': 'Internal errors - Check with technical support when seeing these errors.',
+ '6': 'Bad Xpath -The xpath specified in one or more attributes of the command is invalid.'
+ 'Check the API browser for proper xpath values.',
+ '7': 'Object not present - Object specified by the xpath is not present. For example,'
+ 'entry[@name=value] where no object with name value is present.',
+ '8': 'Object not unique - For commands that operate on a single object, the specified object is not unique.',
+ '10': 'Reference count not zero - Object cannot be deleted as there are other objects that refer to it.'
+ 'For example, address object still in use in policy.',
+ '11': 'Internal error - Check with technical support when seeing these errors.',
+ '12': 'Invalid object - Xpath or element values provided are not complete.',
+ '14': 'Operation not possible - Operation is allowed but not possible in this case.'
+ 'For example, moving a rule up one position when it is already at the top.',
+ '15': 'Operation denied - Operation is allowed. For example, Admin not allowed to delete own account,'
+ 'Running a command that is not allowed on a passive device.',
+ '16': 'Unauthorized -The API role does not have access rights to run this query.',
+ '17': 'Invalid command -Invalid command or parameters.',
+ '18': 'Malformed command - The XML is malformed.',
+ # 19,20: success
+ '21': 'Internal error - Check with technical support when seeing these errors.',
+ '22': 'Session timed out - The session for this query timed out.'
+}
+
+''' HELPERS '''
+
+
+def http_request(uri: str, method: str, headers: Dict = {},
+ body: Dict = {}, params: Dict = {}, files=None) -> Any:
+ """
+ Makes an API call with the given arguments
+ """
+ result = requests.request(
+ method,
+ uri,
+ headers=headers,
+ data=body,
+ verify=USE_SSL,
+ params=params,
+ files=files
+ )
+
+ if result.status_code < 200 or result.status_code >= 300:
+ return_error('Request Failed. with status: ' + str(result.status_code) + '. Reason is: ' + str(result.reason))
+
+ # if pcap download
+ if params.get('type') == 'export':
+ return result
+
+ json_result = json.loads(xml2json(result.text))
+
+ # handle non success
+ if json_result['response']['@status'] != 'success':
+ if 'msg' in json_result['response'] and 'line' in json_result['response']['msg']:
+ # catch non existing object error and display a meaningful message
+ if json_result['response']['msg']['line'] == 'No such node':
+ return_error(
+ 'Object was not found, verify that the name is correct and that the instance was committed.')
+
+ # catch urlfiltering error and display a meaningful message
+ elif str(json_result['response']['msg']['line']).find('test -> url is unexpected') != -1:
+ return_error('The URL filtering license is either expired or not active.'
+ 'Please contact your PAN-OS representative.')
+
+ # catch non valid jobID errors and display a meaningful message
+ elif isinstance(json_result['response']['msg']['line'], str) and \
+ json_result['response']['msg']['line'].find('job') != -1 and \
+ (json_result['response']['msg']['line'].find('not found') != -1
+ or json_result['response']['msg']['line'].find('No such query job')) != -1:
+ return_error('Invalid Job ID error: ' + json_result['response']['msg']['line'])
+
+ # catch already at the top/bottom error for rules and return this as an entry.note
+ elif str(json_result['response']['msg']['line']).find('already at the') != -1:
+ demisto.results('Rule ' + str(json_result['response']['msg']['line']))
+ sys.exit(0)
+
+ # catch already registered ip tags and return this as an entry.note
+ elif str(json_result['response']['msg']['line']).find('already exists, ignore') != -1:
+ if isinstance(json_result['response']['msg']['line']['uid-response']['payload']['register']['entry'],
+ list):
+ ips = [o['@ip'] for o in
+ json_result['response']['msg']['line']['uid-response']['payload']['register']['entry']]
+ else:
+ ips = json_result['response']['msg']['line']['uid-response']['payload']['register']['entry']['@ip']
+ demisto.results(
+ 'IP ' + str(ips) + ' already exist in the tag. All submitted IPs were not registered to the tag.')
+ sys.exit(0)
+
+ # catch timed out log queries and return this as an entry.note
+ elif str(json_result['response']['msg']['line']).find('Query timed out') != -1:
+ demisto.results(str(json_result['response']['msg']['line']) + '. Rerun the query.')
+ sys.exit(0)
+
+ if '@code' in json_result['response']:
+ return_error(
+ 'Request Failed.\nStatus code: ' + str(json_result['response']['@code']) + '\nWith message: ' + str(
+ json_result['response']['msg']['line']))
+ else:
+ return_error('Request Failed.\n' + str(json_result['response']))
+
+ # handle @code
+ if 'response' in json_result and '@code' in json_result['response']:
+ if json_result['response']['@code'] in PAN_OS_ERROR_DICT:
+ error_message = 'Request Failed.\n' + PAN_OS_ERROR_DICT[json_result['response']['@code']]
+ if json_result['response']['@code'] == '7' and DEVICE_GROUP:
+ device_group_names = get_device_groups_names()
+ if DEVICE_GROUP not in device_group_names:
+ error_message += (f'\nDevice Group: {DEVICE_GROUP} does not exist.'
+ f' The available Device Groups for this instance:'
+ f' {", ".join(device_group_names)}.')
+ return_error(error_message)
+ if json_result['response']['@code'] not in ['19', '20']:
+ # error code non exist in dict and not of success
+ if 'msg' in json_result['response']:
+ return_error(
+ 'Request Failed.\nStatus code: ' + str(json_result['response']['@code']) + '\nWith message: ' + str(
+ json_result['response']['msg']))
+ else:
+ return_error('Request Failed.\n' + str(json_result['response']))
+
+ return json_result
+
+
+def add_argument_list(arg: Any, field_name: str, member: Optional[bool]) -> str:
+ member_stringify_list = ''
+ if arg:
+ for item in arg:
+ member_stringify_list += '' + item + ' '
+ if field_name == 'member':
+ return member_stringify_list
+ elif member:
+ return '<' + field_name + '>' + member_stringify_list + '' + field_name + '>'
+ else:
+ return '<' + field_name + '>' + arg + '' + field_name + '>'
+ else:
+ return ''
+
+
+def add_argument(arg: Optional[str], field_name: str, member: bool) -> str:
+ if arg:
+ if member:
+ return '<' + field_name + '>' + arg + ' ' + field_name + '>'
+ else:
+ return '<' + field_name + '>' + arg + '' + field_name + '>'
+ else:
+ return ''
+
+
+def add_argument_open(arg: Optional[str], field_name: str, member: bool) -> str:
+ if arg:
+ if member:
+ return '<' + field_name + '>' + arg + ' ' + field_name + '>'
+ else:
+ return '<' + field_name + '>' + arg + '' + field_name + '>'
+ else:
+ if member:
+ return '<' + field_name + '>any ' + field_name + '>'
+ else:
+ return '<' + field_name + '>any' + field_name + '>'
+
+
+def add_argument_yes_no(arg: Optional[str], field_name: str, option: bool = False) -> str:
+ if arg and arg == 'No':
+ result = '<' + field_name + '>' + 'no' + '' + field_name + '>'
+ else:
+ result = '<' + field_name + '>' + ('yes' if arg else 'no') + '' + field_name + '>'
+
+ if option:
+ result = '' + result + ' '
+
+ return result
+
+
+def add_argument_target(arg: Optional[str], field_name: str) -> str:
+ if arg:
+ return '<' + field_name + '>' + '' + ' ' + ' ' + '' + field_name + '>'
+ else:
+ return ''
+
+
+def prepare_security_rule_params(api_action: str = None, rulename: str = None, source: str = None,
+ destination: str = None, negate_source: str = None, negate_destination: str = None,
+ action: str = None, service: str = None, disable: str = None, application: str = None,
+ source_user: str = None, category: str = None, from_: str = None, to: str = None,
+ description: str = None, target: str = None, log_forwarding: str = None,
+ disable_server_response_inspection: str = None, tags: List[str] = None) -> Dict:
+ rulename = rulename if rulename else ('demisto-' + (str(uuid.uuid4()))[:8])
+ params = {
+ 'type': 'config',
+ 'action': api_action,
+ 'key': API_KEY,
+ 'element': add_argument_open(action, 'action', False)
+ + add_argument_target(target, 'target')
+ + add_argument_open(description, 'description', False)
+ + add_argument_open(source, 'source', True)
+ + add_argument_open(destination, 'destination', True)
+ + add_argument_open(application, 'application', True)
+ + add_argument_open(category, 'category', True)
+ + add_argument_open(source_user, 'source-user', True)
+ + add_argument_open(from_, 'from', True) # default from will always be any
+ + add_argument_open(to, 'to', True) # default to will always be any
+ + add_argument_open(service, 'service', True)
+ + add_argument_yes_no(negate_source, 'negate-source')
+ + add_argument_yes_no(negate_destination, 'negate-destination')
+ + add_argument_yes_no(disable, 'disabled')
+ + add_argument_yes_no(disable_server_response_inspection, 'disable-server-response-inspection', True)
+ + add_argument(log_forwarding, 'log-setting', False)
+ + add_argument_list(tags, 'tag', True)
+ }
+ if DEVICE_GROUP:
+ if 'pre_post' not in demisto.args():
+ return_error('Please provide the pre_post argument when configuring a security rule in Panorama instance.')
+ else:
+ params['xpath'] = XPATH_SECURITY_RULES + demisto.args()[
+ 'pre_post'] + '/security/rules/entry' + '[@name=\'' + rulename + '\']'
+ else:
+ params['xpath'] = XPATH_SECURITY_RULES + '[@name=\'' + rulename + '\']'
+
+ return params
+
+
+''' FUNCTIONS'''
+
+
+def panorama_test():
+ """
+ test module
+ """
+ params = {
+ 'type': 'op',
+ 'cmd': ' ',
+ 'key': API_KEY
+ }
+
+ http_request(
+ URL,
+ 'GET',
+ params=params
+ )
+
+ if DEVICE_GROUP and DEVICE_GROUP != 'shared':
+ device_group_test()
+
+ demisto.results('ok')
+
+
+def get_device_groups_names():
+ """
+ Get device group names in the Panorama
+ """
+ params = {
+ 'action': 'get',
+ 'type': 'config',
+ 'xpath': "/config/devices/entry/device-group/entry",
+ 'key': API_KEY
+ }
+
+ result = http_request(
+ URL,
+ 'GET',
+ params=params
+ )
+
+ device_groups = result['response']['result']['entry']
+ device_group_names = []
+ if isinstance(device_groups, dict):
+ # only one device group in the panorama
+ device_group_names.append(device_groups.get('@name'))
+ else:
+ for device_group in device_groups:
+ device_group_names.append(device_group.get('@name'))
+
+ return device_group_names
+
+
+def device_group_test():
+ """
+ Test module for the Device group specified
+ """
+ device_group_names = get_device_groups_names()
+ if DEVICE_GROUP not in device_group_names:
+ return_error(f'Device Group: {DEVICE_GROUP} does not exist.'
+ f' The available Device Groups for this instance: {", ".join(device_group_names)}.')
+
+
+@logger
+def panorama_command():
+ """
+ Executes a command
+ """
+ params = {}
+ params['key'] = API_KEY
+ for arg in demisto.args().keys():
+ params[arg] = demisto.args()[arg]
+
+ result = http_request(
+ URL,
+ 'POST',
+ params=params
+ )
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Command was executed successfully.',
+ })
+
+
+@logger
+def panorama_commit():
+ params = {
+ 'type': 'commit',
+ 'cmd': ' ',
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'POST',
+ params=params
+ )
+
+ return result
+
+
+def panorama_commit_command():
+ """
+ Commit and show message in warroom
+ """
+ result = panorama_commit()
+
+ if 'result' in result['response']:
+ # commit has been given a jobid
+ commit_output = {
+ 'JobID': result['response']['result']['job'],
+ 'Status': 'Pending'
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Commit:', commit_output, ['JobID', 'Status'], removeNull=True),
+ 'EntryContext': {
+ "Panorama.Commit(val.JobID == obj.JobID)": commit_output
+ }
+ })
+ else:
+ # no changes to commit
+ demisto.results(result['response']['msg'])
+
+
+@logger
+def panorama_commit_status():
+ params = {
+ 'type': 'op',
+ 'cmd': '' + demisto.args()['job_id'] + ' ',
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'GET',
+ params=params
+ )
+
+ return result
+
+
+def panorama_commit_status_command():
+ """
+ Check jobID of commit status
+ """
+ result = panorama_commit_status()
+
+ if result['response']['result']['job']['type'] != 'Commit':
+ return_error('JobID given is not of a commit')
+
+ commit_status_output = {'JobID': result['response']['result']['job']['id']}
+ if result['response']['result']['job']['status'] == 'FIN':
+ if result['response']['result']['job']['result'] == 'OK':
+ commit_status_output['Status'] = 'Completed'
+ else:
+ # result['response']['job']['result'] == 'FAIL'
+ commit_status_output['Status'] = 'Failed'
+ commit_status_output['Details'] = result['response']['result']['job']['details']['line']
+
+ if result['response']['result']['job']['status'] == 'ACT':
+ if result['response']['result']['job']['result'] == 'PEND':
+ commit_status_output['Status'] = 'Pending'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Commit status:', commit_status_output, ['JobID', 'Status', 'Details'],
+ removeNull=True),
+ 'EntryContext': {"Panorama.Commit(val.JobID == obj.JobID)": commit_status_output}
+ })
+
+
+@logger
+def panorama_push_to_device_group():
+ params = {
+ 'type': 'commit',
+ 'action': 'all',
+ 'cmd': ' ',
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'POST',
+ params=params
+ )
+
+ return result
+
+
+def panorama_push_to_device_group_command():
+ """
+ Push Panorama configuration and show message in warroom
+ """
+ if not DEVICE_GROUP:
+ return_error("The 'panorama-push-to-device-group' command is relevant for a Palo Alto Panorama instance.")
+
+ result = panorama_push_to_device_group()
+ if 'result' in result['response']:
+ # commit has been given a jobid
+ push_output = {
+ 'DeviceGroup': DEVICE_GROUP,
+ 'JobID': result['response']['result']['job'],
+ 'Status': 'Pending'
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Push to Device Group:', push_output, ['JobID', 'Status'],
+ removeNull=True),
+ 'EntryContext': {
+ "Panorama.Push(val.JobID == obj.JobID)": push_output
+ }
+ })
+ else:
+ # no changes to commit
+ demisto.results(result['response']['msg']['line'])
+
+
+@logger
+def panorama_push_status():
+ params = {
+ 'type': 'op',
+ 'cmd': '' + demisto.args()['job_id'] + ' ',
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'GET',
+ params=params
+ )
+
+ return result
+
+
+def panorama_push_status_command():
+ """
+ Check jobID of push status
+ """
+ result = panorama_push_status()
+ if result['response']['result']['job']['type'] != 'CommitAll':
+ return_error('JobID given is not of a Push.')
+
+ push_status_output = {'JobID': result['response']['result']['job']['id']}
+ if result['response']['result']['job']['status'] == 'FIN':
+ if result['response']['result']['job']['result'] == 'OK':
+ push_status_output['Status'] = 'Completed'
+ else:
+ # result['response']['job']['result'] == 'FAIL'
+ push_status_output['Status'] = 'Failed'
+ push_status_output['Details'] = result['response']['result']['job']['devices']['entry']['status']
+
+ if result['response']['result']['job']['status'] == 'PEND':
+ push_status_output['Status'] = 'Pending'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Push to Device Group status:', push_status_output,
+ ['JobID', 'Status', 'Details'], removeNull=True),
+ 'EntryContext': {"Panorama.Push(val.JobID == obj.JobID)": push_status_output}
+ })
+
+
+''' Addresses Commands '''
+
+
+def prettify_addresses_arr(addresses_arr: list) -> List:
+ if not isinstance(addresses_arr, list):
+ return prettify_address(addresses_arr)
+ pretty_addresses_arr = []
+ for address in addresses_arr:
+ pretty_address = {'Name': address['@name']}
+ if DEVICE_GROUP:
+ pretty_address['DeviceGroup'] = DEVICE_GROUP
+ if 'description' in address:
+ pretty_address['Description'] = address['description']
+
+ if 'ip-netmask' in address:
+ pretty_address['IP_Netmask'] = address['ip-netmask']
+
+ if 'ip-range' in address:
+ pretty_address['IP_Range'] = address['ip-range']
+
+ if 'fqdn' in address:
+ pretty_address['FQDN'] = address['fqdn']
+
+ if 'tag' in address and 'member' in address['tag']:
+ pretty_address['Tags'] = address['tag']['member']
+
+ pretty_addresses_arr.append(pretty_address)
+
+ return pretty_addresses_arr
+
+
+@logger
+def panorama_list_addresses(tag=None):
+ params = {
+ 'action': 'get',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "address/entry",
+ 'key': API_KEY
+ }
+
+ if tag:
+ params['xpath'] += f'[( tag/member = \'{tag}\')]'
+
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+
+ return result['response']['result']['entry']
+
+
+def panorama_list_addresses_command():
+ """
+ Get all addresses
+ """
+ tag = demisto.args().get('tag')
+
+ addresses_arr = panorama_list_addresses(tag)
+ addresses_output = prettify_addresses_arr(addresses_arr)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': addresses_arr,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Addresses:', addresses_output,
+ ['Name', 'IP_Netmask', 'IP_Range', 'FQDN', 'Tags'], removeNull=True),
+ 'EntryContext': {
+ "Panorama.Addresses(val.Name == obj.Name)": addresses_output
+ }
+ })
+
+
+def prettify_address(address: Dict) -> Dict:
+ pretty_address = {'Name': address['@name']}
+ if DEVICE_GROUP:
+ pretty_address['DeviceGroup'] = DEVICE_GROUP
+ if 'description' in address:
+ pretty_address['Description'] = address['description']
+
+ if 'ip-netmask' in address:
+ pretty_address['IP_Netmask'] = address['ip-netmask']
+
+ if 'ip-range' in address:
+ pretty_address['IP_Range'] = address['ip-range']
+
+ if 'fqdn' in address:
+ pretty_address['FQDN'] = address['fqdn']
+
+ if 'tag' in address and 'member' in address['tag']:
+ pretty_address['Tags'] = address['tag']['member']
+
+ return pretty_address
+
+
+@logger
+def panorama_get_address(address_name: str) -> Dict:
+ params = {
+ 'action': 'show',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "address/entry[@name='" + address_name + "']",
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+
+ return result['response']['result']['entry']
+
+
+def panorama_get_address_command():
+ """
+ Get an address
+ """
+ address_name = demisto.args()['name']
+
+ address = panorama_get_address(address_name)
+ address_output = prettify_address(address)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': address,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Address:', address_output,
+ ['Name', 'IP_Netmask', 'IP_Range', 'FQDN', 'Tags'], removeNull=True),
+ 'EntryContext': {
+ "Panorama.Addresses(val.Name == obj.Name)": address_output
+ }
+ })
+
+
+@logger
+def panorama_create_address(address_name: str, fqdn: str = None, ip_netmask: str = None, ip_range: str = None,
+ description: str = None, tags: list = None):
+ params = {'action': 'set',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "address/entry[@name='" + address_name + "']",
+ 'key': API_KEY,
+ 'element': (add_argument(fqdn, 'fqdn', False)
+ + add_argument(ip_netmask, 'ip-netmask', False)
+ + add_argument(ip_range, 'ip-range', False)
+ + add_argument(description, 'description', False)
+ + add_argument_list(tags, 'tag', True))
+ }
+
+ http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+
+def panorama_create_address_command():
+ """
+ Create an address object
+ """
+ address_name = demisto.args()['name']
+ description = demisto.args().get('description')
+ tags = argToList(demisto.args()['tag']) if 'tag' in demisto.args() else None
+
+ fqdn = demisto.args().get('fqdn')
+ ip_netmask = demisto.args().get('ip_netmask')
+ ip_range = demisto.args().get('ip_range')
+
+ if not fqdn and not ip_netmask and not ip_range:
+ return_error('Please specify exactly one of the following: fqdn, ip_netmask, ip_range.')
+
+ if (fqdn and ip_netmask) or (fqdn and ip_range) or (ip_netmask and ip_range):
+ return_error('Please specify exactly one of the following: fqdn, ip_netmask, ip_range.')
+
+ address = panorama_create_address(address_name, fqdn, ip_netmask, ip_range, description, tags)
+
+ address_output = {'Name': address_name}
+ if DEVICE_GROUP:
+ address_output['DeviceGroup'] = DEVICE_GROUP
+ if fqdn:
+ address_output['FQDN'] = fqdn
+ if ip_netmask:
+ address_output['IP_Netmask'] = ip_netmask
+ if ip_range:
+ address_output['IP_Range'] = ip_range
+ if description:
+ address_output['Description'] = description
+ if tags:
+ address_output['Tags'] = tags
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': address,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Address was created successfully.',
+ 'EntryContext': {
+ "Panorama.Addresses(val.Name == obj.Name)": address_output
+ }
+ })
+
+
+@logger
+def panorama_delete_address(address_name: str):
+ params = {
+ 'action': 'delete',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "address/entry[@name='" + address_name + "']",
+ 'element': " ",
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_delete_address_command():
+ """
+ Delete an address
+ """
+ address_name = demisto.args()['name']
+
+ address = panorama_delete_address(address_name)
+ address_output = {'Name': address_name}
+ if DEVICE_GROUP:
+ address_output['DeviceGroup'] = DEVICE_GROUP
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': address,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Address was deleted successfully.',
+ 'EntryContext': {
+ "Panorama.Addresses(val.Name == obj.Name)": address_output
+ }
+ })
+
+
+''' Address Group Commands '''
+
+
+def prettify_address_groups_arr(address_groups_arr: list) -> List:
+ if not isinstance(address_groups_arr, list):
+ return prettify_address_group(address_groups_arr)
+ pretty_address_groups_arr = []
+ for address_group in address_groups_arr:
+ pretty_address_group = {
+ 'Name': address_group['@name'],
+ 'Type': 'static' if 'static' in address_group else 'dynamic'
+ }
+ if DEVICE_GROUP:
+ pretty_address_group['DeviceGroup'] = DEVICE_GROUP
+ if 'description' in address_group:
+ pretty_address_group['Description'] = address_group['description']
+ if 'tag' in address_group and 'member' in address_group['tag']:
+ pretty_address_group['Tags'] = address_group['tag']['member']
+
+ if pretty_address_group['Type'] == 'static':
+ # static address groups can have empty lists
+ if address_group['static']:
+ pretty_address_group['Addresses'] = address_group['static']['member']
+ else:
+ pretty_address_group['Match'] = address_group['dynamic']['filter']
+
+ pretty_address_groups_arr.append(pretty_address_group)
+
+ return pretty_address_groups_arr
+
+
+@logger
+def panorama_list_address_groups(tag: str = None):
+ params = {
+ 'action': 'get',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "address-group/entry",
+ 'key': API_KEY
+ }
+
+ if tag:
+ params['xpath'] += f'[( tag/member = \'{tag}\')]'
+
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+
+ return result['response']['result']['entry']
+
+
+def panorama_list_address_groups_command():
+ """
+ Get all address groups
+ """
+ tag = demisto.args().get('tag')
+ address_groups_arr = panorama_list_address_groups(tag)
+ address_groups_output = prettify_address_groups_arr(address_groups_arr)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': address_groups_arr,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Address groups:', address_groups_output,
+ ['Name', 'Type', 'Addresses', 'Match', 'Description', 'Tags'], removeNull=True),
+ 'EntryContext': {
+ "Panorama.AddressGroups(val.Name == obj.Name)": address_groups_output
+ }
+ })
+
+
+def prettify_address_group(address_group: Dict) -> Dict:
+ pretty_address_group = {
+ 'Name': address_group['@name'],
+ 'Type': 'static' if 'static' in address_group else 'dynamic'
+ }
+ if DEVICE_GROUP:
+ pretty_address_group['DeviceGroup'] = DEVICE_GROUP
+
+ if 'description' in address_group:
+ pretty_address_group['Description'] = address_group['description']
+ if 'tag' in address_group and 'member' in address_group['tag']:
+ pretty_address_group['Tags'] = address_group['tag']['member']
+
+ if pretty_address_group['Type'] == 'static':
+ pretty_address_group['Addresses'] = address_group['static']['member']
+ else:
+ pretty_address_group['Match'] = address_group['dynamic']['filter']
+
+ return pretty_address_group
+
+
+@logger
+def panorama_get_address_group(address_group_name: str):
+ params = {
+ 'action': 'show',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "address-group/entry[@name='" + address_group_name + "']",
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+
+ return result['response']['result']['entry']
+
+
+def panorama_get_address_group_command():
+ """
+ Get an address group
+ """
+ address_group_name = demisto.args()['name']
+
+ result = panorama_get_address_group(address_group_name)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Address group:', prettify_address_group(result),
+ ['Name', 'Type', 'Addresses', 'Match', 'Description', 'Tags'],
+ removeNull=True),
+ 'EntryContext': {
+ "Panorama.AddressGroups(val.Name == obj.Name)": prettify_address_group(result)
+ }
+ })
+
+
+@logger
+def panorama_create_static_address_group(address_group_name: str, addresses: list,
+ description: str = None, tags: list = None):
+ params = {'action': 'set',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "address-group/entry[@name='" + address_group_name + "']",
+ 'key': API_KEY,
+ 'element': (
+ "" + add_argument_list(addresses, 'member', True)
+ + " " + add_argument(description, 'description', False)
+ + add_argument_list(tags, 'tag', True)
+ )}
+
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_create_dynamic_address_group(address_group_name: str, match: str,
+ description: str = None, tags: list = None):
+ params = {
+ 'action': 'set',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "address-group/entry[@name='" + address_group_name + "']",
+ 'element': "" + add_argument(match, 'filter', False)
+ + " " + add_argument(description, 'description', False)
+ + add_argument_list(tags, 'tag', True),
+ 'key': API_KEY
+ }
+
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_create_address_group_command():
+ """
+ Create an address group
+ """
+ address_group_name = demisto.args()['name']
+ type_ = demisto.args()['type']
+ description = demisto.args().get('description')
+ tags = argToList(demisto.args()['tags']) if 'tags' in demisto.args() else None
+ match = demisto.args().get('match')
+ addresses = argToList(demisto.args()['addresses']) if 'addresses' in demisto.args() else None
+ if match and addresses:
+ return_error('Please specify only one of the following: addresses, match.')
+ if type_ == 'static':
+ if not addresses:
+ return_error('Please specify addresses in order to create a static address group.')
+ if type_ == 'dynamic':
+ if not match:
+ return_error('Please specify a match in order to create a dynamic address group.')
+
+ if type_ == 'static':
+ result = panorama_create_static_address_group(address_group_name, addresses, description, tags)
+ else:
+ result = panorama_create_dynamic_address_group(address_group_name, match, description, tags)
+
+ address_group_output = {
+ 'Name': address_group_name,
+ 'Type': type_
+ }
+ if DEVICE_GROUP:
+ address_group_output['DeviceGroup'] = DEVICE_GROUP
+ if match:
+ address_group_output['Match'] = match
+ if addresses:
+ address_group_output['Addresses'] = addresses
+ if description:
+ address_group_output['Description'] = description
+ if tags:
+ address_group_output['Tags'] = tags
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Address group was created successfully.',
+ 'EntryContext': {
+ "Panorama.AddressGroups(val.Name == obj.Name)": address_group_output
+ }
+ })
+
+
+@logger
+def panorama_delete_address_group(address_group_name: str):
+ params = {
+ 'action': 'delete',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "address-group/entry[@name='" + address_group_name + "']",
+ 'element': " ",
+ 'key': API_KEY
+ }
+
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_delete_address_group_command():
+ """
+ Delete an address group
+ """
+ address_group_name = demisto.args()['name']
+
+ address_group = panorama_delete_address_group(address_group_name)
+ address_group_output = {'Name': address_group_name}
+ if DEVICE_GROUP:
+ address_group_output['DeviceGroup'] = DEVICE_GROUP
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': address_group,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Address group was deleted successfully.',
+ 'EntryContext': {
+ "Panorama.AddressGroups(val.Name == obj.Name)": address_group_output
+ }
+ })
+
+
+def panorama_edit_address_group_command():
+ """
+ Edit an address group
+ """
+ address_group_name = demisto.args()['name']
+ type_ = demisto.args()['type']
+ match = demisto.args().get('match')
+ element_to_add = argToList(demisto.args()['element_to_add']) if 'element_to_add' in demisto.args() else None
+ element_to_remove = argToList(
+ demisto.args()['element_to_remove']) if 'element_to_remove' in demisto.args() else None
+
+ if type_ == 'dynamic':
+ if not match:
+ return_error('To edit a Dynamic Address group, Please provide a match.')
+ match_param = add_argument_open(match, 'filter', False)
+ match_path = XPATH_OBJECTS + "address-group/entry[@name='" + address_group_name + "']/dynamic/filter"
+
+ if type_ == 'static':
+ if (element_to_add and element_to_remove) or (not element_to_add and not element_to_remove):
+ return_error('To edit a Static Address group,'
+ 'Please specify exactly one of the following: element_to_add, element_to_remove.')
+ address_group_prev = panorama_get_address_group(address_group_name)
+ address_group_list: List[str] = []
+ if 'static' in address_group_prev:
+ if address_group_prev['static']:
+ address_group_list = argToList(address_group_prev['static']['member'])
+ if element_to_add:
+ addresses = list(set(element_to_add + address_group_list))
+ else:
+ addresses = [item for item in address_group_list if item not in element_to_remove]
+ addresses_param = add_argument_list(addresses, 'member', False)
+ addresses_path = XPATH_OBJECTS + "address-group/entry[@name='" + address_group_name + "']/static"
+
+ description = demisto.args().get('description')
+ tags = argToList(demisto.args()['tags']) if 'tags' in demisto.args() else None
+
+ params = {
+ 'action': 'edit',
+ 'type': 'config',
+ 'key': API_KEY,
+ 'xpath': '',
+ 'element': ''
+ }
+
+ address_group_output = {'Name': address_group_name}
+
+ if DEVICE_GROUP:
+ address_group_output['DeviceGroup'] = DEVICE_GROUP
+
+ if type_ == 'dynamic' and match:
+ params['xpath'] = match_path
+ params['element'] = match_param
+ result = http_request(
+ URL,
+ 'POST',
+ params=params
+ )
+ address_group_output['Match'] = match
+
+ if type_ == 'static' and addresses:
+ params['xpath'] = addresses_path
+ params['element'] = "" + addresses_param + " "
+ result = http_request(
+ URL,
+ 'POST',
+ params=params
+ )
+ address_group_output['Addresses'] = addresses
+
+ if description:
+ description_param = add_argument_open(description, 'description', False)
+ description_path = XPATH_OBJECTS + "address-group/entry[@name='" + address_group_name + "']/description"
+ params['xpath'] = description_path
+ params['element'] = description_param
+ result = http_request(
+ URL,
+ 'POST',
+ params=params
+ )
+ address_group_output['Description'] = description
+
+ if tags:
+ tag_param = add_argument_list(tags, 'tag', True)
+ tag_path = XPATH_OBJECTS + "address-group/entry[@name='" + address_group_name + "']/tag"
+ params['xpath'] = tag_path
+ params['element'] = tag_param
+ result = http_request(
+ URL,
+ 'POST',
+ params=params
+ )
+ address_group_output['Tags'] = tags
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Address Group was edited successfully.',
+ 'EntryContext': {
+ "Panorama.AddressGroups(val.Name == obj.Name)": address_group_output
+ }
+ })
+
+
+''' Services Commands '''
+
+
+def prettify_services_arr(services_arr: list):
+ if not isinstance(services_arr, list):
+ return prettify_service(services_arr)
+
+ pretty_services_arr = []
+ for service in services_arr:
+ pretty_service = {'Name': service['@name']}
+ if DEVICE_GROUP:
+ pretty_service['DeviceGroup'] = DEVICE_GROUP
+ if 'description' in service:
+ pretty_service['Description'] = service['description']
+ if 'tag' in service and 'member' in service['tag']:
+ pretty_service['Tags'] = service['tag']['member']
+
+ protocol = ''
+ if 'protocol' in service:
+ if 'tcp' in service['protocol']:
+ protocol = 'tcp'
+ elif 'udp' in service['protocol']:
+ protocol = 'udp'
+ else:
+ protocol = 'sctp'
+ pretty_service['Protocol'] = protocol
+
+ if 'port' in service['protocol'][protocol]:
+ pretty_service['DestinationPort'] = service['protocol'][protocol]['port']
+ if 'source-port' in service['protocol'][protocol]:
+ pretty_service['SourcePort'] = service['protocol'][protocol]['source-port']
+
+ pretty_services_arr.append(pretty_service)
+
+ return pretty_services_arr
+
+
+@logger
+def panorama_list_services(tag: str = None):
+ params = {
+ 'action': 'get',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "service/entry",
+ 'key': API_KEY
+ }
+
+ if tag:
+ params['xpath'] += f'[( tag/member = \'{tag}\')]'
+
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+
+ return result['response']['result']['entry']
+
+
+def panorama_list_services_command():
+ """
+ Get all Services
+ """
+ tag = demisto.args().get('tag')
+
+ services_arr = panorama_list_services(tag)
+ services_output = prettify_services_arr(services_arr)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': services_arr,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Services:', services_output,
+ ['Name', 'Protocol', 'SourcePort', 'DestinationPort', 'Description', 'Tags'],
+ removeNull=True),
+ 'EntryContext': {
+ "Panorama.Services(val.Name == obj.Name)": services_output
+ }
+ })
+
+
+def prettify_service(service: Dict):
+ pretty_service = {
+ 'Name': service['@name'],
+ }
+ if DEVICE_GROUP:
+ pretty_service['DeviceGroup'] = DEVICE_GROUP
+ if 'description' in service:
+ pretty_service['Description'] = service['description']
+ if 'tag' in service and 'member' in service['tag']:
+ pretty_service['Tags'] = service['tag']['member']
+
+ protocol = ''
+ if 'protocol' in service:
+ if 'tcp' in service['protocol']:
+ protocol = 'tcp'
+ elif 'udp' in service['protocol']:
+ protocol = 'udp'
+ else:
+ protocol = 'sctp'
+ pretty_service['Protocol'] = protocol
+
+ if 'port' in service['protocol'][protocol]:
+ pretty_service['DestinationPort'] = service['protocol'][protocol]['port']
+ if 'source-port' in service['protocol'][protocol]:
+ pretty_service['SourcePort'] = service['protocol'][protocol]['source-port']
+
+ return pretty_service
+
+
+@logger
+def panorama_get_service(service_name: str):
+ params = {
+ 'action': 'show',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "service/entry[@name='" + service_name + "']",
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+
+ return result['response']['result']['entry']
+
+
+def panorama_get_service_command():
+ """
+ Get a service
+ """
+ service_name = demisto.args()['name']
+
+ service = panorama_get_service(service_name)
+ service_output = prettify_service(service)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': service,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Address:', service_output,
+ ['Name', 'Protocol', 'SourcePort', 'DestinationPort', 'Description', 'Tags'],
+ removeNull=True),
+ 'EntryContext': {
+ "Panorama.Services(val.Name == obj.Name)": service_output
+ }
+ })
+
+
+@logger
+def panorama_create_service(service_name: str, protocol: str, destination_port: str,
+ source_port: str = None, description: str = None, tags: list = None):
+ params = {
+ 'action': 'set',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "service/entry[@name='" + service_name + "']",
+ 'key': API_KEY,
+ 'element': '' + '<' + protocol + '>'
+ + add_argument(destination_port, 'port', False)
+ + add_argument(source_port, 'source-port', False)
+ + '' + protocol + '>' + ' '
+ + add_argument(description, 'description', False)
+ + add_argument_list(tags, 'tag', True)
+ }
+
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_create_service_command():
+ """
+ Create a service object
+ """
+ service_name = demisto.args()['name']
+ protocol = demisto.args()['protocol']
+ destination_port = demisto.args()['destination_port']
+ source_port = demisto.args().get('source_port')
+ description = demisto.args().get('description')
+ tags = argToList(demisto.args()['tags']) if 'tags' in demisto.args() else None
+
+ service = panorama_create_service(service_name, protocol, destination_port, source_port, description, tags)
+
+ service_output = {
+ 'Name': service_name,
+ 'Protocol': protocol,
+ 'DestinationPort': destination_port
+ }
+ if DEVICE_GROUP:
+ service_output['DeviceGroup'] = DEVICE_GROUP
+ if source_port:
+ service_output['SourcePort'] = source_port
+ if description:
+ service_output['Description'] = description
+ if tags:
+ service_output['Tags'] = tags
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': service,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Service was created successfully.',
+ 'EntryContext': {
+ "Panorama.Services(val.Name == obj.Name)": service_output
+ }
+ })
+
+
+@logger
+def panorama_delete_service(service_name: str):
+ params = {
+ 'action': 'delete',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "service/entry[@name='" + service_name + "']",
+ 'element': " ",
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_delete_service_command():
+ """
+ Delete a service
+ """
+ service_name = demisto.args()['name']
+
+ service = panorama_delete_service(service_name)
+ service_output = {'Name': service_name}
+ if DEVICE_GROUP:
+ service_output['DeviceGroup'] = DEVICE_GROUP
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': service,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Service was deleted successfully.',
+ 'EntryContext': {
+ "Panorama.Services(val.Name == obj.Name)": service_output
+ }
+ })
+
+
+''' Service Group Commands '''
+
+
+def prettify_service_groups_arr(service_groups_arr: list):
+ if not isinstance(service_groups_arr, list):
+ return prettify_service_group(service_groups_arr)
+
+ pretty_service_groups_arr = []
+ for service_group in service_groups_arr:
+ pretty_service_group = {
+ 'Name': service_group['@name'],
+ 'Services': service_group['members']['member']
+ }
+ if DEVICE_GROUP:
+ pretty_service_group['DeviceGroup'] = DEVICE_GROUP
+ if 'tag' in service_group and 'member' in service_group['tag']:
+ pretty_service_group['Tags'] = service_group['tag']['member']
+
+ pretty_service_groups_arr.append(pretty_service_group)
+
+ return pretty_service_groups_arr
+
+
+@logger
+def panorama_list_service_groups(tag: str = None):
+ params = {
+ 'action': 'get',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "service-group/entry",
+ 'key': API_KEY
+ }
+
+ if tag:
+ params['xpath'] += f'[( tag/member = \'{tag}\')]'
+
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+
+ return result['response']['result']['entry']
+
+
+def panorama_list_service_groups_command():
+ """
+ Get all address groups
+ """
+ tag = demisto.args().get('tag')
+ service_groups_arr = panorama_list_service_groups(tag)
+ service_groups_output = prettify_service_groups_arr(service_groups_arr)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': service_groups_arr,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Service groups:', service_groups_output, ['Name', 'Services', 'Tags'],
+ removeNull=True),
+ 'EntryContext': {
+ "Panorama.ServiceGroups(val.Name == obj.Name)": service_groups_output
+ }
+ })
+
+
+def prettify_service_group(service_group: dict):
+ pretty_service_group = {
+ 'Name': service_group['@name'],
+ 'Services': service_group['members']['member']
+ }
+ if DEVICE_GROUP:
+ pretty_service_group['DeviceGroup'] = DEVICE_GROUP
+ if 'tag' in service_group and 'member' in service_group['tag']:
+ pretty_service_group['Tags'] = service_group['tag']['member']
+
+ return pretty_service_group
+
+
+@logger
+def panorama_get_service_group(service_group_name):
+ params = {
+ 'action': 'show',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "service-group/entry[@name='" + service_group_name + "']",
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+
+ return result['response']['result']['entry']
+
+
+def panorama_get_service_group_command():
+ """
+ Get an address group
+ """
+ service_group_name = demisto.args()['name']
+
+ result = panorama_get_service_group(service_group_name)
+ pretty_service_group = prettify_service_group(result)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Service group:', pretty_service_group, ['Name', 'Services', 'Tags'],
+ removeNull=True),
+ 'EntryContext': {
+ "Panorama.ServiceGroups(val.Name == obj.Name)": pretty_service_group
+ }
+ })
+
+
+def panorama_create_service_group(service_group_name, services, tags):
+ params = {
+ 'action': 'set',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "service-group/entry[@name='" + service_group_name + "']",
+ 'element': '' + add_argument_list(services, 'member', True) + ' '
+ + add_argument_list(tags, 'tag', True),
+ 'key': API_KEY
+ }
+
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_create_service_group_command():
+ """
+ Create a service group
+ """
+ service_group_name = demisto.args()['name']
+ services = argToList(demisto.args()['services'])
+ tags = argToList(demisto.args()['tags']) if 'tags' in demisto.args() else None
+
+ result = panorama_create_service_group(service_group_name, services, tags)
+
+ service_group_output = {
+ 'Name': service_group_name,
+ 'Services': services
+ }
+ if DEVICE_GROUP:
+ service_group_output['DeviceGroup'] = DEVICE_GROUP
+ if tags:
+ service_group_output['Tags'] = tags
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Service group was created successfully.',
+ 'EntryContext': {
+ "Panorama.ServiceGroups(val.Name == obj.Name)": service_group_output
+ }
+ })
+
+
+@logger
+def panorama_delete_service_group(service_group_name):
+ params = {
+ 'action': 'delete',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "service-group/entry[@name='" + service_group_name + "']",
+ 'element': " ",
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_delete_service_group_command():
+ """
+ Delete a service group
+ """
+ service_group_name = demisto.args()['name']
+
+ service_group = panorama_delete_service_group(service_group_name)
+ service_group_output = {'Name': service_group_name}
+ if DEVICE_GROUP:
+ service_group_output['DeviceGroup'] = DEVICE_GROUP
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': service_group,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Service group was deleted successfully.',
+ 'EntryContext': {
+ "Panorama.ServiceGroups(val.Name == obj.Name)": service_group_output
+ }
+ })
+
+
+@logger
+def panorama_edit_service_group(service_group_name, services, tag):
+ params = {
+ 'action': 'edit',
+ 'type': 'config',
+ 'xpath': '',
+ 'element': '',
+ 'key': API_KEY,
+ }
+
+ if services:
+ services_xpath = XPATH_OBJECTS + "service-group/entry[@name='" + service_group_name + "']/members"
+ services_element = '' + add_argument_list(services, 'member', False) + ' '
+ params['xpath'] = services_xpath
+ params['element'] = services_element
+ result = http_request(
+ URL,
+ 'POST',
+ params=params
+ )
+
+ if tag:
+ tag_xpath = XPATH_OBJECTS + "service-group/entry[@name='" + service_group_name + "']/tag"
+ tag_element = add_argument_list(tag, 'tag', True)
+ params['xpath'] = tag_xpath
+ params['element'] = tag_element
+ result = http_request(
+ URL,
+ 'POST',
+ params=params
+ )
+
+ return result
+
+
+def panorama_edit_service_group_command():
+ """
+ Edit a service group
+ """
+ service_group_name = demisto.args()['name']
+ services_to_add = argToList(demisto.args()['services_to_add']) if 'services_to_add' in demisto.args() else None
+ services_to_remove = argToList(
+ demisto.args()['services_to_remove']) if 'services_to_remove' in demisto.args() else None
+ tag = argToList(demisto.args()['tag']) if 'tag' in demisto.args() else None
+
+ if not services_to_add and not services_to_remove and not tag:
+ return_error('Specify at least one of the following arguments: services_to_add, services_to_remove, tag')
+
+ if services_to_add and services_to_remove:
+ return_error('Specify at most one of the following arguments: services_to_add, services_to_remove')
+
+ services: List[str] = []
+ if services_to_add or services_to_remove:
+ service_group_prev = panorama_get_service_group(service_group_name)
+ service_group_list = argToList(service_group_prev['members']['member'])
+ if services_to_add:
+ services = list(set(services_to_add + service_group_list))
+ else:
+ services = [item for item in service_group_list if item not in services_to_remove]
+
+ if len(services) == 0:
+ return_error('A Service group must have at least one service.')
+
+ result = panorama_edit_service_group(service_group_name, services, tag)
+
+ service_group_output = {'Name': service_group_name}
+ if DEVICE_GROUP:
+ service_group_output['DeviceGroup'] = DEVICE_GROUP
+ if len(services) > 0:
+ service_group_output['Services'] = services
+ if tag:
+ service_group_output['Tag'] = tag
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Service group was edited successfully.',
+ 'EntryContext': {
+ "Panorama.ServiceGroups(val.Name == obj.Name)": service_group_output
+ }
+ })
+
+
+''' Custom URL Category Commands '''
+
+
+def prettify_custom_url_category(custom_url_category):
+ pretty_custom_url_category = {
+ 'Name': custom_url_category['@name'],
+ }
+ if DEVICE_GROUP:
+ pretty_custom_url_category['DeviceGroup'] = DEVICE_GROUP
+
+ if 'description' in custom_url_category:
+ pretty_custom_url_category['Description'] = custom_url_category['description']
+
+ if 'list' in custom_url_category:
+ pretty_custom_url_category['Sites'] = custom_url_category['list']['member']
+
+ return pretty_custom_url_category
+
+
+@logger
+def panorama_get_custom_url_category(name):
+ params = {
+ 'action': 'get',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "profiles/custom-url-category/entry[@name='" + name + "']",
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+
+ return result['response']['result']['entry']
+
+
+def panorama_get_custom_url_category_command():
+ """
+ Get a custom url category
+ """
+ name = demisto.args()['name']
+
+ custom_url_category = panorama_get_custom_url_category(name)
+ custom_url_category_output = prettify_custom_url_category(custom_url_category)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': custom_url_category,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Custom URL Category:', custom_url_category_output,
+ ['Name', 'Sites', 'Description'], removeNull=True),
+ 'EntryContext': {
+ "Panorama.CustomURLCategory(val.Name == obj.Name)": custom_url_category_output
+ }
+ })
+
+
+@logger
+def panorama_create_custom_url_category(custom_url_category_name: str, sites, description: str = None):
+ params = {
+ 'action': 'set',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "profiles/custom-url-category/entry[@name='" + custom_url_category_name + "']",
+ 'element': add_argument(description, 'description', False) + add_argument_list(sites, 'list', True),
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ custom_url_category_output = {'Name': custom_url_category_name}
+ if DEVICE_GROUP:
+ custom_url_category_output['DeviceGroup'] = DEVICE_GROUP
+ if sites:
+ custom_url_category_output['Sites'] = sites
+ if description:
+ custom_url_category_output['Description'] = description
+
+ return result, custom_url_category_output
+
+
+def panorama_create_custom_url_category_command():
+ """
+ Create a custom URL category
+ """
+ custom_url_category_name = demisto.args()['name']
+ sites = argToList(demisto.args()['sites']) if 'sites' in demisto.args() else None
+ description = demisto.args().get('description')
+
+ custom_url_category, custom_url_category_output = panorama_create_custom_url_category(custom_url_category_name,
+ sites, description)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': custom_url_category,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Created Custom URL Category:', custom_url_category_output,
+ ['Name', 'Sites', 'Description'], removeNull=True),
+ 'EntryContext': {
+ "Panorama.CustomURLCategory(val.Name == obj.Name)": custom_url_category_output
+ }
+ })
+
+
+@logger
+def panorama_delete_custom_url_category(custom_url_category_name):
+ params = {
+ 'action': 'delete',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "profiles/custom-url-category/entry[@name='" + custom_url_category_name + "']",
+ 'element': " ",
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_delete_custom_url_category_command():
+ """
+ Delete a custom url category
+ """
+ custom_url_category_name = demisto.args()['name']
+
+ result = panorama_delete_custom_url_category(custom_url_category_name)
+ custom_url_category_output = {'Name': custom_url_category_name}
+ if DEVICE_GROUP:
+ custom_url_category_output['DeviceGroup'] = DEVICE_GROUP
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Custom URL category was deleted successfully.',
+ 'EntryContext': {
+ "Panorama.CustomURLCategory(val.Name == obj.Name)": custom_url_category_output
+ }
+ })
+
+
+@logger
+def panorama_edit_custom_url_category(custom_url_category_name, sites, description=None):
+ params = {
+ 'action': 'edit',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "profiles/custom-url-category/entry[@name='" + custom_url_category_name + "']",
+ 'element': ""
+ + add_argument(description, 'description', False)
+ + add_argument_list(sites, 'list', True) + " ",
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ custom_url_category_output = {'Name': custom_url_category_name}
+ if DEVICE_GROUP:
+ custom_url_category_output['DeviceGroup'] = DEVICE_GROUP
+ if sites:
+ custom_url_category_output['Sites'] = sites
+ if description:
+ custom_url_category_output['Description'] = description
+
+ return result, custom_url_category_output
+
+
+def panorama_custom_url_category_add_sites_command():
+ """
+ Add sites to a configured custom url category
+ """
+ custom_url_category_name = demisto.args()['name']
+
+ custom_url_category = panorama_get_custom_url_category(custom_url_category_name)
+
+ if '@dirtyId' in custom_url_category:
+ return_error('Please commit the instance prior to editing the Custom URL Category.')
+ description = custom_url_category.get('description')
+
+ custom_url_category_sites: List[str] = []
+ if 'list' in custom_url_category:
+ if custom_url_category['list']:
+ custom_url_category_sites = argToList(custom_url_category['list']['member'])
+
+ sites = argToList(demisto.args()['sites'])
+ merged_sites = list((set(sites)).union(set(custom_url_category_sites)))
+
+ result, custom_url_category_output = panorama_edit_custom_url_category(custom_url_category_name, merged_sites,
+ description)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Updated Custom URL Category:', custom_url_category_output,
+ ['Name', 'Sites', 'Description'], removeNull=True),
+ 'EntryContext': {
+ "Panorama.CustomURLCategory(val.Name == obj.Name)": custom_url_category_output
+ }
+ })
+
+
+def panorama_custom_url_category_remove_sites_command():
+ """
+ Add sites to a configured custom url category
+ """
+ custom_url_category_name = demisto.args()['name']
+
+ custom_url_category = panorama_get_custom_url_category(custom_url_category_name)
+ if '@dirtyId' in custom_url_category:
+ return_error('Please commit the instance prior to editing the Custom URL Category.')
+ description = custom_url_category.get('description')
+
+ if 'list' in custom_url_category:
+ if 'member' in custom_url_category['list']:
+ custom_url_category_sites = custom_url_category['list']['member']
+
+ if not custom_url_category_sites:
+ return_error('Custom url category does not contain sites')
+
+ sites = argToList(demisto.args()['sites'])
+
+ subtracted_sites = [item for item in custom_url_category_sites if item not in sites]
+ result, custom_url_category_output = panorama_edit_custom_url_category(custom_url_category_name, subtracted_sites,
+ description)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Updated Custom URL Category:', custom_url_category_output,
+ ['Name', 'Sites', 'Description'], removeNull=True),
+ 'EntryContext': {
+ "Panorama.CustomURLCategory(val.Name == obj.Name)": custom_url_category_output
+ }
+ })
+
+
+''' URL Filtering '''
+
+
+@logger
+def panorama_get_url_category(url):
+ params = {
+ 'action': 'show',
+ 'type': 'op',
+ 'key': API_KEY,
+ 'cmd': '' + url + ' '
+ }
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ s = result['response']['result'].splitlines()[1]
+ return s.split(' ')[1]
+
+
+def populate_url_filter_category_from_context(category):
+ url_filter_category = demisto.dt(demisto.context(), f'Panorama.URLFilter(val.Category === "{category}")')
+ if not url_filter_category:
+ return []
+
+ if type(url_filter_category) is list:
+ return url_filter_category[0].get("URL")
+ else: # url_filter_category is a dict
+ context_urls = url_filter_category.get("URL", None) # pylint: disable=no-member
+ if type(context_urls) is str:
+ return [context_urls]
+ else:
+ return context_urls
+
+
+def panorama_get_url_category_command():
+ """
+ Get the url category from Palo Alto URL Filtering
+ """
+ urls = argToList(demisto.args()['url'])
+
+ categories_dict: Dict[str, list] = {}
+ for url in urls:
+ category = panorama_get_url_category(url)
+ if category in categories_dict:
+ categories_dict[category].append(url)
+ else:
+ categories_dict[category] = [url]
+ context_urls = populate_url_filter_category_from_context(category)
+ categories_dict[category] = list((set(categories_dict[category])).union(set(context_urls)))
+
+ url_category_output = []
+ for key, value in categories_dict.items():
+ url_category_output.append({
+ 'Category': key,
+ 'URL': value
+ })
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': categories_dict,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('URL Filtering:', url_category_output, ['URL', 'Category'], removeNull=True),
+ 'EntryContext': {
+ "Panorama.URLFilter(val.Category === obj.Category)": url_category_output
+ }
+ })
+
+
+def prettify_get_url_filter(url_filter):
+ pretty_url_filter = {'Name': url_filter['@name']}
+ if DEVICE_GROUP:
+ pretty_url_filter['DeviceGroup'] = DEVICE_GROUP
+ if 'description' in url_filter:
+ pretty_url_filter['Description'] = url_filter['description']
+
+ pretty_url_filter['Category'] = []
+ url_category_list: List[str] = []
+ action: str
+ if 'alert' in url_filter:
+ url_category_list = url_filter['alert']['member']
+ action = 'alert'
+ elif 'allow' in url_filter:
+ url_category_list = url_filter['allow']['member']
+ action = 'allow'
+ elif 'block' in url_filter:
+ url_category_list = url_filter['block']['member']
+ action = 'block'
+ elif 'continue' in url_filter:
+ url_category_list = url_filter['continue']['member']
+ action = 'continue'
+ elif 'override' in url_filter:
+ url_category_list = url_filter['override']['member']
+ action = 'override'
+
+ for category in url_category_list:
+ pretty_url_filter['Category'].append({
+ 'Name': category,
+ 'Action': action
+ })
+
+ if 'allow-list' in url_filter or 'block-list' in url_filter:
+ pretty_url_filter['Overrides'] = []
+ if 'allow-list' in url_filter:
+ pretty_url_filter['OverrideAllowList'] = url_filter['allow-list']['member']
+ else:
+ pretty_url_filter['OverrideBlockList'] = url_filter['block-list']['member']
+
+ return pretty_url_filter
+
+
+@logger
+def panorama_get_url_filter(name):
+ params = {
+ 'action': 'get',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "profiles/url-filtering/entry[@name='" + name + "']",
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+
+ return result['response']['result']['entry']
+
+
+def panorama_get_url_filter_command():
+ """
+ Get a URL Filter
+ """
+ name = demisto.args()['name']
+
+ url_filter = panorama_get_url_filter(name)
+
+ url_filter_output = prettify_get_url_filter(url_filter)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': url_filter,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('URL Filter:', url_filter_output,
+ ['Name', 'Category', 'OverrideAllowList', 'OverrideBlockList', 'Description'],
+ removeNull=True),
+ 'EntryContext': {
+ "Panorama.URLFilter(val.Name == obj.Name)": url_filter_output
+ }
+ })
+
+
+@logger
+def panorama_create_url_filter(
+ url_filter_name, action,
+ url_category_list,
+ override_allow_list=None,
+ override_block_list=None,
+ description=None):
+ element = add_argument_list(url_category_list, action, True) + add_argument_list(override_allow_list, 'allow-list',
+ True) + add_argument_list(
+ override_block_list, 'block-list', True) + add_argument(description, 'description',
+ False) + "block "
+
+ params = {
+ 'action': 'set',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "profiles/url-filtering/entry[@name='" + url_filter_name + "']",
+ 'element': element,
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+ return result
+
+
+def panorama_create_url_filter_command():
+ """
+ Create a URL Filter
+ """
+ url_filter_name = demisto.args()['name']
+ action = demisto.args()['action']
+ url_category_list = argToList(demisto.args()['url_category'])
+ override_allow_list = argToList(demisto.args().get('override_allow_list'))
+ override_block_list = argToList(demisto.args().get('override_block_list'))
+ description = demisto.args().get('description')
+
+ result = panorama_create_url_filter(url_filter_name, action, url_category_list, override_allow_list,
+ override_block_list, description)
+
+ url_filter_output = {'Name': url_filter_name}
+ if DEVICE_GROUP:
+ url_filter_output['DeviceGroup'] = DEVICE_GROUP
+ url_filter_output['Category'] = []
+ for category in url_category_list:
+ url_filter_output['Category'].append({
+ 'Name': category,
+ 'Action': action
+ })
+ if override_allow_list:
+ url_filter_output['OverrideAllowList'] = override_allow_list
+ if override_block_list:
+ url_filter_output['OverrideBlockList'] = override_block_list
+ if description:
+ url_filter_output['Description'] = description
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'URL Filter was created successfully.',
+ 'EntryContext': {
+ "Panorama.URLFilter(val.Name == obj.Name)": url_filter_output
+ }
+ })
+
+
+@logger
+def panorama_edit_url_filter(url_filter_name, element_to_change, element_value, add_remove_element=None):
+ url_filter_prev = panorama_get_url_filter(url_filter_name)
+ if '@dirtyId' in url_filter_prev:
+ return_error('Please commit the instance prior to editing the URL Filter')
+
+ url_filter_output = {'Name': url_filter_name}
+ if DEVICE_GROUP:
+ url_filter_output['DeviceGroup'] = DEVICE_GROUP
+ params = {
+ 'action': 'edit',
+ 'type': 'config',
+ 'key': API_KEY,
+ }
+
+ if element_to_change == 'description':
+ params['xpath'] = XPATH_OBJECTS + "profiles/url-filtering/entry[@name='" + url_filter_name + "']/"
+ + element_to_change
+ params['element'] = add_argument_open(element_value, 'description', False)
+ result = http_request(URL, 'POST', params=params)
+ url_filter_output['Description'] = element_value
+
+ elif element_to_change == 'override_allow_list':
+ prev_override_allow_list = argToList(url_filter_prev['allow-list']['member'])
+ if add_remove_element == 'add':
+ new_override_allow_list = list((set(prev_override_allow_list)).union(set([element_value])))
+ else:
+ new_override_allow_list = [url for url in prev_override_allow_list if url != element_value]
+
+ params['xpath'] = XPATH_OBJECTS + "profiles/url-filtering/entry[@name='" + url_filter_name + "']/allow-list"
+ params['element'] = add_argument_list(new_override_allow_list, 'allow-list', True)
+ result = http_request(URL, 'POST', params=params)
+ url_filter_output[element_to_change] = new_override_allow_list
+
+ # element_to_change == 'override_block_list'
+ else:
+ prev_override_block_list = argToList(url_filter_prev['block-list']['member'])
+ if add_remove_element == 'add':
+ new_override_block_list = list((set(prev_override_block_list)).union(set([element_value])))
+ else:
+ new_override_block_list = [url for url in prev_override_block_list if url != element_value]
+
+ params['xpath'] = XPATH_OBJECTS + "profiles/url-filtering/entry[@name='" + url_filter_name + "']/block-list"
+ params['element'] = add_argument_list(new_override_block_list, 'block-list', True)
+ result = http_request(URL, 'POST', params=params)
+ url_filter_output[element_to_change] = new_override_block_list
+
+ return result, url_filter_output
+
+
+def panorama_edit_url_filter_command():
+ """
+ Edit a URL Filter
+ """
+ url_filter_name = demisto.args()['name']
+ element_to_change = demisto.args()['element_to_change']
+ add_remove_element = demisto.args()['add_remove_element']
+ element_value = demisto.args()['element_value']
+
+ result, url_filter_output = panorama_edit_url_filter(url_filter_name, element_to_change, element_value,
+ add_remove_element)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'URL Filter was edited successfully.',
+ 'EntryContext': {
+ "Panorama.URLFilter(val.Name == obj.Name)": url_filter_output
+ }
+ })
+
+
+@logger
+def panorama_delete_url_filter(url_filter_name):
+ params = {
+ 'action': 'delete',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "profiles/url-filtering/entry[@name='" + url_filter_name + "']",
+ 'element': " ",
+ 'key': API_KEY
+ }
+
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_delete_url_filter_command():
+ """
+ Delete a custom url category
+ """
+ url_filter_name = demisto.args()['name']
+ result = panorama_delete_url_filter(url_filter_name)
+
+ url_filter_output = {'Name': url_filter_name}
+ if DEVICE_GROUP:
+ url_filter_output['DeviceGroup'] = DEVICE_GROUP
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'URL Filter was deleted successfully.',
+ 'EntryContext': {
+ "Panorama.URLFilter(val.Name == obj.Name)": url_filter_output
+ }
+ })
+
+
+''' Security Rules Managing '''
+
+
+def prettify_rule(rule):
+ pretty_rule = {
+ 'Name': rule['@name'],
+ 'Action': rule['action']
+ }
+ if DEVICE_GROUP:
+ pretty_rule['DeviceGroup'] = DEVICE_GROUP
+ if '@loc' in rule:
+ pretty_rule['Location'] = rule['@loc']
+ if 'category' in rule and 'member' in rule['category']:
+ pretty_rule['CustomUrlCategory'] = rule['category']['member']
+ if 'application' in rule and 'member' in rule['application']:
+ pretty_rule['Application'] = rule['application']['member']
+ if 'destination' in rule and 'member' in rule['destination']:
+ pretty_rule['Destination'] = rule['destination']['member']
+ if 'from' in rule and 'member' in rule['from']:
+ pretty_rule['From'] = rule['from']['member']
+ if 'service' in rule and 'member' in rule['service']:
+ pretty_rule['Service'] = rule['service']['member']
+ if 'to' in rule and 'member' in rule['to']:
+ pretty_rule['To'] = rule['to']['member']
+ if 'source' in rule and 'member' in rule['source']:
+ pretty_rule['Source'] = rule['source']['member']
+ if 'tag' in rule and 'member' in rule['tag']:
+ pretty_rule['Tags'] = rule['tag']['member']
+ if 'log-setting' in rule and '#text' in rule['log-setting']:
+ pretty_rule['LogForwardingProfile'] = rule['log-setting']['#text']
+
+ return pretty_rule
+
+
+def prettify_rules(rules):
+ if not isinstance(rules, list):
+ return prettify_rule(rules)
+ pretty_rules_arr = []
+ for rule in rules:
+ pretty_rule = prettify_rule(rule)
+ pretty_rules_arr.append(pretty_rule)
+
+ return pretty_rules_arr
+
+
+@logger
+def panorama_list_rules(xpath: str, tag: str = None):
+ params = {
+ 'action': 'get',
+ 'type': 'config',
+ 'xpath': xpath,
+ 'key': API_KEY
+ }
+
+ if tag:
+ params['xpath'] += f'[( tag/member = \'{tag}\')]'
+
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+
+ return result['response']['result']['entry']
+
+
+def panorama_list_rules_command():
+ """
+ List security rules
+ """
+ if DEVICE_GROUP:
+ if 'pre_post' not in demisto.args():
+ return_error('Please provide the pre_post argument when listing rules in Panorama instance.')
+ else:
+ xpath = XPATH_SECURITY_RULES + demisto.args()['pre_post'] + '/security/rules/entry'
+ else:
+ xpath = XPATH_SECURITY_RULES
+
+ tag = demisto.args().get('tag')
+
+ rules = panorama_list_rules(xpath, tag)
+ pretty_rules = prettify_rules(rules)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': rules,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Security Rules:', pretty_rules,
+ ['Name', 'Location', 'Action', 'From', 'To',
+ 'CustomUrlCategory', 'Service', 'Tags'],
+ removeNull=True),
+ 'EntryContext': {
+ "Panorama.SecurityRule(val.Name == obj.Name)": pretty_rules
+ }
+ })
+
+
+@logger
+def panorama_move_rule_command():
+ """
+ Move a security rule
+ """
+ rulename = demisto.args()['rulename']
+ params = {
+ 'type': 'config',
+ 'action': 'move',
+ 'key': API_KEY,
+ 'where': demisto.args()['where'],
+ }
+
+ if DEVICE_GROUP:
+ if 'pre_post' not in demisto.args():
+ return_error('Please provide the pre_post argument when moving a rule in Panorama instance.')
+ else:
+ params['xpath'] = XPATH_SECURITY_RULES + demisto.args()[
+ 'pre_post'] + '/security/rules/entry' + '[@name=\'' + rulename + '\']'
+ else:
+ params['xpath'] = XPATH_SECURITY_RULES + '[@name=\'' + rulename + '\']'
+
+ if 'dst' in demisto.args():
+ params['dst'] = demisto.args()['dst']
+
+ result = http_request(URL, 'POST', params=params)
+ rule_output = {'Name': rulename}
+ if DEVICE_GROUP:
+ rule_output['DeviceGroup'] = DEVICE_GROUP
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Rule ' + rulename + ' moved successfully.',
+ 'EntryContext': {
+ "Panorama.SecurityRule(val.Name == obj.Name)": rule_output
+ }
+ })
+
+
+''' Security Rule Configuration '''
+
+
+@logger
+def panorama_create_rule_command():
+ """
+ Create a security rule
+ """
+ rulename = demisto.args()['rulename'] if 'rulename' in demisto.args() else ('demisto-' + (str(uuid.uuid4()))[:8])
+ source = demisto.args().get('source')
+ destination = demisto.args().get('destination')
+ negate_source = demisto.args().get('negate_source')
+ negate_destination = demisto.args().get('negate_destination')
+ action = demisto.args().get('action')
+ service = demisto.args().get('service')
+ disable = demisto.args().get('disable')
+ application = demisto.args().get('application')
+ source_user = demisto.args().get('source_user')
+ disable_server_response_inspection = demisto.args().get('disable_server_response_inspection')
+ description = demisto.args().get('description')
+ target = demisto.args().get('target')
+ log_forwarding = demisto.args().get('log_forwarding', None)
+ tags = argToList(demisto.args()['tags']) if 'tags' in demisto.args() else None
+
+ if not DEVICE_GROUP:
+ if target:
+ return_error('The target argument is relevant only for a Palo Alto Panorama instance.')
+ elif log_forwarding:
+ return_error('The log_forwarding argument is relevant only for a Palo Alto Panorama instance.')
+
+ params = prepare_security_rule_params(api_action='set', rulename=rulename, source=source, destination=destination,
+ negate_source=negate_source, negate_destination=negate_destination,
+ action=action, service=service,
+ disable=disable, application=application, source_user=source_user,
+ disable_server_response_inspection=disable_server_response_inspection,
+ description=description, target=target,
+ log_forwarding=log_forwarding, tags=tags)
+
+ result = http_request(
+ URL,
+ 'POST',
+ params=params
+ )
+
+ rule_output = {SECURITY_RULE_ARGS[key]: value for key, value in demisto.args().items() if key in SECURITY_RULE_ARGS}
+ rule_output['Name'] = rulename
+ if DEVICE_GROUP:
+ rule_output['DeviceGroup'] = DEVICE_GROUP
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Rule configured successfully.',
+ 'EntryContext': {
+ "Panorama.SecurityRule(val.Name == obj.Name)": rule_output
+ }
+ })
+
+
+@logger
+def panorama_edit_rule_command():
+ """
+ Edit a security rule
+ """
+ rulename = demisto.args()['rulename']
+ element_to_change = demisto.args()['element_to_change']
+ if element_to_change == 'log-forwarding':
+ element_to_change = 'log-setting'
+ element_value = demisto.args()['element_value']
+
+ if element_to_change == 'target' and not DEVICE_GROUP:
+ return_error('The target argument is relevant only for a Palo Alto Panorama instance.')
+
+ params = {
+ 'type': 'config',
+ 'action': 'edit',
+ 'key': API_KEY
+ }
+
+ if element_to_change in ['action', 'description', 'log-setting']:
+ params['element'] = add_argument_open(element_value, element_to_change, False)
+ elif element_to_change in ['source', 'destination', 'application', 'category', 'source-user', 'service']:
+ params['element'] = add_argument_open(element_value, element_to_change, True)
+ elif element_to_change == 'target':
+ params['element'] = add_argument_target(element_value, 'target')
+ elif element_to_change == 'tag':
+ tags = argToList(element_value)
+ params['element'] = add_argument_list(tags, 'tag', True)
+ else: # element_to_change in ['negate_source', 'negate_destination', 'disable']
+ params['element'] = add_argument_yes_no(element_value, element_to_change)
+
+ if DEVICE_GROUP:
+ if 'pre_post' not in demisto.args():
+ return_error('please provide the pre_post argument when editing a rule in Panorama instance.')
+ else:
+ params['xpath'] = XPATH_SECURITY_RULES + demisto.args()[
+ 'pre_post'] + '/security/rules/entry' + '[@name=\'' + rulename + '\']'
+ else:
+ params['xpath'] = XPATH_SECURITY_RULES + '[@name=\'' + rulename + '\']'
+
+ params['xpath'] += '/' + element_to_change
+
+ result = http_request(
+ URL,
+ 'POST',
+ params=params
+ )
+
+ rule_output = {'Name': rulename}
+ if DEVICE_GROUP:
+ rule_output['DeviceGroup'] = DEVICE_GROUP
+ rule_output[SECURITY_RULE_ARGS[element_to_change]] = element_value
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Rule edited successfully.',
+ 'EntryContext': {
+ "Panorama.SecurityRule(val.Name == obj.Name)": rule_output
+ }
+ })
+
+
+@logger
+def panorama_delete_rule_command():
+ """
+ Delete a security rule
+ """
+ rulename = demisto.args()['rulename']
+
+ params = {
+ 'type': 'config',
+ 'action': 'delete',
+ 'key': API_KEY
+ }
+ if DEVICE_GROUP:
+ if 'pre_post' not in demisto.args():
+ return_error('Please provide the pre_post argument when moving a rule in Panorama instance.')
+ else:
+ params['xpath'] = XPATH_SECURITY_RULES + demisto.args()[
+ 'pre_post'] + '/security/rules/entry' + '[@name=\'' + rulename + '\']'
+ else:
+ params['xpath'] = XPATH_SECURITY_RULES + '[@name=\'' + rulename + '\']'
+
+ result = http_request(
+ URL,
+ 'POST',
+ params=params
+ )
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Rule deleted successfully.',
+ })
+
+
+@logger
+def panorama_custom_block_rule_command():
+ """
+ Block an object in Panorama
+ """
+ object_type = demisto.args()['object_type']
+ object_value = demisto.args()['object_value']
+ direction = demisto.args()['direction'] if 'direction' in demisto.args() else 'both'
+ rulename = demisto.args()['rulename'] if 'rulename' in demisto.args() else ('demisto-' + (str(uuid.uuid4()))[:8])
+ block_destination = False if direction == 'from' else True
+ block_source = False if direction == 'to' else True
+ target = demisto.args().get('target')
+ log_forwarding = demisto.args().get('log_forwarding', None)
+ tags = argToList(demisto.args()['tags']) if 'tags' in demisto.args() else None
+
+ if not DEVICE_GROUP:
+ if target:
+ return_error('The target argument is relevant only for a Palo Alto Panorama instance.')
+ elif log_forwarding:
+ return_error('The log_forwarding argument is relevant only for a Palo Alto Panorama instance.')
+
+ custom_block_output = {
+ 'Name': rulename,
+ 'Direction': direction,
+ 'Disabled': False
+ }
+ if DEVICE_GROUP:
+ custom_block_output['DeviceGroup'] = DEVICE_GROUP
+ if log_forwarding:
+ custom_block_output['LogForwarding'] = log_forwarding
+ if target:
+ custom_block_output['Target'] = target
+ if tags:
+ custom_block_output['Tags'] = tags
+
+ if object_type == 'ip':
+ if block_source:
+ params = prepare_security_rule_params(api_action='set', action='drop', source=object_value,
+ destination='any', rulename=rulename + '-from', target=target,
+ log_forwarding=log_forwarding, tags=tags)
+ result = http_request(URL, 'POST', params=params)
+ if block_destination:
+ params = prepare_security_rule_params(api_action='set', action='drop', destination=object_value,
+ source='any', rulename=rulename + '-to', target=target,
+ log_forwarding=log_forwarding, tags=tags)
+ result = http_request(URL, 'POST', params=params)
+ custom_block_output['IP'] = object_value
+
+ elif object_type == 'address-group' or 'edl':
+ if block_source:
+ params = prepare_security_rule_params(api_action='set', action='drop', source=object_value,
+ destination='any', rulename=rulename + '-from', target=target,
+ log_forwarding=log_forwarding, tags=tags)
+ result = http_request(URL, 'POST', params=params)
+ if block_destination:
+ params = prepare_security_rule_params(api_action='set', action='drop', destination=object_value,
+ source='any', rulename=rulename + '-to', target=target,
+ log_forwarding=log_forwarding, tags=tags)
+ result = http_request(URL, 'POST', params=params)
+ custom_block_output['AddressGroup'] = object_value
+
+ elif object_type == 'url-category':
+ params = prepare_security_rule_params(api_action='set', action='drop', source='any', destination='any',
+ category=object_value, rulename=rulename, target=target,
+ log_forwarding=log_forwarding, tags=tags)
+ result = http_request(URL, 'POST', params=params)
+ custom_block_output['CustomURLCategory'] = object_value
+
+ elif object_type == 'application':
+ params = prepare_security_rule_params(api_action='set', action='drop', source='any', destination='any',
+ application=object_value, rulename=rulename, target=target,
+ log_forwarding=log_forwarding, tags=tags)
+ result = http_request(URL, 'POST', params=params)
+ custom_block_output['Application'] = object_value
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Object was blocked successfully.',
+ 'EntryContext': {
+ "Panorama.SecurityRule(val.Name == obj.Name)": custom_block_output
+ }
+ })
+
+
+''' PCAPS '''
+
+
+@logger
+def panorama_list_pcaps_command():
+ """
+ Get list of pcap files
+ """
+ params = {
+ 'type': 'export',
+ 'key': API_KEY,
+ 'category': demisto.args()['pcapType']
+ }
+
+ if 'password' in demisto.args():
+ params['dlp-password'] = demisto.args()['password']
+ elif demisto.args()['pcapType'] == 'dlp-pcap':
+ return_error('can not provide dlp-pcap without password')
+
+ result = http_request(URL, 'GET', params=params)
+
+ json_result = json.loads(xml2json(result.text))['response']
+ if json_result['@status'] != 'success':
+ return_error('Request to get list of Pcaps Failed.\nStatus code: ' + str(
+ json_result['response']['@code']) + '\nWith message: ' + str(json_result['response']['msg']['line']))
+
+ pcap_list = json_result['result']['dir-listing']['file']
+ pcap_list = [pcap[1:] for pcap in pcap_list]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': json_result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('List of Pcaps:', pcap_list, ['Pcap name']),
+ 'EntryContext': {
+ "Panorama.Pcaps(val.Name == obj.Name)": pcap_list
+ }
+ })
+
+
+@logger
+def panorama_get_pcap_command():
+ """
+ Get pcap file
+ """
+ params = {
+ 'type': 'export',
+ 'key': API_KEY,
+ 'category': demisto.args()['pcapType']
+ }
+
+ if 'password' in demisto.args():
+ params['dlp-password'] = demisto.args()['password']
+ elif demisto.args()['pcapType'] == 'dlp-pcap':
+ return_error('can not provide dlp-pcap without password')
+
+ if 'pcapID' in demisto.args():
+ params['pcap-id'] = demisto.args()['pcapID']
+ elif demisto.args()['pcapType'] == 'threat-pcap':
+ return_error('can not provide threat-pcap without pcap-id')
+
+ pcap_name = demisto.args().get('from')
+ local_name = demisto.args().get('localName')
+ serial_no = demisto.args().get('serialNo')
+ search_time = demisto.args().get('searchTime')
+
+ file_name = None
+ if pcap_name:
+ params['from'] = pcap_name
+ file_name = pcap_name
+ if local_name:
+ params['to'] = local_name
+ file_name = local_name
+ if serial_no:
+ params['serialno'] = serial_no
+ if search_time:
+ params['search-time'] = search_time
+
+ # set file name to the current time if from/to were not specified
+ if not file_name:
+ file_name = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S')
+
+ result = http_request(URL, 'GET', params=params)
+
+ # due pcap file size limitation in the product, for more details, please see the documentation.
+ if result.headers['Content-Type'] != 'application/octet-stream':
+ return_error(
+ 'PCAP download failed. Most likely cause is the file size limitation.'
+ 'For information on how to download manually, see the documentation for this integration.')
+
+ file = fileResult(file_name + ".pcap", result.content)
+ demisto.results(file)
+
+
+''' Applications '''
+
+
+def prettify_applications_arr(applications_arr):
+ pretty_application_arr = []
+ for i in range(len(applications_arr)):
+ application = applications_arr[i]
+ pretty_application_arr.append({
+ 'SubCategory': application['subcategory'],
+ 'Risk': application['risk'],
+ 'Technology': application['technology'],
+ 'Name': application['@name'],
+ 'Description': application['description'],
+ 'Id': application['@id']
+ })
+ return pretty_application_arr
+
+
+@logger
+def panorama_list_applications():
+ params = {
+ 'type': 'op',
+ 'command': ' ',
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'POST',
+ params=params
+ )
+ return result['response']['result']['config']['shared']['content-preview']['application']['entry']
+
+
+def panorama_list_applications_command():
+ """
+ List all applications
+ """
+ applications_arr = panorama_list_applications()
+
+ applications_arr_output = prettify_applications_arr(applications_arr)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': applications_arr,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Applications', applications_arr_output,
+ ['Name', 'Id', 'Risk', 'Category', 'SubCategory', 'Technology',
+ 'Description']),
+ 'EntryContext': {
+ "Panorama.Applications(val.Id == obj.Id)": applications_arr_output
+ }
+ })
+
+
+''' External Dynamic Lists Commands '''
+
+
+def prettify_edls_arr(edls_arr):
+ pretty_edls_arr = []
+ if not isinstance(edls_arr, list): # handle case of only one edl in the instance
+ return prettify_edl(edls_arr)
+ for edl in edls_arr:
+ pretty_edl = {
+ 'Name': edl['@name'],
+ 'Type': ''.join(edl['type'].keys())
+ }
+ edl_type = pretty_edl['Type']
+
+ if edl['type'][edl_type]:
+ if 'url' in edl['type'][edl_type]:
+ pretty_edl['URL'] = edl['type'][edl_type]['url']
+ if 'certificate-profile' in edl['type'][edl_type]:
+ pretty_edl['CertificateProfile'] = edl['type'][edl_type]['certificate-profile']
+ if 'recurring' in edl['type'][edl_type]:
+ pretty_edl['Recurring'] = ''.join(edl['type'][edl_type]['recurring'].keys())
+ if 'description' in edl['type'][edl_type]:
+ pretty_edl['Description'] = edl['type'][edl_type]['description']
+
+ if DEVICE_GROUP:
+ pretty_edl['DeviceGroup'] = DEVICE_GROUP
+
+ pretty_edls_arr.append(pretty_edl)
+
+ return pretty_edls_arr
+
+
+@logger
+def panorama_list_edls():
+ params = {
+ 'action': 'get',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "external-list/entry",
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+ return result['response']['result']['entry']
+
+
+def panorama_list_edls_command():
+ """
+ Get all EDLs
+ """
+ edls_arr = panorama_list_edls()
+ edls_output = prettify_edls_arr(edls_arr)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': edls_arr,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('External Dynamic Lists:', edls_output,
+ ['Name', 'Type', 'URL', 'Recurring', 'CertificateProfile', 'Description'],
+ removeNull=True),
+ 'EntryContext': {
+ "Panorama.EDL(val.Name == obj.Name)": edls_output
+ }
+ })
+
+
+def prettify_edl(edl):
+ pretty_edl = {
+ 'Name': edl['@name'],
+ 'Type': ''.join(edl['type'].keys())
+ }
+ edl_type = pretty_edl['Type']
+
+ if edl['type'][edl_type]:
+ if 'url' in edl['type'][edl_type]:
+ pretty_edl['URL'] = edl['type'][edl_type]['url']
+ if 'certificate-profile' in edl['type'][edl_type]:
+ pretty_edl['CertificateProfile'] = edl['type'][edl_type]['certificate-profile']
+ if 'recurring' in edl['type'][edl_type]:
+ pretty_edl['Recurring'] = ''.join(edl['type'][edl_type]['recurring'].keys())
+ if 'description' in edl['type'][edl_type]:
+ pretty_edl['Description'] = edl['type'][edl_type]['description']
+
+ if DEVICE_GROUP:
+ pretty_edl['DeviceGroup'] = DEVICE_GROUP
+
+ return pretty_edl
+
+
+@logger
+def panorama_get_edl(edl_name):
+ params = {
+ 'action': 'show',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "external-list/entry[@name='" + edl_name + "']",
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+
+ return result['response']['result']['entry']
+
+
+def panorama_get_edl_command():
+ """
+ Get an EDL
+ """
+ edl_name = demisto.args()['name']
+ edl = panorama_get_edl(edl_name)
+ edl_output = prettify_edl(edl)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': edl,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('External Dynamic List:', edl_output,
+ ['Name', 'Type', 'URL', 'Recurring', 'CertificateProfile', 'Description'],
+ None, True),
+ 'EntryContext': {
+ "Panorama.EDL(val.Name == obj.Name)": edl_output
+ }
+ })
+
+
+@logger
+def panorama_create_edl(edl_name, url, type_, recurring, certificate_profile=None, description=None):
+ params = {
+ 'action': 'set',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "external-list/entry[@name='" + edl_name + "']/type/" + type_,
+ 'key': API_KEY
+ }
+
+ params['element'] = add_argument(url, 'url', False) + '<' + recurring + '/> ' + add_argument(
+ certificate_profile, 'certificate-profile', False) + add_argument(description, 'description', False)
+
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_create_edl_command():
+ """
+ Create an edl object
+ """
+ edl_name = demisto.args().get('name')
+ url = demisto.args().get('url')
+ type_ = demisto.args().get('type')
+ recurring = demisto.args().get('recurring')
+ certificate_profile = demisto.args().get('certificate_profile')
+ description = demisto.args().get('description')
+
+ edl = panorama_create_edl(edl_name, url, type_, recurring, certificate_profile, description)
+
+ edl_output = {
+ 'Name': edl_name,
+ 'URL': url,
+ 'Type': type_,
+ 'Recurring': recurring
+ }
+
+ if DEVICE_GROUP:
+ edl_output['DeviceGroup'] = DEVICE_GROUP
+ if description:
+ edl_output['Description'] = description
+ if certificate_profile:
+ edl_output['CertificateProfile'] = certificate_profile
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': edl,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'External Dynamic List was created successfully.',
+ 'EntryContext': {
+ "Panorama.EDL(val.Name == obj.Name)": edl_output
+ }
+ })
+
+
+@logger
+def panorama_edit_edl(edl_name, element_to_change, element_value):
+ edl_prev = panorama_get_edl(edl_name)
+ if '@dirtyId' in edl_prev:
+ return_error('Please commit the instance prior to editing the External Dynamic List')
+ edl_type = ''.join(edl_prev['type'].keys())
+ edl_output = {'Name': edl_name}
+ if DEVICE_GROUP:
+ edl_output['DeviceGroup'] = DEVICE_GROUP
+ params = {
+ 'action': 'edit',
+ 'type': 'config',
+ 'key': API_KEY
+ }
+
+ params['xpath'] = XPATH_OBJECTS + "external-list/entry[@name='" + edl_name + "']/type/" + edl_type + "/" + element_to_change
+
+ if element_to_change == 'url':
+ params['element'] = add_argument_open(element_value, 'url', False)
+ result = http_request(URL, 'POST', params=params)
+ edl_output['URL'] = element_value
+
+ elif element_to_change == 'certificate_profile':
+ params['element'] = add_argument_open(element_value, 'certificate-profile', False)
+ result = http_request(URL, 'POST', params=params)
+ edl_output['CertificateProfile'] = element_value
+
+ elif element_to_change == 'description':
+ params['element'] = add_argument_open(element_value, 'description', False)
+ result = http_request(URL, 'POST', params=params)
+ edl_output['Description'] = element_value
+
+ # element_to_change == 'recurring'
+ else:
+ if element_value not in ['five-minute', 'hourly']:
+ return_error('Recurring segment must be five-minute or hourly')
+ params['element'] = '<' + element_value + '/> '
+ result = http_request(URL, 'POST', params=params)
+ edl_output['Recurring'] = element_value
+
+ return result, edl_output
+
+
+def panorama_edit_edl_command():
+ """
+ Edit an EDL
+ """
+ edl_name = demisto.args()['name']
+ element_to_change = demisto.args()['element_to_change']
+ element_value = demisto.args()['element_value']
+
+ result, edl_output = panorama_edit_edl(edl_name, element_to_change, element_value)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'External Dynamic List was edited successfully',
+ 'EntryContext': {
+ "Panorama.EDL(val.Name == obj.Name)": edl_output
+ }
+ })
+
+
+@logger
+def panorama_delete_edl(edl_name):
+ params = {
+ 'action': 'delete',
+ 'type': 'config',
+ 'xpath': XPATH_OBJECTS + "external-list/entry[@name='" + edl_name + "']",
+ 'element': " ",
+ 'key': API_KEY
+ }
+
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_delete_edl_command():
+ """
+ Delete an EDL
+ """
+ edl_name = demisto.args()['name']
+
+ edl = panorama_delete_edl(edl_name)
+ edl_output = {'Name': edl_name}
+ if DEVICE_GROUP:
+ edl_output['DeviceGroup'] = DEVICE_GROUP
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': edl,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'External Dynamic List was deleted successfully',
+ 'EntryContext': {
+ "Panorama.EDL(val.Name == obj.Name)": edl_output
+ }
+ })
+
+
+def panorama_refresh_edl(edl_name):
+ edl = panorama_get_edl(edl_name)
+ edl_type = ''.join(edl['type'].keys())
+
+ params = {
+ 'type': 'op',
+ 'cmd': '<' + edl_type + '>' + edl_name + ' '
+ + edl_type + '> ',
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_refresh_edl_command():
+ """
+ Refresh an EDL
+ """
+ if DEVICE_GROUP:
+ return_error('EDL refresh is only supported on Firewall (not Panorama).')
+
+ edl_name = demisto.args()['name']
+
+ result = panorama_refresh_edl(edl_name)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Refreshed External Dynamic List successfully',
+ })
+
+
+''' IP Tags '''
+
+
+@logger
+def panorama_register_ip_tag(tag: str, ips: List, persistent: str):
+ entry: str = ''
+ for ip in ips:
+ entry += f'{tag} '
+
+ params = {
+ 'type': 'user-id',
+ 'cmd': '2.0 update ' + entry
+ + ' ',
+ 'key': API_KEY
+ }
+
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_register_ip_tag_command():
+ """
+ Register IPs to a Tag
+ """
+ tag = demisto.args()['tag']
+ ips = argToList(demisto.args()['IPs'])
+
+ persistent = demisto.args()['persistent'] if 'persistent' in demisto.args() else 'true'
+ persistent = '1' if persistent == 'true' else '0'
+
+ result = panorama_register_ip_tag(tag, ips, str(persistent))
+
+ registered_ip: Dict[str, str] = {}
+ # update context only if IPs are persistent
+ if persistent == '1':
+ # get existing IPs for this tag
+ context_ips = demisto.dt(demisto.context(), 'Panorama.DynamicTags(val.Tag ==\"' + tag + '\").IPs')
+
+ if context_ips:
+ all_ips = ips + context_ips
+ else:
+ all_ips = ips
+
+ registered_ip = {
+ 'Tag': tag,
+ 'IPs': all_ips
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Registered ip-tag successfully',
+ 'EntryContext': {
+ "Panorama.DynamicTags(val.Tag == obj.Tag)": registered_ip
+ }
+ })
+
+
+@logger
+def panorama_unregister_ip_tag(tag: str, ips: list):
+ entry = ''
+ for ip in ips:
+ entry += '' + tag + ' '
+
+ params = {
+ 'type': 'user-id',
+ 'cmd': '2.0 update ' + entry
+ + ' ',
+ 'key': API_KEY
+ }
+ result = http_request(
+ URL,
+ 'POST',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_unregister_ip_tag_command():
+ """
+ Register IPs to a Tag
+ """
+ tag = demisto.args()['tag']
+ ips = argToList(demisto.args()['IPs'])
+
+ result = panorama_unregister_ip_tag(tag, ips)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Unregistered ip-tag successfully'
+ })
+
+
+''' Traffic Logs '''
+
+
+def build_traffic_logs_query(source=None, destination=None, receive_time=None,
+ application=None, to_port=None, action=None):
+ query = ''
+ if source and len(source) > 0:
+ query += '(addr.src in ' + source + ')'
+ if destination and len(destination) > 0:
+ if len(query) > 0 and query[-1] == ')':
+ query += ' and '
+ query += '(addr.dst in ' + source + ')'
+ if receive_time and len(receive_time) > 0:
+ if len(query) > 0 and query[-1] == ')':
+ query += ' and '
+ query += '(receive_time geq ' + receive_time + ')'
+ if application and len(application) > 0:
+ if len(query) > 0 and query[-1] == ')':
+ query += ' and '
+ query += '(app eq ' + application + ')'
+ if to_port and len(to_port) > 0:
+ if len(query) > 0 and query[-1] == ')':
+ query += ' and '
+ query += '(port.dst eq ' + to_port + ')'
+ if action and len(action) > 0:
+ if len(query) > 0 and query[-1] == ')':
+ query += ' and '
+ query += '(action eq ' + action + ')'
+ return query
+
+
+@logger
+def panorama_query_traffic_logs(number_of_logs, direction, query,
+ source, destination, receive_time, application, to_port, action):
+ params = {
+ 'type': 'log',
+ 'log-type': 'traffic',
+ 'key': API_KEY
+ }
+
+ if query and len(query) > 0:
+ params['query'] = query
+ else:
+ params['query'] = build_traffic_logs_query(source, destination, receive_time, application, to_port, action)
+ if number_of_logs:
+ params['nlogs'] = number_of_logs
+ if direction:
+ params['dir'] = direction
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_query_traffic_logs_command():
+ """
+ Query the traffic logs
+ """
+ number_of_logs = demisto.args().get('number_of_logs')
+ direction = demisto.args().get('direction')
+ query = demisto.args().get('query')
+ source = demisto.args().get('source')
+ destination = demisto.args().get('destination')
+ receive_time = demisto.args().get('receive_time')
+ application = demisto.args().get('application')
+ to_port = demisto.args().get('to_port')
+ action = demisto.args().get('action')
+
+ if query and (source or destination or receive_time or application or to_port or action):
+ return_error('Use the query argument or the '
+ 'source, destination, receive_time, application, to_port, action arguments to build your query')
+
+ result = panorama_query_traffic_logs(number_of_logs, direction, query,
+ source, destination, receive_time, application, to_port, action)
+
+ if result['response']['@status'] == 'error':
+ if 'msg' in result['response'] and 'line' in result['response']['msg']:
+ message = '. Reason is: ' + result['response']['msg']['line']
+ return_error('Query traffic logs failed' + message)
+ else:
+ return_error('Query traffic logs failed')
+
+ if 'response' not in result or 'result' not in result['response'] or 'job' not in result['response']['result']:
+ return_error('Missing JobID in response')
+ query_traffic_output = {
+ 'JobID': result['response']['result']['job'],
+ 'Status': 'Pending'
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Query Traffic Logs:', query_traffic_output, ['JobID', 'Status'],
+ removeNull=True),
+ 'EntryContext': {"Panorama.TrafficLogs(val.JobID == obj.JobID)": query_traffic_output}
+ })
+
+
+@logger
+def panorama_get_traffic_logs(job_id):
+ params = {
+ 'action': 'get',
+ 'type': 'log',
+ 'job-id': job_id,
+ 'key': API_KEY
+ }
+
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_check_traffic_logs_status_command():
+ job_id = demisto.args().get('job_id')
+ result = panorama_get_traffic_logs(job_id)
+
+ if result['response']['@status'] == 'error':
+ if 'msg' in result['response'] and 'line' in result['response']['msg']:
+ message = '. Reason is: ' + result['response']['msg']['line']
+ return_error('Query traffic logs failed' + message)
+ else:
+ return_error('Query traffic logs failed')
+
+ query_traffic_status_output = {
+ 'JobID': job_id,
+ 'Status': 'Pending'
+ }
+
+ if 'response' not in result or 'result' not in result['response'] or 'job' not in result['response']['result'] \
+ or 'status' not in result['response']['result']['job']:
+ return_error('Missing JobID status in response')
+ if result['response']['result']['job']['status'] == 'FIN':
+ query_traffic_status_output['Status'] = 'Completed'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Query Traffic Logs status:', query_traffic_status_output, ['JobID', 'Status'],
+ removeNull=True),
+ 'EntryContext': {"Panorama.TrafficLogs(val.JobID == obj.JobID)": query_traffic_status_output}
+ })
+
+
+def prettify_traffic_logs(traffic_logs):
+ pretty_traffic_logs_arr = []
+ for traffic_log in traffic_logs:
+ pretty_traffic_log = {}
+ if 'action' in traffic_log:
+ pretty_traffic_log['Action'] = traffic_log['action']
+ if 'action_source' in traffic_log:
+ pretty_traffic_log['ActionSource'] = traffic_log['action_source']
+ if 'application' in traffic_log:
+ pretty_traffic_log['Application'] = traffic_log['application']
+ if 'category' in traffic_log:
+ pretty_traffic_log['Category'] = traffic_log['category']
+ if 'device_name' in traffic_log:
+ pretty_traffic_log['DeviceName'] = traffic_log['device_name']
+ if 'dst' in traffic_log:
+ pretty_traffic_log['Destination'] = traffic_log['dst']
+ if 'dport' in traffic_log:
+ pretty_traffic_log['DestinationPort'] = traffic_log['dport']
+ if 'from' in traffic_log:
+ pretty_traffic_log['FromZone'] = traffic_log['from']
+ if 'proto' in traffic_log:
+ pretty_traffic_log['Protocol'] = traffic_log['proto']
+ if 'rule' in traffic_log:
+ pretty_traffic_log['Rule'] = traffic_log['rule']
+ if 'receive_time' in traffic_log:
+ pretty_traffic_log['ReceiveTime'] = traffic_log['receive_time']
+ if 'session_end_reason' in traffic_log:
+ pretty_traffic_log['SessionEndReason'] = traffic_log['session_end_reason']
+ if 'src' in traffic_log:
+ pretty_traffic_log['Source'] = traffic_log['src']
+ if 'sport' in traffic_log:
+ pretty_traffic_log['SourcePort'] = traffic_log['sport']
+ if 'start' in traffic_log:
+ pretty_traffic_log['StartTime'] = traffic_log['start']
+ if 'to' in traffic_log:
+ pretty_traffic_log['ToZone'] = traffic_log['to']
+
+ pretty_traffic_logs_arr.append(pretty_traffic_log)
+ return pretty_traffic_logs_arr
+
+
+def panorama_get_traffic_logs_command():
+ job_id = demisto.args().get('job_id')
+ result = panorama_get_traffic_logs(job_id)
+
+ if result['response']['@status'] == 'error':
+ if 'msg' in result['response'] and 'line' in result['response']['msg']:
+ message = '. Reason is: ' + result['response']['msg']['line']
+ return_error('Query traffic logs failed' + message)
+ else:
+ return_error('Query traffic logs failed')
+
+ query_traffic_logs_output = {
+ 'JobID': job_id,
+ 'Status': 'Pending'
+ }
+
+ if 'response' not in result or 'result' not in result['response'] or 'job' not in result['response']['result'] \
+ or 'status' not in result['response']['result']['job']:
+ return_error('Missing JobID status in response')
+
+ if result['response']['result']['job']['status'] != 'FIN':
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Query Traffic Logs status:', query_traffic_logs_output,
+ ['JobID', 'Status'], removeNull=True),
+ 'EntryContext': {"Panorama.TrafficLogs(val.JobID == obj.JobID)": query_traffic_logs_output}
+ })
+ else: # FIN
+ query_traffic_logs_output['Status'] = 'Completed'
+ if 'response' not in result or 'result' not in result['response'] or 'log' not in result['response']['result'] \
+ or 'logs' not in result['response']['result']['log']:
+ return_error('Missing logs in response')
+
+ logs = result['response']['result']['log']['logs']
+ if logs['@count'] == '0':
+ demisto.results('No traffic logs matched the query')
+ else:
+ pretty_traffic_logs = prettify_traffic_logs(logs['entry'])
+ query_traffic_logs_output['Logs'] = pretty_traffic_logs
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Query Traffic Logs:', pretty_traffic_logs,
+ ['JobID', 'Source', 'SourcePort', 'Destination', 'DestinationPort',
+ 'Application', 'Action'], removeNull=True),
+ 'EntryContext': {"Panorama.TrafficLogs(val.JobID == obj.JobID)": query_traffic_logs_output}
+ })
+
+
+''' Logs '''
+
+
+def build_array_query(query, arg_string, string, operator):
+ list_string = argToList(arg_string)
+ list_string_length = len(list_string)
+
+ if list_string_length > 1:
+ query += '('
+
+ for i, item in enumerate(list_string):
+ query += f'({string} {operator} \'{item}\')'
+ if i < list_string_length - 1:
+ query += ' or '
+
+ if list_string_length > 1:
+ query += ')'
+
+ return query
+
+
+def build_logs_query(address_src=None, address_dst=None,
+ zone_src=None, zone_dst=None, time_generated=None, action=None,
+ port_dst=None, rule=None, url=None, filedigest=None):
+ query = ''
+ if address_src:
+ query += build_array_query(query, address_src, 'addr.src', 'in')
+ if address_dst:
+ if len(query) > 0 and query[-1] == ')':
+ query += ' and '
+ query += build_array_query(query, address_dst, 'addr.dst', 'in')
+ if zone_src:
+ if len(query) > 0 and query[-1] == ')':
+ query += ' and '
+ query += build_array_query(query, zone_src, 'zone.src', 'eq')
+ if zone_dst:
+ if len(query) > 0 and query[-1] == ')':
+ query += ' and '
+ query += build_array_query(query, zone_dst, 'zone.dst', 'eq')
+ if port_dst:
+ if len(query) > 0 and query[-1] == ')':
+ query += ' and '
+ query += build_array_query(query, port_dst, 'port.dst', 'eq')
+ if time_generated:
+ if len(query) > 0 and query[-1] == ')':
+ query += ' and '
+ query += '(time_generated leq ' + time_generated + ')'
+ if action:
+ if len(query) > 0 and query[-1] == ')':
+ query += ' and '
+ query += build_array_query(query, action, 'action', 'eq')
+ if rule:
+ if len(query) > 0 and query[-1] == ')':
+ query += ' and '
+ query += build_array_query(query, rule, 'rule', 'eq')
+ if url:
+ if len(query) > 0 and query[-1] == ')':
+ query += ' and '
+ query += build_array_query(query, url, 'url', 'eq')
+ if filedigest:
+ if len(query) > 0 and query[-1] == ')':
+ query += ' and '
+ query += build_array_query(query, filedigest, 'filedigest', 'eq')
+
+ return query
+
+
+@logger
+def panorama_query_logs(log_type, number_of_logs, query, address_src, address_dst,
+ zone_src, zone_dst, time_generated, action,
+ port_dst, rule, url, filedigest):
+ params = {
+ 'type': 'log',
+ 'log-type': log_type,
+ 'key': API_KEY
+ }
+
+ if filedigest and log_type != 'wildfire':
+ return_error('The filedigest argument is only relevant to wildfire log type.')
+ if url and log_type == 'traffic':
+ return_error('The url argument is not relevant to traffic log type.')
+
+ if query:
+ params['query'] = query
+ else:
+ params['query'] = build_logs_query(address_src, address_dst,
+ zone_src, zone_dst, time_generated, action,
+ port_dst, rule, url, filedigest)
+ if number_of_logs:
+ params['nlogs'] = number_of_logs
+
+ result = http_request(
+ URL,
+ 'GET',
+ params=params,
+ )
+
+ return result
+
+
+def panorama_query_logs_command():
+ """
+ Query logs
+ """
+ log_type = demisto.args().get('log-type')
+ number_of_logs = demisto.args().get('number_of_logs')
+ query = demisto.args().get('query')
+ address_src = demisto.args().get('addr-src')
+ address_dst = demisto.args().get('addr-dst')
+ zone_src = demisto.args().get('zone-src')
+ zone_dst = demisto.args().get('zone-dst')
+ time_generated = demisto.args().get('time-generated')
+ action = demisto.args().get('action')
+ port_dst = demisto.args().get('port-dst')
+ rule = demisto.args().get('rule')
+ filedigest = demisto.args().get('filedigest')
+ url = demisto.args().get('url')
+ if url and url[-1] != '/':
+ url += '/'
+
+ if query and (address_src or address_dst or zone_src or zone_dst
+ or time_generated or action or port_dst or rule or url or filedigest):
+ return_error('Use the free query argument or the fixed search parameters arguments to build your query')
+
+ result = panorama_query_logs(log_type, number_of_logs, query, address_src, address_dst,
+ zone_src, zone_dst, time_generated, action,
+ port_dst, rule, url, filedigest)
+
+ if result['response']['@status'] == 'error':
+ if 'msg' in result['response'] and 'line' in result['response']['msg']:
+ message = '. Reason is: ' + result['response']['msg']['line']
+ return_error('Query traffic logs failed' + message)
+ else:
+ return_error('Query traffic logs failed')
+
+ if 'response' not in result or 'result' not in result['response'] or 'job' not in result['response']['result']:
+ return_error('Missing JobID in response')
+
+ query_logs_output = {
+ 'JobID': result['response']['result']['job'],
+ 'Status': 'Pending',
+ 'LogType': log_type,
+ 'Message': result['response']['result']['msg']['line']
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Query Logs:', query_logs_output, ['JobID', 'Status'], removeNull=True),
+ 'EntryContext': {"Panorama.Monitor(val.JobID == obj.JobID)": query_logs_output}
+ })
+
+
+def panorama_check_logs_status_command():
+ """
+ Check query logs status
+ """
+ job_id = demisto.args().get('job_id')
+ result = panorama_get_traffic_logs(job_id)
+
+ if result['response']['@status'] == 'error':
+ if 'msg' in result['response'] and 'line' in result['response']['msg']:
+ message = '. Reason is: ' + result['response']['msg']['line']
+ return_error('Query logs failed' + message)
+ else:
+ return_error('Query logs failed')
+
+ query_logs_status_output = {
+ 'JobID': job_id,
+ 'Status': 'Pending'
+ }
+
+ if 'response' not in result or 'result' not in result['response'] or 'job' not in result['response']['result'] \
+ or 'status' not in result['response']['result']['job']:
+ return_error('Missing JobID status in response')
+ if result['response']['result']['job']['status'] == 'FIN':
+ query_logs_status_output['Status'] = 'Completed'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Query Logs status:', query_logs_status_output, ['JobID', 'Status'],
+ removeNull=True),
+ 'EntryContext': {"Panorama.Monitor(val.JobID == obj.JobID)": query_logs_status_output}
+ })
+
+
+def prettify_log(log):
+ pretty_log = {}
+
+ if 'action' in log:
+ pretty_log['Action'] = log['action']
+ if 'app' in log:
+ pretty_log['Application'] = log['app']
+ if 'category' in log:
+ pretty_log['CategoryOrVerdict'] = log['category']
+ if 'device_name' in log:
+ pretty_log['DeviceName'] = log['device_name']
+ if 'dst' in log:
+ pretty_log['DestinationAddress'] = log['dst']
+ if 'dstuser' in log:
+ pretty_log['DestinationUser'] = log['dstuser']
+ if 'dstloc' in log:
+ pretty_log['DestinationCountry'] = log['dstloc']
+ if 'dport' in log:
+ pretty_log['DestinationPort'] = log['dport']
+ if 'filedigest' in log:
+ pretty_log['FileDigest'] = log['filedigest']
+ if 'filename' in log:
+ pretty_log['FileName'] = log['filename']
+ if 'filetype' in log:
+ pretty_log['FileType'] = log['filetype']
+ if 'from' in log:
+ pretty_log['FromZone'] = log['from']
+ if 'misc' in log:
+ pretty_log['URLOrFilename'] = log['misc']
+ if 'natdst' in log:
+ pretty_log['NATDestinationIP'] = log['natdst']
+ if 'natdport' in log:
+ pretty_log['NATDestinationPort'] = log['natdport']
+ if 'natsrc' in log:
+ pretty_log['NATSourceIP'] = log['natsrc']
+ if 'natsport' in log:
+ pretty_log['NATSourcePort'] = log['natsport']
+ if 'pcap_id' in log:
+ pretty_log['PCAPid'] = log['pcap_id']
+ if 'proto' in log:
+ pretty_log['IPProtocol'] = log['proto']
+ if 'recipient' in log:
+ pretty_log['Recipient'] = log['recipient']
+ if 'rule' in log:
+ pretty_log['Rule'] = log['rule']
+ if 'rule_uuid' in log:
+ pretty_log['RuleID'] = log['rule_uuid']
+ if 'receive_time' in log:
+ pretty_log['ReceiveTime'] = log['receive_time']
+ if 'sender' in log:
+ pretty_log['Sender'] = log['sender']
+ if 'sessionid' in log:
+ pretty_log['SessionID'] = log['sessionid']
+ if 'serial' in log:
+ pretty_log['DeviceSN'] = log['serial']
+ if 'severity' in log:
+ pretty_log['Severity'] = log['severity']
+ if 'src' in log:
+ pretty_log['SourceAddress'] = log['src']
+ if 'srcloc' in log:
+ pretty_log['SourceCountry'] = log['srcloc']
+ if 'srcuser' in log:
+ pretty_log['SourceUser'] = log['srcuser']
+ if 'sport' in log:
+ pretty_log['SourcePort'] = log['sport']
+ if 'thr_category' in log:
+ pretty_log['ThreatCategory'] = log['thr_category']
+ if 'threatid' in log:
+ pretty_log['Name'] = log['threatid']
+ if 'tid' in log:
+ pretty_log['ID'] = log['tid']
+ if 'to' in log:
+ pretty_log['ToZone'] = log['to']
+ if 'time_generated' in log:
+ pretty_log['TimeGenerated'] = log['time_generated']
+ if 'url_category_list' in log:
+ pretty_log['URLCategoryList'] = log['url_category_list']
+
+ return pretty_log
+
+
+def prettify_logs(logs):
+ if not isinstance(logs, list): # handle case of only one log that matched the query
+ return prettify_log(logs)
+ pretty_logs_arr = []
+ for log in logs:
+ pretty_log = prettify_log(log)
+ pretty_logs_arr.append(pretty_log)
+ return pretty_logs_arr
+
+
+def panorama_get_logs_command():
+ job_id = demisto.args().get('job_id')
+ result = panorama_get_traffic_logs(job_id)
+ ignore_auto_extract = demisto.args().get('ignore_auto_extract') == 'true'
+ log_type_dt = demisto.dt(demisto.context(), f'Panorama.Monitor(val.JobID === "{job_id}").LogType')
+ if isinstance(log_type_dt, list):
+ log_type = log_type_dt[0]
+ else:
+ log_type = log_type_dt
+
+ if result['response']['@status'] == 'error':
+ if 'msg' in result['response'] and 'line' in result['response']['msg']:
+ message = '. Reason is: ' + result['response']['msg']['line']
+ return_error('Query logs failed' + message)
+ else:
+ return_error('Query logs failed')
+
+ query_logs_output = {
+ 'JobID': job_id,
+ 'Status': 'Pending'
+ }
+
+ if 'response' not in result or 'result' not in result['response'] or 'job' not in result['response']['result'] \
+ or 'status' not in result['response']['result']['job']:
+ return_error('Missing JobID status in response')
+
+ if result['response']['result']['job']['status'] != 'FIN':
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Query Logs status:', query_logs_output,
+ ['JobID', 'Status'], removeNull=True),
+ 'EntryContext': {"Panorama.Monitor(val.JobID == obj.JobID)": query_logs_output}
+ })
+ else: # FIN
+ query_logs_output['Status'] = 'Completed'
+ if 'response' not in result or 'result' not in result['response'] or 'log' not in result['response']['result'] \
+ or 'logs' not in result['response']['result']['log']:
+ return_error('Missing logs in response')
+
+ logs = result['response']['result']['log']['logs']
+ if logs['@count'] == '0':
+ demisto.results('No ' + log_type + ' logs matched the query')
+ else:
+ pretty_logs = prettify_logs(logs['entry'])
+ query_logs_output['Logs'] = pretty_logs
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': result,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Query ' + log_type + ' Logs:', query_logs_output['Logs'],
+ ['TimeGenerated', 'SourceAddress', 'DestinationAddress', 'Application',
+ 'Action', 'Rule', 'URLOrFilename'], removeNull=True),
+ 'IgnoreAutoExtract': ignore_auto_extract,
+ 'EntryContext': {"Panorama.Monitor(val.JobID == obj.JobID)": query_logs_output}
+ })
+
+
+''' EXECUTION '''
+
+
+def main():
+ LOG('command is %s' % (demisto.command(),))
+
+ try:
+ # Remove proxy if not set to true in params
+ handle_proxy()
+
+ if demisto.command() == 'test-module':
+ panorama_test()
+
+ elif demisto.command() == 'panorama':
+ panorama_command()
+
+ elif demisto.command() == 'panorama-commit':
+ panorama_commit_command()
+
+ elif demisto.command() == 'panorama-commit-status':
+ panorama_commit_status_command()
+
+ elif demisto.command() == 'panorama-push-to-device-group':
+ panorama_push_to_device_group_command()
+
+ elif demisto.command() == 'panorama-push-status':
+ panorama_push_status_command()
+
+ # Addresses commands
+ elif demisto.command() == 'panorama-list-addresses':
+ panorama_list_addresses_command()
+
+ elif demisto.command() == 'panorama-get-address':
+ panorama_get_address_command()
+
+ elif demisto.command() == 'panorama-create-address':
+ panorama_create_address_command()
+
+ elif demisto.command() == 'panorama-delete-address':
+ panorama_delete_address_command()
+
+ # Address groups commands
+ elif demisto.command() == 'panorama-list-address-groups':
+ panorama_list_address_groups_command()
+
+ elif demisto.command() == 'panorama-get-address-group':
+ panorama_get_address_group_command()
+
+ elif demisto.command() == 'panorama-create-address-group':
+ panorama_create_address_group_command()
+
+ elif demisto.command() == 'panorama-delete-address-group':
+ panorama_delete_address_group_command()
+
+ elif demisto.command() == 'panorama-edit-address-group':
+ panorama_edit_address_group_command()
+
+ # Services commands
+ elif demisto.command() == 'panorama-list-services':
+ panorama_list_services_command()
+
+ elif demisto.command() == 'panorama-get-service':
+ panorama_get_service_command()
+
+ elif demisto.command() == 'panorama-create-service':
+ panorama_create_service_command()
+
+ elif demisto.command() == 'panorama-delete-service':
+ panorama_delete_service_command()
+
+ # Service groups commands
+ elif demisto.command() == 'panorama-list-service-groups':
+ panorama_list_service_groups_command()
+
+ elif demisto.command() == 'panorama-get-service-group':
+ panorama_get_service_group_command()
+
+ elif demisto.command() == 'panorama-create-service-group':
+ panorama_create_service_group_command()
+
+ elif demisto.command() == 'panorama-delete-service-group':
+ panorama_delete_service_group_command()
+
+ elif demisto.command() == 'panorama-edit-service-group':
+ panorama_edit_service_group_command()
+
+ # Custom Url Category commands
+ elif demisto.command() == 'panorama-get-custom-url-category':
+ panorama_get_custom_url_category_command()
+
+ elif demisto.command() == 'panorama-create-custom-url-category':
+ panorama_create_custom_url_category_command()
+
+ elif demisto.command() == 'panorama-delete-custom-url-category':
+ panorama_delete_custom_url_category_command()
+
+ elif demisto.command() == 'panorama-edit-custom-url-category':
+ if demisto.args()['action'] == 'remove':
+ panorama_custom_url_category_remove_sites_command()
+ else:
+ panorama_custom_url_category_add_sites_command()
+
+ # URL Filtering capabilities
+ elif demisto.command() == 'panorama-get-url-category':
+ panorama_get_url_category_command()
+
+ elif demisto.command() == 'panorama-get-url-filter':
+ panorama_get_url_filter_command()
+
+ elif demisto.command() == 'panorama-create-url-filter':
+ panorama_create_url_filter_command()
+
+ elif demisto.command() == 'panorama-edit-url-filter':
+ panorama_edit_url_filter_command()
+
+ elif demisto.command() == 'panorama-delete-url-filter':
+ panorama_delete_url_filter_command()
+
+ # EDL
+ elif demisto.command() == 'panorama-list-edls':
+ panorama_list_edls_command()
+
+ elif demisto.command() == 'panorama-get-edl':
+ panorama_get_edl_command()
+
+ elif demisto.command() == 'panorama-create-edl':
+ panorama_create_edl_command()
+
+ elif demisto.command() == 'panorama-edit-edl':
+ panorama_edit_edl_command()
+
+ elif demisto.command() == 'panorama-delete-edl':
+ panorama_delete_edl_command()
+
+ elif demisto.command() == 'panorama-refresh-edl':
+ panorama_refresh_edl_command()
+
+ # Registered IPs
+ elif demisto.command() == 'panorama-register-ip-tag':
+ panorama_register_ip_tag_command()
+
+ elif demisto.command() == 'panorama-unregister-ip-tag':
+ panorama_unregister_ip_tag_command()
+
+ # Security Rules Managing
+ elif demisto.command() == 'panorama-list-rules':
+ panorama_list_rules_command()
+
+ elif demisto.command() == 'panorama-move-rule':
+ panorama_move_rule_command()
+
+ # Security Rules Configuration
+ elif demisto.command() == 'panorama-create-rule':
+ panorama_create_rule_command()
+
+ elif demisto.command() == 'panorama-custom-block-rule':
+ panorama_custom_block_rule_command()
+
+ elif demisto.command() == 'panorama-edit-rule':
+ panorama_edit_rule_command()
+
+ elif demisto.command() == 'panorama-delete-rule':
+ panorama_delete_rule_command()
+
+ # Traffic Logs - deprecated
+ elif demisto.command() == 'panorama-query-traffic-logs':
+ panorama_query_traffic_logs_command()
+
+ elif demisto.command() == 'panorama-check-traffic-logs-status':
+ panorama_check_traffic_logs_status_command()
+
+ elif demisto.command() == 'panorama-get-traffic-logs':
+ panorama_get_traffic_logs_command()
+
+ # Logs
+ elif demisto.command() == 'panorama-query-logs':
+ panorama_query_logs_command()
+
+ elif demisto.command() == 'panorama-check-logs-status':
+ panorama_check_logs_status_command()
+
+ elif demisto.command() == 'panorama-get-logs':
+ panorama_get_logs_command()
+
+ # Pcaps
+ elif demisto.command() == 'panorama-list-pcaps':
+ panorama_list_pcaps_command()
+
+ elif demisto.command() == 'panorama-get-pcap':
+ panorama_get_pcap_command()
+
+ # Application
+ elif demisto.command() == 'panorama-list-applications':
+ panorama_list_applications_command()
+
+ except Exception as ex:
+ return_error(str(ex))
+
+ finally:
+ LOG.print_log()
+
+
+if __name__ in ["__builtin__", "builtins"]:
+ main()
diff --git a/Integrations/Panorama/Panorama.yml b/Integrations/Panorama/Panorama.yml
new file mode 100644
index 000000000000..498699710fda
--- /dev/null
+++ b/Integrations/Panorama/Panorama.yml
@@ -0,0 +1,2809 @@
+category: Network Security
+commonfields:
+ id: Panorama
+ version: -1
+configuration:
+- display: Server URL (e.g., https://192.168.0.1)
+ name: server
+ required: true
+ type: 0
+- defaultvalue: '443'
+ display: Port (e.g 443)
+ name: port
+ required: false
+ type: 0
+- display: API Key
+ name: key
+ required: true
+ type: 4
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Device group - Panorama instances only (write shared for Shared location)
+ name: device_group
+ required: false
+ type: 0
+- display: Vsys - Firewall instances only
+ name: vsys
+ required: false
+ type: 0
+description: Manage Palo Alto Networks Firewall and Panorama. For more information
+ see Panorama documentation.
+display: Palo Alto Networks PAN-OS
+name: Panorama
+script:
+ commands:
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: Action to be taken, such as show, get, set, edit, delete, rename,
+ clone, move, override, multi-move, multi-clone, or complete.
+ isArray: false
+ name: action
+ predefined:
+ - set
+ - edit
+ - delete
+ - rename
+ - clone
+ - move
+ - override
+ - muti-move
+ - multi-clone
+ - complete
+ - show
+ - get
+ required: false
+ secret: false
+ - default: false
+ description: Category parameter. For example, when exporting a configuration
+ file, use "category=configuration".
+ isArray: false
+ name: category
+ required: false
+ secret: false
+ - default: false
+ description: Specifies the xml structure that defines the command. Used for
+ operation commands.
+ isArray: false
+ name: cmd
+ required: false
+ secret: false
+ - default: false
+ description: Run a command. For example, command =
+ isArray: false
+ name: command
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a destination.
+ isArray: false
+ name: dst
+ required: false
+ secret: false
+ - default: false
+ description: Used to define a new value for an object.
+ isArray: false
+ name: element
+ required: false
+ secret: false
+ - default: false
+ description: End time (used when cloning an object).
+ isArray: false
+ name: to
+ required: false
+ secret: false
+ - default: false
+ description: Start time (used when cloning an object).
+ isArray: false
+ name: from
+ required: false
+ secret: false
+ - default: false
+ description: Sets a key value.
+ isArray: false
+ name: key
+ required: false
+ secret: false
+ - default: false
+ description: Retrieves log types. For example, log-type=threat for threat logs.
+ isArray: false
+ name: log-type
+ required: false
+ secret: false
+ - default: false
+ description: Specifies the type of a move operation (for example, where=after,
+ where=before, where=top, where=bottom).
+ isArray: false
+ name: where
+ required: false
+ secret: false
+ - default: false
+ description: Time period. For example, period=last-24-hrs
+ isArray: false
+ name: period
+ required: false
+ secret: false
+ - default: false
+ description: xpath location. For example, xpath=/config/predefined/application/entry[@name='hotmail']
+ isArray: false
+ name: xpath
+ required: false
+ secret: false
+ - default: false
+ description: PCAP ID included in the threat log.
+ isArray: false
+ name: pcap-id
+ required: false
+ secret: false
+ - default: false
+ description: Specifies the device serial number.
+ isArray: false
+ name: serialno
+ required: false
+ secret: false
+ - default: false
+ description: Chooses the report type, such as dynamic, predefined or custom.
+ isArray: false
+ name: reporttype
+ required: false
+ secret: false
+ - default: false
+ description: Report name.
+ isArray: false
+ name: reportname
+ required: false
+ secret: false
+ - default: false
+ defaultValue: keygen,config,commit,op,report,log,import,export,user-id,version
+ description: Request type (e.g. export, import, log, config).
+ isArray: false
+ name: type
+ required: false
+ secret: false
+ - default: false
+ description: The time that the PCAP was received on the firewall. Used for threat
+ PCAPs.
+ isArray: false
+ name: search-time
+ required: false
+ secret: false
+ - default: false
+ description: Target number of the firewall. Use only on a Panorama instance.
+ isArray: false
+ name: target
+ required: false
+ secret: false
+ - default: false
+ description: Job ID.
+ isArray: false
+ name: job-id
+ required: false
+ secret: false
+ - default: false
+ description: Query string.
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ deprecated: false
+ description: Run any command supported in the API.
+ execution: false
+ name: panorama
+ - deprecated: false
+ description: Commits a configuration to Palo Alto Firewall or Panorama, but does
+ not validate if the commit was successful. Committing to Panorama does not push
+ the configuration to the Firewalls. To push the configuration, run the panorama-push-to-device-group
+ command.
+ execution: false
+ name: panorama-commit
+ outputs:
+ - contextPath: Panorama.Commit.JobID
+ description: Job ID to commit.
+ type: number
+ - contextPath: Panorama.Commit.Status
+ description: Commit status
+ type: string
+ - arguments:
+ - default: true
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Pushes rules from PAN-OS to the configured device group.
+ execution: false
+ name: panorama-push-to-device-group
+ outputs:
+ - contextPath: Panorama.Push.DeviceGroup
+ description: Device group in which the policies were pushed.
+ type: String
+ - contextPath: Panorama.Push.JobID
+ description: Job ID of the polices that were pushed.
+ type: Number
+ - contextPath: Panorama.Push.Status
+ description: Push status.
+ type: String
+ - arguments:
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ - default: false
+ description: Tag for which to filter the list of addresses.
+ isArray: false
+ name: tag
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of addresses.
+ execution: false
+ name: panorama-list-addresses
+ outputs:
+ - contextPath: Panorama.Addresses.Name
+ description: Address name.
+ type: string
+ - contextPath: Panorama.Addresses.Description
+ description: Address description.
+ type: string
+ - contextPath: Panorama.Addresses.FQDN
+ description: Address FQDN.
+ type: string
+ - contextPath: Panorama.Addresses.IP_Netmask
+ description: Address IP Netmask.
+ type: string
+ - contextPath: Panorama.Addresses.IP_Range
+ description: Address IP range.
+ type: string
+ - contextPath: Panorama.Addresses.DeviceGroup
+ description: Address device group.
+ type: String
+ - contextPath: Panorama.Addresses.Tags
+ description: Address tags.
+ type: String
+ - arguments:
+ - default: false
+ description: Address name.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns address details for the supplied address name.
+ execution: false
+ name: panorama-get-address
+ outputs:
+ - contextPath: Panorama.Addresses.Name
+ description: Address name.
+ type: string
+ - contextPath: Panorama.Addresses.Description
+ description: Address description.
+ type: string
+ - contextPath: Panorama.Addresses.FQDN
+ description: Address FQDN.
+ type: string
+ - contextPath: Panorama.Addresses.IP_Netmask
+ description: Address IP Netmask.
+ type: string
+ - contextPath: Panorama.Addresses.IP_Range
+ description: Address IP range.
+ type: string
+ - contextPath: Panorama.Addresses.DeviceGroup
+ description: Device group for the address (Panorama instances).
+ type: String
+ - contextPath: Panorama.Addresses.Tags
+ description: Address tags.
+ type: String
+ - arguments:
+ - default: false
+ description: New address name.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: New address description.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: FQDN of the new address.
+ isArray: false
+ name: fqdn
+ required: false
+ secret: false
+ - default: false
+ description: IP Netmask of the new address. For example, 10.10.10.10/24
+ isArray: false
+ name: ip_netmask
+ required: false
+ secret: false
+ - default: false
+ description: IP range of the new address IP. For example, 10.10.10.0-10.10.10.255
+ isArray: false
+ name: ip_range
+ required: false
+ secret: false
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ - default: false
+ description: The tag for the new address.
+ isArray: true
+ name: tag
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates an address object.
+ execution: false
+ name: panorama-create-address
+ outputs:
+ - contextPath: Panorama.Addresses.Name
+ description: Address name.
+ type: string
+ - contextPath: Panorama.Addresses.Description
+ description: Address description.
+ type: string
+ - contextPath: Panorama.Addresses.FQDN
+ description: Address FQDN.
+ type: string
+ - contextPath: Panorama.Addresses.IP_Netmask
+ description: Address IP Netmask.
+ type: string
+ - contextPath: Panorama.Addresses.IP_Range
+ description: Address IP range.
+ type: string
+ - contextPath: Panorama.Addresses.DeviceGroup
+ description: Device group for the address (Panorama instances).
+ type: String
+ - contextPath: Panorama.Addresses.Tag
+ description: Address tag.
+ type: String
+ - arguments:
+ - default: false
+ description: Name of the address to delete.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Delete an address object
+ execution: false
+ name: panorama-delete-address
+ outputs:
+ - contextPath: Panorama.Addresses.Name
+ description: Address name that was deleted.
+ type: string
+ - contextPath: Panorama.Addresses.DeviceGroup
+ description: Device group for the address (Panorama instances).
+ type: String
+ - arguments:
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ - default: false
+ description: Tag for which to filter the Address groups.
+ isArray: false
+ name: tag
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of address groups.
+ execution: false
+ name: panorama-list-address-groups
+ outputs:
+ - contextPath: Panorama.AddressGroups.Name
+ description: Address group name.
+ type: string
+ - contextPath: Panorama.AddressGroups.Type
+ description: Address group type.
+ type: string
+ - contextPath: Panorama.AddressGroups.Match
+ description: Dynamic Address group match.
+ type: string
+ - contextPath: Panorama.AddressGroups.Description
+ description: Address group description.
+ type: string
+ - contextPath: Panorama.AddressGroups.Addresses
+ description: Static Address group addresses.
+ type: String
+ - contextPath: Panorama.AddressGroups.DeviceGroup
+ description: Device group for the address group (Panorama instances).
+ type: String
+ - contextPath: Panorama.AddressGroups.Tag
+ description: Address group tag.
+ type: String
+ - arguments:
+ - default: false
+ description: Address group name.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Get details for the specified address group
+ execution: false
+ name: panorama-get-address-group
+ outputs:
+ - contextPath: Panorama.AddressGroups.Name
+ description: Address group name.
+ type: string
+ - contextPath: Panorama.AddressGroups.Type
+ description: Address group type.
+ type: string
+ - contextPath: Panorama.AddressGroups.Match
+ description: Dynamic Address group match.
+ type: string
+ - contextPath: Panorama.AddressGroups.Description
+ description: Address group description.
+ type: string
+ - contextPath: Panorama.AddressGroups.Addresses
+ description: Static Address group addresses.
+ type: string
+ - contextPath: Panorama.AddressGroups.DeviceGroup
+ description: Device group for the address group (Panorama instances).
+ type: String
+ - contextPath: Panorama.AddressGroups.Tags
+ description: Address group tags.
+ type: String
+ - arguments:
+ - default: false
+ description: Address group name.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Address group type.
+ isArray: false
+ name: type
+ predefined:
+ - dynamic
+ - static
+ required: true
+ secret: false
+ - default: false
+ description: 'Dynamic Address group match. e.g: "1.1.1.1 or 2.2.2.2"'
+ isArray: false
+ name: match
+ required: false
+ secret: false
+ - default: false
+ description: Static address group list of addresses.
+ isArray: true
+ name: addresses
+ required: false
+ secret: false
+ - default: false
+ description: Address group description.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ - default: false
+ description: The tags for the Address group.
+ isArray: true
+ name: tags
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a static or dynamic address book.
+ execution: false
+ name: panorama-create-address-group
+ outputs:
+ - contextPath: Panorama.AddressGroups.Name
+ description: Address group name.
+ type: string
+ - contextPath: Panorama.AddressGroups.Type
+ description: Address group type.
+ type: string
+ - contextPath: Panorama.AddressGroups.Match
+ description: Dynamic Address group match.
+ type: string
+ - contextPath: Panorama.AddressGroups.Addresses
+ description: Static Address group list of addresses.
+ type: string
+ - contextPath: Panorama.AddressGroups.Description
+ description: Address group description.
+ type: string
+ - contextPath: Panorama.AddressGroups.DeviceGroup
+ description: Device group for the address group (Panorama instances).
+ type: String
+ - contextPath: Panorama.AddressGroups.Tag
+ description: Address group tags.
+ type: String
+ - arguments:
+ - default: false
+ description: Name of address group to delete.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes an address group.
+ execution: false
+ name: panorama-delete-address-group
+ outputs:
+ - contextPath: Panorama.AddressGroups.Name
+ description: Name of address group that was deleted.
+ type: string
+ - contextPath: Panorama.AddressGroups.DeviceGroup
+ description: Device group for the address group (Panorama instances).
+ type: String
+ - arguments:
+ - default: false
+ description: Name of the address group to edit.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Address group type.
+ isArray: false
+ name: type
+ predefined:
+ - static
+ - dynamic
+ required: true
+ secret: false
+ - default: false
+ description: Address group new match. For example, '1.1.1.1 and 2.2.2.2'.
+ isArray: false
+ name: match
+ required: false
+ secret: false
+ - default: false
+ description: Element to add to the list of the static address group. Only existing
+ Address objects can be added.
+ isArray: false
+ name: element_to_add
+ required: false
+ secret: false
+ - default: false
+ description: Element to remove from the list of the static address group. Only
+ existing Address objects can be removed.
+ isArray: false
+ name: element_to_remove
+ required: false
+ secret: false
+ - default: false
+ description: Address group new description.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: The tag of the Address group to edit.
+ isArray: true
+ name: tags
+ required: false
+ secret: false
+ deprecated: false
+ description: Edits a static or dynamic address group.
+ execution: false
+ name: panorama-edit-address-group
+ outputs:
+ - contextPath: Panorama.AddressGroups.Name
+ description: Address group name.
+ type: string
+ - contextPath: Panorama.AddressGroups.Type
+ description: Address group type.
+ type: string
+ - contextPath: Panorama.AddressGroups.Filter
+ description: Dynamic Address group match.
+ type: string
+ - contextPath: Panorama.AddressGroups.Description
+ description: Address group description.
+ type: string
+ - contextPath: Panorama.AddressGroups.Addresses
+ description: Static Address group addresses.
+ type: string
+ - contextPath: Panorama.AddressGroups.DeviceGroup
+ description: Device group for the address group (Panorama instances).
+ type: String
+ - contextPath: Panorama.AddressGroups.Tags
+ description: Address group tags.
+ type: String
+ - arguments:
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ - default: false
+ description: Tag for which to filter the Services.
+ isArray: false
+ name: tag
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of addresses.
+ execution: false
+ name: panorama-list-services
+ outputs:
+ - contextPath: Panorama.Services.Name
+ description: Service name.
+ type: string
+ - contextPath: Panorama.Services.Protocol
+ description: Service protocol.
+ type: string
+ - contextPath: Panorama.Services.Description
+ description: Service description.
+ type: string
+ - contextPath: Panorama.Services.DestinationPort
+ description: Service destination port.
+ type: string
+ - contextPath: Panorama.Services.SourcePort
+ description: Service source port.
+ type: string
+ - contextPath: Panorama.Services.DeviceGroup
+ description: Device group in which the service was configured (Panorama instances).
+ type: string
+ - contextPath: Panorama.Services.Tags
+ description: Service tags.
+ type: String
+ - arguments:
+ - default: false
+ description: Service name.
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns service details for the supplied service name.
+ execution: false
+ name: panorama-get-service
+ outputs:
+ - contextPath: Panorama.Services.Name
+ description: Service name.
+ type: string
+ - contextPath: Panorama.Services.Protocol
+ description: Service protocol.
+ type: string
+ - contextPath: Panorama.Services.Description
+ description: Service description.
+ type: string
+ - contextPath: Panorama.Services.DestinationPort
+ description: Service destination port.
+ type: string
+ - contextPath: Panorama.Services.SourcePort
+ description: Service source port.
+ type: string
+ - contextPath: Panorama.Services.DeviceGroup
+ description: Device group for the service (Panorama instances).
+ type: string
+ - contextPath: Panorama.Service.Tags
+ description: Service tags.
+ type: String
+ - arguments:
+ - default: false
+ description: Name for the new service.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Protocol for the new service.
+ isArray: false
+ name: protocol
+ predefined:
+ - tcp
+ - udp
+ - sctp
+ required: true
+ secret: false
+ - default: false
+ description: Destination port for the new service.
+ isArray: false
+ name: destination_port
+ required: true
+ secret: false
+ - default: false
+ description: Source port for the new service.
+ isArray: false
+ name: source_port
+ required: false
+ secret: false
+ - default: false
+ description: Description for the new service.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ - default: false
+ description: Tags for the new service.
+ isArray: true
+ name: tags
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a service.
+ execution: false
+ name: panorama-create-service
+ outputs:
+ - contextPath: Panorama.Services.Name
+ description: Service name.
+ type: string
+ - contextPath: Panorama.Services.Protocol
+ description: Service protocol.
+ type: string
+ - contextPath: Panorama.Services.Descritpion
+ description: Service description.
+ type: string
+ - contextPath: Panorama.Services.DestinationPort
+ description: Service destination port.
+ type: string
+ - contextPath: Panorama.Services.SourcePort
+ description: Service source port.
+ type: string
+ - contextPath: Panorama.Services.DeviceGroup
+ description: Device group for the service (Panorama instances).
+ type: string
+ - contextPath: Panorama.Services.Tags
+ description: Service tags.
+ type: String
+ - arguments:
+ - default: false
+ description: Name of the service to delete.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes a service.
+ execution: false
+ name: panorama-delete-service
+ outputs:
+ - contextPath: Panorama.Services.Name
+ description: Name of the deleted service.
+ type: string
+ - contextPath: Panorama.Services.DeviceGroup
+ description: Device group for the service (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ - default: false
+ description: Tags for which to filter the Service groups.
+ isArray: true
+ name: tag
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of service groups.
+ execution: false
+ name: panorama-list-service-groups
+ outputs:
+ - contextPath: Panorama.ServiceGroups.Name
+ description: Service group name.
+ type: string
+ - contextPath: Panorama.ServiceGroups.Services
+ description: Service group related services.
+ type: string
+ - contextPath: Panorama.ServiceGroups.DeviceGroup
+ description: Device group for the service group (Panorama instances).
+ type: string
+ - contextPath: Panorama.ServiceGroups.Tags
+ description: Service group tags.
+ type: String
+ - arguments:
+ - default: false
+ description: Service group name.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns details for the specified service group.
+ execution: false
+ name: panorama-get-service-group
+ outputs:
+ - contextPath: Panorama.ServiceGroups.Name
+ description: Service group name.
+ type: string
+ - contextPath: Panorama.ServiceGroups.Services
+ description: Service group related services.
+ type: string
+ - contextPath: Panorama.ServiceGroups.DeviceGroup
+ description: Device group for the service group (Panorama instances).
+ type: string
+ - contextPath: Panorama.ServiceGroups.Tags
+ description: Service group tags.
+ type: String
+ - arguments:
+ - default: false
+ description: Service group name.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: Service group related services.
+ isArray: true
+ name: services
+ required: true
+ secret: false
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ - default: false
+ description: Tags for which to filter Service groups.
+ isArray: true
+ name: tags
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a service group.
+ execution: false
+ name: panorama-create-service-group
+ outputs:
+ - contextPath: Panorama.ServiceGroups.Name
+ description: Service group name.
+ type: string
+ - contextPath: Panorama.ServiceGroups.Services
+ description: Service group related services.
+ type: string
+ - contextPath: Panorama.ServiceGroups.DeviceGroup
+ description: Device group for the service group (Panorama instances).
+ type: string
+ - contextPath: Panorama.ServiceGroups.Tags
+ description: Service group tags.
+ type: String
+ - arguments:
+ - default: false
+ description: Name of the service group to delete.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes a service group.
+ execution: false
+ name: panorama-delete-service-group
+ outputs:
+ - contextPath: Panorama.ServiceGroups.Name
+ description: Name of the deleted service group.
+ type: string
+ - contextPath: Panorama.ServiceGroups.DeviceGroup
+ description: Device group for the service group (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the service group to edit.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: Services to add to the service group. Only existing Services objects
+ can be added.
+ isArray: true
+ name: services_to_add
+ required: false
+ secret: false
+ - default: false
+ description: Services to remove from the service group. Only existing Services
+ objects can be removed.
+ isArray: false
+ name: services_to_remove
+ required: false
+ secret: false
+ - default: false
+ description: Tag of the Service group to edit.
+ isArray: true
+ name: tags
+ required: false
+ secret: false
+ deprecated: false
+ description: Edit a service group.
+ execution: false
+ name: panorama-edit-service-group
+ outputs:
+ - contextPath: Panorama.ServiceGroups.Name
+ description: Service group name.
+ type: string
+ - contextPath: Panorama.ServiceGroups.Services
+ description: Service group related services.
+ type: string
+ - contextPath: Panorama.ServiceGroups.DeviceGroup
+ description: Device group for the service group (Panorama instances).
+ type: string
+ - contextPath: Panorama.ServiceGroups.Tags
+ description: Service group tags.
+ type: String
+ - arguments:
+ - default: false
+ description: Custom URL category name.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The device group for which to return addresses for the custom URL category (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns information for a custom URL category.
+ execution: false
+ name: panorama-get-custom-url-category
+ outputs:
+ - contextPath: Panorama.CustomURLCategory.Name
+ description: Custom URL category name.
+ type: string
+ - contextPath: Panorama.CustomURLCategory.Description
+ description: Custom URL category description.
+ type: string
+ - contextPath: Panorama.CustomURLCategory.Sites
+ description: Custom URL category list of sites.
+ type: string
+ - contextPath: Panorama.CustomURLCategory.DeviceGroup
+ description: Device group for the Custom URL Category (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the custom URL category to create.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: Description of the custom URL category to create.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: List of sites for the custom URL category.
+ isArray: true
+ name: sites
+ required: false
+ secret: false
+ - default: false
+ description: The device group for which to return addresses for the custom URL category (Panorama
+ instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a custom URL category.
+ execution: false
+ name: panorama-create-custom-url-category
+ outputs:
+ - contextPath: Panorama.CustomURLCategory.Name
+ description: Custom URL category name.
+ type: string
+ - contextPath: Panorama.CustomURLCategory.Description
+ description: Custom URL category description.
+ type: string
+ - contextPath: Panorama.CustomURLCategory.Sites
+ description: Custom URL category list of sites.
+ type: string
+ - contextPath: Panorama.CustomURLCategory.DeviceGroup
+ description: Device group for the Custom URL Category (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the custom URL category to delete.
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes a custom URL category.
+ execution: false
+ name: panorama-delete-custom-url-category
+ outputs:
+ - contextPath: Panorama.CustomURLCategory.Name
+ description: Name of the custom URL category to delete.
+ type: string
+ - contextPath: Panorama.CustomURLCategory.DeviceGroup
+ description: Device group for the Custom URL Category (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the custom URL category to add or remove sites.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: CSV list of sites to add to the custom URL category.
+ isArray: true
+ name: sites
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Add or remove sites. Can be "Add", "Remove".
+ isArray: true
+ name: action
+ predefined:
+ - add
+ - remove
+ required: true
+ secret: false
+ deprecated: false
+ description: Add or remove sites to and from a custom URL category.
+ execution: false
+ name: panorama-edit-custom-url-category
+ outputs:
+ - contextPath: Panorama.CustomURLCategory.Name
+ description: Custom URL category name.
+ type: string
+ - contextPath: Panorama.CustomURLCategory.Description
+ description: Custom URL category description.
+ type: string
+ - contextPath: Panorama.CustomURLCategory.Sites
+ description: Custom URL category list of sites.
+ type: string
+ - contextPath: Panorama.CustomURLCategory.DeviceGroup
+ description: Device group for the Custom URL Category (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: URL to check.
+ isArray: true
+ name: url
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets a URL category from URL Filtering.
+ execution: false
+ name: panorama-get-url-category
+ outputs:
+ - contextPath: Panorama.URLFilter.URL
+ description: URL.
+ type: string
+ - contextPath: Panorama.URLFilter.Category
+ description: URL category.
+ type: string
+ - arguments:
+ - default: false
+ description: URL Filter name.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The device group for which to return addresses for the URL Filter (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns information for a URL filtering rule.
+ execution: false
+ name: panorama-get-url-filter
+ outputs:
+ - contextPath: Panorama.URLFilter.Name
+ description: URL Filter name.
+ type: string
+ - contextPath: Panorama.URLFilter.Category.Name
+ description: URL Filter category name.
+ type: string
+ - contextPath: Panorama.URLFilter.Category.Action
+ description: Action for the URL category.
+ type: string
+ - contextPath: Panorama.URLFilter.OverrideBlockList
+ description: URL Filter override block list.
+ type: string
+ - contextPath: Panorama.URLFilter.OverrideAllowList
+ description: URL Filter override allow list.
+ type: string
+ - contextPath: Panorama.URLFilter.Description
+ description: URL Filter description.
+ type: string
+ - contextPath: Panorama.URLFilter.DeviceGroup
+ description: Device group for the URL Filter (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the URL filter to create.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: URL categories.
+ isArray: true
+ name: url_category
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Action for the URL categories. Can be "allow", "block", "alert",
+ "continue", or "override".
+ isArray: false
+ name: action
+ predefined:
+ - allow
+ - block
+ - alert
+ - continue
+ - override
+ required: true
+ secret: false
+ - default: false
+ description: CSV list of URLs to exclude from the allow list.
+ isArray: true
+ name: override_allow_list
+ required: false
+ secret: false
+ - default: false
+ description: CSV list of URLs to exclude from the blocked list.
+ isArray: true
+ name: override_block_list
+ required: false
+ secret: false
+ - default: false
+ description: URL Filter description.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: The device group for which to return addresses for the URL Filter (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a URL filtering rule.
+ execution: false
+ name: panorama-create-url-filter
+ outputs:
+ - contextPath: Panorama.URLFilter.Name
+ description: URL Filter name.
+ type: string
+ - contextPath: Panorama.URLFilter.Category.Name
+ description: URL Filter category name.
+ type: string
+ - contextPath: Panorama.URLFilter.Category.Action
+ description: Action for the URL category.
+ type: string
+ - contextPath: Panorama.URLFilter.OverrideBlockList
+ description: URL Filter override allow list.
+ type: string
+ - contextPath: Panorama.URLFilter.OverrideBlockList
+ description: URL Filter override blocked list.
+ type: string
+ - contextPath: Panorama.URLFilter.Description
+ description: URL Filter description.
+ type: string
+ - contextPath: Panorama.URLFilter.DeviceGroup
+ description: Device group for the URL Filter (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the URL filter to edit.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Element to change. Can be "override_allow_list", or "override_block_list"
+ isArray: false
+ name: element_to_change
+ predefined:
+ - override_allow_list
+ - override_block_list
+ - description
+ required: true
+ secret: false
+ - default: false
+ description: Element value. Limited to one value.
+ isArray: false
+ name: element_value
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: add
+ description: Add or remove an element from the Allow List or Block List fields.
+ Default is to 'add' the element_value to the list.
+ isArray: false
+ name: add_remove_element
+ predefined:
+ - add
+ - remove
+ required: false
+ secret: false
+ deprecated: false
+ description: Edit a URL filtering rule.
+ execution: false
+ name: panorama-edit-url-filter
+ outputs:
+ - contextPath: Panorama.URLFilter.Name
+ description: URL Filter name.
+ type: string
+ - contextPath: Panorama.URLFilter.Description
+ description: URL Filter description.
+ type: string
+ - contextPath: Panorama.URLFilter.Category.Name
+ description: URL Filter category.
+ type: string
+ - contextPath: Panorama.URLFilter.Action
+ description: Action for the URL category.
+ type: string
+ - contextPath: Panorama.URLFilter.OverrideAllowList
+ description: Allow Overrides for the URL category.
+ type: string
+ - contextPath: Panorama.URLFilter.OverrideBlockList
+ description: Block Overrides for the URL category.
+ type: string
+ - contextPath: Panorama.URLFilter.DeviceGroup
+ description: Device group for the URL Filter (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the URL filter rule to delete.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The device group for which to return addresses for the URL filter (Panorama instances)
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes a URL filtering rule.
+ execution: false
+ name: panorama-delete-url-filter
+ outputs:
+ - contextPath: Panorama.URLFilter.Name
+ description: URL filter rule name.
+ type: string
+ - contextPath: Panorama.URLFilter.DeviceGroup
+ description: Device group for the URL Filter (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: The device group for which to return addresses for the EDL (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of external dynamic lists.
+ execution: false
+ name: panorama-list-edls
+ outputs:
+ - contextPath: Panorama.EDL.Name
+ description: Name of the EDL.
+ type: string
+ - contextPath: Panorama.EDL.Type
+ description: The type of EDL.
+ type: string
+ - contextPath: Panorama.EDL.URL
+ description: URL in which the EDL is stored.
+ type: string
+ - contextPath: Panorama.EDL.Description
+ description: Description of the EDL.
+ type: string
+ - contextPath: Panorama.EDL.CertificateProfile
+ description: EDL certificate profile.
+ type: string
+ - contextPath: Panorama.EDL.Recurring
+ description: Time interval that the EDL was pulled and updated.
+ type: string
+ - contextPath: Panorama.EDL.DeviceGroup
+ description: Device group for the EDL (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the EDL.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The device group for which to return addresses for the EDL (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns information for an external dynamic list
+ execution: false
+ name: panorama-get-edl
+ outputs:
+ - contextPath: Panorama.EDL.Name
+ description: Name of the EDL.
+ type: string
+ - contextPath: Panorama.EDL.Type
+ description: The type of EDL.
+ type: string
+ - contextPath: Panorama.EDL.URL
+ description: URL in which the EDL is stored.
+ type: string
+ - contextPath: Panorama.EDL.Description
+ description: Description of the EDL.
+ type: string
+ - contextPath: Panorama.EDL.CertificateProfile
+ description: EDL certificate profile.
+ type: string
+ - contextPath: Panorama.EDL.Recurring
+ description: Time interval that the EDL was pulled and updated.
+ type: string
+ - contextPath: Panorama.EDL.DeviceGroup
+ description: Device group for the EDL (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the EDL.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: URL from which to pull the EDL.
+ isArray: false
+ name: url
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The type of EDL.
+ isArray: false
+ name: type
+ predefined:
+ - ip
+ - url
+ - domain
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Time interval for pulling and updating the EDL.
+ isArray: false
+ name: recurring
+ predefined:
+ - five-minute
+ - hourly
+ required: true
+ secret: false
+ - default: false
+ description: Certificate Profile name for the URL that was previously uploaded.
+ to PAN OS.
+ isArray: false
+ name: certificate_profile
+ required: false
+ secret: false
+ - default: false
+ description: Description of the EDL.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: The device group for which to return addresses for the EDL (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates an external dynamic list.
+ execution: false
+ name: panorama-create-edl
+ outputs:
+ - contextPath: Panorama.EDL.Name
+ description: Name of theEDL.
+ type: string
+ - contextPath: Panorama.EDL.Type
+ description: Type of the EDL.
+ type: string
+ - contextPath: Panorama.EDL.URL
+ description: URL in which the EDL is stored.
+ type: string
+ - contextPath: Panorama.EDL.Description
+ description: Description of the EDL.
+ type: string
+ - contextPath: Panorama.EDL.CertificateProfile
+ description: EDL certificate profile.
+ type: string
+ - contextPath: Panorama.EDL.Recurring
+ description: Time interval that the EDL was pulled and updated.
+ type: string
+ - contextPath: Panorama.EDL.DeviceGroup
+ description: Device group for the EDL (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the external dynamic list to edit.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The element to change (“urlâ€, “recurringâ€, “certificate_profileâ€,
+ “descriptionâ€).
+ isArray: false
+ name: element_to_change
+ predefined:
+ - url
+ - recurring
+ - certificate_profile
+ - description
+ required: true
+ secret: false
+ - default: false
+ description: The element value.
+ isArray: false
+ name: element_value
+ required: true
+ secret: false
+ deprecated: false
+ description: Modifies an element of an external dynamic list.
+ execution: false
+ name: panorama-edit-edl
+ outputs:
+ - contextPath: Panorama.EDL.Name
+ description: Name of the EDL.
+ type: string
+ - contextPath: Panorama.EDL.URL
+ description: URL where the EDL is stored.
+ type: string
+ - contextPath: Panorama.EDL.Description
+ description: Description of the EDL.
+ type: string
+ - contextPath: Panorama.EDL.CertificateProfile
+ description: EDL certificate profile.
+ type: string
+ - contextPath: Panorama.EDL.Recurring
+ description: Time interval that the EDL was pulled and updated.
+ type: string
+ - contextPath: Panorama.EDL.DeviceGroup
+ description: Device group for the EDL (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the EDL to delete.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The device group for which to return addresses for the EDL (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes an external dynamic list.
+ execution: false
+ name: panorama-delete-edl
+ outputs:
+ - contextPath: Panorama.EDL.Name
+ description: Name of the EDL that was deleted.
+ type: string
+ - contextPath: Panorama.EDL.DeviceGroup
+ description: Device group for the EDL (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the EDL
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The device group for which to return addresses for the EDL (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Refreshes the specified external dynamic list.
+ execution: false
+ name: panorama-refresh-edl
+ - arguments:
+ - default: false
+ description: Name of the rule to create.
+ isArray: false
+ name: rulename
+ required: false
+ secret: false
+ - default: false
+ description: Description of the rule to create.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Action for the rule. Can be "allow", "deny", or "drop".
+ isArray: false
+ name: action
+ predefined:
+ - allow
+ - deny
+ - drop
+ required: true
+ secret: false
+ - default: false
+ description: Source address. Can be "address", or "address group".
+ isArray: false
+ name: source
+ required: false
+ secret: false
+ - default: false
+ description: Destination address. Can be "address", or "address group".
+ isArray: false
+ name: destination
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether to negate the source (address, address group). Can be "Yes"
+ or "No".
+ isArray: false
+ name: negate_source
+ predefined:
+ - 'Yes'
+ - 'No'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether to negate the destination (address, address group). Can
+ be "Yes" or "No".
+ isArray: false
+ name: negate_destination
+ predefined:
+ - 'Yes'
+ - 'No'
+ required: false
+ secret: false
+ - default: false
+ description: Service for the rule (service object) to create.
+ isArray: false
+ name: service
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'No'
+ description: Whether to disable the rule. Can be "Yes" or "No" (default is "No").
+ isArray: false
+ name: disable
+ predefined:
+ - 'Yes'
+ - 'No'
+ required: false
+ secret: false
+ - default: false
+ description: Application for the rule to create.
+ isArray: false
+ name: application
+ required: false
+ secret: false
+ - default: false
+ defaultValue: any
+ description: Source user for the rule to create.
+ isArray: false
+ name: source_user
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Pre rule or Post rule (Panorama instances).
+ isArray: false
+ name: pre_post
+ predefined:
+ - pre-rulebase
+ - post-rulebase
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a target firewall for the rule (Panorama instances).
+ isArray: false
+ name: target
+ required: false
+ secret: false
+ - default: false
+ description: Log forwarding profile.
+ isArray: false
+ name: log_forwarding
+ required: false
+ secret: false
+ - default: false
+ description: The device group for which to return addresses for the rule (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ - default: false
+ description: Rule tags to create.
+ isArray: true
+ name: tags
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a policy rule.
+ execution: true
+ name: panorama-create-rule
+ outputs:
+ - contextPath: Panorama.SecurityRule.Name
+ description: Rule name.
+ type: string
+ - contextPath: Panorama.SecurityRule.Description
+ description: Rule description.
+ type: string
+ - contextPath: Panorama.SecurityRule.Action
+ description: Action for the rule.
+ type: string
+ - contextPath: Panorama.SecurityRule.Source
+ description: Source address.
+ type: string
+ - contextPath: Panorama.SecurityRule.Destination
+ description: Destination address.
+ type: string
+ - contextPath: Panorama.SecurityRule.NegateSource
+ description: Whether the source is negated (address, address group).
+ type: boolean
+ - contextPath: Panorama.SecurityRule.NegateDestination
+ description: Whether the destination negated (address, address group).
+ type: boolean
+ - contextPath: Panorama.SecurityRule.Service
+ description: Service for the rule.
+ type: string
+ - contextPath: Panorama.SecurityRule.Disabled
+ description: Whether the rule is disabled.
+ type: string
+ - contextPath: Panorama.SecurityRule.Application
+ description: Application for the rule.
+ type: string
+ - contextPath: Panorama.SecurityRule.Target
+ description: Target firewall (Panorama instances).
+ type: string
+ - contextPath: Panorama.SecurityRule.LogForwarding
+ description: Log forwarding profile (Panorama instances).
+ type: string
+ - contextPath: Panorama.SecurityRule.DeviceGroup
+ description: Device group for the rule (Panorama instances).
+ type: string
+ - contextPath: Panorama.SecurityRules.Tags
+ description: Rule tags.
+ type: String
+ - arguments:
+ - default: false
+ description: Name of the custom block policy rule to create.
+ isArray: false
+ name: rulename
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Object type to block in the policy rule. Can be "ip", "address-group",
+ "edl", or "custom-url-category".
+ isArray: false
+ name: object_type
+ predefined:
+ - ip
+ - address-group
+ - application
+ - url-category
+ - edl
+ required: true
+ secret: false
+ - default: false
+ description: Object value
+ isArray: false
+ name: object_value
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: both
+ description: Direction to block. Can be "to", "from", or "both". Default is
+ "both". This argument is not applicable to the "custom-url-category" object_type.
+ isArray: false
+ name: direction
+ predefined:
+ - to
+ - from
+ - both
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Pre rule or Post rule (Panorama instances).
+ isArray: false
+ name: pre_post
+ predefined:
+ - pre-rulebase
+ - post-rulebase
+ required: false
+ secret: false
+ - default: false
+ description: Specifies a target firewall for the rule (Panorama instances).
+ isArray: false
+ name: target
+ required: false
+ secret: false
+ - default: false
+ description: Log forwarding profile.
+ isArray: false
+ name: log_forwarding
+ required: false
+ secret: false
+ - default: false
+ description: The device group for which to return addresses for the rule (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ - default: false
+ description: Tags for which to use for the custom block policy rule.
+ isArray: true
+ name: tags
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a custom block policy rule.
+ execution: true
+ name: panorama-custom-block-rule
+ outputs:
+ - contextPath: Panorama.SecurityRule.Name
+ description: Rule name.
+ type: string
+ - contextPath: Panorama.SecurityRule.Object
+ description: Blocked object.
+ type: string
+ - contextPath: Panorama.SecurityRule.Direction
+ description: Direction blocked.
+ type: string
+ - contextPath: Panorama.SecurityRule.Target
+ description: Target firewall (Panorama instances)
+ type: string
+ - contextPath: Panorama.SecurityRule.LogForwarding
+ description: Log forwarding profile (Panorama instances).
+ type: string
+ - contextPath: Panorama.SecurityRule.DeviceGroup
+ description: Device group for the rule (Panorama instances).
+ type: string
+ - contextPath: Panorama.SecurityRule.Tags
+ description: Rule tags.
+ type: String
+ - arguments:
+ - default: false
+ description: Name of the rule to move.
+ isArray: false
+ name: rulename
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Where to move the rule. Can be "before", "after", "top", or "bottom".
+ If you specify "up" or "down", you need to supply the "dst" argument.
+ isArray: false
+ name: where
+ predefined:
+ - before
+ - after
+ - top
+ - bottom
+ required: true
+ secret: false
+ - default: false
+ description: Destination rule relative to the rule you are moving. Relevant
+ if you specify "up" or "down" for the "where" argument.
+ isArray: false
+ name: dst
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Rule location. Mandatory for Panorama instances.
+ isArray: false
+ name: pre_post
+ predefined:
+ - pre-rulebase
+ - post-rulebase
+ required: false
+ secret: false
+ - default: false
+ description: The device group for which to return addresses for the rule (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Changes the location of a policy rule.
+ execution: true
+ name: panorama-move-rule
+ outputs:
+ - contextPath: Panorama.SecurityRule.Name
+ description: Rule name.
+ type: string
+ - contextPath: Panorama.SecurityRule.DeviceGroup
+ description: Device group for the rule (Panorama instances).
+ type: string
+ - arguments:
+ - default: false
+ description: Name of the rule to edit.
+ isArray: false
+ name: rulename
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Parameter in the security rule to change. Can be 'source', 'destination',
+ 'application', 'action', 'category', 'description', 'disabled', 'target',
+ 'log-forwarding' or 'tag'.
+ isArray: false
+ name: element_to_change
+ predefined:
+ - source
+ - destination
+ - application
+ - action
+ - category
+ - description
+ - disabled
+ - target
+ - log-forwarding
+ - tag
+ required: true
+ secret: false
+ - default: false
+ description: New value for the parameter.
+ isArray: false
+ name: element_value
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Pre rule or Post rule (Panorama instances).
+ isArray: false
+ name: pre_post
+ predefined:
+ - pre-rulebase
+ - post-rulebase
+ required: false
+ secret: false
+ deprecated: false
+ description: Edits a policy rule.
+ execution: true
+ name: panorama-edit-rule
+ outputs:
+ - contextPath: Panorama.SecurityRule.Name
+ description: Rule name.
+ type: string
+ - contextPath: Panorama.SecurityRule.Description
+ description: Rule description.
+ type: string
+ - contextPath: Panorama.SecurityRule.Action
+ description: Action for the rule.
+ type: string
+ - contextPath: Panorama.SecurityRule.Source
+ description: Source address.
+ type: string
+ - contextPath: Panorama.SecurityRule.Destination
+ description: Destination address.
+ type: string
+ - contextPath: Panorama.SecurityRule.NegateSource
+ description: Whether the source is negated (address, address group).
+ type: boolean
+ - contextPath: Panorama.SecurityRule.NegateDestination
+ description: Whether the destination is negated (address, address group).
+ type: boolean
+ - contextPath: Panorama.SecurityRule.Service
+ description: Service for the rule.
+ type: string
+ - contextPath: Panorama.SecurityRule.Disabled
+ description: Whether the rule is disabled.
+ type: string
+ - contextPath: Panorama.SecurityRule.Application
+ description: Application for the rule.
+ type: string
+ - contextPath: Panorama.SecurityRule.Target
+ description: Target firewall (Panorama instances).
+ type: string
+ - contextPath: Panorama.SecurityRule.DeviceGroup
+ description: Device group for the rule (Panorama instances).
+ type: string
+ - contextPath: Panorama.SecurityRule.Tags
+ description: Tags for the rule.
+ type: String
+ - arguments:
+ - default: false
+ description: Name of the rule to delete.
+ isArray: false
+ name: rulename
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Pre rule or Post rule (Panorama instances).
+ isArray: false
+ name: pre_post
+ predefined:
+ - pre-rulebase
+ - post-rulebase
+ required: false
+ secret: false
+ - default: false
+ description: The device group for which to return addresses for the rule (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes a policy rule.
+ execution: true
+ name: panorama-delete-rule
+ outputs:
+ - contextPath: Panorama.SecurityRule.Name
+ description: Rule name.
+ type: string
+ - contextPath: Panorama.SecurityRule.DeviceGroup
+ description: Device group for the rule (Panorama instances).
+ type: string
+ - deprecated: false
+ description: Returns a list of predefined applications.
+ execution: false
+ name: panorama-list-applications
+ outputs:
+ - contextPath: Panorama.Applications.Name
+ description: Application name.
+ type: string
+ - contextPath: Panorama.Applications.Id
+ description: Application ID.
+ type: number
+ - contextPath: Panorama.Applications.Category
+ description: Application category.
+ type: string
+ - contextPath: Panorama.Applications.SubCategory
+ description: Application sub-category.
+ type: string
+ - contextPath: Panorama.Applications.Technology
+ description: Application technology.
+ type: string
+ - contextPath: Panorama.Applications.Risk
+ description: Application risk (1 to 5).
+ type: number
+ - contextPath: Panorama.Applications.Description
+ description: Application description.
+ type: string
+ - arguments:
+ - default: false
+ description: Job ID to check.
+ isArray: false
+ name: job_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns commit status for a configuration.
+ execution: false
+ name: panorama-commit-status
+ outputs:
+ - contextPath: Panorama.Commit.JobID
+ description: Job ID of the configuration to be committed.
+ type: number
+ - contextPath: Panorama.Commit.Status
+ description: Commit status.
+ type: string
+ - contextPath: Panorama.Commit.Details
+ description: Job ID details.
+ type: string
+ - arguments:
+ - default: false
+ description: Job ID to check.
+ isArray: false
+ name: job_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the push status for a configuration.
+ execution: false
+ name: panorama-push-status
+ outputs:
+ - contextPath: Panorama.Push.DeviceGroup
+ description: Device group to which the policies were pushed.
+ type: string
+ - contextPath: Panorama.Push.JobID
+ description: Job ID of the configuration to be pushed.
+ type: number
+ - contextPath: Panorama.Push.Status
+ description: Push status.
+ type: string
+ - contextPath: Panorama.Push.Details
+ description: Job ID details.
+ type: string
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: Type of Packet Capture.
+ isArray: false
+ name: pcapType
+ predefined:
+ - application-pcap
+ - filter-pcap
+ - threat-pcap
+ - dlp-pcap
+ required: true
+ secret: false
+ - default: false
+ description: The file name for the PCAP type ('dlp-pcap', 'filters-pcap', or
+ 'application-pcap').
+ isArray: false
+ name: from
+ required: false
+ secret: false
+ - default: false
+ description: The new name for the PCAP file after downloading. If this argument
+ is not specified, the file name is the PCAP file name set in the firewall.
+ isArray: false
+ name: localName
+ required: false
+ secret: false
+ - default: false
+ description: Serial number for the request. For further information, see the
+ Panorama XML API Documentation.
+ isArray: false
+ name: serialNo
+ required: false
+ secret: false
+ - default: false
+ description: The Search time for the request. For further information, see the
+ Panorama XML API Documentation.
+ isArray: false
+ name: searchTime
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the PCAP for the request. For further information, see
+ the Panorama XML API Documentation.
+ isArray: false
+ name: pcapID
+ required: false
+ secret: false
+ - default: false
+ description: Password for Panorama, needed for the 'dlp-pcap' PCAP type only.
+ isArray: false
+ name: password
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns information for a Panorama PCAP file. The recommended maximum
+ file size is 5 MB. If the limit is exceeded, you might need to SSH the firewall
+ and run the scp export command to export the PCAP file. For more information,
+ see the Palo Alto Networks documentation.
+ execution: false
+ name: panorama-get-pcap
+ outputs:
+ - contextPath: File.Size
+ description: File size.
+ type: number
+ - contextPath: File.Name
+ description: File name.
+ type: string
+ - contextPath: File.Type
+ description: File type.
+ type: string
+ - contextPath: File.Info
+ description: File info.
+ type: string
+ - contextPath: File.Extenstion
+ description: File extension.
+ type: string
+ - contextPath: File.EntryID
+ description: FIle entryID.
+ type: string
+ - contextPath: File.MD5
+ description: MD5 hash of the file.
+ type: string
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file.
+ type: string
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file.
+ type: string
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: Type of Packet Capture.
+ isArray: false
+ name: pcapType
+ predefined:
+ - application-pcap
+ - filter-pcap
+ - threat-pcap
+ - dlp-pcap
+ required: true
+ secret: false
+ - default: false
+ description: Password for Panorama. Relevant for the 'dlp-pcap' PCAP type.
+ isArray: false
+ name: password
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of all PCAP files by PCAP type.
+ execution: false
+ name: panorama-list-pcaps
+ - arguments:
+ - default: false
+ description: Tag for which to register IP addresses.
+ isArray: false
+ name: tag
+ required: true
+ secret: false
+ - default: false
+ description: IP addresses to register.
+ isArray: true
+ name: IPs
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: Whether the IP addresses remain registered to the tag after the
+ device reboots ('true':persistent, 'false':non-persistent). Default is 'true'.
+ isArray: false
+ name: persistent
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Registers IP addresses to a tag.
+ execution: false
+ name: panorama-register-ip-tag
+ outputs:
+ - contextPath: Panorama.DynamicTags.Tag
+ description: Name of the tag.
+ type: string
+ - contextPath: Panorama.DynamicTags.IPs
+ description: Registered IP addresses.
+ type: string
+ - arguments:
+ - default: false
+ description: Tag for which to unregister IP addresses.
+ isArray: false
+ name: tag
+ required: true
+ secret: false
+ - default: false
+ description: IP addresses to unregister.
+ isArray: true
+ name: IPs
+ required: true
+ secret: false
+ deprecated: false
+ description: Unregisters IP addresses from a tag.
+ execution: false
+ name: panorama-unregister-ip-tag
+ - arguments:
+ - default: false
+ description: Specifies the match criteria for the logs. This is similar to the
+ query provided in the web interface under the Monitor tab, when viewing the
+ logs.
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: The number of logs to retrieve. The default is 100. Maximum is
+ 5000.
+ isArray: false
+ name: number_of_logs
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: backward
+ description: |-
+ Whether logs are shown oldest first (forward) or newest
+ first (backward). Default is backward.
+ isArray: false
+ name: direction
+ predefined:
+ - backward
+ - forward
+ required: false
+ secret: false
+ - default: false
+ description: Source address for the query.
+ isArray: false
+ name: source
+ required: false
+ secret: false
+ - default: false
+ description: Destination address for the query.
+ isArray: false
+ name: destination
+ required: false
+ secret: false
+ - default: false
+ description: Date and time after which logs were received in the format YYYY/MM/DD
+ HH:MM:SS.
+ isArray: false
+ name: receive_time
+ required: false
+ secret: false
+ - default: false
+ description: Application for the query.
+ isArray: false
+ name: application
+ required: false
+ secret: false
+ - default: false
+ description: Destination port for the query.
+ isArray: false
+ name: to_port
+ required: false
+ secret: false
+ - default: false
+ description: Action for the query.
+ isArray: false
+ name: action
+ required: false
+ secret: false
+ deprecated: true
+ description: Deprecated. Use the panorama-query-logs command instead.
+ execution: false
+ name: panorama-query-traffic-logs
+ outputs:
+ - contextPath: Panorama.TrafficLogs.JobID
+ description: Job ID of the traffic logs query.
+ type: Number
+ - contextPath: Panorama.TrafficLogs.Status
+ description: Status of the traffic logs query.
+ type: String
+ - arguments:
+ - default: false
+ description: Job ID of the query.
+ isArray: false
+ name: job_id
+ required: true
+ secret: false
+ deprecated: true
+ description: Deprecated. Use the panorama-check-logs-status command instead.
+ execution: false
+ name: panorama-check-traffic-logs-status
+ outputs:
+ - contextPath: Panorama.TrafficLogs.JobID
+ description: Job ID of the traffic logs query.
+ type: Number
+ - contextPath: Panorama.TrafficLogs.Status
+ description: Status of the traffic logs query.
+ type: String
+ - arguments:
+ - default: false
+ description: Job ID of the query.
+ isArray: false
+ name: job_id
+ required: true
+ secret: false
+ deprecated: true
+ description: Deprecated. Use the panorama-get-logs command instead.
+ execution: false
+ name: panorama-get-traffic-logs
+ outputs:
+ - contextPath: Panorama.TrafficLogs.JobID
+ description: Job ID of the traffic logs query.
+ type: Number
+ - contextPath: Panorama.TrafficLogs.Status
+ description: Status of the traffic logs query.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.Action
+ description: Action of the traffic log.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.ActionSource
+ description: Action source of the traffic log.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.Application
+ description: Application of the traffic log.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.Category
+ description: Category of the traffic log.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.DeviceName
+ description: Device name of the traffic log.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.Destination
+ description: Destination of the traffic log.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.DestinationPort
+ description: Destination port of the traffic log.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.FromZone
+ description: From zone of the traffic log.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.Protocol
+ description: Protocol of the traffic log.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.ReceiveTime
+ description: Receive time of the traffic log.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.Rule
+ description: Rule of the traffic log.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.SessionEndReason
+ description: Session end reason of the traffic log.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.Source
+ description: Source of the traffic log.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.SourcePort
+ description: Source port of the traffic log.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.StartTime
+ description: Start time of the traffic log.
+ type: String
+ - contextPath: Panorama.TrafficLogs.Logs.ToZone
+ description: To zone of the traffic log.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: Rules location. Can be 'pre-rulebase' or 'post-rulebase'. Mandatory
+ for Panorama instances.
+ isArray: false
+ name: pre_post
+ predefined:
+ - pre-rulebase
+ - post-rulebase
+ required: false
+ secret: false
+ - default: false
+ description: The device group for which to return addresses (Panorama instances).
+ isArray: false
+ name: device-group
+ required: false
+ secret: false
+ - default: false
+ description: Tag for which to filter the rules.
+ isArray: false
+ name: tag
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of predefined Security Rules.
+ execution: false
+ name: panorama-list-rules
+ outputs:
+ - contextPath: Panorama.SecurityRule.Name
+ description: Rule name.
+ type: String
+ - contextPath: Panorama.SecurityRule.Action
+ description: Action for the rule.
+ type: String
+ - contextPath: Panorama.SecurityRule.Location
+ description: Rule location.
+ type: String
+ - contextPath: Panorama.SecurityRule.Category
+ description: Rule category.
+ type: String
+ - contextPath: Panorama.SecurityRule.Application
+ description: Application for the rule.
+ type: String
+ - contextPath: Panorama.SecurityRule.Destination
+ description: Destination address.
+ type: String
+ - contextPath: Panorama.SecurityRule.From
+ description: Rule from.
+ type: String
+ - contextPath: Panorama.SecurityRule.Service
+ description: Service for the rule.
+ type: String
+ - contextPath: Panorama.SecurityRule.To
+ description: Rule to.
+ type: String
+ - contextPath: Panorama.SecurityRule.Source
+ description: Source address.
+ type: String
+ - contextPath: Panorama.SecurityRule.DeviceGroup
+ description: Device group for the rule (Panorama instances).
+ type: string
+ - contextPath: Panorama.SecurityRules.Tags
+ description: Rule tags.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The log type. Can be "threat", "traffic", "wildfire", "url", or
+ "data".
+ isArray: false
+ name: log-type
+ predefined:
+ - threat
+ - traffic
+ - wildfire
+ - url
+ - data
+ required: true
+ secret: false
+ - default: false
+ description: The query string by which to match criteria for the logs. This
+ is similar to the query provided in the web interface under the Monitor tab
+ when viewing the logs.
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: |-
+ The time that the log was generated from the timestamp and prior to it.
+ e.g "2019/08/11 01:10:44".
+ isArray: false
+ name: time-generated
+ required: false
+ secret: false
+ - default: false
+ description: Source address.
+ isArray: true
+ name: addr-src
+ required: false
+ secret: false
+ - default: false
+ description: Destination address.
+ isArray: true
+ name: addr-dst
+ required: false
+ secret: false
+ - default: false
+ description: Source zone.
+ isArray: true
+ name: zone-src
+ required: false
+ secret: false
+ - default: false
+ description: Destination Source.
+ isArray: true
+ name: zone-dst
+ required: false
+ secret: false
+ - default: false
+ description: Rule action.
+ isArray: true
+ name: action
+ required: false
+ secret: false
+ - default: false
+ description: Destination port.
+ isArray: true
+ name: port-dst
+ required: false
+ secret: false
+ - default: false
+ description: Rule name, e.g "Allow all outbound".
+ isArray: true
+ name: rule
+ required: false
+ secret: false
+ - default: false
+ description: URL, e.g "safebrowsing.googleapis.com".
+ isArray: true
+ name: url
+ required: false
+ secret: false
+ - default: false
+ description: File hash (for WildFire logs only).
+ isArray: true
+ name: filedigest
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '100'
+ description: Maximum number of logs to retrieve. If empty, the default is 100.
+ The maximum is 5,000.
+ isArray: false
+ name: number_of_logs
+ required: false
+ secret: false
+ deprecated: false
+ description: Query logs in Panorama.
+ execution: false
+ name: panorama-query-logs
+ outputs:
+ - contextPath: Panorama.Monitor.JobID
+ description: Job ID of the logs query.
+ type: String
+ - contextPath: Panorama.Monitor.Status
+ description: Status of the logs query.
+ type: String
+ - contextPath: Panorama.Monitor.Message
+ description: Message of the logs query.
+ type: String
+ - arguments:
+ - default: false
+ description: Job ID of the query.
+ isArray: false
+ name: job_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks the status of a logs query.
+ execution: false
+ name: panorama-check-logs-status
+ outputs:
+ - contextPath: Panorama.Monitor.JobID
+ description: Job ID of the logs query.
+ type: String
+ - contextPath: Panorama.Monitor.Status
+ description: Status of the logs query.
+ type: String
+ - arguments:
+ - default: false
+ description: Job ID of the query.
+ isArray: false
+ name: job_id
+ required: true
+ secret: false
+ - default: false
+ defaultValue: 'true'
+ description: Whether to auto-enrich the War Room entry. If "true", entry is
+ not auto-enriched. If "false", entry is auto-extracted. Default is "true".
+ isArray: false
+ name: ignore_auto_extract
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves the data of a logs query.
+ execution: false
+ name: panorama-get-logs
+ outputs:
+ - contextPath: Panorama.Monitor.Logs.Action
+ description: Action taken for the session. Can be "alert", "allow", "deny",
+ "drop", "drop-all-packets", "reset-client", "reset-server", "reset-both",
+ or "block-url".
+ type: String
+ - contextPath: Panorama.Monitor.Logs.Application
+ description: Application associated with the session.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.Category
+ description: The URL category of the URL subtype. For WildFire subtype, it is
+ the verdict on the file, and can be either "malicious", "phishing", "grayware"’,
+ or "benign". For other subtypes, the value is "any".
+ type: String
+ - contextPath: Panorama.Monitor.Logs.DeviceName
+ description: The hostname of the firewall on which the session was logged.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.DestinationAddress
+ description: Original session destination IP address.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.DestinationUser
+ description: Username of the user to which the session was destined.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.DestinationCountry
+ description: Destination country or internal region for private addresses. Maximum
+ length is 32 bytes.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.DestinationPort
+ description: Destination port utilized by the session.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.FileDigest
+ description: Only for the WildFire subtype, all other types do not use this
+ field. The filedigest string shows the binary hash of the file sent to be
+ analyzed by the WildFire service.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.FileName
+ description: |-
+ File name or file type when the subtype is file.
+ File name when the subtype is virus.
+ File name when the subtype is wildfire-virus.
+ File name when the subtype is wildfire.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.FileType
+ description: |-
+ Only for the WildFire subtype, all other types do not use this field.
+ Specifies the type of file that the firewall forwarded for WildFire analysis.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.FromZone
+ description: The zone from which the session was sourced.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.URLOrFilename
+ description: |-
+ The actual URL when the subtype is url.
+ File name or file type when the subtype is file.
+ File name when the subtype is virus.
+ File name when the subtype is wildfire-virus.
+ File name when the subtype is wildfire.
+ URL or file name when the subtype is vulnerability (if applicable).
+ type: String
+ - contextPath: Panorama.Monitor.Logs.NATDestinationIP
+ description: If destination NAT performed, the post-NAT destination IP address.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.NATDestinationPort
+ description: Post-NAT destination port.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.NATSourceIP
+ description: If source NAT performed, the post-NAT source IP address.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.NATSourcePort
+ description: Post-NAT source port.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.PCAPid
+ description: |-
+ The packet capture (pcap) ID is a 64 bit unsigned integral denoting
+ an ID to correlate threat pcap files with extended pcaps taken as a part of
+ that flow. All threat logs will contain either a pcap_id of 0 (no associated
+ pcap), or an ID referencing the extended pcap file.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.IPProtocol
+ description: IP protocol associated with the session.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.Recipient
+ description: |-
+ Only for the WildFire subtype, all other types do not use this field.
+ Specifies the name of the receiver of an email that WildFire determined to be malicious when analyzing an email link forwarded by the firewall.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.Rule
+ description: Name of the rule that the session matched.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.RuleID
+ description: ID of the rule that the session matched.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.ReceiveTime
+ description: Time the log was received at the management plane.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.Sender
+ description: |-
+ Only for the WildFire subtype; all other types do not use this field.
+ Specifies the name of the sender of an email that WildFire determined to be malicious when analyzing an email link forwarded by the firewall.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.SessionID
+ description: An internal numerical identifier applied to each session.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.DeviceSN
+ description: The serial number of the firewall on which the session was logged.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.Severity
+ description: |-
+ Severity associated with the threat. Can be "informational", "low",
+ "medium", "high", or "critical".
+ type: String
+ - contextPath: Panorama.Monitor.Logs.SourceAddress
+ description: Original session source IP address.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.SourceCountry
+ description: |-
+ Source country or internal region for private addresses. Maximum
+ length is 32 bytes.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.SourceUser
+ description: Username of the user who initiated the session.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.SourcePort
+ description: Source port utilized by the session.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.ThreatCategory
+ description: |-
+ Describes threat categories used to classify different types of
+ threat signatures.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.Name
+ description: |-
+ Palo Alto Networks identifier for the threat. It is a description
+ string followed by a 64-bit numerical identifier
+ type: String
+ - contextPath: Panorama.Monitor.Logs.ID
+ description: Palo Alto Networks ID for the threat.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.ToZone
+ description: The zone to which the session was destined.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.TimeGenerated
+ description: Time that the log was generated on the dataplane.
+ type: String
+ - contextPath: Panorama.Monitor.Logs.URLCategoryList
+ description: |-
+ A list of the URL filtering categories that the firewall used to
+ enforce the policy.
+ type: String
+ dockerimage: demisto/python3:3.7.3.221
+ isfetch: false
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- palo_alto_firewall_test_pb
+- palo_alto_panorama_test_pb
+fromversion: 3.0.0
diff --git a/Integrations/Panorama/Panorama_description.md b/Integrations/Panorama/Panorama_description.md
new file mode 100644
index 000000000000..d2975e358250
--- /dev/null
+++ b/Integrations/Panorama/Panorama_description.md
@@ -0,0 +1,17 @@
+The integration uses the Panorama XML API
+To obtain an API Key, run the following REST command and copy the key:
+https://[PanoramaIP]/api/?type=keygen&user=[user]&password=[password]
+
+For more information, visit the [Palo Alto Networks documentation](https://www.paloaltonetworks.com/documentation).
+
+---
+You need to create a separate integration instance for Palo Alto Networks Firewall and Palo Alto Networks. Unless specified otherwise, all commands are valid for both Firewall and Panorama.
+
+---
+### Firewall: Configure the vsys
+- The vsys is located in the Firewall URL; e.g, https://#device::::device/setup
+
+### Panorama: Configure a device group
+- Access the Panorama UI.
+- Go to Panorama --> Device Groups.
+- Choose a device group name.
\ No newline at end of file
diff --git a/Integrations/Panorama/Panorama_image.png b/Integrations/Panorama/Panorama_image.png
new file mode 100644
index 000000000000..ff44f2d6baac
Binary files /dev/null and b/Integrations/Panorama/Panorama_image.png differ
diff --git a/Integrations/Panorama/Panorama_test.py b/Integrations/Panorama/Panorama_test.py
new file mode 100644
index 000000000000..7122b2e34ea9
--- /dev/null
+++ b/Integrations/Panorama/Panorama_test.py
@@ -0,0 +1,160 @@
+import pytest
+
+import demistomock as demisto
+
+integration_params = {
+ 'port': '22',
+ 'vsys': 'vsys1',
+ 'server': '1.1.1.1'
+}
+
+
+@pytest.fixture(autouse=True)
+def set_params(mocker):
+ mocker.patch.object(demisto, 'params', return_value=integration_params)
+
+
+def test_add_argument_list():
+ from Panorama import add_argument_list
+ list_argument = ["foo", "bar"]
+
+ response_with_member = add_argument_list(list_argument, "test", True)
+ expected_with_member = 'foo bar '
+ assert response_with_member == expected_with_member
+
+ response_with_member_field_name = add_argument_list(list_argument, "member", True)
+ expected_with_member_field_name = 'foo bar '
+ assert response_with_member_field_name == expected_with_member_field_name
+
+
+def test_add_argument():
+ from Panorama import add_argument
+ argument = "foo"
+
+ response_with_member = add_argument(argument, "test", True)
+ expected_with_member = 'foo '
+ assert response_with_member == expected_with_member
+
+ response_without_member = add_argument(argument, "test", False)
+ expected_without_member = 'foo '
+ assert response_without_member == expected_without_member
+
+
+def test_add_argument_yes_no():
+ from Panorama import add_argument_yes_no
+ arg = 'No'
+ field = 'test'
+ option = True
+
+ response_option_true = add_argument_yes_no(arg, field, option)
+ expected_option_true = 'no '
+ assert response_option_true == expected_option_true
+
+ option = False
+ response_option_false = add_argument_yes_no(arg, field, option)
+ expected_option_false = 'no '
+ assert response_option_false == expected_option_false
+
+
+def test_add_argument_target():
+ from Panorama import add_argument_target
+ response = add_argument_target('foo', 'bar')
+ expected = ' '
+ assert response == expected
+
+
+def test_prettify_addresses_arr():
+ from Panorama import prettify_addresses_arr
+ addresses_arr = [{'@name': 'my_name', 'fqdn': 'a.com'},
+ {'@name': 'my_name2', 'fqdn': 'b.com'}]
+ response = prettify_addresses_arr(addresses_arr)
+ expected = [{'Name': 'my_name', 'FQDN': 'a.com'},
+ {'Name': 'my_name2', 'FQDN': 'b.com'}]
+ assert response == expected
+
+
+def test_prettify_address():
+ from Panorama import prettify_address
+ address = {'@name': 'my_name', 'ip-netmask': '1.1.1.1', 'description': 'lala'}
+ response = prettify_address(address)
+ expected = {'Name': 'my_name', 'IP_Netmask': '1.1.1.1', 'Description': 'lala'}
+ assert response == expected
+
+
+def test_prettify_address_group():
+ from Panorama import prettify_address_group
+ address_group_static = {'@name': 'foo', 'static': {'member': 'address object'}}
+ response_static = prettify_address_group(address_group_static)
+ expected_address_group_static = {'Name': 'foo', 'Type': 'static', 'Addresses': 'address object'}
+ assert response_static == expected_address_group_static
+
+ address_group_dynamic = {'@name': 'foo', 'dynamic': {'filter': '1.1.1.1 and 2.2.2.2'}}
+ response_dynamic = prettify_address_group(address_group_dynamic)
+ expected_address_group_dynamic = {'Name': 'foo', 'Type': 'dynamic', 'Match': '1.1.1.1 and 2.2.2.2'}
+ assert response_dynamic == expected_address_group_dynamic
+
+
+def test_prettify_service():
+ from Panorama import prettify_service
+ service = {'@name': 'service_name', 'description': 'foo', 'protocol': {'tcp': {'port': '443'}}}
+ response = prettify_service(service)
+ expected = {'Name': 'service_name', 'Description': 'foo', 'Protocol': 'tcp', 'DestinationPort': '443'}
+ assert response == expected
+
+
+def test_prettify_service_group():
+ from Panorama import prettify_service_group
+ service_group = {'@name': 'sg', 'members': {'member': ['service1', 'service2']}}
+ response = prettify_service_group(service_group)
+ expected = {'Name': 'sg', 'Services': ['service1', 'service2']}
+ assert response == expected
+
+
+def test_prettify_custom_url_category():
+ from Panorama import prettify_custom_url_category
+ custom_url_category = {'@name': 'foo', 'list': {'member': ['a', 'b', 'c']}}
+ response = prettify_custom_url_category(custom_url_category)
+ expected = {'Name': 'foo', 'Sites': ['a', 'b', 'c']}
+ assert response == expected
+
+
+def test_prettify_edl():
+ from Panorama import prettify_edl
+ edl = {'@name': 'edl_name', 'type': {'my_type': {'url': 'abc.com', 'description': 'my_desc'}}}
+ response = prettify_edl(edl)
+ expected = {'Name': 'edl_name', 'Type': 'my_type', 'URL': 'abc.com', 'Description': 'my_desc'}
+ assert response == expected
+
+
+def test_build_traffic_logs_query():
+ # (addr.src in 192.168.1.222) and (app eq netbios-dg) and (action eq allow) and (port.dst eq 138)
+ from Panorama import build_traffic_logs_query
+ source = '192.168.1.222'
+ application = 'netbios-dg'
+ action = 'allow'
+ to_port = '138'
+ response = build_traffic_logs_query(source, None, None, application, to_port, action)
+ expected = '(addr.src in 192.168.1.222) and (app eq netbios-dg) and (port.dst eq 138) and (action eq allow)'
+ assert response == expected
+
+
+def test_prettify_traffic_logs():
+ from Panorama import prettify_traffic_logs
+ traffic_logs = [{'action': 'my_action1', 'category': 'my_category1', 'rule': 'my_rule1'},
+ {'action': 'my_action2', 'category': 'my_category2', 'rule': 'my_rule2'}]
+ response = prettify_traffic_logs(traffic_logs)
+ expected = [{'Action': 'my_action1', 'Category': 'my_category1', 'Rule': 'my_rule1'},
+ {'Action': 'my_action2', 'Category': 'my_category2', 'Rule': 'my_rule2'}]
+ assert response == expected
+
+
+def test_prettify_logs():
+ from Panorama import prettify_logs
+ traffic_logs = [{'action': 'my_action1', 'category': 'my_category1', 'rule': 'my_rule1', 'natdport': '100'},
+ {'action': 'my_action2', 'category': 'my_category2', 'rule': 'my_rule2', 'natdport': '101'}]
+ response = prettify_logs(traffic_logs)
+ expected = [{'Action': 'my_action1', 'CategoryOrVerdict': 'my_category1', 'Rule': 'my_rule1',
+ 'NATDestinationPort': '100'},
+ {'Action': 'my_action2', 'CategoryOrVerdict': 'my_category2', 'Rule': 'my_rule2',
+ 'NATDestinationPort': '101'}]
+ assert response == expected
diff --git a/Integrations/Panorama/Pipfile b/Integrations/Panorama/Pipfile
new file mode 100644
index 000000000000..3f4c5a84e70a
--- /dev/null
+++ b/Integrations/Panorama/Pipfile
@@ -0,0 +1,19 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+flake8 = "*"
+
+[packages]
+requests = "*"
+pancloud = "*"
+crypto = "*"
+cryptography = "*"
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/Panorama/Pipfile.lock b/Integrations/Panorama/Pipfile.lock
new file mode 100644
index 000000000000..27fb5db65dfd
--- /dev/null
+++ b/Integrations/Panorama/Pipfile.lock
@@ -0,0 +1,387 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "044778e20682923f75aa6dcac76202d8c6b19a4e0bf72738fa9e081affe78db6"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "arrow": {
+ "hashes": [
+ "sha256:3397e5448952e18e1295bf047014659effa5ae8da6a5371d37ff0ddc46fa6872",
+ "sha256:6f54d9f016c0b7811fac9fb8c2c7fa7421d80c54dbdd75ffb12913c55db60b8a"
+ ],
+ "version": "==0.13.1"
+ },
+ "asn1crypto": {
+ "hashes": [
+ "sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87",
+ "sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
+ ],
+ "version": "==0.24.0"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5",
+ "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae"
+ ],
+ "version": "==2019.3.9"
+ },
+ "cffi": {
+ "hashes": [
+ "sha256:041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774",
+ "sha256:046ef9a22f5d3eed06334d01b1e836977eeef500d9b78e9ef693f9380ad0b83d",
+ "sha256:066bc4c7895c91812eff46f4b1c285220947d4aa46fa0a2651ff85f2afae9c90",
+ "sha256:066c7ff148ae33040c01058662d6752fd73fbc8e64787229ea8498c7d7f4041b",
+ "sha256:2444d0c61f03dcd26dbf7600cf64354376ee579acad77aef459e34efcb438c63",
+ "sha256:300832850b8f7967e278870c5d51e3819b9aad8f0a2c8dbe39ab11f119237f45",
+ "sha256:34c77afe85b6b9e967bd8154e3855e847b70ca42043db6ad17f26899a3df1b25",
+ "sha256:46de5fa00f7ac09f020729148ff632819649b3e05a007d286242c4882f7b1dc3",
+ "sha256:4aa8ee7ba27c472d429b980c51e714a24f47ca296d53f4d7868075b175866f4b",
+ "sha256:4d0004eb4351e35ed950c14c11e734182591465a33e960a4ab5e8d4f04d72647",
+ "sha256:4e3d3f31a1e202b0f5a35ba3bc4eb41e2fc2b11c1eff38b362de710bcffb5016",
+ "sha256:50bec6d35e6b1aaeb17f7c4e2b9374ebf95a8975d57863546fa83e8d31bdb8c4",
+ "sha256:55cad9a6df1e2a1d62063f79d0881a414a906a6962bc160ac968cc03ed3efcfb",
+ "sha256:5662ad4e4e84f1eaa8efce5da695c5d2e229c563f9d5ce5b0113f71321bcf753",
+ "sha256:59b4dc008f98fc6ee2bb4fd7fc786a8d70000d058c2bbe2698275bc53a8d3fa7",
+ "sha256:73e1ffefe05e4ccd7bcea61af76f36077b914f92b76f95ccf00b0c1b9186f3f9",
+ "sha256:a1f0fd46eba2d71ce1589f7e50a9e2ffaeb739fb2c11e8192aa2b45d5f6cc41f",
+ "sha256:a2e85dc204556657661051ff4bab75a84e968669765c8a2cd425918699c3d0e8",
+ "sha256:a5457d47dfff24882a21492e5815f891c0ca35fefae8aa742c6c263dac16ef1f",
+ "sha256:a8dccd61d52a8dae4a825cdbb7735da530179fea472903eb871a5513b5abbfdc",
+ "sha256:ae61af521ed676cf16ae94f30fe202781a38d7178b6b4ab622e4eec8cefaff42",
+ "sha256:b012a5edb48288f77a63dba0840c92d0504aa215612da4541b7b42d849bc83a3",
+ "sha256:d2c5cfa536227f57f97c92ac30c8109688ace8fa4ac086d19d0af47d134e2909",
+ "sha256:d42b5796e20aacc9d15e66befb7a345454eef794fdb0737d1af593447c6c8f45",
+ "sha256:dee54f5d30d775f525894d67b1495625dd9322945e7fee00731952e0368ff42d",
+ "sha256:e070535507bd6aa07124258171be2ee8dfc19119c28ca94c9dfb7efd23564512",
+ "sha256:e1ff2748c84d97b065cc95429814cdba39bcbd77c9c85c89344b317dc0d9cbff",
+ "sha256:ed851c75d1e0e043cbf5ca9a8e1b13c4c90f3fbd863dacb01c0808e2b5204201"
+ ],
+ "version": "==1.12.3"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "crypto": {
+ "hashes": [
+ "sha256:8f2ee9756a0265c18845ac097ae447c75cfbde158abe1361b7491619f866a9bd",
+ "sha256:985120aa86f71545388199f96a2a0e00f7ccfe5ecd14c56355eb399e1a63d164"
+ ],
+ "index": "pypi",
+ "version": "==1.4.1"
+ },
+ "cryptography": {
+ "hashes": [
+ "sha256:066f815f1fe46020877c5983a7e747ae140f517f1b09030ec098503575265ce1",
+ "sha256:210210d9df0afba9e000636e97810117dc55b7157c903a55716bb73e3ae07705",
+ "sha256:26c821cbeb683facb966045e2064303029d572a87ee69ca5a1bf54bf55f93ca6",
+ "sha256:2afb83308dc5c5255149ff7d3fb9964f7c9ee3d59b603ec18ccf5b0a8852e2b1",
+ "sha256:2db34e5c45988f36f7a08a7ab2b69638994a8923853dec2d4af121f689c66dc8",
+ "sha256:409c4653e0f719fa78febcb71ac417076ae5e20160aec7270c91d009837b9151",
+ "sha256:45a4f4cf4f4e6a55c8128f8b76b4c057027b27d4c67e3fe157fa02f27e37830d",
+ "sha256:48eab46ef38faf1031e58dfcc9c3e71756a1108f4c9c966150b605d4a1a7f659",
+ "sha256:6b9e0ae298ab20d371fc26e2129fd683cfc0cfde4d157c6341722de645146537",
+ "sha256:6c4778afe50f413707f604828c1ad1ff81fadf6c110cb669579dea7e2e98a75e",
+ "sha256:8c33fb99025d353c9520141f8bc989c2134a1f76bac6369cea060812f5b5c2bb",
+ "sha256:9873a1760a274b620a135054b756f9f218fa61ca030e42df31b409f0fb738b6c",
+ "sha256:9b069768c627f3f5623b1cbd3248c5e7e92aec62f4c98827059eed7053138cc9",
+ "sha256:9e4ce27a507e4886efbd3c32d120db5089b906979a4debf1d5939ec01b9dd6c5",
+ "sha256:acb424eaca214cb08735f1a744eceb97d014de6530c1ea23beb86d9c6f13c2ad",
+ "sha256:c8181c7d77388fe26ab8418bb088b1a1ef5fde058c6926790c8a0a3d94075a4a",
+ "sha256:d4afbb0840f489b60f5a580a41a1b9c3622e08ecb5eec8614d4fb4cd914c4460",
+ "sha256:d9ed28030797c00f4bc43c86bf819266c76a5ea61d006cd4078a93ebf7da6bfd",
+ "sha256:e603aa7bb52e4e8ed4119a58a03b60323918467ef209e6ff9db3ac382e5cf2c6"
+ ],
+ "index": "pypi",
+ "version": "==2.6.1"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "jmespath": {
+ "hashes": [
+ "sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6",
+ "sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c"
+ ],
+ "version": "==0.9.4"
+ },
+ "naked": {
+ "hashes": [
+ "sha256:12b76b8a14595d07039422f1d2219ca8fbef8b237f9cdf5d8e947c03e148677e",
+ "sha256:19de9961f4edb29e75cf837e8e031d6b52fbba4f0033515893d26f69c74b3b1f"
+ ],
+ "version": "==0.1.31"
+ },
+ "pancloud": {
+ "hashes": [
+ "sha256:374ca770405f9bfda69489ad9cd1ef3d716287f584771566cf9ff6d22f189a4e"
+ ],
+ "index": "pypi",
+ "version": "==1.5.1"
+ },
+ "pycparser": {
+ "hashes": [
+ "sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
+ ],
+ "version": "==2.19"
+ },
+ "python-dateutil": {
+ "hashes": [
+ "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb",
+ "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"
+ ],
+ "version": "==2.8.0"
+ },
+ "pyyaml": {
+ "hashes": [
+ "sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c",
+ "sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95",
+ "sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2",
+ "sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4",
+ "sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad",
+ "sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba",
+ "sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1",
+ "sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e",
+ "sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673",
+ "sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13",
+ "sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19"
+ ],
+ "version": "==5.1"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
+ "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
+ ],
+ "index": "pypi",
+ "version": "==2.21.0"
+ },
+ "shellescape": {
+ "hashes": [
+ "sha256:3ff2aeb6ce2c5a4e6059fe4a2a745a824f5a3834fe8365a39c5ea691073cfdb6",
+ "sha256:e618b2bc13f2553315ca1669995dc10fcc2cae5f1e0fda49035ef02d56f0b358"
+ ],
+ "version": "==3.4.1"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "tinydb": {
+ "hashes": [
+ "sha256:260b1f69168a24518af63f0270c72dc026476607016a0105aef6a966d8d2fbdc",
+ "sha256:a05c4c81e6e867c4f8a2e51c5236d0d897019aa5e9296f5947455b0bdd3c519d"
+ ],
+ "version": "==3.13.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4",
+ "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb"
+ ],
+ "version": "==1.24.3"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "entrypoints": {
+ "hashes": [
+ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
+ "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
+ ],
+ "version": "==0.3"
+ },
+ "flake8": {
+ "hashes": [
+ "sha256:859996073f341f2670741b51ec1e67a01da142831aa1fdc6242dbf88dffbe661",
+ "sha256:a796a115208f5c03b18f332f7c11729812c8c3ded6c46319c59b53efd3819da8"
+ ],
+ "index": "pypi",
+ "version": "==3.7.7"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:1349c6f7c2a0f7539f5f2ace51a9a8e4a37086ce4de6f78f5f53fb041d0a3cd5",
+ "sha256:f09911f6eb114e5592abe635aded8bf3d2c3144ebcfcaf81ee32e7af7b7d1870"
+ ],
+ "version": "==4.3.18"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:2112d2ca570bb7c3e53ea1a35cd5df42bb0fd10c45f0fb97178679c3c03d64c7",
+ "sha256:c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a"
+ ],
+ "markers": "python_version > '2.7'",
+ "version": "==7.0.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:25a1bc1d148c9a640211872b4ff859878d422bccb59c9965e04eed468a0aa180",
+ "sha256:964cedd2b27c492fbf0b7f58b3284a09cf7f99b0f715941fb24a439b3af1bd1a"
+ ],
+ "version": "==0.11.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pyflakes": {
+ "hashes": [
+ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
+ "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ ],
+ "version": "==2.1.1"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:1a8aa4fa958f8f451ac5441f3ac130d9fc86ea38780dd2715e6d5c5882700b24",
+ "sha256:b8bf138592384bd4e87338cb0f256bf5f615398a649d4bd83915f0e4047a5ca6"
+ ],
+ "index": "pypi",
+ "version": "==4.5.0"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:132eae51d6ef3ff4a8c47c393a4ef5ebf0d1aecc96880eb5d6c8ceab7017cc9b",
+ "sha256:18141c1484ab8784006c839be8b985cfc82a2e9725837b0ecfa0203f71c4e39d",
+ "sha256:2baf617f5bbbfe73fd8846463f5aeafc912b5ee247f410700245d68525ec584a",
+ "sha256:3d90063f2cbbe39177e9b4d888e45777012652d6110156845b828908c51ae462",
+ "sha256:4304b2218b842d610aa1a1d87e1dc9559597969acc62ce717ee4dfeaa44d7eee",
+ "sha256:4983ede548ffc3541bae49a82675996497348e55bafd1554dc4e4a5d6eda541a",
+ "sha256:5315f4509c1476718a4825f45a203b82d7fdf2a6f5f0c8f166435975b1c9f7d4",
+ "sha256:6cdfb1b49d5345f7c2b90d638822d16ba62dc82f7616e9b4caa10b72f3f16649",
+ "sha256:7b325f12635598c604690efd7a0197d0b94b7d7778498e76e0710cd582fd1c7a",
+ "sha256:8d3b0e3b8626615826f9a626548057c5275a9733512b137984a68ba1598d3d2f",
+ "sha256:8f8631160c79f53081bd23446525db0bc4c5616f78d04021e6e434b286493fd7",
+ "sha256:912de10965f3dc89da23936f1cc4ed60764f712e5fa603a09dd904f88c996760",
+ "sha256:b010c07b975fe853c65d7bbe9d4ac62f1c69086750a574f6292597763781ba18",
+ "sha256:c908c10505904c48081a5415a1e295d8403e353e0c14c42b6d67f8f97fae6616",
+ "sha256:c94dd3807c0c0610f7c76f078119f4ea48235a953512752b9175f9f98f5ae2bd",
+ "sha256:ce65dee7594a84c466e79d7fb7d3303e7295d16a83c22c7c4037071b059e2c21",
+ "sha256:eaa9cfcb221a8a4c2889be6f93da141ac777eb8819f077e1d09fb12d00a09a93",
+ "sha256:f3376bc31bad66d46d44b4e6522c5c21976bf9bca4ef5987bb2bf727f4506cbb",
+ "sha256:f9202fa138544e13a4ec1a6792c35834250a85958fde1251b6a22e07d1260ae7"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.3.5"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ }
+ }
+}
diff --git a/Integrations/PerceptionPoint/PerceptionPoint.py b/Integrations/PerceptionPoint/PerceptionPoint.py
new file mode 100644
index 000000000000..08f318bdee35
--- /dev/null
+++ b/Integrations/PerceptionPoint/PerceptionPoint.py
@@ -0,0 +1,181 @@
+import demistomock as demisto
+from CommonServerPython import *
+
+''' IMPORTS'''
+import requests
+import json
+from collections import defaultdict
+
+''' INTEGRATION PARAMS '''
+URL = 'http://api.perception-point.io/api/v1/{endpoint}' # disable-secrets-detection
+INCIDENTS_ENDPOINT = 'scans/incidents/'
+RELEASE_ENDPOINT = 'quarantine/release/{id_}'
+
+USER_PARAMS = demisto.params()
+SECURED = not USER_PARAMS.get('insecure', False)
+PP_TOKEN = USER_PARAMS.get('pp_token', None)
+if PP_TOKEN is None:
+ return_error('Perception Point token is mandatory. '
+ 'Please enter your token or contact PerceptionPoint support for assistance')
+try:
+ API_MAX_LOOPS = int(USER_PARAMS.get('api_loops', 1))
+except Exception:
+ API_MAX_LOOPS = 1
+HEADER = {'Authorization': f'Token {PP_TOKEN}'}
+
+''' CONSTANTS '''
+RELEASE = 'release'
+LIST = 'list'
+API_ACTIONS_DICT = {RELEASE: RELEASE_ENDPOINT,
+ LIST: INCIDENTS_ENDPOINT}
+SPAM = 'SPM'
+BLOCKED = 'BLK'
+MALICIOUS = 'MAL'
+
+API_CURSOR_ARG = '_cursor'
+
+VERBOSE_VERDICT_PARAM = 'verbose_verdict[]'
+
+FETCH_INCIDENTS_TYPE = [{'demisto_param': 'fetch_malicious',
+ 'req_pname': VERBOSE_VERDICT_PARAM,
+ 'req_pval': MALICIOUS},
+ {'demisto_param': 'fetch_blocked',
+ 'req_pname': VERBOSE_VERDICT_PARAM,
+ 'req_pval': BLOCKED},
+ {'demisto_param': 'fetch_spam',
+ 'req_pname': VERBOSE_VERDICT_PARAM,
+ 'req_pval': SPAM}]
+
+''' HELPER FUNCTIONS '''
+
+
+def build_fetch_incident_types(fetch_blocked, fetch_malicious, fetch_spam):
+ fetch_type_dict = defaultdict(list) # type: ignore
+ fetch_select = {
+ 'fetch_blocked': fetch_blocked,
+ 'fetch_malicious': fetch_malicious,
+ 'fetch_spam': fetch_spam
+ }
+ for darg in FETCH_INCIDENTS_TYPE:
+ darg_input = fetch_select.get(darg['demisto_param'])
+ if darg_input:
+ fetch_type_dict[darg['req_pname']].append(darg.get('req_pval', darg_input))
+ return dict(fetch_type_dict)
+
+
+def create_incident(record):
+ record.pop('Attachment', None)
+ record['RawJSON'] = json.dumps(record)
+ return record
+
+
+def collect_incidents(params):
+ list_url = build_request_url(LIST)
+ api_res = get_pp_api_result(list_url, params)
+ num_of_results = api_res.get('count')
+ incidents = [] # type: list
+ api_loops = 0
+ while num_of_results and api_loops < API_MAX_LOOPS:
+ incidents += map(create_incident, api_res.get('results'))
+ if api_res.get('next'):
+ api_res = get_pp_api_result(api_res.get('next'), {})
+ num_of_results = api_res.get('count')
+ api_loops += 1
+ return incidents
+
+
+def report_incidents(incidents_list):
+ demisto.incidents(incidents_list)
+
+
+def get_pp_api_result(url, params):
+ try:
+ res = requests.get(url=url,
+ params=params,
+ headers=HEADER,
+ verify=SECURED)
+ res.raise_for_status()
+ try:
+ res_content = res.json()
+ except Exception:
+ res_content = {}
+ return res_content
+ except requests.exceptions.HTTPError as err:
+ if 400 <= res.status_code < 500:
+ return_error('Invalid token')
+ else:
+ return_error(err)
+ except Exception as err:
+ return_error(err)
+
+
+def build_request_url(api_action):
+ return URL.format(endpoint=API_ACTIONS_DICT.get(api_action))
+
+
+def command_fetch_incidents():
+ try:
+ fetch_blocked = USER_PARAMS.get('fetch_blocked')
+ fetch_spam = USER_PARAMS.get('fetch_spam')
+ fetch_malicious = USER_PARAMS.get('fetch_malicious')
+ req_args = build_fetch_incident_types(fetch_blocked, fetch_malicious, fetch_spam)
+ last_run_id = int(demisto.getLastRun().get('scan_id', 0))
+ req_args[API_CURSOR_ARG] = last_run_id
+ incidents_list = collect_incidents(req_args)
+ report_incidents(incidents_list)
+ if incidents_list:
+ last_run_id = max(last_run_id, int(incidents_list[-1].get('Scan Id')))
+ demisto.setLastRun({'scan_id': int(last_run_id)})
+ except Exception as err:
+ return_error(f'An error occurred while trying to fetch new incidents. '
+ f'Please contact PerceptionPoint support for more info. {err}')
+
+
+def release_email_and_get_message(scan_id_to_release):
+ try:
+ release_url = build_request_url(RELEASE).format(id_=scan_id_to_release)
+ _ = get_pp_api_result(release_url, {})
+ return f'Email with id {scan_id_to_release} was released Successfully!'
+ except Exception:
+ raise
+
+
+def command_release_email():
+ try:
+ scan_id_to_release = demisto.args().get('scan_id')
+ entry = {
+ 'Type': entryTypes['note'],
+ 'ReadableContentsFormat': formats['markdown']
+ }
+ email_release_response = release_email_and_get_message(scan_id_to_release)
+ entry.update({'Contents': email_release_response,
+ 'ContentsFormat': formats['text'],
+ 'EntryContext': {'PP.Released': scan_id_to_release}}
+ )
+ demisto.results(entry)
+ except Exception as err:
+ return_error(f'An error occurred while trying to release email. '
+ f'Please contact PerceptionPoint support for more info\n. {err}')
+
+
+def test_command():
+ list_url = build_request_url(LIST)
+ if get_pp_api_result(list_url, {}):
+ demisto.results('ok')
+
+
+''' COMMAND CLASSIFIER'''
+try:
+ handle_proxy()
+ if demisto.command() == 'test-module':
+ test_command()
+ if demisto.command() == 'fetch-incidents':
+ command_fetch_incidents()
+ if demisto.command() == 'pp-release-email':
+ command_release_email()
+except Exception as e:
+ LOG(str(e))
+ message = f'Unexpected error: {e} \n'
+ LOG(message)
+ LOG.print_log()
+ return_error(message)
diff --git a/Integrations/PerceptionPoint/PerceptionPoint.yml b/Integrations/PerceptionPoint/PerceptionPoint.yml
new file mode 100644
index 000000000000..a92794e901b5
--- /dev/null
+++ b/Integrations/PerceptionPoint/PerceptionPoint.yml
@@ -0,0 +1,73 @@
+commonfields:
+ id: PerceptionPoint
+ version: -1
+name: PerceptionPoint
+display: PerceptionPoint
+category: Email Gateway
+description: Loads incidents from Perception Point and releases falsely quarantined
+ emails.
+configuration:
+- display: Token to use Perception Point's API
+ name: pp_token
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Number of API loops
+ name: api_loops
+ defaultvalue: "1"
+ type: 0
+ required: false
+- display: Fetch incidents
+ name: isFetch
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Fetch blocked incidents
+ name: fetch_blocked
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Fetch spam incidents
+ name: fetch_spam
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Fetch malicious incidents
+ name: fetch_malicious
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Incident type
+ name: incidentType
+ defaultvalue: ""
+ type: 13
+ required: false
+- display: Trust any certificate (insecure)
+ name: insecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Use system proxy
+ name: proxy
+ defaultvalue: ""
+ type: 8
+ required: false
+script:
+ script: '-'
+ type: python
+ commands:
+ - name: pp-release-email
+ arguments:
+ - name: scan_id
+ required: true
+ description: The PP scan ID of the email.
+ outputs:
+ - contextPath: PP.Released
+ description: The scan ID of the released email.
+ type: number
+ description: Re-sends an email that was falsely quarantined, using the scan ID.
+ dockerimage: demisto/python3:3.7.2.214
+ isfetch: true
+ runonce: false
+tests:
+ - PerceptionPoint Test
diff --git a/Integrations/PerceptionPoint/PerceptionPoint_description.md b/Integrations/PerceptionPoint/PerceptionPoint_description.md
new file mode 100644
index 000000000000..6f34fe0daa04
--- /dev/null
+++ b/Integrations/PerceptionPoint/PerceptionPoint_description.md
@@ -0,0 +1,8 @@
+## Get your PerceptionPoint API token
+To get an API token, contact PerceptionPoint support.
+
+## API token use cases
+To set the number of results to return, specify the parameter "Number of API loops". Each loop returns a maximum of 20 items.
+- View and manage your incidents list. This list will be updated automatically in the Incidents dashboard.
+- Release emails from quarantine and resend them to their recipients, by passing the scan ID as an argument.
+
diff --git a/Integrations/PerceptionPoint/PerceptionPoint_image.png b/Integrations/PerceptionPoint/PerceptionPoint_image.png
new file mode 100644
index 000000000000..4fa7396fd605
Binary files /dev/null and b/Integrations/PerceptionPoint/PerceptionPoint_image.png differ
diff --git a/Integrations/Perch/CHANGELOG.md b/Integrations/Perch/CHANGELOG.md
new file mode 100644
index 000000000000..c937db9bac75
--- /dev/null
+++ b/Integrations/Perch/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+-
\ No newline at end of file
diff --git a/Integrations/Perch/Perch.py b/Integrations/Perch/Perch.py
new file mode 100644
index 000000000000..d13b9c491670
--- /dev/null
+++ b/Integrations/Perch/Perch.py
@@ -0,0 +1,493 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import requests
+import json
+import collections
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+USERNAME = demisto.params().get('credentials').get('identifier')
+PASSWORD = demisto.params().get('credentials').get('password')
+API_KEY = demisto.params().get('api-key')
+FETCH_TIME = int(demisto.params().get('fetch_time', '7'))
+SERVER = demisto.params()['url'][:-1] if (demisto.params()['url'] and demisto.params()['url'].endswith('/')) else \
+ demisto.params()['url']
+USE_SSL = not demisto.params().get('insecure', False)
+BASE_URL = SERVER + '/v1'
+
+# Remove proxy if not set to true in params
+handle_proxy()
+
+TLP_MAP = {
+ 'WHITE': 0,
+ 'GREEN': 1,
+ 'AMBER': 2,
+ 'RED': 3
+}
+
+CONFIDENCE_MAP = {
+ 'LOW': 0,
+ 'MEDIUM': 1,
+ 'HIGH': 2
+}
+
+OBSERVABLE_TYPES_MAP = {
+ 'IP': 0,
+ 'Domain': 1,
+ 'URL': 2,
+ 'REGEX': 3,
+ 'File Hash': 4
+}
+
+''' HELPER FUNCTIONS '''
+
+
+# Allows nested keys to be accessible
+def makehash():
+ return collections.defaultdict(makehash)
+
+
+def http_request(method, url_suffix, params=None, data=None, headers=None):
+ try:
+ res = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ verify=USE_SSL,
+ params=params,
+ data=data,
+ headers=headers
+ )
+ if res.status_code == 403:
+ return_error('Connection forbidden. Please verify your API key is valid.')
+ elif res.status_code not in {200, 201}:
+ return_error(f'Error in API call to Perch Integration [{res.status_code}] - {res.reason}')
+
+ except requests.exceptions.ConnectionError as error:
+ return_error(f"Failed to establish a new connection: {type(error)}")
+
+ try:
+ response = res.json()
+ except Exception as e:
+ return_error(f'Failed to parse JSON response: {str(e)}')
+ return response
+
+
+def find_key_by_value(val, dic_map):
+ for key, value in dic_map.items():
+ if value == val:
+ return key
+
+
+def format_alerts(alert):
+ hr = makehash() # type: dict
+ ec = makehash() # type: dict
+ if alert.get('id'):
+ hr['ID'] = alert.get('id')
+ ec['ID'] = alert.get('id')
+ if alert.get('sensor_id'):
+ hr['Sensor ID'] = alert.get('sensor_id')
+ ec['SensorID'] = alert.get('sensor_id')
+ if alert.get('observable_id'):
+ hr['Observable ID'] = alert.get('observable_id')
+ ec['ObservableID'] = alert.get('observable_id')
+ if alert.get('indicator_id'):
+ hr['Indicator ID'] = alert.get('indicator_id')
+ ec['IndicatorID'] = alert.get('indicator_id')
+ if alert.get('status'):
+ hr['Status'] = alert.get('status')
+ ec['Status'] = alert.get('status')
+ if alert.get('ts'):
+ hr['Timestamp'] = alert.get('ts')
+ ec['TS'] = alert.get('ts')
+ if alert.get('title'):
+ hr['Title'] = alert.get('title')
+ ec['Title'] = alert.get('title')
+ if alert.get('protocol'):
+ hr['Protocol'] = alert.get('protocol')
+ ec['Protocol'] = alert.get('protocol')
+ if alert.get('src_ip'):
+ hr['Source IP'] = alert.get('src_ip')
+ ec['SrcIP'] = alert.get('src_ip')
+ if alert.get('src_port'):
+ hr['Source Port'] = alert.get('src_port')
+ ec['SrcPort'] = alert.get('src_port')
+ if alert.get('src_geo_ip'):
+ src_geo = alert['src_geo_ip']
+ if src_geo.get('latitude'):
+ hr['Source Geo']['Latitude'] = src_geo.get('latitude')
+ ec['SrcGeo']['Latitude'] = src_geo.get('latitude')
+ if src_geo.get('longitude'):
+ hr['Source Geo']['Longitude'] = src_geo.get('longitude')
+ ec['SrcGeo']['Longitude'] = src_geo.get('longitude')
+ if src_geo.get('country_name'):
+ hr['Source Geo']['Country Name'] = src_geo.get('country_name')
+ ec['SrcGeo']['Country'] = src_geo.get('country_name')
+ if alert.get('dest_ip'):
+ hr['Destination IP'] = alert.get('dest_ip')
+ ec['DestIP'] = alert.get('dest_ip')
+ if alert.get('dest_port'):
+ hr['Destination Port'] = alert.get('dest_port')
+ ec['DestPort'] = alert.get('dest_port')
+ if alert.get('dest_geo_ip'):
+ dest_geo = alert['dest_geo_ip']
+ if dest_geo.get('latitude'):
+ hr['Destination Geo']['Latitude'] = dest_geo.get('latitude')
+ ec['DestGeo']['Latitude'] = dest_geo.get('latitude')
+ if dest_geo.get('longitude'):
+ hr['Destination Geo']['Longitude'] = dest_geo.get('longitude')
+ ec['DestGeo']['Longitude'] = dest_geo.get('longitude')
+ if dest_geo.get('country_name'):
+ hr['Destination Geo']['Country Name'] = dest_geo.get('country_name')
+ ec['DestGeo']['Country'] = dest_geo.get('country_name')
+ return hr, ec
+
+
+def alerts_params(args):
+ params = {} # type:dict
+ if args.get('page'):
+ params['page'] = args.get('page')
+ if args.get('page_size'):
+ params['page_size'] = args.get('page_size')
+ if args.get('closed'):
+ params['closed'] = args.get('closed')
+ if args.get('closed_at'):
+ params['closed_at'] = args.get('closed_at')
+ if args.get('community_id'):
+ params['community_id'] = args.get('community_id')
+ if args.get('created_at'):
+ params['created_at'] = args.get('created_at')
+ if args.get('dest_ip'):
+ params['dest_ip'] = args.get('dest_ip')
+ if args.get('dest_port'):
+ params['dest_port'] = args.get('dest_port')
+ if args.get('full_url'):
+ params['full_url'] = args.get('full_url')
+ if args.get('id'):
+ params['id'] = args.get('id')
+ if args.get('indicator_id'):
+ params['indicator_id'] = args.get('indicator_id')
+ if args.get('indicator_loaded'):
+ params['indicator_loaded'] = args.get('indicator_loaded')
+ if args.get('observable_id'):
+ params['observable_id'] = args.get('observable_id')
+ if args.get('protocol'):
+ params['protocol'] = args.get('protocol')
+ if args.get('sensor_id'):
+ params['sensor_id'] = args.get('sensor_id')
+ if args.get('sensor_name'):
+ params['sensor_name'] = args.get('sensor_name')
+ if args.get('soc_status'):
+ params['soc_status'] = args.get('soc_status')
+ if args.get('src_ip'):
+ params['src_ip'] = args.get('src_ip')
+ if args.get('src_port'):
+ params['src_port'] = args.get('src_port')
+ if args.get('status'):
+ params['status'] = args.get('status')
+ if args.get('status_updated_at'):
+ params['status_updated_at'] = args.get('status_updated_at')
+ if args.get('team_id'):
+ params['team_id'] = args.get('team_id')
+ if args.get('title'):
+ params['title'] = args.get('title')
+ if args.get('ts'):
+ params['ts'] = args.get('ts')
+ if args.get('closed_at__gte'):
+ params['closed_at__gte'] = args.get('closed_at__gte')
+ if args.get('closed_at__lte'):
+ params['closed_at__lte'] = args.get('closed_at__lte')
+ if args.get('created_at__gte'):
+ params['created_at__gte'] = args.get('created_at__gte')
+ if args.get('created_at__lte'):
+ params['created_at__lte'] = args.get('created_at__lte')
+ if args.get('status_updated_at__gte'):
+ params['status_updated_at__gte'] = args.get('status_updated_at__gte')
+ if args.get('status_updated_at__lte'):
+ params['status_updated_at__lte'] = args.get('status_updated_at__lte')
+ if args.get('status_updated_at__gt'):
+ params['status_updated_at__gt'] = args.get('status_updated_at__gt')
+ if args.get('status_updated_at__lt'):
+ params['status_updated_at__lt'] = args.get('status_updated_at__lt')
+ if args.get('ordering'):
+ params['ordering'] = args.get('ordering')
+ return params
+
+
+def indicator_params(args):
+ params = []
+ param = {}
+ observables = []
+ communities = []
+ if args.get('communities'):
+ community = {
+ 'id': args.get('communities')
+ }
+ communities.append(community)
+
+ param['communities'] = communities
+ if args.get('type'):
+ observable = {
+ 'type': OBSERVABLE_TYPES_MAP[args.get('type')],
+ 'details': {
+ 'value': args.get('value')
+ }
+ }
+ observables.append(observable)
+ param['observables'] = observables
+ if args.get('title'):
+ param['title'] = args.get('title')
+ if args.get('description'):
+ param['description'] = args.get('description')
+ if args.get('tlp'):
+ param['tlp'] = TLP_MAP[args.get('tlp')] # type: ignore
+ if args.get('confidence'):
+ param['confidence'] = CONFIDENCE_MAP[args.get('confidence')] # type: ignore
+ if args.get('operator'):
+ param['operator'] = args.get('operator')
+ if args.get('first_sighting'):
+ param['first_sighting'] = args.get('first_sighting')
+ if args.get('email_summary'):
+ param['email_summary'] = args.get('email_summary')
+ params.append(param)
+
+ return params
+
+
+def authenticate():
+ headers = {'Content-Type': 'application/json', 'x-api-key': API_KEY}
+ req_body = json.dumps({'username': USERNAME, 'password': PASSWORD})
+ url = '/auth/access_token'
+ res_body = http_request('POST', url, data=req_body, headers=headers)
+ headers['Authorization'] = 'Bearer ' + res_body['access_token']
+ return headers
+
+
+def format_indicator(indicator):
+ hr = makehash() # type: dict
+ ec = makehash() # type: dict
+ if indicator.get('id'):
+ hr['ID'] = indicator.get('id')
+ ec['ID'] = indicator.get('id')
+ if indicator.get('confidence'):
+ hr['Confidence'] = find_key_by_value(indicator.get('confidence'), CONFIDENCE_MAP)
+ ec['Confidence'] = find_key_by_value(indicator.get('confidence'), CONFIDENCE_MAP)
+ if indicator.get('created_at'):
+ hr['Created At'] = indicator.get('created_at')
+ ec['CreatedAt'] = indicator.get('created_at')
+ if indicator.get('created_by'):
+ hr['Created By'] = indicator.get('created_by')
+ ec['CreatedBy'] = indicator.get('created_by')
+ if indicator.get('description'):
+ hr['Description'] = indicator.get('description')
+ ec['Description'] = indicator.get('description')
+ if indicator.get('email_summary'):
+ hr['Email Summary'] = indicator.get('email_summary')
+ ec['EmailSummary'] = indicator.get('email_summary')
+ if indicator.get('title'):
+ hr['Title'] = indicator.get('title')
+ ec['Title'] = indicator.get('title')
+ if indicator.get('first_sighting'):
+ hr['First Sighting'] = indicator.get('first_sighting')
+ ec['FirstSighting'] = indicator.get('first_sighting')
+ if indicator.get('perch_id'):
+ hr['Perch ID'] = indicator.get('perch_id')
+ ec['PerchID'] = indicator.get('perch_id')
+ if indicator.get('team'):
+ hr['Team'] = indicator.get('team')
+ ec['Team'] = indicator.get('team')
+ if indicator.get('tlp'):
+ hr['TLP'] = find_key_by_value(indicator.get('tlp'), TLP_MAP)
+ ec['TLP'] = find_key_by_value(indicator.get('tlp'), TLP_MAP)
+ if indicator.get('updated_at'):
+ hr['Updated At'] = indicator.get('updated_at')
+ ec['UpdatedAt'] = indicator.get('updated_at')
+ if indicator.get('operator'):
+ hr['Operator'] = indicator.get('operator')
+ ec['Operator'] = indicator.get('operator')
+ return hr, ec
+
+
+def item_to_incident(item):
+ incident = {'name': 'Perch Incident: ' + item.get('title'),
+ 'occurred': item.get('created_at'),
+ 'rawJSON': json.dumps(item)}
+ return incident
+
+
+'''COMMAND FUNCTIONS'''
+
+
+def search_alerts_command():
+ headers = authenticate()
+ args = demisto.args()
+ params = alerts_params(args)
+ url = '/alerts'
+ res = http_request('GET', url, headers=headers, params=params)
+ res_results = res.get('results')
+ hr = ''
+ ec = {
+ "Perch": {
+ "Alert": []
+ }
+ } # type: dict
+ for alert in res_results:
+ alert_hr, alert_ec = format_alerts(alert)
+ ec['Perch']['Alert'].append(alert_ec)
+ hr += tableToMarkdown(f'{alert_ec.get("Title")}', alert_hr)
+ if len(res_results) == 0:
+ demisto.results('No results were found')
+ else:
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': res_results,
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+
+
+def list_communities_command():
+ headers = authenticate()
+ args = demisto.args()
+ params = alerts_params(args)
+ url = '/communities'
+ res = http_request('GET', url, headers=headers, params=params)
+ res_results = res.get('results')
+ hr = tableToMarkdown('Communities Found', res_results, headerTransform=string_to_table_header, removeNull=True)
+ ec = {
+ "Perch": {
+ "Community": []
+ }
+ } # type: dict
+ for alert in res_results:
+ ec['Perch']['Community'].append(createContext(alert, keyTransform=string_to_context_key, removeNull=True))
+ if len(res_results) == 0:
+ demisto.results('No communities were found')
+ else:
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': res_results,
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+
+
+def get_community_command():
+ headers = authenticate()
+ args = demisto.args()
+ params = alerts_params(args)
+ community_id = args.get('id')
+ url = f'/communities/{community_id}'
+ res = http_request('GET', url, headers=headers, params=params)
+ if len(res) > 0:
+ hr = tableToMarkdown('Communities Found', res, headerTransform=string_to_table_header, removeNull=True)
+ ec = {
+ "Perch": {
+ "Community": createContext(res, keyTransform=string_to_context_key, removeNull=True)
+ }
+ } # type: dict
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': res,
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results('No communities were found')
+
+
+def create_indicator_command():
+ headers = authenticate()
+ args = demisto.args()
+ raw_data = indicator_params(args)
+ data = json.dumps(raw_data)
+ url = '/indicators'
+ res = http_request('POST', url, headers=headers, data=data)
+ indicator_hr, indicator_ec = format_indicator(res[0])
+ hr = ''
+ ec = {
+ "Perch": {
+ "Indicator": []
+ }
+ } # type: dict
+ ec['Perch']['Indicator'].append(indicator_ec)
+ hr += tableToMarkdown(f'{indicator_hr.get("Title")}', indicator_hr)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': res,
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+
+
+def fetch_alerts(last_run, headers):
+ last_fetch = last_run.get('time')
+ url = '/alerts'
+ res = http_request('GET', url, headers=headers)
+ items = res.get('results')
+ items.sort(key=lambda r: r['created_at'])
+ if last_fetch is None:
+ last_fetch_raw = datetime.now() - timedelta(days=FETCH_TIME)
+ last_fetch = date_to_timestamp(last_fetch_raw, '%Y-%m-%dT%H:%M:%S.%fZ')
+ incidents = []
+ for item in items:
+ incident = item_to_incident(item)
+ incident_date = date_to_timestamp(incident['occurred'], '%Y-%m-%dT%H:%M:%S.%fZ')
+ if incident_date > last_fetch:
+ incidents.append(incident)
+ last_fetch = incident_date
+ return last_fetch, incidents
+
+
+def fetch_alerts_command():
+ last_run = demisto.getLastRun()
+ headers = authenticate()
+ last_fetch, incidents = fetch_alerts(last_run, headers)
+ demisto.setLastRun({'time': last_fetch})
+ demisto.incidents(incidents)
+
+
+def test_module():
+ try:
+ headers = authenticate()
+ if demisto.params().get('isFetch'):
+ last_run = {'time': 1561017202}
+ fetch_alerts(last_run, headers)
+ demisto.results('ok')
+ except Exception as err:
+ return_error(str(err))
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+demisto.info(f'Command being called is {demisto.command()}')
+
+try:
+ if demisto.command() == 'perch-search-alerts':
+ search_alerts_command()
+ elif demisto.command() == 'perch-get-community':
+ get_community_command()
+ elif demisto.command() == 'perch-list-communities':
+ list_communities_command()
+ elif demisto.command() == 'perch-create-indicator':
+ create_indicator_command()
+ elif demisto.command() == 'fetch-incidents':
+ fetch_alerts_command()
+ elif demisto.command() == 'test-module':
+ test_module()
+
+
+# Log exceptions
+except Exception as e:
+ LOG(str(e))
+ LOG.print_log()
+ raise
diff --git a/Integrations/Perch/Perch.yml b/Integrations/Perch/Perch.yml
new file mode 100644
index 000000000000..65c37e59f175
--- /dev/null
+++ b/Integrations/Perch/Perch.yml
@@ -0,0 +1,427 @@
+category: Network Security
+commonfields:
+ id: Perch
+ version: -1
+configuration:
+- defaultvalue: https://api.perch.rocks/
+ display: Server URL (e.g., https://api.perch.rocks/)
+ name: url
+ required: true
+ type: 0
+- display: API Token
+ name: api-key
+ required: true
+ type: 4
+- defaultvalue: 'true'
+ display: Trust any certificate (not secure)
+ name: insecure
+ defaultvalue: ""
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ required: false
+ type: 8
+- defaultvalue: '7'
+ display: Previous days to fetch
+ name: fetch_time
+ required: false
+ type: 0
+- display: Credentials
+ name: credentials
+ required: false
+ type: 9
+- defaultvalue: ""
+ display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- defaultvalue: ""
+ display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+description: Perch is a co-managed threat detection and response platform.
+display: Perch
+name: Perch
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Page of results to return.
+ isArray: false
+ name: page
+ required: false
+ secret: false
+ - default: false
+ description: Number of results to return per page.
+ isArray: false
+ name: page_size
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether the alert is closed.
+ isArray: false
+ name: closed
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Time that the alert was closed.
+ isArray: false
+ name: closed_at
+ required: false
+ secret: false
+ - default: false
+ description: Community ID that generated the alert.
+ isArray: false
+ name: community_id
+ required: false
+ secret: false
+ - default: false
+ description: Time that the alert was created.
+ isArray: false
+ name: created_at
+ required: false
+ secret: false
+ - default: false
+ description: Destination IP address.
+ isArray: false
+ name: dest_ip
+ required: false
+ secret: false
+ - default: false
+ description: Destination port.
+ isArray: false
+ name: dest_port
+ required: false
+ secret: false
+ - default: false
+ description: Full URL of the alert.
+ isArray: false
+ name: full_url
+ required: false
+ secret: false
+ - default: false
+ description: ID of the alert.
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ - default: false
+ description: ID of the indicator.
+ isArray: false
+ name: indicator_id
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether the indicator is loaded.
+ isArray: false
+ name: indicator_loaded
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Observable ID.
+ isArray: false
+ name: observable_id
+ required: false
+ secret: false
+ - default: false
+ description: Protocol effected by the alert.
+ isArray: false
+ name: protocol
+ required: false
+ secret: false
+ - default: false
+ description: ID of the sensor that generated the alert.
+ isArray: false
+ name: sensor_id
+ required: false
+ secret: false
+ - default: false
+ description: Name of the sensor that generated the alert.
+ isArray: false
+ name: sensor_name
+ required: false
+ secret: false
+ - default: false
+ description: Status in the SOC.
+ isArray: false
+ name: soc_status
+ required: false
+ secret: false
+ - default: false
+ description: Source IP address.
+ isArray: false
+ name: src_ip
+ required: false
+ secret: false
+ - default: false
+ description: Source port.
+ isArray: false
+ name: src_port
+ required: false
+ secret: false
+ - default: false
+ description: Status of the alert.
+ isArray: false
+ name: status
+ required: false
+ secret: false
+ - default: false
+ description: Time that the status was last updated.
+ isArray: false
+ name: status_updated_at
+ required: false
+ secret: false
+ - default: false
+ description: ID of the team that generated the alert.
+ isArray: false
+ name: team_id
+ required: false
+ secret: false
+ - default: false
+ description: Title of the alert.
+ isArray: false
+ name: title
+ required: false
+ secret: false
+ - default: false
+ description: Timestamp of the alert.
+ isArray: false
+ name: ts
+ required: false
+ secret: false
+ - default: false
+ description: Order of the returned alerts.
+ isArray: false
+ name: ordering
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches for alerts in Perch.
+ execution: false
+ name: perch-search-alerts
+ outputs:
+ - contextPath: Perch.Alert.DestPort
+ description: Destination port of the alert.
+ type: Number
+ - contextPath: Perch.Alert.SrcPort
+ description: Source port of the alert.
+ type: Number
+ - contextPath: Perch.Alert.DestIP
+ description: Destination IP of the alert.
+ type: Number
+ - contextPath: Perch.Alert.IndicatorID
+ description: Indicator ID of the alert.
+ type: Number
+ - contextPath: Perch.Alert.SrcIP
+ description: IP address of the source.
+ type: String
+ - contextPath: Perch.Alert.SrcGeo.Country
+ description: Country of the threat.
+ type: String
+ - contextPath: Perch.Alert.SrcGeo.Latitude
+ description: Latitude of the detected threat.
+ type: Number
+ - contextPath: Perch.Alert.SrcGeo.Longitude
+ description: Longitude of the detected threat.
+ type: Number
+ - contextPath: Perch.Alert.SensorID
+ description: ID of the sensor that reported the threat.
+ type: Number
+ - contextPath: Perch.Alert.Title
+ description: Title of the alert.
+ type: String
+ - contextPath: Perch.Alert.Protocol
+ description: Protocol on which the alert was detected.
+ type: String
+ - contextPath: Perch.Alert.ID
+ description: ID of the alert.
+ type: Number
+ - contextPath: Perch.Alert.ObservableID
+ description: ID of the observable event.
+ type: Number
+ - contextPath: Perch.Alert.TS
+ description: Timestamp of the alert.
+ type: Date
+ - arguments:
+ - default: false
+ description: ID of the community.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets community information by ID.
+ execution: false
+ name: perch-get-community
+ outputs:
+ - contextPath: Perch.Community.Allsectors
+ description: Indicates if the community covers all sectors.
+ type: Boolean
+ - contextPath: Perch.Community.Credentialreq
+ description: Credentials required to interact with the community.
+ type: Number
+ - contextPath: Perch.Community.Desc
+ description: Description of the community.
+ type: String
+ - contextPath: Perch.Community.Id
+ description: ID of the community.
+ type: Number
+ - contextPath: Perch.Community.Name
+ description: Name of the community.
+ type: String
+ - contextPath: Perch.Community.Poweredby
+ description: Organization providing the feed.
+ type: String
+ - contextPath: Perch.Community.Selectablefeeds
+ description: Whether the feeds are selectable.
+ type: Boolean
+ - deprecated: false
+ description: Lists all communities.
+ execution: false
+ name: perch-list-communities
+ outputs:
+ - contextPath: Perch.Community.Allsectors
+ description: Indicates if the community covers all sectors.
+ type: Boolean
+ - contextPath: Perch.Community.Credentialreq
+ description: Credentials required to interact with the community.
+ type: Number
+ - contextPath: Perch.Community.Desc
+ description: Description of the community.
+ type: String
+ - contextPath: Perch.Community.Id
+ description: ID of the community.
+ type: Number
+ - contextPath: Perch.Community.Name
+ description: Name of the community.
+ type: String
+ - contextPath: Perch.Community.Poweredby
+ description: Organization providing the feed.
+ type: String
+ - contextPath: Perch.Community.Selectablefeeds
+ description: Whether the feeds are selectable.
+ type: Boolean
+ - arguments:
+ - default: false
+ description: Communities to report the indicator to.
+ isArray: false
+ name: communities
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Confidence of the findings.
+ isArray: false
+ name: confidence
+ predefined:
+ - LOW
+ - MEDIUM
+ - HIGH
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Type of indicator.
+ isArray: false
+ name: type
+ predefined:
+ - IP
+ - Domain
+ - URL
+ - REGEX
+ - File Hash
+ required: true
+ secret: false
+ - default: false
+ description: The value of the indicator.
+ isArray: false
+ name: value
+ required: true
+ secret: false
+ - default: false
+ description: The title of the indicator.
+ isArray: false
+ name: title
+ required: true
+ secret: false
+ - default: false
+ description: Description of the indicator.
+ isArray: false
+ name: description
+ required: true
+ secret: false
+ - default: false
+ description: TLP of the Indicator.
+ isArray: false
+ name: tlp
+ required: true
+ secret: false
+ - default: false
+ description: Operator of the indicator.
+ isArray: false
+ name: operator
+ required: false
+ secret: false
+ - default: false
+ description: When the indicator was first sighted.
+ isArray: false
+ name: first_sighting
+ required: false
+ secret: false
+ - default: false
+ description: Sends a email with the summary of the indicator.
+ isArray: false
+ name: email_summary
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates an indicator in Perch.
+ execution: false
+ name: perch-create-indicator
+ outputs:
+ - contextPath: Perch.Indicator.Confidence
+ description: Confidence of the indicator.
+ type: Unknown
+ - contextPath: Perch.Indicator.UpdatedAt
+ description: Date and time that the indicator was last updated.
+ type: Date
+ - contextPath: Perch.Indicator.TLP
+ description: TLP of the Indicator.
+ type: String
+ - contextPath: Perch.Indicator.Title
+ description: Title of the indicator.
+ type: String
+ - contextPath: Perch.Indicator.ID
+ description: ID of the indicator.
+ type: Number
+ - contextPath: Perch.Indicator.CreatedAt
+ description: Date that the indicator was created.
+ type: Date
+ - contextPath: Perch.Indicator.Team
+ description: ID of the team.
+ type: Number
+ - contextPath: Perch.Indicator.PerchID
+ description: The Perch ID for the incident.
+ type: String
+ - contextPath: Perch.Indicator.CreatedBy
+ description: ID of user that created the incident.
+ type: Number
+ dockerimage: demisto/python3:3.7.2.214
+ subtype: python3
+ isfetch: true
+ runonce: false
+ script: ''
+ type: python
+tests:
+- Perch-Test
diff --git a/Integrations/Perch/Perch_image.png b/Integrations/Perch/Perch_image.png
new file mode 100644
index 000000000000..4775016110ff
Binary files /dev/null and b/Integrations/Perch/Perch_image.png differ
diff --git a/Integrations/Perch/perch-description.md b/Integrations/Perch/perch-description.md
new file mode 100644
index 000000000000..1fb068942a2a
--- /dev/null
+++ b/Integrations/Perch/perch-description.md
@@ -0,0 +1,7 @@
+Perch Security is a crowd-sourced threat and intelligence feed which also provides network security.
+
+When you configure an instance of the Perch Security integration in Demisto, you need to enter your Perch Key.
+
+## Generate your Perch key
+1. Log in to your Perch environment and navigate to the [Integrations menu](https://app.perchsecurity.com/settings/integrations).
+2. Generate a **Key**.
diff --git a/Integrations/PhishLabsIOC/CHANGELOG.md b/Integrations/PhishLabsIOC/CHANGELOG.md
new file mode 100644
index 000000000000..170d9f868879
--- /dev/null
+++ b/Integrations/PhishLabsIOC/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.8.0] - 2019-08-06
+ - Fixed an issue with the **updatedAt** field.
\ No newline at end of file
diff --git a/Integrations/PhishLabsIOC/PhishLabsIOC.py b/Integrations/PhishLabsIOC/PhishLabsIOC.py
new file mode 100644
index 000000000000..1815260d433e
--- /dev/null
+++ b/Integrations/PhishLabsIOC/PhishLabsIOC.py
@@ -0,0 +1,670 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+
+import json
+import requests
+from typing import Callable
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' TYPES '''
+
+Response = requests.models.Response
+
+
+''' GLOBALS/PARAMS '''
+
+USERNAME: str = demisto.params().get('credentials', {}).get('identifier')
+PASSWORD: str = demisto.params().get('credentials', {}).get('password')
+SERVER: str = (demisto.params().get('url')[:-1]
+ if (demisto.params().get('url') and demisto.params().get('url').endswith('/'))
+ else demisto.params().get('url'))
+USE_SSL: bool = not demisto.params().get('insecure', False)
+BASE_URL: str = str(SERVER) + '/api/v1/'
+HEADERS: dict = {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+}
+NONE_DATE: str = '0001-01-01T00:00:00Z'
+
+FETCH_TIME: str = demisto.params().get('fetch_time', '').strip()
+FETCH_LIMIT: str = demisto.params().get('fetch_limit', '10')
+RAISE_EXCEPTION_ON_ERROR: bool = False
+
+
+''' HELPER FUNCTIONS '''
+
+
+@logger
+def http_request(method: str, path: str, params: dict = None, data: dict = None) -> dict:
+ """
+ Sends an HTTP request using the provided arguments
+ :param method: HTTP method
+ :param path: URL path
+ :param params: URL query params
+ :param data: Request body
+ :return: JSON response
+ """
+ params: dict = params if params is not None else {}
+ data: dict = data if data is not None else {}
+
+ try:
+ res: Response = requests.request(
+ method,
+ BASE_URL + path,
+ auth=(USERNAME, PASSWORD),
+ verify=USE_SSL,
+ params=params,
+ data=json.dumps(data),
+ headers=HEADERS)
+ except requests.exceptions.SSLError:
+ ssl_error = 'Could not connect to PhishLabs IOC Feed: Could not verify certificate.'
+ if RAISE_EXCEPTION_ON_ERROR:
+ raise Exception(ssl_error)
+ return return_error(ssl_error)
+ except (requests.exceptions.ConnectionError, requests.exceptions.Timeout,
+ requests.exceptions.TooManyRedirects, requests.exceptions.RequestException) as e:
+ connection_error = 'Could not connect to PhishLabs IOC Feed: {}'.format(str(e))
+ if RAISE_EXCEPTION_ON_ERROR:
+ raise Exception(connection_error)
+ return return_error(connection_error)
+
+ if res.status_code < 200 or res.status_code > 300:
+ status: int = res.status_code
+ message: str = res.reason
+ try:
+ error_json: dict = res.json()
+ message = error_json.get('error', '')
+ except Exception:
+ pass
+ error_message: str = ('Error in API call to PhishLabs IOC API, status code: {}'.format(status))
+ if status == 401:
+ error_message = 'Could not connect to PhishLabs IOC Feed: Wrong credentials'
+ if message:
+ error_message += ', reason:' + message
+ if RAISE_EXCEPTION_ON_ERROR:
+ raise Exception(error_message)
+ else:
+ return return_error(error_message)
+ try:
+ return res.json()
+ except Exception:
+ error_message = 'Failed parsing the response from PhishLabs IOC API: {}'.format(res.content)
+ if RAISE_EXCEPTION_ON_ERROR:
+ raise Exception(error_message)
+ else:
+ return return_error(error_message)
+
+
+@logger
+def populate_context(dbot_scores: list, domain_entries: list, file_entries: list,
+ url_entries: list, email_entries: list = None) -> dict:
+ """
+ Populate the context object with entries as tuples -
+ the first element contains global objects and the second contains PhishLabs objects
+ :param dbot_scores: Indicator DBotScore
+ :param domain_entries: Domain indicators
+ :param file_entries: File indicators
+ :param url_entries: URL indicators
+ :param email_entries: Email indicators
+ :return: The context object
+ """
+ context: dict = {}
+ if url_entries:
+ context[outputPaths['url']] = createContext(list(map(lambda u: u[0], url_entries)))
+ context['PhishLabs.URL(val.ID && val.ID === obj.ID)'] = createContext(list(map(lambda u: u[1], url_entries)),
+ removeNull=True)
+ if domain_entries:
+ context[outputPaths['domain']] = createContext(list(map(lambda d: d[0], domain_entries)))
+ context['PhishLabs.Domain(val.ID && val.ID === obj.ID)'] = createContext(list(map(lambda d: d[1],
+ domain_entries)),
+ removeNull=True)
+ if file_entries:
+ context[outputPaths['file']] = createContext(list(map(lambda f: f[0], file_entries)))
+ context['PhishLabs.File(val.ID && val.ID === obj.ID)'] = createContext(list(map(lambda f: f[1], file_entries)),
+ removeNull=True)
+ if email_entries:
+ context['Email'] = createContext(list(map(lambda e: e[0], email_entries)))
+ context['PhishLabs.Email(val.ID && val.ID === obj.ID)'] = createContext(list(map(lambda e: e[1],
+ email_entries)),
+ removeNull=True)
+ if dbot_scores:
+ context[outputPaths['dbotscore']] = dbot_scores
+ return context
+
+
+@logger
+def get_file_properties(indicator: dict) -> tuple:
+ """
+ Extract the file properties from the indicator attributes.
+ Example:
+ Indicator: {
+ "attributes": [
+ {
+ "createdAt": "2019-05-14T13:03:45Z",
+ "id": "xyz",
+ "name": "md5",
+ "value": "c8092abd8d581750c0530fa1fc8d8318" # guardrails-disable-line
+ },
+ {
+ "createdAt": "2019-05-14T13:03:45Z",
+ "id": "abc",
+ "name": "filetype",
+ "value": "application/zip"
+ },
+ {
+ "createdAt": "2019-05-14T13:03:45Z",
+ "id": "qwe",
+ "name": "name",
+ "value": "Baycc.zip"
+ }
+ ],
+ "createdAt": "2019-05-14T13:03:45Z",
+ "falsePositive": false,
+ "id": "def",
+ "type": "Attachment",
+ "updatedAt": "0001-01-01T00:00:00Z",
+ "value": "c8092abd8d581750c0530fa1fc8d8318" # guardrails-disable-line
+ }
+ Return values: c8092abd8d581750c0530fa1fc8d8318, Baycc.zip, application/zip
+ :param indicator: The file indicator
+ :return: File MD5, name and type
+ """
+ file_name_attribute: list = list(filter(lambda a: a.get('name') == 'name', indicator.get('attributes', [])))
+ file_name: str = file_name_attribute[0].get('value') if file_name_attribute else ''
+ file_type_attribute: list = list(filter(lambda a: a.get('name') == 'filetype', indicator.get('attributes', [])))
+ file_type: str = file_type_attribute[0].get('value') if file_type_attribute else ''
+ file_md5_attribute: list = list(filter(lambda a: a.get('name') == 'md5', indicator.get('attributes', [])))
+ file_md5: str = file_md5_attribute[0].get('value') if file_md5_attribute else ''
+
+ return file_md5, file_name, file_type
+
+
+@logger
+def get_email_properties(indicator: dict) -> tuple:
+ """
+ Extract the email properties from the indicator attributes.
+ Example:
+ Indicator:
+ {
+ "attributes":
+ [
+ {
+ "createdAt": "2019-05-13T16:54:18Z",
+ "id": "abc",
+ "name": "email-body",
+ "value": "\r\n\r\n-----Original Message-----\r\nFrom: A \r\nSent:
+ Monday, May 13, 2019 12:22 PM\r\nTo:
+ },
+ {
+ "createdAt": "2019-05-13T16:54:18Z",
+ "id": "def",
+ "name": "from",
+ "value": "someuser@contoso.com"
+ },
+ {
+ "createdAt": "2019-05-13T16:54:18Z",
+ "id": "cf3182ca-92ec-43b6-8aaa-429802a99fe5",
+ "name": "to",
+ "value": "example@gmail.com"
+ }
+ ],
+ "createdAt": "2019-05-13T16:54:18Z",
+ "falsePositive": false,
+ "id": "ghi",
+ "type": "E-mail",
+ "updatedAt": "0001-01-01T00:00:00Z",
+ "value": "FW: Task"
+ }
+ Return values:
+ :param indicator: The email indicator
+ :return: Email body, To and From
+ """
+ email_to_attribute: list = list(filter(lambda a: a.get('name') == 'to', indicator.get('attributes', [])))
+ email_to: str = email_to_attribute[0].get('value') if email_to_attribute else ''
+ email_from_attribute: list = list(filter(lambda a: a.get('name') == 'from', indicator.get('attributes', [])))
+ email_from: str = email_from_attribute[0].get('value') if email_from_attribute else ''
+ email_body_attribute: list = list(filter(lambda a: a.get('name') == 'email-body', indicator.get('attributes', [])))
+ email_body: str = email_body_attribute[0].get('value') if email_body_attribute else ''
+
+ return email_body, email_to, email_from
+
+
+@logger
+def create_domain_context(indicator: dict, classification: str) -> dict:
+ """
+ Create a domain context object
+ :param indicator: The domain indicator
+ :param classification: The indicator classification
+ :return: The domain context object
+ """
+ domain_object = {
+ 'Name': indicator.get('value')
+ }
+
+ if classification == 'Malicious':
+ domain_object['Malicious'] = {
+ 'Vendor': 'PhishLabs',
+ 'Description': 'Domain in PhishLabs feed'
+ }
+
+ return domain_object
+
+
+@logger
+def create_url_context(indicator: dict, classification: str) -> dict:
+ """
+ Create a URL context object
+ :param indicator: The URL indicator
+ :param classification: The indicator classification
+ :return: The URL context object
+ """
+
+ url_object: dict = {
+ 'Data': indicator.get('value')
+ }
+
+ if classification == 'Malicious':
+ url_object['Malicious'] = {
+ 'Vendor': 'PhishLabs',
+ 'Description': 'URL in PhishLabs feed'
+ }
+
+ return url_object
+
+
+@logger
+def create_phishlabs_object(indicator: dict) -> dict:
+ """
+ Create the context object for the PhishLabs path
+ :param indicator: The indicator
+ :return: The context object
+ """
+ return {
+ 'ID': indicator.get('id'),
+ 'CreatedAt': indicator.get('createdAt'),
+ 'UpdatedAt': indicator['updatedAt'] if indicator.get('updatedAt', NONE_DATE) != NONE_DATE else '',
+ 'Attribute': [{
+ 'Name': a.get('name'),
+ 'Type': a.get('type'),
+ 'Value': a.get('value'),
+ 'CreatedAt': a.get('createdAt')
+ } for a in indicator.get('attributes', [])]
+ }
+
+
+@logger
+def create_indicator_content(indicator: dict) -> dict:
+ """
+ Create content for the human readable object
+ :param indicator: The indicator
+ :return: The object to return to the War Room
+ """
+ return {
+ 'ID': indicator.get('id'),
+ 'Indicator': indicator.get('value'),
+ 'Type': indicator.get('type'),
+ 'CreatedAt': indicator.get('createdAt'),
+ 'UpdatedAt': indicator['updatedAt'] if indicator.get('updatedAt', NONE_DATE) != NONE_DATE else '',
+ 'FalsePositive': indicator.get('falsePositive')
+ }
+
+
+''' COMMANDS'''
+
+
+def test_module():
+ """
+ Performs basic get request to get item samples
+ """
+ get_global_feed_request(limit='1')
+ demisto.results('ok')
+
+
+def get_global_feed_command():
+ """
+ Gets the global feed data using the provided arguments
+ """
+ indicator_headers: list = ['Indicator', 'Type', 'CreatedAt', 'UpdatedAt', 'FalsePositive']
+ contents: list = []
+ url_entries: list = []
+ domain_entries: list = []
+ file_entries: list = []
+ dbot_scores: list = []
+ context: dict = {}
+
+ since: str = demisto.args().get('since')
+ limit: str = demisto.args().get('limit')
+ indicator: list = argToList(demisto.args().get('indicator_type', []))
+ remove_protocol: str = demisto.args().get('remove_protocol')
+ remove_query: str = demisto.args().get('remove_query')
+ false_positive: str = demisto.args().get('false_positive')
+
+ feed: dict = get_global_feed_request(since, limit, indicator, remove_protocol, remove_query, false_positive)
+ results: list = feed.get('data', []) if feed else []
+
+ if results:
+ if not isinstance(results, list):
+ results = [results]
+ for result in results:
+ contents.append(create_indicator_content(result))
+ indicator_false_positive = result.get('falsePositive', False)
+ indicator_type: str = result.get('type')
+ phishlabs_object: dict = create_phishlabs_object(result)
+
+ dbot_score: dict = {
+ 'Indicator': result.get('value'),
+ 'Vendor': 'PhishLabs',
+ 'Score': 3 if not indicator_false_positive else 1
+ }
+
+ if indicator_type == 'URL':
+ context_object = create_url_context(result, 'Malicious' if not indicator_false_positive else 'Good')
+ phishlabs_object['Data'] = result.get('value')
+ dbot_score['type'] = 'url'
+ url_entries.append((context_object, phishlabs_object))
+
+ elif indicator_type == 'Domain':
+ context_object = create_domain_context(result, 'Malicious' if not indicator_false_positive else 'Good')
+ phishlabs_object['Name'] = result.get('value')
+ dbot_score['type'] = 'domain'
+ domain_entries.append((context_object, phishlabs_object))
+
+ elif indicator_type == 'Attachment':
+ file_md5, file_name, file_type = get_file_properties(result)
+
+ context_object = {
+ 'Name': file_name,
+ 'Type': file_type,
+ 'MD5': file_md5
+ }
+
+ phishlabs_object['Name'] = file_name
+ phishlabs_object['Type'] = file_type
+ phishlabs_object['MD5'] = file_md5
+
+ file_entries.append((context_object, phishlabs_object))
+ dbot_score['type'] = 'file'
+
+ dbot_scores.append(dbot_score)
+
+ context = populate_context(dbot_scores, domain_entries, file_entries, url_entries)
+ human_readable: str = tableToMarkdown('PhishLabs Global Feed', contents, headers=indicator_headers,
+ removeNull=True, headerTransform=pascalToSpace)
+ else:
+ human_readable = 'No indicators found'
+
+ return_outputs(human_readable, context, feed)
+
+
+@logger
+def get_global_feed_request(since: str = None, limit: str = None, indicator: list = None,
+ remove_protocol: str = None, remove_query: str = None, false_positive: str = None) -> dict:
+ """
+ Sends a request to PhishLabs global feed with the provided arguments
+ :param since: Data updated within this duration of time from now
+ :param limit: Limit the number of rows to return
+ :param indicator: Indicator type filter
+ :param remove_protocol: Removes the protocol part from indicators when the rule can be applied.
+ :param remove_query: Removes the query string part from indicators when the rules can be applied.
+ :param false_positive: Filter by indicators that are false positives.
+ :return: Global feed indicators
+ """
+ path: str = 'globalfeed'
+ params: dict = {}
+
+ if since:
+ params['since'] = since
+ if limit:
+ params['limit'] = int(limit)
+ if indicator:
+ params['indicator'] = indicator
+ if remove_protocol:
+ params['remove_protocol'] = remove_protocol
+ if remove_query:
+ params['remove_query'] = remove_query
+ if false_positive:
+ params['false_positive'] = false_positive
+
+ response = http_request('GET', path, params)
+
+ return response
+
+
+def get_incident_indicators_command():
+ """
+ Gets the indicators for the specified incident
+ """
+ indicator_headers: list = ['Indicator', 'Type', 'CreatedAt', 'UpdatedAt', 'FalsePositive']
+ attribute_headers: list = ['Name', 'Type', 'Value', 'CreatedAt']
+ url_entries: list = []
+ domain_entries: list = []
+ file_entries: list = []
+ email_entries: list = []
+ dbot_scores: list = []
+ context: dict = {}
+
+ incident_id: str = demisto.args()['incident_id']
+ since: str = demisto.args().get('since')
+ limit: str = demisto.args().get('limit')
+ indicator: list = argToList(demisto.args().get('indicator_type', []))
+ classification: str = demisto.args().get('indicators_classification', 'Suspicious')
+ remove_protocol: str = demisto.args().get('remove_protocol')
+ remove_query: str = demisto.args().get('remove_query')
+
+ human_readable: str = '## Indicators for incident ' + incident_id + '\n'
+
+ feed: dict = get_feed_request(since, indicator=indicator, remove_protocol=remove_protocol, remove_query=remove_query)
+ results: list = feed.get('data', []) if feed else []
+
+ if results:
+ if not isinstance(results, list):
+ results = [results]
+ results = list(filter(lambda f: f.get('referenceId', '') == incident_id, results))
+ if results:
+ indicators = results[0].get('indicators', [])
+ if limit:
+ indicators = indicators[:int(limit)]
+ for result in indicators:
+ human_readable += tableToMarkdown('Indicator', create_indicator_content(result),
+ headers=indicator_headers,
+ removeNull=True, headerTransform=pascalToSpace)
+ phishlabs_object = create_phishlabs_object(result)
+
+ if phishlabs_object.get('Attribute'):
+ human_readable += tableToMarkdown('Attributes', phishlabs_object['Attribute'],
+ headers=attribute_headers,
+ removeNull=True, headerTransform=pascalToSpace)
+ else:
+ human_readable += 'No attributes for this indicator\n'
+
+ indicator_type: str = result.get('type')
+
+ dbot_score: dict = {
+ 'Indicator': result.get('value'),
+ 'Vendor': 'PhishLabs',
+ 'Score': 3 if classification == 'Malicious' else 2
+ }
+
+ if indicator_type == 'URL':
+ context_object = create_url_context(result, classification)
+ phishlabs_object['Data'] = result.get('value')
+ dbot_score['type'] = 'url'
+ url_entries.append((context_object, phishlabs_object))
+
+ elif indicator_type == 'Domain':
+ context_object = create_domain_context(result, classification)
+ phishlabs_object['Name'] = result.get('value')
+ dbot_score['type'] = 'domain'
+ domain_entries.append((context_object, phishlabs_object))
+
+ elif indicator_type == 'Attachment':
+ file_md5, file_name, file_type = get_file_properties(result)
+
+ context_object = {
+ 'Name': file_name,
+ 'Type': file_type,
+ 'MD5': file_md5
+ }
+
+ phishlabs_object['Name'] = file_name
+ phishlabs_object['Type'] = file_type
+ phishlabs_object['MD5'] = file_md5
+
+ file_entries.append((context_object, phishlabs_object))
+ dbot_score['type'] = 'file'
+
+ elif indicator_type == 'E-mail':
+ email_body, email_to, email_from = get_email_properties(result)
+
+ context_object = {
+ 'To': email_to,
+ 'From': email_from,
+ 'Body': email_body,
+ 'Subject': result.get('value')
+ }
+
+ phishlabs_object['To'] = email_to,
+ phishlabs_object['From'] = email_from,
+ phishlabs_object['Body'] = email_body
+ phishlabs_object['Subject'] = result.get('value')
+
+ email_entries.append((context_object, phishlabs_object))
+
+ if indicator_type != 'E-mail':
+ # We do not know what we have for an email
+ dbot_scores.append(dbot_score)
+
+ context = populate_context(dbot_scores, domain_entries, file_entries, url_entries, email_entries)
+ else:
+ human_readable = 'Incident not found'
+ else:
+ human_readable = 'No incidents found'
+
+ return_outputs(human_readable, context, feed)
+
+
+@logger
+def get_feed_request(since: str = None, limit: str = None, indicator: list = None,
+ remove_protocol: str = None, remove_query: str = None,
+ offset: str = None, sort: bool = False) -> dict:
+ """
+ Sends a request to PhishLabs user feed with the provided arguments
+ :param since: Data updated within this duration of time from now
+ :param limit: Limit the number of rows to return
+ :param indicator: Indicator type filter
+ :param remove_protocol: Removes the protocol part from indicators when the rule can be applied.
+ :param remove_query: Removes the query string part from indicators when the rules can be applied.
+ :param offset: Number of incidents to skip
+ :param sort: If true, the incidents will be sorted by their creation time in ascending order.
+ :return: User feed
+ """
+ path: str = 'feed'
+ params: dict = {}
+
+ if since:
+ params['since'] = since
+ if limit:
+ params['limit'] = int(limit)
+ if offset:
+ params['offset'] = int(offset)
+ if indicator:
+ params['indicator'] = indicator
+ if remove_query:
+ params['remove_query'] = remove_query
+ if remove_protocol:
+ params['remove_protocol'] = remove_protocol
+
+ if sort:
+ params['sort'] = 'created_at'
+ params['direction'] = 'asc'
+
+ response = http_request('GET', path, params)
+
+ return response
+
+
+def fetch_incidents():
+ """
+ Fetches incidents from the PhishLabs user feed.
+ :return: Demisto incidents
+ """
+ last_run: dict = demisto.getLastRun()
+ last_fetch: str = last_run.get('time', '') if last_run else ''
+ last_offset: str = last_run.get('offset', '0') if last_run else '0'
+
+ incidents: list = []
+ count: int = 1
+ limit = int(FETCH_LIMIT)
+ feed: dict = get_feed_request(since=FETCH_TIME)
+ last_fetch_time: datetime = (datetime.strptime(last_fetch, '%Y-%m-%dT%H:%M:%SZ') if last_fetch
+ else datetime.strptime(NONE_DATE, '%Y-%m-%dT%H:%M:%SZ'))
+ max_time: datetime = last_fetch_time
+ offset = int(last_offset)
+ results: list = feed.get('data', []) if feed else []
+
+ if results:
+ results = sorted(results, key=lambda r: datetime.strptime(r.get('createdAt', NONE_DATE), '%Y-%m-%dT%H:%M:%SZ'))
+ results = results[offset:]
+ if not isinstance(results, list):
+ results = [results]
+
+ for result in results:
+ if count > limit:
+ break
+ incident_time: datetime = datetime.strptime(result.get('createdAt', NONE_DATE), '%Y-%m-%dT%H:%M:%SZ')
+ if last_fetch_time and incident_time <= last_fetch_time:
+ continue
+
+ incident: dict = {
+ 'name': 'PhishLabs IOC Incident ' + result.get('referenceId', ''),
+ 'occurred': datetime.strftime(incident_time, '%Y-%m-%dT%H:%M:%SZ'),
+ 'rawJSON': json.dumps(result)
+ }
+ incidents.append(incident)
+ if max_time < incident_time:
+ max_time = incident_time
+ count += 1
+
+ offset += count - 1
+
+ demisto.setLastRun({'time': datetime.strftime(max_time, '%Y-%m-%dT%H:%M:%SZ'), 'offset': str(offset)})
+ demisto.incidents(incidents)
+
+
+''' MAIN'''
+
+
+def main():
+ """
+ Main function
+ """
+ global RAISE_EXCEPTION_ON_ERROR
+ LOG('Command being called is {}'.format(demisto.command()))
+ handle_proxy()
+ command_dict = {
+ 'test-module': test_module,
+ 'fetch-incidents': fetch_incidents,
+ 'phishlabs-global-feed': get_global_feed_command,
+ 'phishlabs-get-incident-indicators': get_incident_indicators_command
+ }
+ try:
+ command_func: Callable = command_dict[demisto.command()]
+ if demisto.command() == 'fetch-incidents':
+ RAISE_EXCEPTION_ON_ERROR = True
+ command_func()
+
+ except Exception as e:
+ if RAISE_EXCEPTION_ON_ERROR:
+ LOG(str(e))
+ LOG.print_log()
+ raise
+ else:
+ return_error(str(e))
+
+
+if __name__ in ['__main__', '__builtin__', 'builtins']:
+ main()
diff --git a/Integrations/PhishLabsIOC/PhishLabsIOC.yml b/Integrations/PhishLabsIOC/PhishLabsIOC.yml
new file mode 100644
index 000000000000..ed3b8a56632f
--- /dev/null
+++ b/Integrations/PhishLabsIOC/PhishLabsIOC.yml
@@ -0,0 +1,461 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: PhishLabs IOC
+ version: -1
+configuration:
+- defaultvalue: https://ioc.phishlabs.com
+ display: Server URL (e.g., https://ioc.phishlabs.com)
+ name: url
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: true
+ type: 9
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- defaultvalue: 1h
+ display: Fetch for this time period, e.g., "1d", "1h", "10m". The default is 1h.
+ name: fetch_time
+ required: false
+ type: 0
+- defaultvalue: '10'
+ display: Number of incidents to fetch each time
+ name: fetch_limit
+ required: false
+ type: 0
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+description: Get live feeds of IOC data from PhishLabs.
+display: PhishLabs IOC
+name: PhishLabs IOC
+script:
+ commands:
+ - arguments:
+ - default: false
+ defaultValue: 1h
+ description: Duration (from now) for which to pull updated data, for
+ example, "1d", "1h" or "10m".
+ isArray: false
+ name: since
+ required: false
+ secret: false
+ - default: false
+ description: Maximum number of results to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Filter the data by indicator type.
+ isArray: false
+ name: indicator_type
+ predefined:
+ - Domain
+ - Attachment
+ - URL
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Removes the protocol part from indicators, when the rule can be
+ applied.
+ isArray: false
+ name: remove_protocol
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Removes the query string part from indicators, when the rules can
+ be applied.
+ isArray: false
+ name: remove_query
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Whether the indiciator is a false postive.
+ isArray: false
+ name: false_positive
+ predefined:
+ - 'false'
+ - 'true'
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves the global IOC feed from PhishLabs.
+ execution: false
+ name: phishlabs-global-feed
+ outputs:
+ - contextPath: URL.Data
+ description: URL address.
+ type: String
+ - contextPath: URL.Malicious.Vendor
+ description: Vendor reporting the malicious status.
+ type: String
+ - contextPath: URL.Malicious.Description
+ description: Description of the malicious URL.
+ type: String
+ - contextPath: PhishLabs.URL.Data
+ description: URL address.
+ type: String
+ - contextPath: PhishLabs.URL.ID
+ description: URL PhishLabs ID.
+ type: String
+ - contextPath: PhishLabs.URL.CreatedAt
+ description: URL creation time, in PhishLabs.
+ type: Date
+ - contextPath: PhishLabs.URL.UpdatedAt
+ description: URL update time, in PhishLabs.
+ type: Date
+ - contextPath: PhishLabs.URL.Attribute.Name
+ description: URL attribute name.
+ type: String
+ - contextPath: PhishLabs.URL.Attribute.Value
+ description: URL attribute value.
+ type: String
+ - contextPath: PhishLabs.URL.Attribute.CreatedAt
+ description: URL attribute creation time.
+ type: Date
+ - contextPath: PhishLabs.URL.FalsePositive
+ description: Whether this URL is a false positive.
+ type: Boolean
+ - contextPath: Domain.Name
+ description: Domain name.
+ type: String
+ - contextPath: Domain.Malicious.Vendor
+ description: Vendor reporting the malicious status.
+ type: String
+ - contextPath: Domain.Malicious.Description
+ description: Description of the malicious domain.
+ type: String
+ - contextPath: PhishLabs.Domain.Name
+ description: Domain name.
+ type: String
+ - contextPath: PhishLabs.Domain.ID
+ description: Domain PhishLabs ID.
+ type: String
+ - contextPath: PhishLabs.Domain.CreatedAt
+ description: Domain creation time, in PhishLabs.
+ type: Date
+ - contextPath: PhishLabs.Domain.UpdatedAt
+ description: Domain update time, in PhishLabs.
+ type: Date
+ - contextPath: PhishLabs.Domain.Attribute.Name
+ description: Domain attribute name.
+ type: String
+ - contextPath: PhishLabs.Domain.Attribute.Value
+ description: Domain attribute value.
+ type: String
+ - contextPath: PhishLabs.Domain.Attribute.CreatedAt
+ description: Domain attribute creation time.
+ type: Date
+ - contextPath: PhishLabs.Domain.FalsePositive
+ description: Whether this domain is a false positive.
+ type: Boolean
+ - contextPath: File.Name
+ description: Full filename.
+ type: String
+ - contextPath: File.MD5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: File.Type
+ description: File type.
+ type: String
+ - contextPath: PhishLabs.File.ID
+ description: File PhishLabs ID.
+ type: String
+ - contextPath: PhishLabs.File.Name
+ description: Full filename.
+ type: String
+ - contextPath: PhishLabs.File.MD5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: PhishLabs.File.Type
+ description: File type.
+ type: String
+ - contextPath: PhishLabs.File.CreatedAt
+ description: File creation time, in PhishLabs.
+ type: Date
+ - contextPath: PhishLabs.File.UpdatedAt
+ description: File update time, in PhishLabs.
+ type: Date
+ - contextPath: PhishLabs.File.Attribute.Name
+ description: File attribute name.
+ type: String
+ - contextPath: PhishLabs.File.Attribute.Value
+ description: File attribute value.
+ type: String
+ - contextPath: PhishLabs.File.Attribute.CreatedAt
+ description: File attribute creation time.
+ type: Date
+ - contextPath: PhishLabs.File.FalsePositive
+ description: Whether this file is a false positive.
+ type: Boolean
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - arguments:
+ - default: true
+ description: PhishLabs incident reference ID, for example, "INC123456".
+ isArray: false
+ name: incident_id
+ required: true
+ secret: false
+ - default: false
+ defaultValue: 1h
+ description: Duration (from now) for which to pull updated data, for
+ example, "1d", "1h" or "10m".
+ isArray: false
+ name: since
+ required: false
+ secret: false
+ - default: false
+ description: Maximum number of results to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Filter the data by indicator type.
+ isArray: false
+ name: indicator_type
+ predefined:
+ - E-mail
+ - Domain
+ - URL
+ - Attachment
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: Suspicious
+ description: How to classify indicators from the feed.
+ isArray: false
+ name: indicators_classification
+ predefined:
+ - Malicious
+ - Suspicious
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Removes the protocol part from indicators, when the rule can be
+ applied.
+ isArray: false
+ name: remove_protocol
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Removes the query string part from indicators, when the rules can
+ be applied.
+ isArray: false
+ name: remove_query
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves indicators from a speicifed PhishLabs incident. To fetch incidents to Demisto, enable fetching incidents.
+ execution: false
+ name: phishlabs-get-incident-indicators
+ outputs:
+ - contextPath: URL.Data
+ description: URL address.
+ type: String
+ - contextPath: URL.Malicious.Vendor
+ description: Vendor reporting the malicious status.
+ type: String
+ - contextPath: URL.Malicious.Description
+ description: Description of the malicious URL.
+ type: String
+ - contextPath: PhishLabs.URL.Data
+ description: URL address.
+ type: String
+ - contextPath: PhishLabs.URL.CreatedAt
+ description: URL creation time, in PhishLabs
+ type: Date
+ - contextPath: PhishLabs.URL.UpdatedAt
+ description: URL update time, in PhishLabs.
+ type: Date
+ - contextPath: PhishLabs.URL.Attribute.Name
+ description: URL attribute name.
+ type: String
+ - contextPath: PhishLabs.URL.Attribute.Value
+ description: URL attribute value.
+ type: String
+ - contextPath: PhishLabs.URL.Attribute.CreatedAt
+ description: URL attribute creation time.
+ type: Date
+ - contextPath: PhishLabs.URL.FalsePositive
+ description: Whether this URL is a false positive.
+ type: Boolean
+ - contextPath: Domain.Name
+ description: Domain name.
+ type: String
+ - contextPath: Domain.Malicious.Vendor
+ description: Vendor reporting the malicious status.
+ type: String
+ - contextPath: Domain.Malicious.Description
+ description: Description of the malicious domain.
+ type: String
+ - contextPath: PhishLabs.Domain.Name
+ description: Domain name
+ type: String
+ - contextPath: PhishLabs.Domain.CreatedAt
+ description: Domain creation time, in PhishLabs.
+ type: Date
+ - contextPath: PhishLabs.Domain.UpdatedAt
+ description: Domain update time, in PhishLabs.
+ type: Date
+ - contextPath: PhishLabs.Domain.Attribute.Name
+ description: Domain attribute name.
+ type: String
+ - contextPath: PhishLabs.Domain.Attribute.Value
+ description: Domain attribute value.
+ type: String
+ - contextPath: PhishLabs.Domain.Attribute.CreatedAt
+ description: Domain attribute creation time.
+ type: Date
+ - contextPath: PhishLabs.Domain.FalsePositive
+ description: Whether this domain is a false positive.
+ type: Boolean
+ - contextPath: Email.To
+ description: Recipient of the email.
+ type: String
+ - contextPath: Email.From
+ description: Sender of the email.
+ type: String
+ - contextPath: Email.Body
+ description: Body of the email.
+ type: String
+ - contextPath: Email.Subject
+ description: Subject of the email.
+ type: String
+ - contextPath: PhishLabs.Email.ID
+ description: Email PhishLabs ID.
+ type: String
+ - contextPath: PhishLabs.Email.To
+ description: Recipient of the email.
+ type: String
+ - contextPath: PhishLabs.Email.From
+ description: Sender of the email.
+ type: String
+ - contextPath: PhishLabs.Email.Body
+ description: Body of the email.
+ type: String
+ - contextPath: PhishLabs.Email.Subject
+ description: Subject of the email.
+ type: String
+ - contextPath: PhishLabs.Email.CreatedAt
+ description: Email creation time, in PhishLabs.
+ type: Date
+ - contextPath: PhishLabs.Email.UpdatedAt
+ description: Email update time, in PhishLabs.
+ type: Date
+ - contextPath: PhishLabs.Email.Attribute.Name
+ description: Email attribute name.
+ type: String
+ - contextPath: PhishLabs.Email.Attribute.Value
+ description: Email attribute value.
+ type: String
+ - contextPath: PhishLabs.Email.Attribute.CreatedAt
+ description: Email attribute creation time.
+ type: Date
+ - contextPath: File.Name
+ description: Full filename.
+ type: String
+ - contextPath: File.MD5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: File.Type
+ description: File type.
+ type: String
+ - contextPath: PhishLabs.File.ID
+ description: File PhishLabs ID.
+ type: String
+ - contextPath: PhishLabs.File.Name
+ description: Full filename.
+ type: String
+ - contextPath: PhishLabs.File.MD5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: PhishLabs.File.Type
+ description: File type.
+ type: String
+ - contextPath: PhishLabs.File.CreatedAt
+ description: File creation time, in PhishLabs.
+ type: Date
+ - contextPath: PhishLabs.File.UpdatedAt
+ description: File update time, in PhishLabs.
+ type: Date
+ - contextPath: PhishLabs.File.Attribute.Name
+ description: File attribute name.
+ type: String
+ - contextPath: PhishLabs.File.Attribute.Value
+ description: File attribute value.
+ type: String
+ - contextPath: PhishLabs.File.Attribute.CreatedAt
+ description: File attribute creation time.
+ type: Date
+ - contextPath: PhishLabs.File.FalsePositive
+ description: Whether this file is a false positive.
+ type: Boolean
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ dockerimage: demisto/python3:3.7.3.221
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- PhishLabsIOC TestPlaybook
diff --git a/Integrations/PhishLabsIOC/PhishLabsIOC_description.md b/Integrations/PhishLabsIOC/PhishLabsIOC_description.md
new file mode 100644
index 000000000000..bdd71129a271
--- /dev/null
+++ b/Integrations/PhishLabsIOC/PhishLabsIOC_description.md
@@ -0,0 +1,10 @@
+The IOC feed in PhishLabs is divided into 2 endpoints:
+### Global Feed
+This is the PhishLabs global database for malicious indicators.
+This feed consists of indicators that are classified as malicious by PhishLabs -
+URLs, domains, and attachments (MD5 hashes). All the indicators from this feed are classified as malicious in Demisto.
+To populate indicators from PhishLabs in Demisto, use the **PhishLabsPopulateIndicators** script/playbooks.
+
+### User Feed
+This feed is exclusive for the user and consists of emails that were sent to PhishLabs and were classified as malicious emails. For each malicious email, an incident is created that contains the email details and the extracted indicators. These indicators are not necessarily malicious though. In Demisto,
+the user can choose whether to classify those indicators as malicious or suspicious. Incidents can be fetched by enabling fetch incidents in the integration configuration.
diff --git a/Integrations/PhishLabsIOC/PhishLabsIOC_image.png b/Integrations/PhishLabsIOC/PhishLabsIOC_image.png
new file mode 100644
index 000000000000..2dd80189dd01
Binary files /dev/null and b/Integrations/PhishLabsIOC/PhishLabsIOC_image.png differ
diff --git a/Integrations/PhishLabsIOC/PhishLabsIOC_test.py b/Integrations/PhishLabsIOC/PhishLabsIOC_test.py
new file mode 100644
index 000000000000..770994a0989e
--- /dev/null
+++ b/Integrations/PhishLabsIOC/PhishLabsIOC_test.py
@@ -0,0 +1,324 @@
+from CommonServerPython import *
+
+
+def test_create_indicator_no_date():
+ from PhishLabsIOC import create_indicator_content
+
+ files_json = """
+ {
+ "attributes": [
+ {
+ "createdAt": "2019-05-14T13:03:45Z",
+ "id": "xyz",
+ "name": "md5",
+ "value": "c8092abd8d581750c0530fa1fc8d8318"
+ },
+ {
+ "createdAt": "2019-05-14T13:03:45Z",
+ "id": "abc",
+ "name": "filetype",
+ "value": "application/zip"
+ },
+ {
+ "createdAt": "2019-05-14T13:03:45Z",
+ "id": "qwe",
+ "name": "name",
+ "value": "Baycc.zip"
+ }
+ ],
+ "createdAt": "2019-05-14T13:03:45Z",
+ "falsePositive": false,
+ "id": "def",
+ "type": "Attachment",
+ "value": "c8092abd8d581750c0530fa1fc8d8318"
+ } """
+
+ result = {
+ 'ID': 'def',
+ 'Indicator': 'c8092abd8d581750c0530fa1fc8d8318',
+ 'Type': 'Attachment',
+ 'CreatedAt': '2019-05-14T13:03:45Z',
+ 'UpdatedAt': '',
+ 'FalsePositive': False,
+ }
+
+ indicator = json.loads(files_json)
+
+ actual = create_indicator_content(indicator)
+
+ assert actual == result
+
+
+def test_create_indicator_with_none_date():
+ from PhishLabsIOC import create_indicator_content
+
+ files_json = """
+ {
+ "attributes": [
+ {
+ "createdAt": "2019-05-14T13:03:45Z",
+ "id": "xyz",
+ "name": "md5",
+ "value": "c8092abd8d581750c0530fa1fc8d8318"
+ },
+ {
+ "createdAt": "2019-05-14T13:03:45Z",
+ "id": "abc",
+ "name": "filetype",
+ "value": "application/zip"
+ },
+ {
+ "createdAt": "2019-05-14T13:03:45Z",
+ "id": "qwe",
+ "name": "name",
+ "value": "Baycc.zip"
+ }
+ ],
+ "createdAt": "2019-05-14T13:03:45Z",
+ "updatedAt": "0001-01-01T00:00:00Z",
+ "falsePositive": false,
+ "id": "def",
+ "type": "Attachment",
+ "value": "c8092abd8d581750c0530fa1fc8d8318"
+ } """
+
+ result = {
+ 'ID': 'def',
+ 'Indicator': 'c8092abd8d581750c0530fa1fc8d8318',
+ 'Type': 'Attachment',
+ 'CreatedAt': '2019-05-14T13:03:45Z',
+ 'UpdatedAt': '',
+ 'FalsePositive': False,
+ }
+
+ indicator = json.loads(files_json)
+
+ actual = create_indicator_content(indicator)
+
+ assert actual == result
+
+
+def test_create_indicator_with_date():
+ from PhishLabsIOC import create_indicator_content
+
+ files_json = """
+ {
+ "attributes": [
+ {
+ "createdAt": "2019-05-14T13:03:45Z",
+ "id": "xyz",
+ "name": "md5",
+ "value": "c8092abd8d581750c0530fa1fc8d8318"
+ },
+ {
+ "createdAt": "2019-05-14T13:03:45Z",
+ "id": "abc",
+ "name": "filetype",
+ "value": "application/zip"
+ },
+ {
+ "createdAt": "2019-05-14T13:03:45Z",
+ "id": "qwe",
+ "name": "name",
+ "value": "Baycc.zip"
+ }
+ ],
+ "createdAt": "2019-05-14T13:03:45Z",
+ "updatedAt": "2019-05-14T13:03:45Z",
+ "falsePositive": false,
+ "id": "def",
+ "type": "Attachment",
+ "value": "c8092abd8d581750c0530fa1fc8d8318"
+ } """
+
+ result = {
+ 'ID': 'def',
+ 'Indicator': 'c8092abd8d581750c0530fa1fc8d8318',
+ 'Type': 'Attachment',
+ 'CreatedAt': '2019-05-14T13:03:45Z',
+ 'UpdatedAt': '2019-05-14T13:03:45Z',
+ 'FalsePositive': False,
+ }
+
+ indicator = json.loads(files_json)
+
+ actual = create_indicator_content(indicator)
+
+ assert actual == result
+
+
+def test_populate_context_files():
+ from PhishLabsIOC import populate_context, get_file_properties, create_phishlabs_object
+ files_json = """
+ {
+ "attributes": [
+ {
+ "createdAt": "2019-05-14T13:03:45Z",
+ "id": "xyz",
+ "name": "md5",
+ "value": "c8092abd8d581750c0530fa1fc8d8318"
+ },
+ {
+ "createdAt": "2019-05-14T13:03:45Z",
+ "id": "abc",
+ "name": "filetype",
+ "value": "application/zip"
+ },
+ {
+ "createdAt": "2019-05-14T13:03:45Z",
+ "id": "qwe",
+ "name": "name",
+ "value": "Baycc.zip"
+ }
+ ],
+ "createdAt": "2019-05-14T13:03:45Z",
+ "falsePositive": false,
+ "id": "def",
+ "type": "Attachment",
+ "updatedAt": "0001-01-01T00:00:00Z",
+ "value": "c8092abd8d581750c0530fa1fc8d8318"
+ } """
+ file = json.loads(files_json)
+ file_md5, file_name, file_type = get_file_properties(file)
+
+ phishlabs_entry = create_phishlabs_object(file)
+
+ phishlabs_entry['Name'] = file_name
+ phishlabs_entry['Type'] = file_type
+ phishlabs_entry['MD5'] = file_md5
+
+ phishlabs_result = [{
+ 'ID': 'def',
+ 'CreatedAt': '2019-05-14T13:03:45Z',
+ 'Name': 'Baycc.zip',
+ 'Type': 'application/zip',
+ 'MD5': 'c8092abd8d581750c0530fa1fc8d8318',
+ 'Attribute': [
+ {
+ 'CreatedAt': '2019-05-14T13:03:45Z',
+ 'Type': None,
+ 'Name': 'md5',
+ 'Value': 'c8092abd8d581750c0530fa1fc8d8318'
+ },
+ {
+ 'CreatedAt': '2019-05-14T13:03:45Z',
+ 'Type': None,
+ 'Name': 'filetype',
+ 'Value': 'application/zip'
+ },
+ {
+ 'CreatedAt': '2019-05-14T13:03:45Z',
+ 'Type': None,
+ 'Name': 'name',
+ 'Value': 'Baycc.zip'
+ }
+ ]
+ }]
+
+ global_entry = {
+ 'Name': file_name,
+ 'Type': file_type,
+ 'MD5': file_md5
+ }
+
+ global_result = [{
+ 'Name': 'Baycc.zip',
+ 'Type': 'application/zip',
+ 'MD5': 'c8092abd8d581750c0530fa1fc8d8318'
+ }]
+
+ context = populate_context([], [], [(global_entry, phishlabs_entry)], [])
+
+ assert len(context.keys()) == 2
+ assert context[outputPaths['file']] == global_result
+ assert context['PhishLabs.File(val.ID && val.ID === obj.ID)'] == phishlabs_result
+
+
+def test_populate_context_emails():
+ from PhishLabsIOC import populate_context, get_email_properties, create_phishlabs_object
+ emails_json = """
+ {
+ "attributes":[
+ {
+ "createdAt":"2019-05-13T16:54:18Z",
+ "id":"abc",
+ "name":"email-body",
+ "value":"-----Original Message-----From: A Sent: Monday, May 13, 2019 12:22 PMTo:"
+ },
+ {
+ "createdAt":"2019-05-13T16:54:18Z",
+ "id":"def",
+ "name":"from",
+ "value":"someuser@contoso.com"
+ },
+ {
+ "createdAt":"2019-05-13T16:54:18Z",
+ "id":"cf3182ca-92ec-43b6-8aaa-429802a99fe5",
+ "name":"to",
+ "value":"example@gmail.com"
+ }
+ ],
+ "createdAt":"2019-05-13T16:54:18Z",
+ "falsePositive":false,
+ "id":"ghi",
+ "type":"E-mail",
+ "updatedAt":"0001-01-01T00:00:00Z",
+ "value":"FW: Task"
+ } """
+ email = json.loads(emails_json)
+ email_body, email_to, email_from = get_email_properties(email)
+
+ phishlabs_entry = create_phishlabs_object(email)
+
+ phishlabs_entry['To'] = email_to,
+ phishlabs_entry['From'] = email_from,
+ phishlabs_entry['Body'] = email_body
+ phishlabs_entry['Subject'] = email.get('value')
+
+ phishlabs_result = [{
+ 'ID': 'ghi',
+ 'CreatedAt': '2019-05-13T16:54:18Z',
+ 'To': ('example@gmail.com',),
+ 'From': ('someuser@contoso.com',),
+ 'Body': '-----Original Message-----From: A Sent: Monday, May 13, 2019 12:22 PMTo:',
+ 'Subject': 'FW: Task',
+ 'Attribute':
+ [{
+ 'CreatedAt': '2019-05-13T16:54:18Z',
+ 'Type': None,
+ 'Name': 'email-body',
+ 'Value': '-----Original Message-----From: A Sent: Monday, May 13, 2019 12:22 PMTo:'
+ },
+ {
+ 'CreatedAt': '2019-05-13T16:54:18Z',
+ 'Type': None,
+ 'Name': 'from',
+ 'Value': 'someuser@contoso.com'
+ },
+ {
+ 'CreatedAt': '2019-05-13T16:54:18Z',
+ 'Type': None,
+ 'Name': 'to',
+ 'Value': 'example@gmail.com'
+ }]
+ }]
+
+ global_entry = {
+ 'To': email_to,
+ 'From': email_from,
+ 'Body': email_body,
+ 'Subject': email.get('value')
+ }
+
+ global_result = [{
+ 'To': 'example@gmail.com',
+ 'From': 'someuser@contoso.com',
+ 'Body': '-----Original Message-----From: A Sent: Monday, May 13, 2019 12:22 PMTo:',
+ 'Subject': 'FW: Task'
+ }]
+
+ context = populate_context([], [], [], [], [(global_entry, phishlabs_entry)])
+
+ assert len(context.keys()) == 2
+ assert context['Email'] == global_result
+ assert context['PhishLabs.Email(val.ID && val.ID === obj.ID)'] == phishlabs_result
diff --git a/Integrations/PhishLabsIOC/Pipfile b/Integrations/PhishLabsIOC/Pipfile
new file mode 100644
index 000000000000..735bc94d8669
--- /dev/null
+++ b/Integrations/PhishLabsIOC/Pipfile
@@ -0,0 +1,15 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+
+[packages]
+requests = "*"
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/PhishLabsIOC/Pipfile.lock b/Integrations/PhishLabsIOC/Pipfile.lock
new file mode 100644
index 000000000000..1251dec7b78f
--- /dev/null
+++ b/Integrations/PhishLabsIOC/Pipfile.lock
@@ -0,0 +1,207 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "cdf6bcc5b30aeac3202a405ab2cfb3dad24912c0a5319b3825e2ecb2e6a7b60d"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "certifi": {
+ "hashes": [
+ "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5",
+ "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae"
+ ],
+ "version": "==2019.3.9"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
+ "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
+ ],
+ "index": "pypi",
+ "version": "==2.21.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4",
+ "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb"
+ ],
+ "version": "==1.24.3"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:c40744b6bc5162bbb39c1257fe298b7a393861d50978b565f3ccd9cb9de0182a",
+ "sha256:f57abacd059dc3bd666258d1efb0377510a89777fda3e3274e3c01f7c03ae22d"
+ ],
+ "version": "==4.3.20"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:2112d2ca570bb7c3e53ea1a35cd5df42bb0fd10c45f0fb97178679c3c03d64c7",
+ "sha256:c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a"
+ ],
+ "markers": "python_version > '2.7'",
+ "version": "==7.0.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:25a1bc1d148c9a640211872b4ff859878d422bccb59c9965e04eed468a0aa180",
+ "sha256:964cedd2b27c492fbf0b7f58b3284a09cf7f99b0f715941fb24a439b3af1bd1a"
+ ],
+ "version": "==0.11.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:1a8aa4fa958f8f451ac5441f3ac130d9fc86ea38780dd2715e6d5c5882700b24",
+ "sha256:b8bf138592384bd4e87338cb0f256bf5f615398a649d4bd83915f0e4047a5ca6"
+ ],
+ "index": "pypi",
+ "version": "==4.5.0"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:132eae51d6ef3ff4a8c47c393a4ef5ebf0d1aecc96880eb5d6c8ceab7017cc9b",
+ "sha256:18141c1484ab8784006c839be8b985cfc82a2e9725837b0ecfa0203f71c4e39d",
+ "sha256:2baf617f5bbbfe73fd8846463f5aeafc912b5ee247f410700245d68525ec584a",
+ "sha256:3d90063f2cbbe39177e9b4d888e45777012652d6110156845b828908c51ae462",
+ "sha256:4304b2218b842d610aa1a1d87e1dc9559597969acc62ce717ee4dfeaa44d7eee",
+ "sha256:4983ede548ffc3541bae49a82675996497348e55bafd1554dc4e4a5d6eda541a",
+ "sha256:5315f4509c1476718a4825f45a203b82d7fdf2a6f5f0c8f166435975b1c9f7d4",
+ "sha256:6cdfb1b49d5345f7c2b90d638822d16ba62dc82f7616e9b4caa10b72f3f16649",
+ "sha256:7b325f12635598c604690efd7a0197d0b94b7d7778498e76e0710cd582fd1c7a",
+ "sha256:8d3b0e3b8626615826f9a626548057c5275a9733512b137984a68ba1598d3d2f",
+ "sha256:8f8631160c79f53081bd23446525db0bc4c5616f78d04021e6e434b286493fd7",
+ "sha256:912de10965f3dc89da23936f1cc4ed60764f712e5fa603a09dd904f88c996760",
+ "sha256:b010c07b975fe853c65d7bbe9d4ac62f1c69086750a574f6292597763781ba18",
+ "sha256:c908c10505904c48081a5415a1e295d8403e353e0c14c42b6d67f8f97fae6616",
+ "sha256:c94dd3807c0c0610f7c76f078119f4ea48235a953512752b9175f9f98f5ae2bd",
+ "sha256:ce65dee7594a84c466e79d7fb7d3303e7295d16a83c22c7c4037071b059e2c21",
+ "sha256:eaa9cfcb221a8a4c2889be6f93da141ac777eb8819f077e1d09fb12d00a09a93",
+ "sha256:f3376bc31bad66d46d44b4e6522c5c21976bf9bca4ef5987bb2bf727f4506cbb",
+ "sha256:f9202fa138544e13a4ec1a6792c35834250a85958fde1251b6a22e07d1260ae7"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.3.5"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ }
+ }
+}
diff --git a/Integrations/ProofpointTAP_v2/CHANGELOG.md b/Integrations/ProofpointTAP_v2/CHANGELOG.md
new file mode 100644
index 000000000000..8dc683fca951
--- /dev/null
+++ b/Integrations/ProofpointTAP_v2/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+Fixed the **fetch-incidents** function when the last_fetch time range is greater than 1 hour.
+
+## [19.8.0] - 2019-08-06
+ - Modified the fetch range for the first fetch to 1 hour (the Proofpoint TAP API maximum).
diff --git a/Integrations/ProofpointTAP_v2/Pipfile b/Integrations/ProofpointTAP_v2/Pipfile
new file mode 100644
index 000000000000..03feaeb15cfc
--- /dev/null
+++ b/Integrations/ProofpointTAP_v2/Pipfile
@@ -0,0 +1,14 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[packages]
+requests = "*"
+requests-mock = "*"
+mock = "*"
+pytest = "*"
+pytest-mock = "*"
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/ProofpointTAP_v2/Pipfile.lock b/Integrations/ProofpointTAP_v2/Pipfile.lock
new file mode 100644
index 000000000000..7dff8551aee4
--- /dev/null
+++ b/Integrations/ProofpointTAP_v2/Pipfile.lock
@@ -0,0 +1,166 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "430863e1854e0800544d8ad52572df57a23ba3a9b62d84f59b0ce24627e2897f"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
+ "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
+ ],
+ "version": "==2019.6.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7",
+ "sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db"
+ ],
+ "version": "==0.18"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "index": "pypi",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:3ad685ff8512bf6dc5a8b82ebf73543999b657eded8c11803d9ba6b648986f4d",
+ "sha256:8bb43d1f51ecef60d81854af61a3a880555a14643691cc4b64a6ee269c78f09a"
+ ],
+ "version": "==7.1.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af",
+ "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"
+ ],
+ "version": "==19.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a",
+ "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03"
+ ],
+ "version": "==2.4.0"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6ef6d06de77ce2961156013e9dff62f1b2688aa04d0dc244299fe7d67e09370d",
+ "sha256:a736fed91c12681a7b34617c8fcefe39ea04599ca72c608751c31d89579a3f77"
+ ],
+ "index": "pypi",
+ "version": "==5.0.1"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
+ "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ ],
+ "index": "pypi",
+ "version": "==2.22.0"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:12e17c7ad1397fd1df5ead7727eb3f1bdc9fe1c18293b0492e0e01b57997e38d",
+ "sha256:dc9e416a095ee7c3360056990d52e5611fb94469352fc1c2dc85be1ff2189146"
+ ],
+ "index": "pypi",
+ "version": "==1.6.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
+ "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
+ ],
+ "version": "==1.25.3"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:8c1019c6aad13642199fbe458275ad6a84907634cc9f0989877ccc4a2840139d",
+ "sha256:ca943a7e809cc12257001ccfb99e3563da9af99d52f261725e96dfe0f9275bc3"
+ ],
+ "version": "==0.5.1"
+ }
+ },
+ "develop": {}
+}
diff --git a/Integrations/ProofpointTAP_v2/ProofpointTAP_v2.py b/Integrations/ProofpointTAP_v2/ProofpointTAP_v2.py
new file mode 100644
index 000000000000..b1c169acc933
--- /dev/null
+++ b/Integrations/ProofpointTAP_v2/ProofpointTAP_v2.py
@@ -0,0 +1,316 @@
+import demistomock as demisto
+
+from CommonServerPython import *
+
+''' IMPORTS '''
+
+from datetime import datetime, timedelta
+import json
+import requests
+import urllib3
+
+# Disable insecure warnings
+urllib3.disable_warnings()
+
+ALL_EVENTS = "All"
+ISSUES_EVENTS = "Issues"
+BLOCKED_CLICKS = "Blocked Clicks"
+PERMITTED_CLICKS = "Permitted Clicks"
+BLOCKED_MESSAGES = "Blocked Messages"
+DELIVERED_MESSAGES = "Delivered Messages"
+
+DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
+
+""" Helper functions """
+
+
+def get_now():
+ """ A wrapper function of datetime.now
+ helps handle tests
+
+ Returns:
+ datetime: time right now
+ """
+ return datetime.now()
+
+
+def get_fetch_times(last_fetch):
+ """ Get list of every hour since last_fetch
+ Args:
+ last_fetch (datetime or str): last_fetch time
+
+ Returns:
+ List[str]: list of str represents every hour since last_fetch
+ """
+ now = get_now()
+ times = list()
+ time_format = "%Y-%m-%dT%H:%M:%SZ"
+ if isinstance(last_fetch, str):
+ times.append(last_fetch)
+ last_fetch = datetime.strptime(last_fetch, time_format)
+ elif isinstance(last_fetch, datetime):
+ times.append(last_fetch.strftime(time_format))
+ while now - last_fetch > timedelta(minutes=59):
+ last_fetch += timedelta(minutes=59)
+ times.append(last_fetch.strftime(time_format))
+ return times
+
+
+class Client:
+ def __init__(self, proofpoint_url, api_version, verify, service_principal, secret, proxies):
+ self.base_url = "{}/{}/siem".format(proofpoint_url, api_version)
+ self.verify = verify
+ self.service_principal = service_principal
+ self.secret = secret
+ self.proxies = proxies
+
+ def http_request(self, method, url_suffix, params=None, data=None):
+ full_url = self.base_url + url_suffix
+ res = requests.request(
+ method,
+ full_url,
+ verify=self.verify,
+ params=params,
+ json=data,
+ auth=(self.service_principal, self.secret),
+ proxies=self.proxies
+ )
+
+ if res.status_code not in [200, 204]:
+ raise ValueError('Error in API call to Proofpoint TAP [%d]. Reason: %s' % (res.status_code, res.text))
+
+ try:
+ return res.json()
+ except Exception:
+ raise ValueError(
+ "Failed to parse http response to JSON format. Original response body: \n{}".format(res.text))
+
+ def get_events(self, interval=None, since_time=None, since_seconds=None, threat_type=None, threat_status=None,
+ event_type_filter="All"):
+
+ if not interval and not since_time and not since_seconds:
+ raise ValueError("Required to pass interval or sinceTime or sinceSeconds.")
+
+ query_params = {
+ "format": "json"
+ }
+
+ if interval:
+ query_params["interval"] = interval
+
+ if since_time:
+ query_params["sinceTime"] = since_time
+
+ if since_seconds:
+ query_params["sinceSeconds"] = since_seconds
+
+ if threat_status:
+ query_params["threatStatus"] = threat_status
+
+ if threat_type:
+ query_params["threatType"] = threat_type
+
+ url_route = {
+ "All": "/all",
+ "Issues": "/issues",
+ "Blocked Clicks": "/clicks/blocked",
+ "Permitted Clicks": "/clicks/permitted",
+ "Blocked Messages": "/messages/blocked",
+ "Delivered Messages": "/messages/delivered"
+ }[event_type_filter]
+
+ events = self.http_request("GET", url_route, params=query_params)
+
+ return events
+
+
+def test_module(client, first_fetch_time, event_type_filter):
+ """
+ Performs basic get request to get item samples
+ """
+ since_time, _ = parse_date_range(first_fetch_time, date_format=DATE_FORMAT, utc=True)
+ client.get_events(since_time=since_time, event_type_filter=event_type_filter)
+
+ # test was successful
+ return 'ok'
+
+
+def get_events_command(client, args):
+ interval = args.get("interval")
+ threat_type = argToList(args.get("threatType"))
+ threat_status = args.get("threatStatus")
+ since_time = args.get("sinceTime")
+ since_seconds = int(args.get("sinceSeconds")) if args.get("sinceSeconds") else None
+ event_type_filter = args.get("eventTypes")
+
+ raw_events = client.get_events(interval, since_time, since_seconds, threat_type, threat_status, event_type_filter)
+
+ return (
+ tableToMarkdown("Proofpoint Events", raw_events),
+ {
+ 'Proofpoint.MessagesDelivered(val.GUID == obj.GUID)': raw_events.get("messagesDelivered"),
+ 'Proofpoint.MessagesBlocked(val.GUID == obj.GUID)': raw_events.get("messagesBlocked"),
+ 'Proofpoint.ClicksBlocked(val.GUID == obj.GUID)': raw_events.get("clicksBlocked"),
+ 'Proofpoint.ClicksPermitted(val.GUID == obj.GUID)': raw_events.get("clicksPermitted")
+ },
+ raw_events
+ )
+
+
+def fetch_incidents(client, last_run, first_fetch_time, event_type_filter, threat_type, threat_status, limit=50):
+ # Get the last fetch time, if exists
+ last_fetch = last_run.get('last_fetch')
+
+ # Handle first time fetch, fetch incidents retroactively
+ if not last_fetch:
+ last_fetch, _ = parse_date_range(first_fetch_time, date_format=DATE_FORMAT, utc=True)
+ incidents: list = []
+ fetch_times = get_fetch_times(last_fetch)
+ fetch_time_count = len(fetch_times)
+ for index, fetch_time in enumerate(fetch_times):
+ if index < fetch_time_count - 1:
+ raw_events = client.get_events(interval=fetch_time + "/" + fetch_times[index + 1],
+ event_type_filter=event_type_filter,
+ threat_status=threat_status, threat_type=threat_type)
+ else:
+ raw_events = client.get_events(interval=fetch_time + "/" + get_now().strftime(DATE_FORMAT),
+ event_type_filter=event_type_filter,
+ threat_status=threat_status, threat_type=threat_type)
+
+ message_delivered = raw_events.get("messagesDelivered", [])
+ for raw_event in message_delivered:
+ raw_event["type"] = "messages delivered"
+ event_guid = raw_events.get("GUID", "")
+ incident = {
+ "name": "Proofpoint - Message Delivered - {}".format(event_guid),
+ "rawJSON": json.dumps(raw_event)
+ }
+ last_event_fetch = raw_event["messageTime"]
+
+ threat_info_map = raw_event.get("threatsInfoMap", [])
+ for threat in threat_info_map:
+ if threat["threatTime"] > last_fetch:
+ last_event_fetch = last_event_fetch if last_event_fetch > threat["threatTime"] else threat[
+ "threatTime"]
+ incident['occurred'] = last_event_fetch
+ incidents.append(incident)
+
+ message_blocked = raw_events.get("messagesBlocked", [])
+ for raw_event in message_blocked:
+ raw_event["type"] = "messages blocked"
+ event_guid = raw_events.get("GUID", "")
+ incident = {
+ "name": "Proofpoint - Message Blocked - {}".format(event_guid),
+ "rawJSON": json.dumps(raw_event)
+ }
+ last_event_fetch = raw_event["messageTime"]
+
+ threat_info_map = raw_event.get("threatsInfoMap", [])
+ for threat in threat_info_map:
+ if threat["threatTime"] > last_fetch:
+ last_fetch = threat["threatTime"]
+ last_event_fetch = last_event_fetch if last_event_fetch > threat["threatTime"] else threat[
+ "threatTime"]
+
+ incident['occurred'] = last_event_fetch
+ incidents.append(incident)
+
+ clicks_permitted = raw_events.get("clicksPermitted", [])
+ for raw_event in clicks_permitted:
+ raw_event["type"] = "clicks permitted"
+ event_guid = raw_events.get("GUID", "")
+ incident = {
+ "name": "Proofpoint - Click Permitted - {}".format(event_guid),
+ "rawJSON": json.dumps(raw_event),
+ "occurred": raw_event["clickTime"] if raw_event["clickTime"] > raw_event["threatTime"] else raw_event[
+ "threatTime"]
+ }
+ incidents.append(incident)
+
+ clicks_blocked = raw_events.get("clicksBlocked", [])
+ for raw_event in clicks_blocked:
+ raw_event["type"] = "clicks blocked"
+ event_guid = raw_events.get("GUID", "")
+ incident = {
+ "name": "Proofpoint - Click Blocked - {}".format(event_guid),
+ "rawJSON": json.dumps(raw_event),
+ "occurred": raw_event["clickTime"] if raw_event["clickTime"] > raw_event["threatTime"] else raw_event[
+ "threatTime"]
+ }
+ incidents.append(incident)
+
+ # limit incidents to the limit given
+ incidents.sort(key=lambda a: a.get('occurred'))
+ if len(incidents) > limit:
+ incidents = incidents[:limit]
+
+ # Cut the milliseconds from last fetch if exists
+ last_fetch = incidents[-1].get('occurred')
+ last_fetch = last_fetch[:-5] + 'Z' if last_fetch[-5] == '.' else last_fetch
+ last_fetch_datetime = datetime.strptime(last_fetch, DATE_FORMAT)
+ last_fetch = (last_fetch_datetime + timedelta(seconds=1)).strftime(DATE_FORMAT)
+ next_run = {'last_fetch': last_fetch}
+ return next_run, incidents
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+
+def main():
+ """
+ PARSE AND VALIDATE INTEGRATION PARAMS
+ """
+ service_principal = demisto.params().get('credentials').get('identifier')
+ secret = demisto.params().get('credentials').get('password')
+
+ # Remove trailing slash to prevent wrong URL path to service
+ server_url = demisto.params()['url'][:-1] \
+ if (demisto.params()['url'] and demisto.params()['url'].endswith('/')) else demisto.params()['url']
+ api_version = demisto.params().get('api_version')
+
+ verify_certificate = not demisto.params().get('insecure', False)
+
+ # How many time before the first fetch to retrieve incidents
+ fetch_time = demisto.params().get('fetch_time', '3 days')
+
+ threat_status = argToList(demisto.params().get('threat_status'))
+
+ threat_type = argToList(demisto.params().get('threat_type'))
+
+ event_type_filter = demisto.params().get('events_type')
+
+ fetch_limit = 50
+ # Remove proxy if not set to true in params
+ proxies = handle_proxy()
+
+ LOG('Command being called is %s' % (demisto.command()))
+
+ try:
+ client = Client(server_url, api_version, verify_certificate, service_principal, secret, proxies)
+
+ if demisto.command() == 'test-module':
+ results = test_module(client, fetch_time, event_type_filter)
+ return_outputs(results, None)
+
+ elif demisto.command() == 'fetch-incidents':
+ next_run, incidents = fetch_incidents(
+ client=client,
+ last_run=demisto.getLastRun(),
+ first_fetch_time=fetch_time,
+ event_type_filter=event_type_filter,
+ threat_status=threat_status,
+ threat_type=threat_type,
+ limit=fetch_limit
+ )
+ demisto.setLastRun(next_run)
+ demisto.incidents(incidents)
+
+ elif demisto.command() == 'proofpoint-get-events':
+ return_outputs(*get_events_command(client, demisto.args()))
+
+ except Exception as e:
+ return_error(str(e))
+
+
+if __name__ in ['__main__', 'builtin', 'builtins']:
+ main()
diff --git a/Integrations/ProofpointTAP_v2/ProofpointTAP_v2.yml b/Integrations/ProofpointTAP_v2/ProofpointTAP_v2.yml
new file mode 100644
index 000000000000..f66e241b9c15
--- /dev/null
+++ b/Integrations/ProofpointTAP_v2/ProofpointTAP_v2.yml
@@ -0,0 +1,169 @@
+category: Email Gateway
+commonfields:
+ id: Proofpoint TAP v2
+ version: -1
+configuration:
+- defaultvalue: https://tap-api-v2.proofpoint.com
+ display: Server URL (e.g., https://tap-api-v2.proofpoint.com)
+ name: url
+ required: true
+ type: 0
+- display: Service Principal (the Password refers to Secret)
+ name: credentials
+ required: true
+ type: 9
+- defaultvalue: v2
+ display: API Version
+ name: api_version
+ options:
+ - v1
+ - v2
+ required: false
+ type: 15
+- defaultvalue: ''
+ display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: ''
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: A string specifying which threat type to return. If empty, all threat types
+ are returned. Can be "url", "attachment", or "messageText".
+ name: threat_type
+ options:
+ - url
+ - attachment
+ - messageText
+ required: false
+ type: 16
+- display: A string specifying which threat statuses to return. If empty, will return
+ "active" and "cleared" threats.
+ name: threat_status
+ options:
+ - active
+ - cleared
+ - falsePositive
+ required: false
+ type: 16
+- defaultvalue: All
+ display: Events to fetch
+ name: events_type
+ options:
+ - All
+ - Issues
+ - Blocked Clicks
+ - Permitted Clicks
+ - Blocked Messages
+ - Delivered Messages
+ required: false
+ type: 15
+- defaultvalue: 1 hour
+ display: First fetch time range ( , e.g., 1 hour, 30 minutes)
+ - Proofpoint supports maximum 1 hour fetch back
+ name: fetch_time
+ required: false
+ type: 0
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+description: Use the Proofpoint Targeted Attack Protection (TAP) integration to protect
+ against and provide additional visibility into phishing and other malicious email
+ attacks.
+display: Proofpoint TAP v2
+name: Proofpoint TAP v2
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: 'A string containing an ISO8601-formatted interval. If this interval
+ overlaps with previous requests for data, records from the previous request
+ might be duplicated. The minimum interval is thirty seconds. The maximum interval
+ is one hour. Examples: 2016-05-01T12:00:00Z/2016-05-01T13:00:00Z - an hour
+ interval, beginning at noon UTC on 05-01-2016 PT30M/2016-05-01T12:30:00Z -
+ the thirty minutes beginning at noon UTC on 05-01-2016 and ending at 12:30pm
+ UTC 2016-05-01T05:00:00-0700/PT30M - the same interval as above, but using
+ -0700 as the time zone'
+ isArray: false
+ name: interval
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'A string specifying which threat type to return. If empty, all
+ threat types are returned. The following values are accepted: url,attachment,
+ messageText'
+ isArray: false
+ name: threatType
+ predefined:
+ - url
+ - attachment
+ - messageText
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: A string specifying which threat statuses to return. If empty,
+ active and cleared threats are returned. Can be "active", "cleared", "falsePositive".
+ isArray: false
+ name: threatStatus
+ predefined:
+ - active
+ - cleared
+ - falsePositive
+ required: false
+ secret: false
+ - default: false
+ description: 'A string containing an ISO8601 date. It represents the start of
+ the data retrieval period. The end of the period is determined by the current
+ API server time rounded to the nearest minute. If JSON output is selected,
+ the end time is included in the returned result. Example: 2016-05-01T12:00:00Z'
+ isArray: false
+ name: sinceTime
+ required: false
+ secret: false
+ - default: false
+ description: An integer representing a time window (in seconds) from the current
+ API server time. The start of the window is the current API server time, rounded
+ to the nearest minute, less the number of seconds provided. The end of the
+ window is the current API server time rounded to the nearest minute. If JSON
+ output is selected, the end time is included in the returned result.
+ isArray: false
+ name: sinceSeconds
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: All
+ description: Event types to return.
+ isArray: false
+ name: eventTypes
+ predefined:
+ - All
+ - Issues
+ - Delivered Messages
+ - Blocked Messages
+ - Permitted Clicks
+ - Blocked Clicks
+ required: false
+ secret: false
+ deprecated: false
+ description: Fetches events for all clicks and messages relating to known threats
+ within the specified time period. Details as per clicks/blocked.
+ execution: false
+ name: proofpoint-get-events
+ dockerimage: demisto/python3:3.7.4.977
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- No test - no instance
diff --git a/Integrations/ProofpointTAP_v2/ProofpointTAP_v2_description.md b/Integrations/ProofpointTAP_v2/ProofpointTAP_v2_description.md
new file mode 100644
index 000000000000..e8e597b75b19
--- /dev/null
+++ b/Integrations/ProofpointTAP_v2/ProofpointTAP_v2_description.md
@@ -0,0 +1,5 @@
+## Configure an API account
+To configure an instance of the integration in Demisto, you need to supply your Service Principal and Service Secret. When you configure the integration instance, enter the Service Principal in the Service Principal field, and the Service Secret in the Password field.
+1. Log in to your Proofpoint TAP environment.
+2. Navigate to **Connect Applications > Service Credentials**.
+
diff --git a/Integrations/ProofpointTAP_v2/ProofpointTAP_v2_image.png b/Integrations/ProofpointTAP_v2/ProofpointTAP_v2_image.png
new file mode 100644
index 000000000000..c6b38472e746
Binary files /dev/null and b/Integrations/ProofpointTAP_v2/ProofpointTAP_v2_image.png differ
diff --git a/Integrations/ProofpointTAP_v2/ProofpointTAP_v2_test.py b/Integrations/ProofpointTAP_v2/ProofpointTAP_v2_test.py
new file mode 100644
index 000000000000..242f7c84a030
--- /dev/null
+++ b/Integrations/ProofpointTAP_v2/ProofpointTAP_v2_test.py
@@ -0,0 +1,314 @@
+import json
+from unittest.mock import patch
+
+from ProofpointTAP_v2 import fetch_incidents, Client, ALL_EVENTS, ISSUES_EVENTS, get_events_command
+from datetime import datetime
+
+MOCK_URL = "http://123-fake-api.com"
+MOCK_DELIVERED_MESSAGE = {
+ "GUID": "1111",
+ "QID": "r2FNwRHF004109",
+ "ccAddresses": [
+ "bruce.wayne@university-of-education.zz"
+ ],
+ "clusterId": "pharmtech_hosted",
+ "fromAddress": "badguy@evil.zz",
+ "headerCC": "\"Bruce Wayne\" ",
+ "headerFrom": "\"A. Badguy\" ",
+ "headerReplyTo": None,
+ "headerTo": "\"Clark Kent\" ; \"Diana Prince\" ",
+ "impostorScore": 0,
+ "malwareScore": 100,
+ "messageID": "1111@evil.zz",
+ "threatsInfoMap": [
+ {
+ "campaignId": "46e01b8a-c899-404d-bcd9-189bb393d1a7",
+ "classification": "MALWARE",
+ "threat": "2fab740f143fc1aa4c1cd0146d334c5593b1428f6d062b2c406e5efe8abe95ca",
+ "threatId": "2fab740f143fc1aa4c1cd0146d334c5593b1428f6d062b2c406e5efe8abe95ca",
+ "threatStatus": "active",
+ "threatTime": "2010-01-30T00:00:40.000Z",
+ "threatType": "ATTACHMENT",
+ "threatUrl": "https://threatinsight.proofpoint.com/43fc1aa4c1cd0146d334c5593b1428f6d062b2c406e5efe8abe95ca"
+ },
+ {
+ "campaignId": "46e01b8a-c899-404d-bcd9-189bb393d1a7",
+ "classification": "MALWARE",
+ "threat": "badsite.zz",
+ "threatId": "3ba97fc852c66a7ba761450edfdfb9f4ffab74715b591294f78b5e37a76481aa",
+ "threatTime": "2010-01-30T00:00:30.000Z",
+ "threatType": "URL",
+ "threatUrl": "https://threatinsight.proofpoint.com/a7ba761450edfdfb9f4ffab74715b591294f78b5e37a76481aa"
+ }
+ ],
+ "messageTime": "2010-01-30T00:00:59.000Z",
+ "modulesRun": [
+ "pdr",
+ "sandbox",
+ "spam",
+ "urldefense"
+ ],
+ "phishScore": 46,
+ "policyRoutes": [
+ "default_inbound",
+ "executives"
+ ],
+ "quarantineFolder": "Attachment Defense",
+ "quarantineRule": "module.sandbox.threat",
+ "recipient": [
+ "clark.kent@pharmtech.zz",
+ "diana.prince@pharmtech.zz"
+ ],
+ "replyToAddress": None,
+ "sender": "e99d7ed5580193f36a51f597bc2c0210@evil.zz",
+ "senderIP": "192.0.2.255",
+ "spamScore": 4,
+ "subject": "Please find a totally safe invoice attached."
+}
+
+MOCK_BLOCKED_MESSAGE = {
+ "GUID": "2222",
+ "QID": "r2FNwRHF004109",
+ "ccAddresses": [
+ "bruce.wayne@university-of-education.zz"
+ ],
+ "clusterId": "pharmtech_hosted",
+ "fromAddress": "badguy@evil.zz",
+ "headerCC": "\"Bruce Wayne\" ",
+ "headerFrom": "\"A. Badguy\" ",
+ "headerReplyTo": None,
+ "headerTo": "\"Clark Kent\" ; \"Diana Prince\" ",
+ "impostorScore": 0,
+ "malwareScore": 100,
+ "messageID": "2222@evil.zz",
+ "threatsInfoMap": [
+ {
+ "campaignId": "46e01b8a-c899-404d-bcd9-189bb393d1a7",
+ "classification": "MALWARE",
+ "threat": "2fab740f143fc1aa4c1cd0146d334c5593b1428f6d062b2c406e5efe8abe95ca",
+ "threatId": "2fab740f143fc1aa4c1cd0146d334c5593b1428f6d062b2c406e5efe8abe95ca",
+ "threatStatus": "active",
+ "threatTime": "2010-01-25T00:00:40.000Z",
+ "threatType": "ATTACHMENT",
+ "threatUrl": "https://threatinsight.proofpoint.com/43fc1aa4c1cd0146d334c5593b1428f6d062b2c406e5efe8abe95ca"
+ },
+ {
+ "campaignId": "46e01b8a-c899-404d-bcd9-189bb393d1a7",
+ "classification": "MALWARE",
+ "threat": "badsite.zz",
+ "threatId": "3ba97fc852c66a7ba761450edfdfb9f4ffab74715b591294f78b5e37a76481aa",
+ "threatTime": "2010-01-25T00:00:30.000Z",
+ "threatType": "URL",
+ "threatUrl": "https://threatinsight.proofpoint.com/a7ba761450edfdfb9f4ffab74715b591294f78b5e37a76481aa"
+ }
+ ],
+ "messageTime": "2010-01-25T00:00:10.000Z",
+ "modulesRun": [
+ "pdr",
+ "sandbox",
+ "spam",
+ "urldefense"
+ ],
+ "phishScore": 46,
+ "policyRoutes": [
+ "default_inbound",
+ "executives"
+ ],
+ "quarantineFolder": "Attachment Defense",
+ "quarantineRule": "module.sandbox.threat",
+ "recipient": [
+ "clark.kent@pharmtech.zz",
+ "diana.prince@pharmtech.zz"
+ ],
+ "replyToAddress": None,
+ "sender": "e99d7ed5580193f36a51f597bc2c0210@evil.zz",
+ "senderIP": "192.0.2.255",
+ "spamScore": 4,
+ "subject": "Please find a totally safe invoice attached."
+}
+
+MOCK_PERMITTED_CLICK = {
+ "campaignId": "46e01b8a-c899-404d-bcd9-189bb393d1a7",
+ "classification": "MALWARE",
+ "clickIP": "192.0.2.1",
+ "clickTime": "2010-01-11T00:00:20.000Z",
+ "messageID": "3333",
+ "recipient": "bruce.wayne@pharmtech.zz",
+ "sender": "9facbf452def2d7efc5b5c48cdb837fa@badguy.zz",
+ "senderIP": "192.0.2.255",
+ "threatID": "61f7622167144dba5e3ae4480eeee78b23d66f7dfed970cfc3d086cc0dabdf50",
+ "threatTime": "2010-01-11T00:00:10.000Z",
+ "threatURL": "https://threatinsight.proofpoint.com/#/f7622167144dba5e3ae4480eeee78b23d66f7dfed970cfc3d086cc0dabdf50",
+ "url": "http://badguy.zz/",
+ "userAgent": "Mozilla/5.0(WindowsNT6.1;WOW64;rv:27.0)Gecko/20100101Firefox/27.0"
+}
+
+MOCK_BLOCKED_CLICK = {
+ "campaignId": "46e01b8a-c899-404d-bcd9-189bb393d1a7",
+ "classification": "MALWARE",
+ "clickIP": "192.0.2.2",
+ "clickTime": "2010-01-22T00:00:10.000Z",
+ "messageID": "4444",
+ "recipient": "bruce.wayne@pharmtech.zz",
+ "sender": "9facbf452def2d7efc5b5c48cdb837fa@badguy.zz",
+ "senderIP": "192.0.2.255",
+ "threatID": "61f7622167144dba5e3ae4480eeee78b23d66f7dfed970cfc3d086cc0dabdf50",
+ "threatTime": "2010-01-22T00:00:20.000Z",
+ "threatURL": "https://threatinsight.proofpoint.com/#/f7622167144dba5e3ae4480eeee78b23d66f7dfed970cfc3d086cc0dabdf50",
+ "url": "http://badguy.zz/",
+ "userAgent": "Mozilla/5.0(WindowsNT6.1;WOW64;rv:27.0)Gecko/20100101Firefox/27.0"
+}
+
+MOCK_ISSUES = {
+ "messagesDelivered": [
+ MOCK_DELIVERED_MESSAGE
+ ],
+ "clicksPermitted": [
+ MOCK_PERMITTED_CLICK
+ ]
+}
+
+MOCK_ALL_EVENTS = {
+ "messagesDelivered": [
+ MOCK_DELIVERED_MESSAGE
+ ],
+ "clicksPermitted": [
+ MOCK_PERMITTED_CLICK
+ ],
+ "clicksBlocked": [
+ MOCK_BLOCKED_CLICK
+ ],
+ "messagesBlocked": [
+ MOCK_BLOCKED_MESSAGE
+ ]
+}
+
+
+def get_mocked_time():
+ return datetime.strptime("2010-01-01T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ")
+
+
+def test_command(requests_mock):
+ requests_mock.get(MOCK_URL + "/v2/siem/issues?format=json&sinceSeconds=100&threatType=url&threatType=attachment",
+ json=MOCK_ISSUES)
+
+ client = Client(
+ proofpoint_url=MOCK_URL,
+ api_version="v2",
+ service_principal="user1",
+ secret="123",
+ verify=False,
+ proxies=None
+ )
+
+ args = {
+ "threatType": "url,attachment",
+ "sinceSeconds": "100",
+ "eventTypes": ISSUES_EVENTS
+ }
+ _, outputs, _ = get_events_command(client, args)
+
+ assert len(outputs["Proofpoint.MessagesDelivered(val.GUID == obj.GUID)"]) == 1
+ assert len(outputs["Proofpoint.ClicksPermitted(val.GUID == obj.GUID)"]) == 1
+
+
+def return_self(return_date):
+ return return_date
+
+
+@patch('ProofpointTAP_v2.parse_date_range')
+@patch("ProofpointTAP_v2.get_now", get_mocked_time)
+def test_first_fetch_incidents(mocked_parse_date_range, requests_mock):
+ mock_date = "2010-01-01T00:00:00Z"
+ mocked_parse_date_range.return_value = (mock_date, "never mind")
+ requests_mock.get(
+ MOCK_URL + '/v2/siem/all?format=json&interval=2010-01-01T00%3A00%3A00Z%2F2010-01-01T00%3A00%3A00Z',
+ json=MOCK_ALL_EVENTS)
+
+ client = Client(
+ proofpoint_url=MOCK_URL,
+ api_version="v2",
+ service_principal="user1",
+ secret="123",
+ verify=False,
+ proxies=None
+ )
+
+ next_run, incidents = fetch_incidents(
+ client=client,
+ last_run={},
+ first_fetch_time="3 month",
+ event_type_filter=ALL_EVENTS,
+ threat_status="",
+ threat_type=""
+ )
+
+ assert len(incidents) == 4
+ assert json.loads(incidents[3]['rawJSON'])["messageID"] == "1111@evil.zz"
+
+
+@patch("ProofpointTAP_v2.get_now", get_mocked_time)
+def test_next_fetch(requests_mock):
+ mock_date = "2010-01-01T00:00:00Z"
+ requests_mock.get(MOCK_URL + '/v2/siem/all?format=json&interval=2010-01-01T00%3A00%3A00Z%'
+ '2F2010-01-01T00%3A00%3A00Z&threatStatus=active&threatStatus=cleared',
+ json=MOCK_ALL_EVENTS)
+
+ client = Client(
+ proofpoint_url=MOCK_URL,
+ api_version="v2",
+ service_principal="user1",
+ secret="123",
+ verify=False,
+ proxies=None
+ )
+
+ next_run, incidents = fetch_incidents(
+ client=client,
+ last_run={"last_fetch": mock_date},
+ first_fetch_time="3 month",
+ event_type_filter=ALL_EVENTS,
+ threat_status=["active", "cleared"],
+ threat_type="",
+ limit=50
+ )
+
+ assert len(incidents) == 4
+ assert json.loads(incidents[3]['rawJSON'])["messageID"] == "1111@evil.zz"
+
+
+def test_fetch_limit(requests_mock):
+ mock_date = "2010-01-01T00:00:00Z"
+ requests_mock.get(MOCK_URL + '/v2/siem/all', json=MOCK_ALL_EVENTS)
+
+ client = Client(
+ proofpoint_url=MOCK_URL,
+ api_version="v2",
+ service_principal="user1",
+ secret="123",
+ verify=False,
+ proxies=None
+ )
+
+ next_run, incidents = fetch_incidents(
+ client=client,
+ last_run={"last_fetch": mock_date},
+ first_fetch_time="3 month",
+ event_type_filter=ALL_EVENTS,
+ threat_status=["active", "cleared"],
+ threat_type="",
+ limit=3
+ )
+
+ assert len(incidents) == 3
+ assert next_run.get('last_fetch') == '2010-01-11T00:00:21Z'
+
+
+def test_get_fetch_times():
+ from datetime import datetime, timedelta
+ from ProofpointTAP_v2 import get_fetch_times
+
+ now = datetime.now()
+ before_two_hours = now - timedelta(hours=2)
+ times = get_fetch_times(before_two_hours)
+ assert len(times) == 3
diff --git a/Integrations/Pwned-V2/CHANGELOG.md b/Integrations/Pwned-V2/CHANGELOG.md
new file mode 100644
index 000000000000..30c0553456a7
--- /dev/null
+++ b/Integrations/Pwned-V2/CHANGELOG.md
@@ -0,0 +1,11 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+ - Added handling for cases where the rate limit is exceeded.
+ - Added the *max_retry_time* integration parameter, which defines the maximum time per request.
+
+
+## [19.8.2] - 2019-08-22
+#### New Integration
+Uses the Have I Been Pwned? service to check whether email addresses or domains were compromised in previous breaches.
diff --git a/Integrations/Pwned-V2/Pwned-V2.py b/Integrations/Pwned-V2/Pwned-V2.py
new file mode 100644
index 000000000000..4b15c57d64f6
--- /dev/null
+++ b/Integrations/Pwned-V2/Pwned-V2.py
@@ -0,0 +1,271 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import re
+import requests
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+VENDOR = 'Have I Been Pwned? V2'
+MAX_RETRY_ALLOWED = demisto.params().get('max_retry_time', -1)
+API_KEY = demisto.params().get('api_key')
+USE_SSL = not demisto.params().get('insecure', False)
+
+BASE_URL = 'https://haveibeenpwned.com/api/v3'
+HEADERS = {
+ 'hibp-api-key': API_KEY,
+ 'user-agent': 'DBOT-API',
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+}
+
+DEFAULT_DBOT_SCORE_EMAIL = 2 if demisto.params().get('default_dbot_score_email') == 'SUSPICIOUS' else 3
+DEFAULT_DBOT_SCORE_DOMAIN = 2 if demisto.params().get('default_dbot_score_domain') == 'SUSPICIOUS' else 3
+
+SAMPLE_TEST_SUFFIX = '/breaches?domain=demisto.com'
+PWNED_EMAIL_SUFFIX = '/breachedaccount/'
+PWNED_DOMAIN_SUFFIX = '/breaches?domain='
+PWNED_PASTE_SUFFIX = '/pasteaccount/'
+EMAIL_TRUNCATE_VERIFIED_SUFFIX = '?truncateResponse=false&includeUnverified=true'
+DOMAIN_TRUNCATE_VERIFIED_SUFFIX = '&truncateResponse=false&includeUnverified=true'
+
+RETRIES_END_TIME = datetime.min
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url_suffix, params=None, data=None):
+ while True:
+ res = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ verify=USE_SSL,
+ params=params,
+ data=data,
+ headers=HEADERS
+ )
+
+ if res.status_code != 429:
+ # Rate limit response code
+ break
+
+ if datetime.now() > RETRIES_END_TIME:
+ return_error('Max retry time has exceeded.')
+
+ wait_regex = re.search(r'\d+', res.json()['message'])
+ if wait_regex:
+ wait_amount = wait_regex.group()
+
+ if datetime.now() + timedelta(seconds=int(wait_amount)) > RETRIES_END_TIME:
+ return_error('Max retry time has exceeded.')
+
+ time.sleep(int(wait_amount))
+
+ if res.status_code == 404:
+ return None
+ if not res.status_code == 200:
+ return_error('Error in API call to Pwned Integration [%d] - %s' % (res.status_code, res.reason))
+ return None
+
+ return res.json()
+
+
+def html_description_to_human_readable(breach_description):
+ """
+
+ Args:
+ breach_description: Description of breach from API response
+
+ Returns: Description string that altered HTML urls to clickable urls
+ for better readability in war-room
+
+ """
+ html_link_pattern = re.compile('(.+?) ')
+ patterns_found = html_link_pattern.findall(breach_description)
+ for link in patterns_found:
+ html_actual_address = link[0]
+ html_readable_name = link[2]
+ link_from_desc = '[' + html_readable_name + ']' + '(' + html_actual_address + ')'
+ breach_description = re.sub(html_link_pattern, link_from_desc, breach_description, count=1)
+ return breach_description
+
+
+def data_to_markdown(query_type, query_arg, api_res, api_paste_res=None):
+ records_found = False
+
+ md = '### Have I Been Pwned query for ' + query_type.lower() + ': *' + query_arg + '*\n'
+
+ if api_res:
+ records_found = True
+ for breach in api_res:
+ verified_breach = 'Verified' if breach['IsVerified'] else 'Unverified'
+ md += '#### ' + breach['Title'] + ' (' + breach['Domain'] + '): ' + str(breach['PwnCount']) + \
+ ' records breached [' + verified_breach + ' breach]\n'
+ md += 'Date: **' + breach['BreachDate'] + '**\n\n'
+ md += html_description_to_human_readable(breach['Description']) + '\n'
+ md += 'Data breached: **' + ','.join(breach['DataClasses']) + '**\n'
+
+ if api_paste_res:
+ records_found = True
+ pastes_list = []
+ for paste_breach in api_paste_res:
+ paste_entry = \
+ {
+ 'Source': paste_breach['Source'],
+ 'Title': paste_breach['Title'],
+ 'ID': paste_breach['Id'],
+ 'Date': '',
+ 'Amount of emails in paste': str(paste_breach['EmailCount'])
+ }
+
+ if paste_breach['Date']:
+ paste_entry['Date'] = paste_breach['Date'].split('T')[0]
+
+ pastes_list.append(paste_entry)
+
+ md += tableToMarkdown('The email address was found in the following "Pastes":',
+ pastes_list,
+ ['ID', 'Title', 'Date', 'Source', 'Amount of emails in paste'])
+
+ if not records_found:
+ md += 'No records found'
+
+ return md
+
+
+def create_dbot_score_dictionary(indicator_value, indicator_type, dbot_score):
+ return {
+ 'Indicator': indicator_value,
+ 'Type': indicator_type,
+ 'Vendor': VENDOR,
+ 'Score': dbot_score
+ }
+
+
+def create_context_entry(context_type, context_main_value, comp_sites, comp_pastes, malicious_score):
+ context_dict = dict() # dict
+
+ if context_type == 'email':
+ context_dict['Address'] = context_main_value
+ else:
+ context_dict['Name'] = context_main_value
+
+ context_dict['Pwned-V2'] = {
+ 'Compromised': {
+ 'Vendor': VENDOR,
+ 'Reporters': ', '.join(comp_sites + comp_pastes)
+ }
+ }
+
+ if malicious_score == 3:
+ context_dict['Malicious'] = add_malicious_to_context(context_type)
+
+ return context_dict
+
+
+def add_malicious_to_context(malicious_type):
+ return {
+ 'Vendor': VENDOR,
+ 'Description': 'The ' + malicious_type + ' has been compromised'
+ }
+
+
+def email_to_entry_context(email, api_email_res, api_paste_res):
+ dbot_score = 0
+ comp_email = dict() # type: dict
+ comp_sites = sorted([item['Title'] for item in api_email_res])
+ comp_pastes = sorted(set(item['Source'] for item in api_paste_res))
+
+ if len(comp_sites) > 0:
+ dbot_score = DEFAULT_DBOT_SCORE_EMAIL
+ email_context = create_context_entry('email', email, comp_sites, comp_pastes, DEFAULT_DBOT_SCORE_EMAIL)
+ comp_email[outputPaths['email']] = email_context
+
+ comp_email['DBotScore'] = create_dbot_score_dictionary(email, 'email', dbot_score)
+
+ return comp_email
+
+
+def domain_to_entry_context(domain, api_res):
+ comp_sites = [item['Title'] for item in api_res]
+ comp_sites = sorted(comp_sites)
+ comp_domain = dict() # type: dict
+ dbot_score = 0
+
+ if len(comp_sites) > 0:
+ dbot_score = DEFAULT_DBOT_SCORE_DOMAIN
+ domain_context = create_context_entry('domain', domain, comp_sites, [], DEFAULT_DBOT_SCORE_DOMAIN)
+ comp_domain[outputPaths['domain']] = domain_context
+
+ comp_domain['DBotScore'] = create_dbot_score_dictionary(domain, 'domain', dbot_score)
+
+ return comp_domain
+
+
+def set_retry_end_time():
+ global RETRIES_END_TIME
+ if MAX_RETRY_ALLOWED != -1:
+ RETRIES_END_TIME = datetime.now() + timedelta(seconds=int(MAX_RETRY_ALLOWED))
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ http_request('GET', SAMPLE_TEST_SUFFIX)
+ demisto.results('ok')
+
+
+def pwned_email_command():
+ email = demisto.args().get('email')
+ email_suffix = PWNED_EMAIL_SUFFIX + email + EMAIL_TRUNCATE_VERIFIED_SUFFIX
+ paste_suffix = PWNED_PASTE_SUFFIX + email
+ pwned_email(email, email_suffix, paste_suffix)
+
+
+def pwned_email(email, email_suffix, paste_suffix):
+ api_email_res = http_request('GET', url_suffix=email_suffix)
+ api_paste_res = http_request('GET', url_suffix=paste_suffix)
+
+ md = data_to_markdown('Email', email, api_email_res, api_paste_res)
+ ec = email_to_entry_context(email, api_email_res or [], api_paste_res or [])
+ return_outputs(md, ec, api_email_res)
+
+
+def pwned_domain_command():
+ domain = demisto.args().get('domain')
+ suffix = PWNED_DOMAIN_SUFFIX + domain + DOMAIN_TRUNCATE_VERIFIED_SUFFIX
+ pwned_domain(domain, suffix)
+
+
+def pwned_domain(domain, suffix):
+ api_res = http_request('GET', url_suffix=suffix)
+
+ md = data_to_markdown('Domain', domain, api_res)
+ ec = domain_to_entry_context(domain, api_res or [])
+ return_outputs(md, ec, api_res)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('Command being called is %s' % (demisto.command()))
+
+try:
+ handle_proxy()
+ set_retry_end_time()
+ if demisto.command() == 'test-module':
+ test_module()
+ elif demisto.command() in ['pwned-email', 'email']:
+ pwned_email_command()
+ elif demisto.command() in ['pwned-domain', 'domain']:
+ pwned_domain_command()
+
+# Log exceptions
+except Exception as e:
+ return_error(str(e))
diff --git a/Integrations/Pwned-V2/Pwned-V2.yml b/Integrations/Pwned-V2/Pwned-V2.yml
new file mode 100644
index 000000000000..21fd31c7f84e
--- /dev/null
+++ b/Integrations/Pwned-V2/Pwned-V2.yml
@@ -0,0 +1,207 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: Have I Been Pwned? V2
+ version: -1
+configuration:
+- display: API Key
+ name: api_key
+ required: true
+ type: 4
+- defaultvalue: '30'
+ display: Maximum time per request (in seconds)
+ name: max_retry_time
+ required: false
+ type: 0
+- defaultvalue: SUSPICIOUS
+ display: 'Email Severity: The DBot reputation for compromised emails (SUSPICIOUS
+ or MALICIOUS)'
+ name: default_dbot_score_email
+ required: false
+ type: 0
+- defaultvalue: SUSPICIOUS
+ display: 'Domain Severity: The DBot reputation for compromised domains (SUSPICIOUS
+ or MALICIOUS)'
+ name: default_dbot_score_domain
+ required: false
+ type: 0
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Uses the Have I Been Pwned? service to check whether email addresses
+ or domains were compromised in previous breaches.
+display: Have I Been Pwned? V2
+name: Have I Been Pwned? V2
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: The email address to check.
+ isArray: false
+ name: email
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks if an email address was compromised.
+ execution: false
+ name: pwned-email
+ outputs:
+ - contextPath: Account.Email.Pwned-V2.Compromised.Vendor
+ description: For compromised email addresses, the vendor that made the decision.
+ type: String
+ - contextPath: Account.Email.Pwned-V2.Compromised.Reporters
+ description: For compromised email addresses, the reporters for the vendor to
+ make the compromised decision.
+ type: String
+ - contextPath: Account.Email.Address
+ description: The email address.
+ type: String
+ - contextPath: Email.Malicious.Vendor
+ description: For malicious email addresses, the vendor that made the decision.
+ type: String
+ - contextPath: Email.Malicious.Description
+ description: For malicious email addresses, the reason that the vendor made
+ the decision.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - arguments:
+ - default: true
+ description: The domain to check.
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks if a domain was compromised.
+ execution: false
+ name: pwned-domain
+ outputs:
+ - contextPath: Domain.Pwned-V2.Compromised.Vendor
+ description: For compromised domains, the vendor that made the decision.
+ type: String
+ - contextPath: Domain.Pwned-V2.Compromised.Reporters
+ description: For compromised domains, the reporters for the vendor to make the
+ compromised decision.
+ type: String
+ - contextPath: Domain.Name
+ description: Domain name.
+ type: String
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision.
+ type: String
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the reason that the vendor made the decision.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - arguments:
+ - default: true
+ description: The email address to check.
+ isArray: false
+ name: email
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks if an email address was compromised.
+ execution: false
+ name: email
+ outputs:
+ - contextPath: Account.Email.Pwned-V2.Compromised.Vendor
+ description: For compromised email addresses, the vendor that made the decision.
+ type: String
+ - contextPath: Account.Email.Pwned-V2.Compromised.Reporters
+ description: For compromised email addresses, the reporters for the vendor to
+ make the compromised decision.
+ type: String
+ - contextPath: Account.Email.Address
+ description: The email address.
+ type: String
+ - contextPath: Email.Malicious.Vendor
+ description: For malicious email addresses, the vendor that made the decision.
+ type: String
+ - contextPath: Email.Malicious.Description
+ description: For malicious email addresses, the reason that the vendor made
+ the decision.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ - arguments:
+ - default: true
+ description: The domain to check.
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks if a domain was compromised.
+ execution: false
+ name: domain
+ outputs:
+ - contextPath: Domain.Pwned-V2.Compromised.Vendor
+ description: For compromised domains, the vendor that made the decision.
+ type: String
+ - contextPath: Domain.Pwned-V2.Compromised.Reporters
+ description: For compromised domains, the reporters for the vendor to make the
+ compromised decision.
+ type: String
+ - contextPath: Domain.Name
+ description: The domain name.
+ type: String
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision.
+ type: String
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the reason that the vendor made the decision.
+ type: String
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: Number
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
diff --git a/Integrations/Pwned-V2/Pwned-V2_description.md b/Integrations/Pwned-V2/Pwned-V2_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/Pwned-V2/Pwned-V2_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/Pwned-V2/Pwned-V2_image.png b/Integrations/Pwned-V2/Pwned-V2_image.png
new file mode 100644
index 000000000000..1beed5a4fb67
Binary files /dev/null and b/Integrations/Pwned-V2/Pwned-V2_image.png differ
diff --git a/Integrations/QRadar/CHANGELOG.md b/Integrations/QRadar/CHANGELOG.md
new file mode 100644
index 000000000000..134e3b20638f
--- /dev/null
+++ b/Integrations/QRadar/CHANGELOG.md
@@ -0,0 +1,20 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+ - The *note_id* argument is now optional in the ***qradar-get-note*** command. If the *note_id* argument is not specified, the command will return all notes for the the offense.
+ - Fixed an issue when closing an offense with the ***qradar-update-offense*** command, in which a user would specify a close reason, but an error was returned specifying that there was no close reason.
+
+## [19.9.0] - 2019-09-04
+ - Fixed an issue in which the ***qradar-get-search-results*** command failed when the root of the result contained a non-ascii character.
+ - Fixed an issue in which the ***qradar-offense-by-id*** command failed if an SEC header was missing when trying to get an offense type.
+
+## [19.8.2] - 2019-08-22
+ - Fixed an issue in which users would receive an error message for missing SEC headers.
+Fixed an issue in which the qradar-get-search-results command would fail if the root of the result contained a non-ascii character
+
+## [19.8.0] - 2019-08-06
+ - Fixed an issue in which the fetch incidents function would fail when there were non-ASCII characters in the data.
+ - Fixed an issue in which the fetch incidents function would ignore the filter if the maximum number of offenses set in the instance configuration were fetched in a single fetch.
+ - Improved error messages for fetch-incidents.
+ - Added the *Required Permissions* information in the detailed description section.
diff --git a/Integrations/QRadar/QRadar.py b/Integrations/QRadar/QRadar.py
new file mode 100644
index 000000000000..f05bae2b1f09
--- /dev/null
+++ b/Integrations/QRadar/QRadar.py
@@ -0,0 +1,1077 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import os
+import json
+import requests
+import traceback
+from requests.exceptions import HTTPError
+from copy import deepcopy
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARS '''
+SERVER = demisto.params()['server'][:-1] if demisto.params()['server'].endswith('/') else demisto.params()['server']
+CREDENTIALS = demisto.params().get('credentials')
+USERNAME = CREDENTIALS['identifier'] if CREDENTIALS else ''
+PASSWORD = CREDENTIALS['password'] if CREDENTIALS else ''
+TOKEN = demisto.params().get('token')
+USE_SSL = not demisto.params().get('insecure', False)
+AUTH_HEADERS = {'Content-Type': 'application/json'}
+if TOKEN:
+ AUTH_HEADERS['SEC'] = str(TOKEN)
+OFFENSES_PER_CALL = int(demisto.params().get('offensesPerCall', 50))
+OFFENSES_PER_CALL = 50 if OFFENSES_PER_CALL > 50 else OFFENSES_PER_CALL
+
+if not TOKEN and not (USERNAME and PASSWORD):
+ raise Exception('Either credentials or auth token should be provided.')
+
+if not demisto.params()['proxy']:
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+''' Header names transformation maps '''
+# Format: {'OldName': 'NewName'}
+
+OFFENSES_NAMES_MAP = {
+ 'follow_up': 'Followup',
+ 'id': 'ID',
+ 'description': 'Description',
+ 'source_address_ids': 'SourceAddress',
+ 'local_destination_address_ids': 'DestinationAddress',
+ 'remote_destination_count': 'RemoteDestinationCount',
+ 'start_time': 'StartTime',
+ 'event_count': 'EventCount',
+ 'magnitude': 'Magnitude',
+ 'last_updated_time': 'LastUpdatedTime',
+ 'offense_type': 'OffenseType'
+}
+
+SINGLE_OFFENSE_NAMES_MAP = {
+ 'credibility': 'Credibility',
+ 'relevance': 'Relevance',
+ 'severity': 'Severity',
+ 'assigned_to': 'AssignedTo',
+ 'destination_networks': 'DestinationHostname',
+ 'status': 'Status',
+ 'closing_user': 'ClosingUser',
+ 'closing_reason_id': 'ClosingReason',
+ 'close_time': 'CloseTime',
+ 'categories': 'Categories',
+ 'follow_up': 'Followup',
+ 'id': 'ID',
+ 'description': 'Description',
+ 'source_address_ids': 'SourceAddress',
+ 'local_destination_address_ids': 'DestinationAddress',
+ 'remote_destination_count': 'RemoteDestinationCount',
+ 'start_time': 'StartTime',
+ 'event_count': 'EventCount',
+ 'flow_count': 'FlowCount',
+ 'offense_source': 'OffenseSource',
+ 'magnitude': 'Magnitude',
+ 'last_updated_time': 'LastUpdatedTime',
+ 'offense_type': 'OffenseType',
+ 'protected': 'Protected'
+}
+
+SEARCH_ID_NAMES_MAP = {
+ 'search_id': 'ID',
+ 'status': 'Status'
+}
+
+ASSET_PROPERTIES_NAMES_MAP = {
+ 'Unified Name': 'Name',
+ 'CVSS Collateral Damage Potential': 'AggregatedCVSSScore',
+ 'Weight': 'Weight'
+}
+ASSET_PROPERTIES_ENDPOINT_NAMES_MAP = {
+ 'Primary OS ID': 'OS'
+}
+
+FULL_ASSET_PROPERTIES_NAMES_MAP = {
+ 'Compliance Notes': 'ComplianceNotes',
+ 'Compliance Plan': 'CompliancePlan',
+ 'CVSS Collateral Damage Potential': 'CollateralDamagePotential',
+ 'Location': 'Location',
+ 'Switch ID': 'SwitchID',
+ 'Switch Port ID': 'SwitchPort',
+ 'Group Name': 'GroupName',
+ 'Vulnerabilities': 'Vulnerabilities'
+}
+
+REFERENCE_NAMES_MAP = {
+ 'number_of_elements': 'NumberOfElements',
+ 'name': 'Name',
+ 'creation_time': 'CreationTime',
+ 'element_type': 'ElementType',
+ 'time_to_live': 'TimeToLive',
+ 'timeout_type': 'TimeoutType',
+ 'data': 'Data',
+ 'last_seen': 'LastSeen',
+ 'source': 'Source',
+ 'value': 'Value',
+ 'first_seen': 'FirstSeen'
+}
+
+DEVICE_MAP = {
+ 'asset_scanner_ids': 'AssetScannerIDs',
+ 'custom_properties': 'CustomProperties',
+ 'deleted': 'Deleted',
+ 'description': 'Description',
+ 'event_collector_ids': 'EventCollectorIDs',
+ 'flow_collector_ids': 'FlowCollectorIDs',
+ 'flow_source_ids': 'FlowSourceIDs',
+ 'id': 'ID',
+ 'log_source_ids': 'LogSourceIDs',
+ 'log_source_group_ids': 'LogSourceGroupIDs',
+ 'name': 'Name',
+ 'qvm_scanner_ids': 'QVMScannerIDs',
+ 'tenant_id': 'TenantID'
+}
+
+''' Utility methods '''
+
+
+# Filters recursively null values from dictionary
+def filter_dict_null(d):
+ if isinstance(d, dict):
+ return dict((k, filter_dict_null(v)) for k, v in d.items() if filter_dict_null(v) is not None)
+ elif isinstance(d, list):
+ if len(d) > 0:
+ return list(map(filter_dict_null, d))
+ return None
+ return d
+
+
+# Converts unicode elements of obj (incl. dictionary and list) to string recursively
+def unicode_to_str_recur(obj):
+ if isinstance(obj, dict):
+ obj = {unicode_to_str_recur(k): unicode_to_str_recur(v) for k, v in obj.iteritems()}
+ elif isinstance(obj, list):
+ obj = map(unicode_to_str_recur, obj)
+ elif isinstance(obj, unicode):
+ obj = obj.encode('utf-8')
+ return obj
+
+
+# Converts to an str
+def convert_to_str(obj):
+ if isinstance(obj, unicode):
+ return obj.encode('utf-8')
+ try:
+ return str(obj)
+ except ValueError:
+ return obj
+
+
+# Filters recursively from dictionary (d1) all keys that do not appear in d2
+def filter_dict_non_intersection_key_to_value(d1, d2):
+ if isinstance(d1, list):
+ return map(lambda x: filter_dict_non_intersection_key_to_value(x, d2), d1)
+ elif isinstance(d1, dict) and isinstance(d2, dict):
+ d2values = d2.values()
+ return dict((k, v) for k, v in d1.items() if k in d2values)
+ return d1
+
+
+# Change the keys of a dictionary according to a conversion map
+# trans_map - { 'OldKey': 'NewKey', ...}
+def replace_keys(src, trans_map):
+ def replace(key, trans_map):
+ if key in trans_map:
+ return trans_map[key]
+ return key
+
+ if trans_map:
+ if isinstance(src, list):
+ return map(lambda x: replace_keys(x, trans_map), src)
+ else:
+ src = {replace(k, trans_map): v for k, v in src.iteritems()}
+ return src
+
+
+# Transforms flat dictionary to comma separated values
+def dict_values_to_comma_separated_string(dic):
+ return ','.join(convert_to_str(v) for v in dic.itervalues())
+
+
+# Sends request to the server using the given method, url, headers and params
+def send_request(method, url, headers=AUTH_HEADERS, params=None):
+ try:
+ log_hdr = deepcopy(headers)
+ log_hdr.pop('SEC', None)
+ LOG('qradar is attempting {method} request sent to {url} with headers:\n{headers}\nparams:\n{params}'
+ .format(method=method, url=url, headers=json.dumps(log_hdr, indent=4), params=json.dumps(params, indent=4)))
+ if TOKEN:
+ res = requests.request(method, url, headers=headers, params=params, verify=USE_SSL)
+ else:
+ res = requests.request(method, url, headers=headers, params=params, verify=USE_SSL,
+ auth=(USERNAME, PASSWORD))
+ res.raise_for_status()
+ except HTTPError:
+ err_json = res.json()
+ err_msg = ''
+ if 'message' in err_json:
+ err_msg = err_msg + 'Error: {0}.\n'.format(err_json['message'])
+ elif 'http_response' in err_json:
+ err_msg = err_msg + 'Error: {0}.\n'.format(err_json['http_response'])
+ if 'code' in err_json:
+ err_msg = err_msg + 'QRadar Error Code: {0}'.format(err_json['code'])
+ raise Exception(err_msg)
+ return res.json()
+
+
+# Generic function that receives a result json, and turns it into an entryObject
+def get_entry_for_object(title, obj, contents, headers=None, context_key=None, human_readable=None):
+ if len(obj) == 0:
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': contents,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': "There is no output result"
+ }
+ obj = filter_dict_null(obj)
+ if headers:
+ if isinstance(headers, str):
+ headers = headers.split(',')
+ if isinstance(obj, dict):
+ headers = list(set(headers).intersection(set(obj.keys())))
+ ec = {context_key: obj} if context_key else obj
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': contents,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable if human_readable else tableToMarkdown(title, obj, headers),
+ 'EntryContext': ec
+ }
+
+
+# Converts epoch (miliseconds) to ISO string
+def epoch_to_ISO(ms_passed_since_epoch):
+ if ms_passed_since_epoch >= 0:
+ return datetime.utcfromtimestamp(ms_passed_since_epoch / 1000.0).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
+ return ms_passed_since_epoch
+
+
+# Converts closing reason name to id
+def convert_closing_reason_name_to_id(closing_name, closing_reasons=None):
+ if not closing_reasons:
+ closing_reasons = get_closing_reasons(include_deleted=True, include_reserved=True)
+ for closing_reason in closing_reasons:
+ if closing_reason['text'] == closing_name:
+ return closing_reason['id']
+ return closing_name
+
+
+# Converts closing reason id to name
+def convert_closing_reason_id_to_name(closing_id, closing_reasons=None):
+ if not closing_reasons:
+ closing_reasons = get_closing_reasons(include_deleted=True, include_reserved=True)
+ for closing_reason in closing_reasons:
+ if closing_reason['id'] == closing_id:
+ return closing_reason['text']
+ return closing_id
+
+
+# Converts offense type id to name
+def convert_offense_type_id_to_name(offense_type_id, offense_types=None):
+ if not offense_types:
+ offense_types = get_offense_types()
+ if offense_types:
+ for o_type in offense_types:
+ if o_type['id'] == offense_type_id:
+ return o_type['name']
+ return offense_type_id
+
+
+''' Request/Response methods '''
+
+
+# Returns the result of an offenses request
+def get_offenses(_range, _filter='', _fields=''):
+ full_url = '{0}/api/siem/offenses'.format(SERVER)
+ params = {'filter': _filter} if _filter else {}
+ headers = dict(AUTH_HEADERS)
+ if _fields:
+ params['fields'] = _fields
+ if _range:
+ headers['Range'] = 'items={0}'.format(_range)
+ return send_request('GET', full_url, headers, params)
+
+
+# Returns the result of a single offense request
+def get_offense_by_id(offense_id, _filter='', _fields=''):
+ full_url = '{0}/api/siem/offenses/{1}'.format(SERVER, offense_id)
+ params = {"filter": _filter} if _filter else {}
+ headers = dict(AUTH_HEADERS)
+ if _fields:
+ params['fields'] = _fields
+ return send_request('GET', full_url, headers, params)
+
+
+# Updates a single offense and returns the updated offense
+def update_offense(offense_id):
+ url = '{0}/api/siem/offenses/{1}'.format(SERVER, offense_id)
+ return send_request('POST', url, params=demisto.args())
+
+
+# Posts a search in QRadar and returns the search object
+def search(args):
+ url = '{0}/api/ariel/searches'.format(SERVER)
+ return send_request('POST', url, AUTH_HEADERS, params=args)
+
+
+# Returns a search object (doesn't contain reuslt)
+def get_search(search_id):
+ url = '{0}/api/ariel/searches/{1}'.format(SERVER, convert_to_str(search_id))
+ return send_request('GET', url, AUTH_HEADERS)
+
+
+# Returns a search result
+def get_search_results(search_id, _range=''):
+ url = '{0}/api/ariel/searches/{1}/results'.format(SERVER, convert_to_str(search_id))
+ headers = dict(AUTH_HEADERS)
+ if _range:
+ headers['Range'] = 'items={0}'.format(_range)
+ return send_request('GET', url, headers)
+
+
+# Returns the result of an assets request
+def get_assets(_range='', _filter='', _fields=''):
+ url = '{0}/api/asset_model/assets'.format(SERVER)
+ params = {"filter": _filter} if _filter else {}
+ headers = dict(AUTH_HEADERS)
+ if _fields:
+ params['fields'] = _fields
+ if _range:
+ headers['Range'] = 'items={0}'.format(_range)
+ return send_request('GET', url, headers, params)
+
+
+# Returns the result of a closing reasons request
+def get_closing_reasons(_range='', _filter='', _fields='', include_deleted=False, include_reserved=False):
+ url = '{0}/api/siem/offense_closing_reasons'.format(SERVER)
+ params = {}
+ if _filter:
+ params['filter'] = _filter
+ if include_deleted:
+ params['include_deleted'] = include_deleted
+ if include_reserved:
+ params['include_reserved'] = include_reserved
+ headers = AUTH_HEADERS
+ if _range:
+ headers['Range'] = 'items={0}'.format(_range)
+ return send_request('GET', url, headers, params)
+
+
+# Returns the result of a offense types request
+def get_offense_types():
+ url = '{0}/api/siem/offense_types'.format(SERVER)
+ # Due to a bug in QRadar, this functions does not work if username/password was not provided
+ if USERNAME and PASSWORD:
+ return send_request('GET', url)
+ return {}
+
+
+# Returns the result of a get note request
+def get_note(offense_id, note_id, fields):
+ if note_id:
+ url = '{0}/api/siem/offenses/{1}/notes/{2}'.format(SERVER, offense_id, note_id)
+ else:
+ url = '{0}/api/siem/offenses/{1}/notes'.format(SERVER, offense_id)
+ params = {'fields': fields} if fields else {}
+ return send_request('GET', url, AUTH_HEADERS, params=params)
+
+
+# Creates a note and returns the note as a result
+def create_note(offense_id, note_text, fields):
+ url = '{0}/api/siem/offenses/{1}/notes'.format(SERVER, offense_id)
+ params = {'fields': fields} if fields else {}
+ params['note_text'] = note_text
+ return send_request('POST', url, AUTH_HEADERS, params=params)
+
+
+# Returns the result of a reference request
+def get_reference_by_name(ref_name, _range='', _filter='', _fields=''):
+ url = '{0}/api/reference_data/sets/{1}'.format(SERVER, ref_name)
+ params = {'filter': _filter} if _filter else {}
+ headers = dict(AUTH_HEADERS)
+ if _fields:
+ params['fields'] = _fields
+ if _range:
+ headers['Range'] = 'items={0}'.format(_range)
+ return send_request('GET', url, headers, params=params)
+
+
+def create_reference_set(ref_name, element_type, timeout_type, time_to_live):
+ url = '{0}/api/reference_data/sets'.format(SERVER)
+ params = {'name': ref_name, 'element_type': element_type}
+ if timeout_type:
+ params['timeout_type'] = timeout_type
+ if time_to_live:
+ params['time_to_live'] = time_to_live
+ return send_request('POST', url, params=params)
+
+
+def delete_reference_set(ref_name):
+ url = '{0}/api/reference_data/sets/{1}'.format(SERVER, ref_name)
+ return send_request('DELETE', url)
+
+
+def update_reference_set_value(ref_name, value, source=None):
+ url = '{0}/api/reference_data/sets/{1}'.format(SERVER, ref_name)
+ params = {'name': ref_name, 'value': value}
+ if source:
+ params['source'] = source
+ return send_request('POST', url, params=params)
+
+
+def delete_reference_set_value(ref_name, value):
+ url = '{0}/api/reference_data/sets/{1}/{2}'.format(SERVER, ref_name, value)
+ params = {'name': ref_name, 'value': value}
+ return send_request('DELETE', url, params=params)
+
+
+def get_devices(_range='', _filter='', _fields=''):
+ url = '{0}/api/config/domain_management/domains'.format(SERVER)
+ params = {'filter': _filter} if _filter else {}
+ headers = dict(AUTH_HEADERS)
+ if _fields:
+ params['fields'] = _fields
+ if _range:
+ headers['Range'] = 'items={0}'.format(_range)
+ return send_request('GET', url, headers, params=params)
+
+
+def get_domains_by_id(domain_id, _fields=''):
+ url = '{0}/api/config/domain_management/domains/{1}'.format(SERVER, domain_id)
+ headers = dict(AUTH_HEADERS)
+ params = {'fields': _fields} if _fields else {}
+ return send_request('GET', url, headers, params=params)
+
+
+''' Command methods '''
+
+
+def test_module():
+ get_offenses('0-0')
+ # If encountered error, send_request will return_error
+ return 'ok'
+
+
+def fetch_incidents():
+ query = demisto.params().get('query')
+ last_run = demisto.getLastRun()
+ offense_id = last_run['id'] if last_run and 'id' in last_run else 0
+ if last_run and offense_id == 0:
+ start_time = last_run['startTime'] if 'startTime' in last_run else '0'
+ fetch_query = 'start_time>{0}{1}'.format(start_time, ' AND ({0})'.format(query) if query else '')
+ else:
+ fetch_query = 'id>{0} {1}'.format(offense_id, 'AND ({0})'.format(query) if query else '')
+ # qradar returns offenses sorted desc on id and there's no way to change sorting.
+ # if we get `offensesPerCall` offenses it means we (probably) have more than that so we
+ # start looking for the end of the list by doubling the page position until we're empty.
+ # then start binary search back until you find the end of the list and finally return
+ # `offensesPerCall` from the end.
+ raw_offenses = get_offenses(_range='0-{0}'.format(OFFENSES_PER_CALL), _filter=fetch_query)
+ if len(raw_offenses) >= OFFENSES_PER_CALL:
+ last_offense_pos = find_last_page_pos(fetch_query)
+ raw_offenses = get_offenses(_range='{0}-{1}'.format(last_offense_pos - OFFENSES_PER_CALL + 1, last_offense_pos),
+ _filter=fetch_query)
+ raw_offenses = unicode_to_str_recur(raw_offenses)
+ incidents = []
+ enrich_offense_res_with_source_and_destination_address(raw_offenses)
+ for offense in raw_offenses:
+ offense_id = max(offense_id, offense['id'])
+ incidents.append(create_incident_from_offense(offense))
+ demisto.setLastRun({'id': offense_id})
+ return incidents
+
+
+# Finds the last page position for QRadar query that receives a range parameter
+def find_last_page_pos(fetch_query):
+ # Make sure it wasn't a fluke we have exactly OFFENSES_PER_CALL results
+ if len(get_offenses(_range='{0}-{0}'.format(OFFENSES_PER_CALL), _filter=fetch_query)) == 0:
+ return OFFENSES_PER_CALL - 1
+ # Search up until we don't have any more results
+ pos = OFFENSES_PER_CALL * 2
+ while len(get_offenses(_range='{0}-{0}'.format(pos), _filter=fetch_query)) == 1:
+ pos = pos * 2
+ # Binary search the gap from the las step
+ high = pos
+ low = pos / 2
+ while high > low + 1:
+ pos = (high + low) / 2
+ if len(get_offenses(_range='{0}-{0}'.format(pos), _filter=fetch_query)) == 1:
+ # we still have results, raise the bar
+ low = pos
+ else:
+ # we're too high, lower the bar
+ high = pos
+ # low holds the last pos of the list
+ return low
+
+
+# Creates incidents from offense
+def create_incident_from_offense(offense):
+ occured = epoch_to_ISO(offense['start_time'])
+ keys = offense.keys()
+ labels = []
+ for i in range(len(keys)):
+ labels.append({'type': keys[i], 'value': convert_to_str(offense[keys[i]])})
+ return {
+ 'name': '{0} {1}'.format(offense['id'], offense['description']),
+ 'labels': labels,
+ 'rawJSON': json.dumps(offense),
+ 'occurred': occured
+ }
+
+
+def get_offenses_command():
+ raw_offenses = get_offenses(demisto.args().get('range'), demisto.args().get('filter'), demisto.args().get('fields'))
+ offenses = deepcopy(raw_offenses)
+ enrich_offense_result(offenses)
+ offenses = filter_dict_non_intersection_key_to_value(replace_keys(offenses, OFFENSES_NAMES_MAP), OFFENSES_NAMES_MAP)
+
+ # prepare for printing:
+ headers = demisto.args().get('headers')
+ if not headers:
+ offenses_names_map_cpy = dict(OFFENSES_NAMES_MAP)
+ offenses_names_map_cpy.pop('id', None)
+ offenses_names_map_cpy.pop('description', None)
+ headers = 'ID,Description,' + dict_values_to_comma_separated_string(offenses_names_map_cpy)
+
+ return get_entry_for_object('QRadar offenses', offenses, raw_offenses, headers, 'QRadar.Offense(val.ID === obj.ID)')
+
+
+# Enriches the values of a given offense result (full_enrichment adds more enrichment options)
+def enrich_offense_result(response, full_enrichment=False):
+ enrich_offense_res_with_source_and_destination_address(response)
+ if isinstance(response, list):
+ type_dict = get_offense_types()
+ closing_reason_dict = get_closing_reasons(include_deleted=True, include_reserved=True)
+ for offense in response:
+ enrich_single_offense_result(offense, full_enrichment, type_dict, closing_reason_dict)
+ else:
+ enrich_single_offense_result(response, full_enrichment)
+
+ return response
+
+
+# Convert epoch to iso and closing_reason_id to closing reason name, and if full_enrichment then converts
+# closing_reason_id to name
+def enrich_single_offense_result(offense, full_enrichment, type_dict=None, closing_reason_dict=None):
+ enrich_offense_times(offense)
+ if 'offense_type' in offense:
+ offense['offense_type'] = convert_offense_type_id_to_name(offense['offense_type'], type_dict)
+ if full_enrichment and 'closing_reason_id' in offense:
+ offense['closing_reason_id'] = convert_closing_reason_id_to_name(offense['closing_reason_id'],
+ closing_reason_dict)
+
+
+# Enriches offense result dictionary with source and destination addresses
+def enrich_offense_res_with_source_and_destination_address(response):
+ src_adrs, dst_adrs = extract_source_and_destination_addresses_ids(response)
+ # This command might encounter HTML error page in certain cases instead of JSON result. Fallback: cancel the
+ # enrichment
+ try:
+ if src_adrs:
+ enrich_source_addresses_dict(src_adrs)
+ if dst_adrs:
+ enrich_destination_addresses_dict(dst_adrs)
+ if isinstance(response, list):
+ for offense in response:
+ enrich_single_offense_res_with_source_and_destination_address(offense, src_adrs, dst_adrs)
+ else:
+ enrich_single_offense_res_with_source_and_destination_address(response, src_adrs, dst_adrs)
+ except ValueError:
+ pass
+ return response
+
+
+# Helper method: Extracts all source and destination addresses ids from an offense result
+def extract_source_and_destination_addresses_ids(response):
+ src_ids = {} # type: dict
+ dst_ids = {} # type: dict
+ if isinstance(response, list):
+ for offense in response:
+ populate_src_and_dst_dicts_with_single_offense(offense, src_ids, dst_ids)
+ else:
+ populate_src_and_dst_dicts_with_single_offense(response, src_ids, dst_ids)
+
+ return src_ids, dst_ids
+
+
+# Helper method: Populates source and destination id dictionaries with the id key/values
+def populate_src_and_dst_dicts_with_single_offense(offense, src_ids, dst_ids):
+ if 'source_address_ids' in offense and isinstance(offense['source_address_ids'], list):
+ for source_id in offense['source_address_ids']:
+ src_ids[source_id] = source_id
+ if 'local_destination_address_ids' in offense and isinstance(offense['local_destination_address_ids'], list):
+ for destination_id in offense['local_destination_address_ids']:
+ dst_ids[destination_id] = destination_id
+ return None
+
+
+# Helper method: Enriches the source addresses ids dictionary with the source addresses values corresponding to the ids
+def enrich_source_addresses_dict(src_adrs):
+ src_ids_str = dict_values_to_comma_separated_string(src_adrs)
+ source_url = '{0}/api/siem/source_addresses?filter=id in ({1})'.format(SERVER, src_ids_str)
+ src_res = send_request('GET', source_url, AUTH_HEADERS)
+ for src_adr in src_res:
+ src_adrs[src_adr['id']] = convert_to_str(src_adr['source_ip'])
+ return src_adrs
+
+
+# Helper method: Enriches the destination addresses ids dictionary with the source addresses values corresponding to
+# the ids
+def enrich_destination_addresses_dict(dst_adrs):
+ dst_ids_str = dict_values_to_comma_separated_string(dst_adrs)
+ destination_url = '{0}/api/siem/local_destination_addresses?filter=id in ({1})'.format(SERVER, dst_ids_str)
+ dst_res = send_request('GET', destination_url, AUTH_HEADERS)
+ for dst_adr in dst_res:
+ dst_adrs[dst_adr['id']] = convert_to_str(dst_adr['local_destination_ip'])
+ return dst_adrs
+
+
+# Helper method: For a single offense replaces the source and destination ids with the actual addresses
+def enrich_single_offense_res_with_source_and_destination_address(offense, src_adrs, dst_adrs):
+ if isinstance(offense.get('source_address_ids'), list):
+ for i in range(len(offense['source_address_ids'])):
+ offense['source_address_ids'][i] = src_adrs[offense['source_address_ids'][i]]
+ if isinstance(offense.get('local_destination_address_ids'), list):
+ for i in range(len(offense['local_destination_address_ids'])):
+ offense['local_destination_address_ids'][i] = dst_adrs[offense['local_destination_address_ids'][i]]
+
+ return None
+
+
+# Helper method: For a single offense replaces the epoch times with ISO string
+def enrich_offense_times(offense):
+ if 'start_time' in offense:
+ offense['start_time'] = epoch_to_ISO(offense['start_time'])
+ if 'last_updated_time' in offense:
+ offense['last_updated_time'] = epoch_to_ISO(offense['last_updated_time'])
+ if offense.get('close_time'):
+ offense['close_time'] = epoch_to_ISO(offense['close_time'])
+
+ return None
+
+
+def get_offense_by_id_command():
+ offense_id = demisto.args().get('offense_id')
+ raw_offense = get_offense_by_id(offense_id, demisto.args().get('filter'), demisto.args().get('fields'))
+ offense = deepcopy(raw_offense)
+ enrich_offense_result(offense, full_enrichment=True)
+ offense = filter_dict_non_intersection_key_to_value(replace_keys(offense, SINGLE_OFFENSE_NAMES_MAP),
+ SINGLE_OFFENSE_NAMES_MAP)
+ return get_entry_for_object('QRadar Offenses', offense, raw_offense, demisto.args().get('headers'),
+ 'QRadar.Offense(val.ID === obj.ID)')
+
+
+def update_offense_command():
+ args = demisto.args()
+ if 'closing_reason_name' in args:
+ args['closing_reason_id'] = convert_closing_reason_name_to_id(args.get('closing_reason_name'))
+ elif 'CLOSED' == args.get('status') and not args.get('closing_reason_id'):
+ raise ValueError(
+ 'Invalid input - must provide closing reason name or id (may use "qradar-get-closing-reasons" command to '
+ 'get them) to close offense')
+ offense_id = args.get('offense_id')
+ raw_offense = update_offense(offense_id)
+ offense = deepcopy(raw_offense)
+ enrich_offense_result(offense, full_enrichment=True)
+ offense = filter_dict_non_intersection_key_to_value(replace_keys(offense, SINGLE_OFFENSE_NAMES_MAP),
+ SINGLE_OFFENSE_NAMES_MAP)
+ return get_entry_for_object('QRadar Offense', offense, raw_offense, demisto.args().get('headers'),
+ 'QRadar.Offense(val.ID === obj.ID)')
+
+
+def search_command():
+ raw_search = search(demisto.args())
+ search_res = deepcopy(raw_search)
+ search_res = filter_dict_non_intersection_key_to_value(replace_keys(search_res, SEARCH_ID_NAMES_MAP),
+ SEARCH_ID_NAMES_MAP)
+ return get_entry_for_object('QRadar Search', search_res, raw_search, demisto.args().get('headers'),
+ 'QRadar.Search(val.ID === obj.ID)')
+
+
+def get_search_command():
+ search_id = demisto.args().get('search_id')
+ raw_search = get_search(search_id)
+ search = deepcopy(raw_search)
+ search = filter_dict_non_intersection_key_to_value(replace_keys(search, SEARCH_ID_NAMES_MAP), SEARCH_ID_NAMES_MAP)
+ return get_entry_for_object('QRadar Search Info', search, raw_search, demisto.args().get('headers'),
+ 'QRadar.Search(val.ID === "{0}")'.format(search_id))
+
+
+def get_search_results_command():
+ search_id = demisto.args().get('search_id')
+ raw_search_results = get_search_results(search_id, demisto.args().get('range'))
+ result_key = raw_search_results.keys()[0]
+ title = 'QRadar Search Results from {}'.format(convert_to_str(result_key))
+ context_key = demisto.args().get('output_path') if demisto.args().get(
+ 'output_path') else 'QRadar.Search(val.ID === "{0}").Result.{1}'.format(search_id, result_key)
+ context_obj = unicode_to_str_recur(raw_search_results[result_key])
+ human_readable = tableToMarkdown(title, context_obj, None).replace('\t', '\n')
+ return get_entry_for_object(title, context_obj, raw_search_results, demisto.args().get('headers'), context_key,
+ human_readable=human_readable)
+
+
+def get_assets_command():
+ raw_assets = get_assets(demisto.args().get('range'), demisto.args().get('filter'), demisto.args().get('fields'))
+ assets_result, human_readable_res = create_assets_result(deepcopy(raw_assets))
+ return get_entry_for_assets('QRadar Assets', assets_result, raw_assets, human_readable_res,
+ demisto.args().get('headers'))
+
+
+def get_asset_by_id_command():
+ _filter = "id=" + convert_to_str(demisto.args().get('asset_id'))
+ raw_asset = get_assets(_filter=_filter)
+ asset_result, human_readable_res = create_assets_result(deepcopy(raw_asset), full_values=True)
+ return get_entry_for_assets('QRadar Asset', asset_result, raw_asset, human_readable_res,
+ demisto.args().get('headers'))
+
+
+# Specific implementation for assets commands, that turns asset result to entryObject
+def get_entry_for_assets(title, obj, contents, human_readable_obj, headers=None):
+ if len(obj) == 0:
+ return "There is no output result"
+ obj = filter_dict_null(obj)
+ human_readable_obj = filter_dict_null(human_readable_obj)
+ if headers:
+ if isinstance(headers, str):
+ headers = headers.split(',')
+ headers = list(filter(lambda x: x in headers, list_entry) for list_entry in human_readable_obj)
+ human_readable_md = ''
+ for k, h_obj in human_readable_obj.iteritems():
+ human_readable_md = human_readable_md + tableToMarkdown(k, h_obj, headers)
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': contents,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': "### {0}\n{1}".format(title, human_readable_md),
+ 'EntryContext': obj
+ }
+
+
+def create_assets_result(assets, full_values=False):
+ trans_assets = {}
+ human_readable_trans_assets = {}
+ endpoint_dict = create_empty_endpoint_dict(full_values)
+ for asset in assets:
+ asset_key = 'QRadar.Asset(val.ID === "{0}")'.format(asset['id'])
+ human_readable_key = 'Asset(ID:{0})'.format(asset['id'])
+ populated_asset = create_single_asset_result_and_enrich_endpoint_dict(asset, endpoint_dict, full_values)
+ trans_assets[asset_key] = populated_asset
+ human_readable_trans_assets[human_readable_key] = transform_single_asset_to_hr(populated_asset)
+ # Adding endpoints context items
+ trans_assets['Endpoint'] = endpoint_dict
+ human_readable_trans_assets['Endpoint'] = endpoint_dict
+ return trans_assets, human_readable_trans_assets
+
+
+def transform_single_asset_to_hr(asset):
+ """
+ Prepares asset for human readable
+ """
+ hr_asset = []
+ for k, v in asset.iteritems():
+ if isinstance(v, dict):
+ hr_item = v
+ hr_item['Property Name'] = k
+ hr_asset.append(hr_item)
+ return hr_asset
+
+
+def create_single_asset_result_and_enrich_endpoint_dict(asset, endpoint_dict, full_values):
+ asset_dict = {'ID': asset['id']}
+ for interface in asset['interfaces']:
+ if full_values:
+ endpoint_dict['MACAddress'].append(interface['mac_address'])
+ for ip_address in interface['ip_addresses']:
+ endpoint_dict['IPAddress'].append(ip_address['value'])
+ if full_values:
+ domain_name = get_domain_name(asset['domain_id'])
+ endpoint_dict['Domain'].append(domain_name)
+ # Adding values found in properties of the asset
+ enrich_dict_using_asset_properties(asset, asset_dict, endpoint_dict, full_values)
+ return asset_dict
+
+
+def enrich_dict_using_asset_properties(asset, asset_dict, endpoint_dict, full_values):
+ for prop in asset['properties']:
+ if prop['name'] in ASSET_PROPERTIES_NAMES_MAP:
+ asset_dict[ASSET_PROPERTIES_NAMES_MAP[prop['name']]] = {'Value': prop['value'],
+ 'LastUser': prop['last_reported_by']}
+ elif prop['name'] in ASSET_PROPERTIES_ENDPOINT_NAMES_MAP:
+ endpoint_dict[ASSET_PROPERTIES_ENDPOINT_NAMES_MAP[prop['name']]] = prop['value']
+ elif full_values:
+ if prop['name'] in FULL_ASSET_PROPERTIES_NAMES_MAP:
+ asset_dict[FULL_ASSET_PROPERTIES_NAMES_MAP[prop['name']]] = {'Value': prop['value'],
+ 'LastUser': prop['last_reported_by']}
+ return None
+
+
+# Creates an empty endpoint dictionary (for use in other methods)
+def create_empty_endpoint_dict(full_values):
+ endpoint_dict = {'IPAddress': [], 'OS': []} # type: dict
+ if full_values:
+ endpoint_dict['MACAddress'] = []
+ endpoint_dict['Domain'] = []
+ return endpoint_dict
+
+
+# Retrieves domain name using domain id
+def get_domain_name(domain_id):
+ try:
+ query_param = {
+ 'query_expression': "SELECT DOMAINNAME({0}) AS 'Domain name' FROM events GROUP BY 'Domain name'".format(
+ domain_id)}
+ search_id = search(query_param)['search_id']
+ return get_search_results(search_id)['events'][0]['Domain name']
+ except Exception as e:
+ demisto.results({
+ 'Type': 11,
+ 'Contents': 'No Domain name was found.{error}'.format(error=str(e)),
+ 'ContentsFormat': formats['text']
+ })
+ return domain_id
+
+
+def get_closing_reasons_command():
+ args = demisto.args()
+ closing_reasons_map = {
+ 'id': 'ID',
+ 'text': 'Name',
+ 'is_reserved': 'IsReserved',
+ 'is_deleted': 'IsDeleted'
+ }
+ raw_closing_reasons = get_closing_reasons(args.get('range'), args.get('filter'), args.get('fields'),
+ args.get('include_deleted'), args.get('include_reserved'))
+ closing_reasons = replace_keys(raw_closing_reasons, closing_reasons_map)
+
+ # prepare for printing:
+ closing_reasons_map.pop('id', None)
+ closing_reasons_map.pop('text', None)
+ headers = 'ID,Name,' + dict_values_to_comma_separated_string(closing_reasons_map)
+
+ return get_entry_for_object('Offense Closing Reasons', closing_reasons, raw_closing_reasons,
+ context_key='QRadar.Offense.ClosingReasons', headers=headers)
+
+
+def get_note_command():
+ raw_note = get_note(demisto.args().get('offense_id'), demisto.args().get('note_id'), demisto.args().get('fields'))
+ note_names_map = {
+ 'id': 'ID',
+ 'note_text': 'Text',
+ 'create_time': 'CreateTime',
+ 'username': 'CreatedBy'
+ }
+ notes = replace_keys(raw_note, note_names_map)
+ if not isinstance(notes, list):
+ notes = [notes]
+ for note in notes:
+ if 'CreateTime' in note:
+ note['CreateTime'] = epoch_to_ISO(note['CreateTime'])
+ return get_entry_for_object('QRadar note for offense: {0}'.format(str(demisto.args().get('offense_id'))), notes,
+ raw_note, demisto.args().get('headers'),
+ 'QRadar.Note(val.ID === "{0}")'.format(demisto.args().get('note_id')))
+
+
+def create_note_command():
+ raw_note = create_note(demisto.args().get('offense_id'), demisto.args().get('note_text'),
+ demisto.args().get('fields'))
+ note_names_map = {
+ 'id': 'ID',
+ 'note_text': 'Text',
+ 'create_time': 'CreateTime',
+ 'username': 'CreatedBy'
+ }
+ note = replace_keys(raw_note, note_names_map)
+ note['CreateTime'] = epoch_to_ISO(note['CreateTime'])
+ return get_entry_for_object('QRadar Note', note, raw_note, demisto.args().get('headers'), 'QRadar.Note')
+
+
+def get_reference_by_name_command():
+ raw_ref = get_reference_by_name(demisto.args().get('ref_name'))
+ ref = replace_keys(raw_ref, REFERENCE_NAMES_MAP)
+ convert_date_elements = True if demisto.args().get('date_value') == 'True' and ref[
+ 'ElementType'] == 'DATE' else False
+ enrich_reference_set_result(ref, convert_date_elements)
+ return get_entry_for_reference_set(ref)
+
+
+def enrich_reference_set_result(ref, convert_date_elements=False):
+ if 'Data' in ref:
+ ref['Data'] = replace_keys(ref['Data'], REFERENCE_NAMES_MAP)
+ for item in ref['Data']:
+ item['FirstSeen'] = epoch_to_ISO(item['FirstSeen'])
+ item['LastSeen'] = epoch_to_ISO(item['LastSeen'])
+ if convert_date_elements:
+ try:
+ item['Value'] = epoch_to_ISO(int(item['Value']))
+ except ValueError:
+ pass
+ if 'CreationTime' in ref:
+ ref['CreationTime'] = epoch_to_ISO(ref['CreationTime'])
+ return ref
+
+
+def get_entry_for_reference_set(ref, title='QRadar References'):
+ ref_cpy = deepcopy(ref)
+ data = ref_cpy.pop('Data', None)
+ ec_key = 'QRadar.Reference(val.Name === obj.Name)'
+ entry = get_entry_for_object(title, ref_cpy, ref, demisto.args().get('headers'), ec_key)
+ # Add another table for the data values
+ if data:
+ entry['HumanReadable'] = entry['HumanReadable'] + tableToMarkdown("Reference Items", data)
+ entry['EntryContext'][ec_key]['Data'] = data
+ return entry
+
+
+def create_reference_set_command():
+ args = demisto.args()
+ raw_ref = create_reference_set(args.get('ref_name'), args.get('element_type'), args.get('timeout_type'),
+ args.get('time_to_live'))
+ ref = replace_keys(raw_ref, REFERENCE_NAMES_MAP)
+ enrich_reference_set_result(ref)
+ return get_entry_for_reference_set(ref)
+
+
+def delete_reference_set_command():
+ ref_name = demisto.args().get('ref_name')
+ raw_ref = delete_reference_set(ref_name)
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': raw_ref,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': "Reference Data Deletion Task for '{0}' was initiated. Reference set '{0}' should be deleted "
+ "shortly.".format(ref_name)
+ }
+
+
+def update_reference_set_value_command():
+ args = demisto.args()
+ if args.get('date_value') == 'True':
+ value = date_to_timestamp(args.get('value'), date_format="%Y-%m-%dT%H:%M:%S.%f000Z")
+ else:
+ value = args.get('value')
+ raw_ref = update_reference_set_value(args.get('ref_name'), value, args.get('source'))
+ ref = replace_keys(raw_ref, REFERENCE_NAMES_MAP)
+ enrich_reference_set_result(ref)
+ return get_entry_for_reference_set(ref, title='Element value was updated successfully in reference set:')
+
+
+def delete_reference_set_value_command():
+ args = demisto.args()
+ if args.get('date_value') == 'True':
+ value = date_to_timestamp(args.get('value'), date_format="%Y-%m-%dT%H:%M:%S.%f000Z")
+ else:
+ value = args.get('value')
+ raw_ref = delete_reference_set_value(args.get('ref_name'), value)
+ ref = replace_keys(raw_ref, REFERENCE_NAMES_MAP)
+ enrich_reference_set_result(ref)
+ return get_entry_for_reference_set(ref, title='Element value was deleted successfully in reference set:')
+
+
+def get_domains_command():
+ args = demisto.args()
+ raw_domains = get_devices(args.get('range'), args.get('filter'), args.get('fields'))
+ domains = []
+
+ for raw_domain in raw_domains:
+ domain = replace_keys(raw_domain, DEVICE_MAP)
+ domains.append(domain)
+ if len(domains) == 0:
+ return demisto.results('No Domains Found')
+ else:
+ ec = {'QRadar.Domains': createContext(domains, removeNull=True)}
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': domains,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Domains Found', domains),
+ 'EntryContext': ec
+ }
+
+
+def get_domains_by_id_command():
+ args = demisto.args()
+ raw_domains = get_domains_by_id(args.get('id'), args.get('fields'))
+ formatted_domain = replace_keys(raw_domains, DEVICE_MAP)
+
+ if len(formatted_domain) == 0:
+ return demisto.results('No Domain Found')
+ else:
+ ec = {'QRadar.Domains': createContext(formatted_domain, removeNull=True)}
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': raw_domains,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Domains Found', formatted_domain, removeNull=True),
+ 'EntryContext': ec
+ }
+
+
+# Command selector
+try:
+ LOG('Command being called is {command}'.format(command=demisto.command()))
+ if demisto.command() == 'test-module':
+ demisto.results(test_module())
+ elif demisto.command() == 'fetch-incidents':
+ demisto.incidents(fetch_incidents())
+ elif demisto.command() in ['qradar-offenses', 'qr-offenses']:
+ demisto.results(get_offenses_command())
+ elif demisto.command() == 'qradar-offense-by-id':
+ demisto.results(get_offense_by_id_command())
+ elif demisto.command() in ['qradar-update-offense', 'qr-update-offense']:
+ demisto.results(update_offense_command())
+ elif demisto.command() in ['qradar-searches', 'qr-searches']:
+ demisto.results(search_command())
+ elif demisto.command() in ['qradar-get-search', 'qr-get-search']:
+ demisto.results(get_search_command())
+ elif demisto.command() in ['qradar-get-search-results', 'qr-get-search-results']:
+ demisto.results(get_search_results_command())
+ elif demisto.command() in ['qradar-get-assets', 'qr-get-assets']:
+ demisto.results(get_assets_command())
+ elif demisto.command() == 'qradar-get-asset-by-id':
+ demisto.results(get_asset_by_id_command())
+ elif demisto.command() == 'qradar-get-closing-reasons':
+ demisto.results(get_closing_reasons_command())
+ elif demisto.command() == 'qradar-get-note':
+ demisto.results(get_note_command())
+ elif demisto.command() == 'qradar-create-note':
+ demisto.results(create_note_command())
+ elif demisto.command() == 'qradar-get-reference-by-name':
+ demisto.results(get_reference_by_name_command())
+ elif demisto.command() == 'qradar-create-reference-set':
+ demisto.results(create_reference_set_command())
+ elif demisto.command() == 'qradar-delete-reference-set':
+ demisto.results(delete_reference_set_command())
+ elif demisto.command() in ('qradar-create-reference-set-value', 'qradar-update-reference-set-value'):
+ demisto.results(update_reference_set_value_command())
+ elif demisto.command() == 'qradar-delete-reference-set-value':
+ demisto.results(delete_reference_set_value_command())
+ elif demisto.command() == 'qradar-get-domains':
+ demisto.results(get_domains_command())
+ elif demisto.command() == 'qradar-get-domain-by-id':
+ demisto.results(get_domains_by_id_command())
+except Exception as e:
+ message = e.message if hasattr(e, 'message') else convert_to_str(e)
+ error = 'Error has occurred in the QRadar Integration: {error}\n {message}'.format(error=type(e), message=message)
+ LOG(traceback.format_exc())
+ if demisto.command() == 'fetch-incidents':
+ LOG(error)
+ LOG.print_log()
+ raise Exception(error)
+ else:
+ return_error(error)
diff --git a/Integrations/QRadar/QRadar.yml b/Integrations/QRadar/QRadar.yml
new file mode 100644
index 000000000000..948bb1d173f1
--- /dev/null
+++ b/Integrations/QRadar/QRadar.yml
@@ -0,0 +1,1509 @@
+category: Analytics & SIEM
+commonfields:
+ id: QRadar
+ version: -1
+configuration:
+- display: Server URL (e.g. https://192.168.0.1)
+ name: server
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: false
+ type: 9
+- display: Authentication token
+ name: token
+ required: false
+ type: 4
+- display: Query to fetch offenses
+ name: query
+ required: false
+ type: 0
+- defaultvalue: '50'
+ display: Number of offenses to pull per API call
+ name: offensesPerCall
+ required: false
+ type: 0
+- display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+description: Fetch offenses as incidents and search QRadar
+display: IBM QRadar
+name: QRadar
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: 'Query to filter offenses. For refernce please consult: https://www.ibm.com/support/knowledgecenter/en/SS42VS_7.3.1/com.ibm.qradar.doc/c_rest_api_filtering.html'
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ - default: false
+ description: 'If used, will filter all fields except for the specified ones.
+ Use this parameter to specify which fields you would like to get back in the
+ response. Fields that are not named are excluded. Specify subfields in brackets
+ and multiple fields in the same object are separated by commas. The filter
+ uses QRadar''s field names, for reference please consult: https://www.ibm.com/support/knowledgecenter/SSKMKU/com.ibm.qradar.doc_cloud/9.1--siem-offenses-GET.html'
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: 'Range of results to return. e.g.: 0-20'
+ isArray: false
+ name: range
+ required: false
+ secret: false
+ - default: false
+ description: Table headers to use the human readable output (if none provided,
+ will show all table headers)
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ deprecated: false
+ description: Gets offenses from QRadar
+ execution: false
+ name: qradar-offenses
+ outputs:
+ - contextPath: QRadar.Offense.Followup
+ description: Offense followup.
+ type: boolean
+ - contextPath: QRadar.Offense.ID
+ description: The ID of the offense.
+ type: number
+ - contextPath: QRadar.Offense.Description
+ description: The description of the offense.
+ type: string
+ - contextPath: QRadar.Offense.SourceAddress
+ description: The source addresses that are associated with the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.DestinationAddress
+ description: The local destination addresses that are associated with the offense.
+ If your offense has a remote destination, you will need to use QRadarFullSearch
+ playbook with the following query - SELECT destinationip FROM events WHERE
+ inOffense() GROUP BY destinationip
+ type: Unknown
+ - contextPath: QRadar.Offense.RemoteDestinationCount
+ description: The remote destination that are associated with the offesne. If
+ this value is greater than 0 that means your offense has a remote destination,
+ you will need to use QRadarFullSearch playbook with the following query -
+ SELECT destinationip FROM events WHERE inOffense() GROUP BY destinationip
+ type: Unknown
+ - contextPath: QRadar.Offense.StartTime
+ description: The time (ISO) when the offense was started.
+ type: date
+ - contextPath: QRadar.Offense.EventCount
+ description: The number of events that are associated with the offense.
+ type: number
+ - contextPath: QRadar.Offense.Magnitude
+ description: The magnitude of the offense.
+ type: number
+ - contextPath: QRadar.Offense.LastUpdatedTime
+ description: The time (ISO) when the offense was last updated.
+ type: date
+ - contextPath: QRadar.Offense.OffenseType
+ description: The offense type (due to API limitations if username and password
+ were not provided, this value will be the id of offense type)
+ type: string
+ - arguments:
+ - default: true
+ description: Offense ID
+ isArray: false
+ name: offense_id
+ required: true
+ secret: false
+ - default: false
+ description: 'Query to filter offense. For refernce please consult: https://www.ibm.com/support/knowledgecenter/en/SS42VS_7.3.1/com.ibm.qradar.doc/c_rest_api_filtering.html'
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ - default: false
+ description: 'If used, will filter all fields except for the specified ones.
+ Use this parameter to specify which fields you would like to get back in the
+ response. Fields that are not named are excluded. Specify subfields in brackets
+ and multiple fields in the same object are separated by commas. The filter
+ uses QRadar''s field names, for reference please consult: https://www.ibm.com/support/knowledgecenter/SSKMKU/com.ibm.qradar.doc_cloud/9.1--siem-offenses-offense_id-GET.html'
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: Table headers to use the human readable output (if none provided,
+ will show all table headers)
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ deprecated: false
+ description: Gets offense with matching offense ID from qradar
+ execution: false
+ name: qradar-offense-by-id
+ outputs:
+ - contextPath: QRadar.Offense.Credibility
+ description: The credibility of the offense
+ type: number
+ - contextPath: QRadar.Offense.Relevance
+ description: The relevance of the offense
+ type: number
+ - contextPath: QRadar.Offense.Severity
+ description: The severity of the offense
+ type: number
+ - contextPath: QRadar.Offense.SourceAddress
+ description: The source addresses that are associated with the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.DestinationAddress
+ description: The local destination addresses that are associated with the offense.
+ If your offense has a remote destination, you will need to use QRadarFullSearch
+ playbook with the following query - SELECT destinationip FROM events WHERE
+ inOffense() GROUP BY destinationip
+ type: Unknown
+ - contextPath: QRadar.Offense.RemoteDestinationCount
+ description: The remote destination that are associated with the offesne. If
+ this value is greater than 0 that means your offense has a remote destination,
+ you will need to use QRadarFullSearch playbook with the following query -
+ SELECT destinationip FROM events WHERE inOffense() GROUP BY destinationip
+ type: Unknown
+ - contextPath: QRadar.Offense.AssignedTo
+ description: The user the offense is assigned to.
+ type: string
+ - contextPath: QRadar.Offense.StartTime
+ description: The time (ISO) when the offense was started.
+ type: date
+ - contextPath: QRadar.Offense.ID
+ description: The ID of the offense.
+ type: int
+ - contextPath: QRadar.Offense.DestinationHostname
+ description: Destintion hostname
+ type: Unknown
+ - contextPath: QRadar.Offense.Description
+ description: The description of the offense.
+ type: string
+ - contextPath: QRadar.Offense.EventCount
+ description: The number of events that are associated with the offense.
+ type: number
+ - contextPath: QRadar.Offense.OffenseSource
+ description: The source of the offense.
+ type: string
+ - contextPath: QRadar.Offense.Status
+ description: The status of the offense. One of "OPEN", "HIDDEN", or "CLOSED".
+ type: string
+ - contextPath: QRadar.Offense.Magnitude
+ description: The magnitude of the offense.
+ type: number
+ - contextPath: QRadar.Offense.ClosingUser
+ description: The user that closed the offense
+ type: string
+ - contextPath: QRadar.Offense.ClosingReason
+ description: The offense closing reason.
+ type: string
+ - contextPath: QRadar.Offense.CloseTime
+ description: The time when the offense was closed.
+ type: date
+ - contextPath: QRadar.Offense.LastUpdatedTime
+ description: The time (ISO) when the offense was last updated.
+ type: date
+ - contextPath: QRadar.Offense.Categories
+ description: Event categories that are associated with the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.FlowCount
+ description: The number of flows that are associated with the offense.
+ type: number
+ - contextPath: QRadar.Offense.FollowUp
+ description: Offense followup.
+ type: boolean
+ - contextPath: QRadar.Offense.OffenseType
+ description: A number that represents the offense type
+ type: string
+ - contextPath: QRadar.Offense.Protected
+ description: Is the offense protected
+ type: boolean
+ - arguments:
+ - default: true
+ description: The query expressions in AQL (for more information about Ariel
+ Query Language please review "https://www.ibm.com/support/knowledgecenter/en/SS42VS_7.3.0/com.ibm.qradar.doc/c_aql_intro.html")
+ isArray: false
+ name: query_expression
+ required: true
+ secret: false
+ - default: false
+ description: Table headers to use the human readable output (if none provided,
+ will show all table headers)
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches in QRadar using AQL. It is highly recommended to use the
+ playbook 'QRadarFullSearch' instead of this command - it will execute the search,
+ and will return the result.
+ execution: false
+ name: qradar-searches
+ outputs:
+ - contextPath: QRadar.Search.ID
+ description: Search ID
+ type: number
+ - contextPath: QRadar.Search.Status
+ description: The status of the search.
+ type: string
+ - arguments:
+ - default: true
+ description: The search id
+ isArray: false
+ name: search_id
+ required: true
+ secret: false
+ - default: false
+ description: Table headers to use the human readable output (if none provided,
+ will show all table headers)
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ deprecated: false
+ description: Gets a specific search id and status
+ execution: false
+ name: qradar-get-search
+ outputs:
+ - contextPath: QRadar.Search.ID
+ description: Search ID
+ type: number
+ - contextPath: QRadar.Search.Status
+ description: The status of the search.
+ type: string
+ - arguments:
+ - default: true
+ description: The search id
+ isArray: false
+ name: search_id
+ required: true
+ secret: false
+ - default: false
+ description: 'Range of results to return. e.g.: 0-20'
+ isArray: false
+ name: range
+ required: false
+ secret: false
+ - default: false
+ description: Table headers to use the human readable output (if none provided,
+ will show all table headers)
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ - default: false
+ description: Replaces the default context output path for the query result (QRadar.Search.Result).
+ e.g. for output_path=QRadar.Correlations the result will be under the key
+ "QRadar.Correlations" in the context data.
+ isArray: false
+ name: output_path
+ required: false
+ secret: false
+ deprecated: false
+ description: Gets search results
+ execution: false
+ name: qradar-get-search-results
+ outputs:
+ - contextPath: QRadar.Search.Result
+ description: The result of the search
+ type: Unknown
+ - arguments:
+ - default: true
+ description: The ID of the offense to update
+ isArray: false
+ name: offense_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Set to true to protect the offense
+ isArray: false
+ name: protected
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Set to true to set the follow up flag on the offense
+ isArray: false
+ name: follow_up
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The new status for the offense
+ isArray: false
+ name: status
+ predefined:
+ - OPEN
+ - HIDDEN
+ - CLOSED
+ required: false
+ secret: false
+ - default: false
+ description: 'The id of a closing reason. You must provide a valid closing_reason_name
+ when you close an offense. The default closing_reasons are: (1) False-Positive,
+ Tuned (2) Non-Issues (3) Policy Violation'
+ isArray: false
+ name: closing_reason_id
+ required: false
+ secret: false
+ - default: false
+ description: 'The name of a closing reason. You must provide a valid closing_reason_name
+ when you close an offense. The default closing_reasons are: (1) False-Positive,
+ Tuned (2) Non-Issues (3) Policy Violation'
+ isArray: false
+ name: closing_reason_name
+ required: false
+ secret: false
+ - default: false
+ description: A user to assign the offense to
+ isArray: false
+ name: assigned_to
+ required: false
+ secret: false
+ - default: false
+ description: Table headers
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ - default: false
+ description: Use this parameter to specify which fields you would like to get
+ back in the response. Fields that are not named are excluded. Specify subfields
+ in brackets and multiple fields in the same object are separated by commas.
+ Please consult - https://www.ibm.com/support/knowledgecenter/SSKMKU/com.ibm.qradar.doc_cloud/9.1--siem-offenses-offense_id-POST.html
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ deprecated: false
+ description: Update an offense
+ execution: false
+ name: qradar-update-offense
+ outputs:
+ - contextPath: QRadar.Offense.Credibility
+ description: The credibility of the offense
+ type: number
+ - contextPath: QRadar.Offense.Relevance
+ description: The relevance of the offense
+ type: number
+ - contextPath: QRadar.Offense.Severity
+ description: The severity of the offense
+ type: number
+ - contextPath: QRadar.Offense.SourceAddress
+ description: The source addresses that are associated with the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.DestinationAddress
+ description: The destination addresses that are associated with the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.AssignedTo
+ description: The user the offense is assigned to.
+ type: string
+ - contextPath: QRadar.Offense.StartTime
+ description: The time (ISO) when the offense was started.
+ type: date
+ - contextPath: QRadar.Offense.ID
+ description: The ID of the offense.
+ type: int
+ - contextPath: QRadar.Offense.DestinationHostname
+ description: Destintion hostname
+ type: Unknown
+ - contextPath: QRadar.Offense.Description
+ description: The description of the offense.
+ type: string
+ - contextPath: QRadar.Offense.EventCount
+ description: The number of events that are associated with the offense.
+ type: number
+ - contextPath: QRadar.Offense.OffenseSource
+ description: The source of the offense.
+ type: string
+ - contextPath: QRadar.Offense.Status
+ description: The status of the offense. One of "OPEN", "HIDDEN", or "CLOSED".
+ type: string
+ - contextPath: QRadar.Offense.Magnitude
+ description: The magnitude of the offense.
+ type: number
+ - contextPath: QRadar.Offense.ClosingUser
+ description: The user that closed the offense
+ type: string
+ - contextPath: QRadar.Offense.ClosingReason
+ description: The offense closing reason.
+ type: string
+ - contextPath: QRadar.Offense.CloseTime
+ description: The time when the offense was closed.
+ type: date
+ - contextPath: QRadar.Offense.LastUpdatedTime
+ description: The time (ISO) when the offense was last updated.
+ type: date
+ - contextPath: QRadar.Offense.Categories
+ description: Event categories that are associated with the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.FlowCount
+ description: The number of flows that are associated with the offense.
+ type: number
+ - contextPath: QRadar.Offense.FollowUp
+ description: Offense followup.
+ type: boolean
+ - contextPath: QRadar.Offense.OffenseType
+ description: A number that represents the offense type
+ type: string
+ - contextPath: QRadar.Offense.Protected
+ description: Is the offense protected
+ type: boolean
+ - arguments:
+ - default: false
+ description: 'Query to filter assets. For refernce please consult: https://www.ibm.com/support/knowledgecenter/en/SS42VS_7.3.1/com.ibm.qradar.doc/c_rest_api_filtering.html'
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ - default: false
+ description: 'If used, will filter all fields except for the specified ones.
+ Use this parameter to specify which fields you would like to get back in the
+ response. Fields that are not named are excluded. Specify subfields in brackets
+ and multiple fields in the same object are separated by commas. The filter
+ uses QRadar''s field names, for reference please consult: https://www.ibm.com/support/knowledgecenter/SSKMKU/com.ibm.qradar.doc_cloud/9.1--asset_model-assets-GET.html'
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: 'Range of results to return. e.g.: 0-20'
+ isArray: false
+ name: range
+ required: false
+ secret: false
+ - default: false
+ description: Table headers to use the human readable output (if none provided,
+ will show all table headers)
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ deprecated: false
+ description: List all assets found in the model
+ execution: false
+ name: qradar-get-assets
+ outputs:
+ - contextPath: QRadar.Assets.ID
+ description: The ID of the asset
+ type: number
+ - contextPath: Endpoint.IPAddress
+ description: IP address of the asset
+ type: Unknown
+ - contextPath: QRadar.Assets.Name.Value
+ description: Name of the asset
+ type: string
+ - contextPath: Endpoint.OS
+ description: Asset OS
+ type: number
+ - contextPath: QRadar.Assets.AggregatedCVSSScore.Value
+ description: CVSSScore
+ type: number
+ - contextPath: QRadar.Assets.AggregatedCVSSScore.LastUser
+ description: Last user who updated the Aggregated CVSS Score
+ type: string
+ - contextPath: QRadar.Assets.Weight.Value
+ description: Asset weight
+ type: number
+ - contextPath: QRadar.Assets.Weight.LastUser
+ description: Last user who updated the weight
+ type: string
+ - contextPath: QRadar.Assets.Name.LastUser
+ description: Last user who updated the name
+ type: string
+ - arguments:
+ - default: true
+ description: The ID of the requested asset.
+ isArray: false
+ name: asset_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the asset by id
+ execution: false
+ name: qradar-get-asset-by-id
+ outputs:
+ - contextPath: QRadar.Assets.ID
+ description: The ID of the asset.
+ type: number
+ - contextPath: Endpoint.MACAddress
+ description: Asset MAC address.
+ type: Unknown
+ - contextPath: Endpoint.IPAddress
+ description: It's in ip_addresses - value
+ type: Unknown
+ - contextPath: QRadar.Assets.ComplianceNotes.Value
+ description: Compliance notes
+ type: string
+ - contextPath: QRadar.Assets.CompliancePlan.Value
+ description: Compliance plan
+ type: string
+ - contextPath: QRadar.Assets.CollateralDamagePotential.Value
+ description: Collateral damage potential
+ type: Unknown
+ - contextPath: QRadar.Assets.AggregatedCVSSScore.Value
+ description: CVSSScore
+ type: number
+ - contextPath: QRadar.Assets.Name.Value
+ description: Name of the asset
+ type: string
+ - contextPath: QRadar.Assets.GroupName
+ description: Name of the asset's group
+ type: string
+ - contextPath: Endpoint.Domain
+ description: DNS name
+ type: Unknown
+ - contextPath: Endpoint.OS
+ description: Asset OS
+ type: Unknown
+ - contextPath: QRadar.Assets.Weight.Value
+ description: Asset weight
+ type: number
+ - contextPath: QRadar.Assets.Vulnerabilities.Value
+ description: Vulnerabilities
+ type: Unknown
+ - contextPath: QRadar.Assets.Location
+ description: Location.
+ type: string
+ - contextPath: QRadar.Assets.Description
+ description: The asset description.
+ type: string
+ - contextPath: QRadar.Assets.SwitchID
+ description: Switch ID
+ type: number
+ - contextPath: QRadar.Assets.SwitchPort
+ description: Switch port.
+ type: number
+ - contextPath: QRadar.Assets.Name.LastUser
+ description: Last user who updated the name
+ type: string
+ - contextPath: QRadar.Assets.AggregatedCVSSScore.LastUser
+ description: Last user who updated the Aggregated CVSS Score
+ type: string
+ - contextPath: QRadar.Assets.Weight.LastUser
+ description: Last user who updated the weight
+ type: string
+ - contextPath: QRadar.Assets.ComplianceNotes.LastUser
+ description: Last user who updated the compliance notes
+ type: string
+ - contextPath: QRadar.Assets.CompliancePlan.LastUser
+ description: Last user who updated the compliance plan
+ type: string
+ - contextPath: QRadar.Assets.CollateralDamagePotential.LastUser
+ description: Last user who updated the collateral damage potential
+ type: string
+ - contextPath: QRadar.Assets.Vulnerabilities.LastUser
+ description: Last user who updated the vulnerabilities
+ type: string
+ - arguments:
+ - default: true
+ description: The query expressions in AQL (for more information about Ariel
+ Query Language please review https://www.ibm.com/support/knowledgecenter/en/SS42VS_7.3.0/com.ibm.qradar.doc/c_aql_intro.html)
+ isArray: false
+ name: query_expression
+ required: true
+ secret: false
+ - default: false
+ description: Table headers
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ deprecated: true
+ description: Searches in QRadar
+ execution: false
+ name: qr-searches
+ outputs:
+ - contextPath: QRadar.Search.ID
+ description: Search ID
+ type: number
+ - contextPath: QRadar.Search.State
+ description: The state of the search.
+ type: string
+ - arguments:
+ - default: true
+ description: The search id
+ isArray: false
+ name: search_id
+ required: true
+ secret: false
+ - default: false
+ description: Table headers
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ deprecated: true
+ description: Gets a specific search id and state
+ execution: false
+ name: qr-get-search
+ outputs:
+ - contextPath: QRadar.Search.ID
+ description: Search ID
+ type: number
+ - contextPath: QRadar.Search.State
+ description: The state of the search.
+ type: string
+ - arguments:
+ - default: true
+ description: The search id
+ isArray: false
+ name: search_id
+ required: true
+ secret: false
+ - default: false
+ description: Number of results in return
+ isArray: false
+ name: range
+ required: false
+ secret: false
+ - default: false
+ description: Table headers
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ deprecated: true
+ description: Gets search results
+ execution: false
+ name: qr-get-search-results
+ outputs:
+ - contextPath: QRadar.Search.Result
+ description: The result of the search
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The ID of a closing reason. You must provide a valid closing_reason_id
+ when you close an offense
+ isArray: false
+ name: closing_reason_id
+ required: false
+ secret: false
+ - default: true
+ description: The ID of the offense to update
+ isArray: false
+ name: offense_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Set to true to protect the offense
+ isArray: false
+ name: protected
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Set to true to set the follow up flag on the offense
+ isArray: false
+ name: follow_up
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The new status for the offense
+ isArray: false
+ name: status
+ predefined:
+ - OPEN
+ - HIDDEN
+ - CLOSED
+ required: false
+ secret: false
+ - default: false
+ description: 'The name of a closing reason. You must provide a valid closing_reason_name
+ when you close an offense. The default closing_reasons are: (1) False-Positive,
+ Tuned (2) Non-Issues (3) Policy Violation'
+ isArray: false
+ name: closing_reason_name
+ required: false
+ secret: false
+ - default: false
+ description: A user to assign the offense to
+ isArray: false
+ name: assigned_to
+ required: false
+ secret: false
+ - default: false
+ description: Table headers
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ - default: false
+ description: Use this parameter to specify which fields you would like to get
+ back in the response. Fields that are not named are excluded. Specify subfields
+ in brackets and multiple fields in the same object are separated by commas
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ deprecated: true
+ description: Update an offense
+ execution: false
+ name: qr-update-offense
+ outputs:
+ - contextPath: QRadar.Offense.Followup
+ description: Offense followup.
+ type: Unknown
+ - contextPath: QRadar.Offense.ID
+ description: The ID of the offense.
+ type: number
+ - contextPath: QRadar.Offense.Description
+ description: The description of the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.SourceAddress
+ description: The source addresses that are associated with the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.DestinationAddress
+ description: The destination addresses that are associated with the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.StartTime
+ description: The time (ISO) when the offense was started.
+ type: Unknown
+ - contextPath: QRadar.Offense.EventCount
+ description: The number of events that are associated with the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.FlowCount
+ description: The number of flows that are associated with the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.OffenseSource
+ description: The source of the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.Magnitude
+ description: The magnitude of the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.LastUpdatedTime
+ description: The time (ISO) when the offense was last updated.
+ type: Unknown
+ - contextPath: QRadar.Offense.OffenseType
+ description: A number that represents the offense type
+ type: Unknown
+ - contextPath: QRadar.Offense.Protected
+ description: Is the offense protected
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Number of results in return
+ isArray: false
+ name: range
+ required: false
+ secret: false
+ - default: false
+ description: Fields to filter in
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: Query to filter offenses
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ - default: false
+ description: Table headers
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ deprecated: true
+ description: List all assets found in the model
+ execution: false
+ name: qr-get-assets
+ outputs:
+ - contextPath: QRadar.Assets.ID
+ description: The ID of the asset
+ type: number
+ - contextPath: Endpoint.IPAddress
+ description: IP address of the asset
+ type: Unknown
+ - contextPath: QRadar.Assets.Name.Value
+ description: Name of the asset
+ type: Unknown
+ - contextPath: Endpoint.OS
+ description: Asset OS
+ type: Unknown
+ - contextPath: QRadar.Assets.AggregatedCVSSScore.Value
+ description: CVSSScore
+ type: Unknown
+ - contextPath: QRadar.Assets.Weight.Value
+ description: Asset weight
+ type: Unknown
+ - contextPath: QRadar.Assets.Name.LastUser
+ description: Last user who updated the name
+ type: Unknown
+ - contextPath: QRadar.Assets.AggregatedCVSSScore.LastUser
+ description: Last user who updated the Aggregated CVSS Score
+ type: Unknown
+ - contextPath: QRadar.Assets.Weight.LastUser
+ description: Last user who updated the weight
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Query to filter offenses
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ - default: false
+ description: 'Fields to filter in '
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: Number of results in return
+ isArray: false
+ name: range
+ required: false
+ secret: false
+ - default: false
+ description: Table headers
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ deprecated: true
+ description: Gets offenses from QRadar
+ execution: false
+ name: qr-offenses
+ outputs:
+ - contextPath: QRadar.Offense.Followup
+ description: Offense followup.
+ type: Unknown
+ - contextPath: QRadar.Offense.ID
+ description: The ID of the offense.
+ type: number
+ - contextPath: QRadar.Offense.Description
+ description: The description of the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.SourceAddress
+ description: The source addresses that are associated with the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.DestinationAddress
+ description: The destination addresses that are associated with the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.StartTime
+ description: The time (ISO) when the offense was started.
+ type: Unknown
+ - contextPath: QRadar.Offense.EventCount
+ description: The number of events that are associated with the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.FlowCount
+ description: The number of flows that are associated with the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.OffenseSource
+ description: The source of the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.Magnitude
+ description: The magnitude of the offense.
+ type: Unknown
+ - contextPath: QRadar.Offense.LastUpdatedTime
+ description: The time (ISO) when the offense was last updated.
+ type: Unknown
+ - contextPath: QRadar.Offense.OffenseType
+ description: A number that represents the offense type
+ type: Unknown
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: If true, reserved closing reasons are included in the response
+ isArray: false
+ name: include_reserved
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: If true, deleted closing reasons are included in the response
+ isArray: false
+ name: include_deleted
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: 'Query to filter results. For refernce please consult: https://www.ibm.com/support/knowledgecenter/en/SS42VS_7.3.1/com.ibm.qradar.doc/c_rest_api_filtering.html'
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ - default: false
+ description: 'If used, will filter all fields except for the specified ones.
+ Use this parameter to specify which fields you would like to get back in the
+ response. Fields that are not named are excluded. Specify subfields in brackets
+ and multiple fields in the same object are separated by commas. The filter
+ uses QRadar''s field names, for reference please consult: https://www.ibm.com/support/knowledgecenter/SSKMKU/com.ibm.qradar.doc_cloud/9.1--siem-offense_closing_reasons-GET.html'
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: 'Range of results to return. e.g.: 0-20'
+ isArray: false
+ name: range
+ required: false
+ secret: false
+ deprecated: false
+ description: Get closing reasons
+ execution: false
+ name: qradar-get-closing-reasons
+ outputs:
+ - contextPath: QRadar.Offense.ClosingReasons.ID
+ description: Closing reason ID
+ type: number
+ - contextPath: QRadar.Offense.ClosingReasons.Name
+ description: Closing reason name
+ type: string
+ - arguments:
+ - default: true
+ description: The offense ID to add the note to
+ isArray: false
+ name: offense_id
+ required: true
+ secret: false
+ - default: false
+ description: The note text
+ isArray: false
+ name: note_text
+ required: true
+ secret: false
+ - default: false
+ description: 'If used, will filter all fields except for the specified ones.
+ Use this parameter to specify which fields you would like to get back in the
+ response. Fields that are not named are excluded. Specify subfields in brackets
+ and multiple fields in the same object are separated by commas. The filter
+ uses QRadar''s field names, for reference please consult: https://www.ibm.com/support/knowledgecenter/SSKMKU/com.ibm.qradar.doc_cloud/9.1--siem-offenses-offense_id-notes-POST.html'
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: Table headers to use the human readable output (if none provided,
+ will show all table headers)
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ deprecated: false
+ description: Create a note on an offense
+ execution: false
+ name: qradar-create-note
+ outputs:
+ - contextPath: QRadar.Note.ID
+ description: Note ID
+ type: number
+ - contextPath: QRadar.Note.Text
+ description: Note text
+ type: string
+ - contextPath: QRadar.Note.CreateTime
+ description: The creation time of the note
+ type: date
+ - contextPath: QRadar.Note.CreatedBy
+ description: The user who created the note
+ type: string
+ - arguments:
+ - default: true
+ description: The offense ID to retrieve the note from
+ isArray: false
+ name: offense_id
+ required: true
+ secret: false
+ - default: false
+ description: The note ID
+ isArray: false
+ name: note_id
+ required: false
+ secret: false
+ - default: false
+ description: 'If used, will filter all fields except for the specified ones.
+ Use this parameter to specify which fields you would like to get back in the
+ response. Fields that are not named are excluded. Specify subfields in brackets
+ and multiple fields in the same object are separated by commas. The filter
+ uses QRadar''s field names, for reference please consult: https://www.ibm.com/support/knowledgecenter/SSKMKU/com.ibm.qradar.doc_cloud/9.1--siem-offenses-offense_id-notes-note_id-GET.html'
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: Table headers to use the human readable output (if none provided,
+ will show all table headers)
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieve a note for an offense
+ execution: false
+ name: qradar-get-note
+ outputs:
+ - contextPath: QRadar.Note.ID
+ description: Note ID
+ type: number
+ - contextPath: QRadar.Note.Text
+ description: Note text
+ type: string
+ - contextPath: QRadar.Note.CreateTime
+ description: The creation time of the note
+ type: date
+ - contextPath: QRadar.Note.CreatedBy
+ description: The user who created the note
+ type: string
+ - arguments:
+ - default: true
+ description: The name of the requestered reference.
+ isArray: false
+ name: ref_name
+ required: true
+ secret: false
+ - default: false
+ description: Table headers to use the human readable output (if none provided,
+ will show all table headers)
+ isArray: false
+ name: headers
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: If set to true will try to convert the data values to ISO-8601
+ string.
+ isArray: false
+ name: date_value
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Information about the reference set that had data added or updated.
+ This returns information set but not the contained data. This feature is supported
+ from version 8.1 and upward.
+ execution: false
+ name: qradar-get-reference-by-name
+ outputs:
+ - contextPath: QRadar.Reference.Name
+ description: The name of the reference set.
+ type: string
+ - contextPath: QRadar.Reference.CreationTime
+ description: The creation time (ISO) of the reference.
+ type: date
+ - contextPath: QRadar.Reference.ElementType
+ description: Reference element type.
+ type: string
+ - contextPath: QRadar.Reference.NumberOfElements
+ description: Number of elements.
+ type: number
+ - contextPath: QRadar.Reference.TimeToLive
+ description: Reference time to live.
+ type: string
+ - contextPath: QRadar.Reference.TimeoutType
+ description: 'Reference timeout type. One of: UNKNOWN, FIRST_SEEN, LAST_SEEN'
+ type: string
+ - contextPath: QRadar.Reference.Data
+ description: Reference set items
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Reference name to be created
+ isArray: false
+ name: ref_name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The element type for the values allowed in the reference set.
+ The allowed values are: ALN (alphanumeric), ALNIC (alphanumeric ignore case),
+ IP (IP address), NUM (numeric), PORT (port number) or DATE. Note that date
+ values need to be represented in milliseconds since the Unix Epoch January
+ 1st 1970.'
+ isArray: false
+ name: element_type
+ predefined:
+ - ALN
+ - ALNIC
+ - IP
+ - NUM
+ - PORT
+ - DATE
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The allowed values are "FIRST_SEEN", LAST_SEEN and UNKNOWN. The
+ default value is UNKNOWN.
+ isArray: false
+ name: timeout_type
+ predefined:
+ - FIRST_SEEN
+ - LAST_SEEN
+ - UNKNOWN
+ required: false
+ secret: false
+ - default: false
+ description: 'The time to live interval, for example: "1 month" or "5 minutes"'
+ isArray: false
+ name: time_to_live
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a new reference set. If the provided name is already in use,
+ this command will fail
+ execution: false
+ name: qradar-create-reference-set
+ outputs:
+ - contextPath: QRadar.Reference.CreationTime
+ description: Creation time of the reference set.
+ type: date
+ - contextPath: QRadar.Reference.ElementType
+ description: 'The element type for the values allowed in the reference set.
+ The allowed values are: ALN (alphanumeric), ALNIC (alphanumeric ignore case),
+ IP (IP address), NUM (numeric), PORT (port number) or DATE.'
+ type: string
+ - contextPath: QRadar.Reference.Name
+ description: Name of the reference set.
+ type: string
+ - contextPath: QRadar.Reference.NumberOfElements
+ description: Number of elements in the created reference set.
+ type: number
+ - contextPath: QRadar.Reference.TimeoutType
+ description: Timeout type of the reference. The allowed values are FIRST_SEEN,
+ LAST_SEEN and UNKNOWN.
+ type: string
+ - arguments:
+ - default: true
+ description: The name of reference set to delete.
+ isArray: false
+ name: ref_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes a reference set corresponding to the name provided.
+ execution: false
+ name: qradar-delete-reference-set
+ - arguments:
+ - default: false
+ description: The name of the reference set to add or update a value in.
+ isArray: false
+ name: ref_name
+ required: true
+ secret: false
+ - default: false
+ description: 'The value to add or update in the reference set. Note: Date values
+ must be represented in epoch in reference sets (milliseconds since the Unix
+ Epoch January 1st 1970). If ''date_value'' is set to ''True'', then the argument
+ will be converted from date in format: ''%Y-%m-%dT%H:%M:%S.%f000Z'' (e.g.
+ ''2018-11-06T08:56:41.000000Z'') to epoch.'
+ isArray: false
+ name: value
+ required: true
+ secret: false
+ - default: false
+ description: An indication of where the data originated. The default value is
+ 'reference data api'.
+ isArray: false
+ name: source
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: 'If set to True will convert ''value'' argument from date in format:
+ ''%Y-%m-%dT%H:%M:%S.%f000Z'' (e.g. ''2018-11-06T08:56:41.000000Z'') to epoch.'
+ isArray: false
+ name: date_value
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Add or update a value in a reference set.
+ execution: false
+ name: qradar-create-reference-set-value
+ outputs:
+ - contextPath: QRadar.Reference.Name
+ description: The name of the reference set.
+ type: string
+ - contextPath: QRadar.Reference.CreationTime
+ description: The creation time (ISO) of the reference.
+ type: date
+ - contextPath: QRadar.Reference.ElementType
+ description: Reference element type.
+ type: string
+ - contextPath: QRadar.Reference.NumberOfElements
+ description: Number of elements.
+ type: number
+ - contextPath: QRadar.Reference.TimeoutType
+ description: 'Reference timeout type. One of: UNKNOWN, FIRST_SEEN, LAST_SEEN'
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the reference set to add or update a value in.
+ isArray: false
+ name: ref_name
+ required: true
+ secret: false
+ - default: false
+ description: 'The value to add or update in the reference set. Note: Date values
+ must be represented in milliseconds since the Unix Epoch January 1st 1970.'
+ isArray: false
+ name: value
+ required: true
+ secret: false
+ - default: false
+ description: An indication of where the data originated. The default value is
+ 'reference data api'.
+ isArray: false
+ name: source
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: 'If set to True will convert ''value'' argument from date in format:
+ ''%Y-%m-%dT%H:%M:%S.%f000Z'' (e.g. ''2018-11-06T08:56:41.000000Z'') to epoch.'
+ isArray: false
+ name: date_value
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Add or update a value in a reference set.
+ execution: false
+ name: qradar-update-reference-set-value
+ outputs:
+ - contextPath: QRadar.Reference.Name
+ description: The name of the reference set.
+ type: string
+ - contextPath: QRadar.Reference.CreationTime
+ description: The creation time (ISO) of the reference.
+ type: date
+ - contextPath: QRadar.Reference.ElementType
+ description: Reference element type.
+ type: string
+ - contextPath: QRadar.Reference.NumberOfElements
+ description: Number of elements.
+ type: number
+ - contextPath: QRadar.Reference.TimeoutType
+ description: 'Reference timeout type. One of: UNKNOWN, FIRST_SEEN, LAST_SEEN'
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the reference set to remove a value from.
+ isArray: false
+ name: ref_name
+ required: true
+ secret: false
+ - default: false
+ description: The value to remove from the reference set.
+ isArray: false
+ name: value
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: 'If set to True will convert ''value'' argument from date in format:
+ ''%Y-%m-%dT%H:%M:%S.%f000Z'' (e.g. ''2018-11-06T08:56:41.000000Z'') to epoch.'
+ isArray: false
+ name: date_value
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes a value in a reference set.
+ execution: false
+ name: qradar-delete-reference-set-value
+ outputs:
+ - contextPath: QRadar.Reference.Name
+ description: The name of the reference set.
+ type: string
+ - contextPath: QRadar.Reference.CreationTime
+ description: The creation time (ISO) of the reference.
+ type: date
+ - contextPath: QRadar.Reference.ElementType
+ description: Reference element type.
+ type: string
+ - contextPath: QRadar.Reference.NumberOfElements
+ description: Number of elements.
+ type: number
+ - contextPath: QRadar.Reference.TimeoutType
+ description: 'Reference timeout type. One of: UNKNOWN, FIRST_SEEN, LAST_SEEN'
+ type: string
+ - arguments:
+ - default: false
+ description: 'If used, will filter all fields except for the specified ones.
+ Use this parameter to specify which fields you would like to get back in the
+ response. Fields that are not named are excluded. Specify subfields in brackets
+ and multiple fields in the same object are separated by commas. The filter
+ uses QRadar''s field names, for reference please consult: https://www.ibm.com/support/knowledgecenter/SSKMKU/com.ibm.qradar.doc_cloud/9.1--siem-offenses-offense_id-notes-note_id-GET.html'
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: Number of results in return
+ isArray: false
+ name: range
+ required: false
+ secret: false
+ - default: false
+ description: Query to filter offenses
+ isArray: false
+ name: filter
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieve all Domains
+ execution: false
+ name: qradar-get-domains
+ outputs:
+ - contextPath: QRadar.Domains.AssetScannerIDs
+ description: Array of Asset Scanner IDs.
+ type: Number
+ - contextPath: QRadar.Domains.CustomProperties
+ description: Custom properties of the domain.
+ type: String
+ - contextPath: QRadar.Domains.Deleted
+ description: Indicates if the domain is deleted.
+ type: Boolean
+ - contextPath: QRadar.Domains.Description
+ description: Description of the domain.
+ type: String
+ - contextPath: QRadar.Domains.EventCollectorIDs
+ description: Array of Event Collector IDs.
+ type: Number
+ - contextPath: QRadar.Domains.FlowCollectorIDs
+ description: Array of Flow Collector IDs.
+ type: Number
+ - contextPath: QRadar.Domains.FlowSourceIDs
+ description: Array of Flow Source IDs.
+ type: Number
+ - contextPath: QRadar.Domains.ID
+ description: ID of the domain.
+ type: Number
+ - contextPath: QRadar.Domains.LogSourceGroupIDs
+ description: Array of Log Source Group IDs.
+ type: Number
+ - contextPath: QRadar.Domains.LogSourceIDs
+ description: Array of Log Source IDs.
+ type: Number
+ - contextPath: QRadar.Domains.Name
+ description: Name of the Domain.
+ type: String
+ - contextPath: QRadar.Domains.QVMScannerIDs
+ description: Array of QVM Scanner IDs.
+ type: Number
+ - contextPath: QRadar.Domains.TenantID
+ description: ID of the Domain tenant.
+ type: Number
+ - arguments:
+ - default: false
+ description: ID of the domain
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: 'If used, will filter all fields except for the specified ones.
+ Use this parameter to specify which fields you would like to get back in the
+ response. Fields that are not named are excluded. Specify subfields in brackets
+ and multiple fields in the same object are separated by commas. The filter
+ uses QRadar''s field names, for reference please consult: https://www.ibm.com/support/knowledgecenter/SSKMKU/com.ibm.qradar.doc_cloud/9.1--siem-offenses-offense_id-notes-note_id-GET.html'
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves Domain information By ID
+ execution: false
+ name: qradar-get-domain-by-id
+ outputs:
+ - contextPath: QRadar.Domains.AssetScannerIDs
+ description: Array of Asset Scanner IDs.
+ type: Number
+ - contextPath: QRadar.Domains.CustomProperties
+ description: Custom properties of the domain.
+ type: String
+ - contextPath: QRadar.Domains.Deleted
+ description: Indicates if the domain is deleted.
+ type: Boolean
+ - contextPath: QRadar.Domains.Description
+ description: Description of the domain.
+ type: String
+ - contextPath: QRadar.Domains.EventCollectorIDs
+ description: Array of Event Collector IDs.
+ type: Number
+ - contextPath: QRadar.Domains.FlowCollectorIDs
+ description: Array of Flow Collector IDs.
+ type: Number
+ - contextPath: QRadar.Domains.FlowSourceIDs
+ description: Array of Flow Source IDs.
+ type: Number
+ - contextPath: QRadar.Domains.ID
+ description: ID of the domain.
+ type: Number
+ - contextPath: QRadar.Domains.LogSourceGroupIDs
+ description: Array of Log Source Group IDs.
+ type: Number
+ - contextPath: QRadar.Domains.LogSourceIDs
+ description: Array of Log Source IDs.
+ type: Number
+ - contextPath: QRadar.Domains.Name
+ description: Name of the Domain.
+ type: String
+ - contextPath: QRadar.Domains.QVMScannerIDs
+ description: Array of QVM Scanner IDs.
+ type: Number
+ - contextPath: QRadar.Domains.TenantID
+ description: ID of the Domain tenant.
+ type: Number
+ isfetch: true
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAHMAAAAfCAYAAADUdfLHAAAHgElEQVR4Ae3YA6xlWRaA4VW2bdu2bVdbZbRt27a7y7Zt27ZtvFrzJ1mT7Nnpvnicmb6VfPXeuzzJn3P2OltC+bd0z8eJUB4P4RcswVGcwXGsxCA8hRpIComIX/4DvsRohUmIgoZoIW5HCkhEwscsgjHQGFiIqpCIhIvZBsegnrOYg0/wKHriQbyLKTgB9VxCb0hE/MfsBfUcwXPICDFJkB6pICY5emAn1PMGJCL+YnaGen51IubBs1iPS1DcxHGMQUskNm/76yyehMS9SMzSOAU1t/AYBCnwnj2mOIc1mIl52A81W1EPgs64BDU30BQStyIxx0IdT0CQA8uhGIYq3rSbCIIi+AO3vPd3ch5TrEQySNyIxOwCdXwPQWZsxToUg6AAvsMOnMERzMM9EFTGASieguBpqOMRSNyIxJwKNXuQDYJBmI8UELwIxXnMxB9YgstQbEJRFMJRKOpBsAhqNiI5JHZFYlaFOl6CoCW2IysE7yEKPf0QSOGEPm4xW0IxE4JuUEdDSPRFvDSgW2bkRSqI2JCj5jQKQDAYAyDoiGOoCEEZe349FqEXBO2hWADBcCdcImyDmm8h0RYJ2RYd0Bh3orTYpVLNdAjyYS7SQDADfSGoj5V4Cb3sOcVvELwFRWs0h+IjCL6EmqUQIy/07ZKIA6qPtmgdQBs0Q05IPMuL9miNdsgCSQDVLWQztEBLdBVnUlV8DEFLDIOgPOYhERJjEKpCHH9A0R0FnCEqGxSzIOgDNfuX7fskLQTyYr+uyTigTdAQncMolIbEkx5QR2NIArgHRdAfr6M56oq3bfckBD3wKQSdLIygFt6GeGpCMQiCPViCJDiEzRC0hpqzy/Z+kgMCeaFPFydmWI6iAiQePPBfErMD6qAp6qIV2oi3UTAAgr74AILbnICN8DDEUwSKcRBsxCokwi7sgKAB1FwgYj4I5HkvptmAJVjm2A31zP+HxUyD+9AM+dETzcTuE9U8DUFXfAtBIydsPrwM8bSE4gVkgWI00uIqlkHQDmrOcHnNDoGwZvoxb6EixJMUz0Ad11HqnxLTpEZL3IcK/55mF0PN5xBUwTAIstjjiSG4G2UgjjKYhGwYCMXDKA/FHxAMgJq9REzjrZlBY5rk2AJ1NII4kqAWHsfn+Alf4RV0RnqIz9TGi/gMb6EdBPcFiZkSzfE8vsKP+BzPoZk9L45KaGbKQZABd+ML9ICExGL+DDWzIUiCH5ENgifQGoJkRowf9SJOISNehqI3BN9BzQKICSumWQx11IGYPBgHDWArakAcafADFL4ReDJAzCpYAQ1gLgpAzCyoGYp8WAM1U8KNOdBdx1AcgsfRF4LceC2EXZuyOIiByIGz9nc6sw9qPvuPmP3/MmYFCHwZsR9qziCPE2SZ9zlbsQC7oI69yAwxw6Dee68gCoqT9pgfsyAOQ80NrMNC93Ez2Y9ppmKe/9pwY5bDDah5B4Ls+BwpIciCFJAg0iA1JnoT8n1QRw3vJvivYrZDXhQ1RVAJg6COX7yxXR3vQEwiDIY6GkLQDerYg07Ii6qYCTV+zB+hjnsgJgOWQM1VFIMfMwqKG9iIlfg8rJj8JzasqDmCAhC0Qh9ImArhNGZAkBproWYVEgeKaa7hiueWdwYMQlqI6YllmIvJyAlx3A51tIJghve9leEPHBv+Zp3+DIsxH0OQCuL40L/i+DHNKbSGmEQIO2YrqGMYxJRCEkiYiiA3BG9DHQ9AgsYMbjcqQYJIheyojgVQRzMk8S6HMyDwvRLiNJsIaZAbzbEtxJi9Ib6wYlrQ36GOtyCxoBfUMQ8CP2ao95lHoI5LqAF/2u2CP7EUe3ER6rMg2XAZal6AwNcoQMwM6ItRWIWDuAo1wWJeRpHYilkA+6H++hkD/REFNedRLUjMYNNsGvwKdYyHmNxYAfXcwC7MgR8kJ65CzRMQ+Or+TcwaOAD1XMN6rAsh5kGki5WYFrQJrkIdU1AAEobM+BXq6QGJSUxT1DuT9iI1BEOhju9RCwUgaAt1NEUanIGaHyHw3Q11NIBgjbeB8TIqIw8Eb4YQcx/SxFpMC9oRV6COc/gKJb3BxZcfr+Mo1HEd90NiKab/2nPIijQ4GmTt6wd1tIB4Z89+ZIR4hkAdtZEH16DmDYjn6/iPaWxDfTvUcw3rMQTv4xW8g9+xApeg8I1HVwv6AHrg3uX7PkkPAdt5YW8azIaaK8iP9N4ZNiHI+9xp9l2o41sk9ja3/TWwFgojCmqegTiyYHvCxDR2n/kNoqAxdAvqucoGezF/oz2MmD9CTRTKQrDeW7M+QifcbXHV0xaCnDgEdazEdxiPS1BPPaTGcag5jefREX2wAuqpGC8xfTaw/IBz0Njix3wh/JjPQh13QHAnNICd3hnWA2Lae2uxb6wXtREE70B9Jgr+cNQIfswDSBtXMf2oedAd72EU5mFZNK3EAiLmdWIm4Xt+xgLMwcwgo3pdLMIsLMVzEHO7PXcCZ3AcK/AcimA05mIpHoI4qmMEDuAMTmEN+qI4pmMOFqISBEnxONbhFM7gMObhXtTHPMzGErSF4ENvsyElJLr+BYNjupf20vh5AAAAAElFTkSuQmCC
+tests:
+- test_Qradar
diff --git a/Integrations/QRadar/QRadar_description.md b/Integrations/QRadar/QRadar_description.md
new file mode 100644
index 000000000000..6161793962f5
--- /dev/null
+++ b/Integrations/QRadar/QRadar_description.md
@@ -0,0 +1,12 @@
+# Fetch incidents:
+You can apply additional (optional) filters for the fetch-incident query using the `Query to fetch offenses
+` integration parameter. For more information how to use the filter syntax, see the [QRadar filter documentation](https://www.ibm.com/support/knowledgecenter/en/SS42VS_7.3.1/com.ibm.qradar.doc/c_rest_api_filtering.html) and [QRadar offense documentation](https://www.ibm.com/support/knowledgecenter/SSKMKU/com.ibm.qradar.doc_cloud/9.1--siem-offenses-offense_id-GET.html).
+
+# Required Permissions:
+* Assets - Vulnerability Management *or* Assets
+* Domains - Admin
+* Offenses (Manage Closing Reason) - Manage Offense Closing Reasons
+* Offenses (Assign Offenses to Users) - Assign Offenses to Users
+* Offenses (Read) - Offenses
+* References (Create/Update) - admin
+* References (Read) - View Reference Data
diff --git a/Integrations/QRadar/Qradar_image.png b/Integrations/QRadar/Qradar_image.png
new file mode 100644
index 000000000000..5c55fb07e5ad
Binary files /dev/null and b/Integrations/QRadar/Qradar_image.png differ
diff --git a/Integrations/RSANetWitness_v11_1/CHANGELOG.md b/Integrations/RSANetWitness_v11_1/CHANGELOG.md
new file mode 100644
index 000000000000..3f0f8aaedc54
--- /dev/null
+++ b/Integrations/RSANetWitness_v11_1/CHANGELOG.md
@@ -0,0 +1,7 @@
+## [Unreleased]
+
+
+## [19.8.2] - 2019-08-22
+ - Added the *fetch time* parameter
+ - Improved error reporting for fetch incidents.
+ - Fixed an issue in fetch incidents for cases in which an unsupported timestamp format was received.
\ No newline at end of file
diff --git a/Integrations/RSANetWitness_v11_1/RSANetWitness_v11_1.py b/Integrations/RSANetWitness_v11_1/RSANetWitness_v11_1.py
new file mode 100644
index 000000000000..e095d14f5fa5
--- /dev/null
+++ b/Integrations/RSANetWitness_v11_1/RSANetWitness_v11_1.py
@@ -0,0 +1,995 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+"""
+
+IMPORTS
+
+"""
+from datetime import datetime, timedelta
+
+import requests
+import json
+import re
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+"""
+
+HELPERS
+
+"""
+
+
+def dict_list_to_str(dict_list):
+ """
+
+ parses a list of dictionaries into a string representation
+
+ """
+ if not dict_list:
+ return ''
+ string_list = []
+ for dict in dict_list:
+ key_values = ["{}: {}".format(k, v) for k, v in dict.items()]
+ string_list.append(', '.join(key_values))
+ return '\n'.join(string_list)
+
+
+"""
+
+AUTHENTICATION
+
+"""
+
+
+def get_token_request(username, password):
+ """
+
+ returns a token on successful get_token request
+
+ raises an exception on:
+
+ - http request failure
+ - response status code different from 200
+ - response body does not contain valid json (ValueError)
+
+ """
+ username_password = "username={}&password={}".format(username, password)
+ url = '{}/auth/userpass'.format(BASE_PATH)
+ get_token_headers = {
+ 'Content-Type': 'application/x-www-form-urlencoded;charset=ISO-8859-1',
+ 'Accept': 'application/json; charset=UTF-8',
+ 'NetWitness-Version': VERSION
+ }
+
+ response = requests.post(url, headers=get_token_headers, data=username_password, verify=USE_SSL)
+
+ # successful get_token
+ if response.status_code == 200:
+ return response.json()
+ # bad request - NetWitness returns a common json structure for errors
+ error_lst = response.json().get('errors')
+ raise ValueError('get_token failed with status: {}\n{}'.format(response.status_code, dict_list_to_str(error_lst)))
+
+
+def get_token():
+ """
+
+ returns a token to be used in future requests to NetWitness server
+
+ raises an exception on:
+
+ - unexpected response from the server
+
+ """
+ LOG('Attempting to get token')
+ response_body = get_token_request(
+ USERNAME,
+ PASSWORD
+ )
+ LOG('Token received')
+ token = response_body.get('accessToken')
+ if not token:
+ raise ValueError('Failed to access get_token token (Unexpected response)')
+ return token
+
+
+"""
+
+GLOBAL VARS
+
+"""
+SERVER_URL = demisto.params()['server']
+BASE_PATH = '{}/rest/api'.format(SERVER_URL)
+USERNAME = demisto.params()['credentials']['identifier']
+PASSWORD = demisto.params()['credentials']['password']
+USE_SSL = not demisto.params()['insecure']
+VERSION = demisto.params()['version']
+IS_FETCH = demisto.params()['isFetch']
+FETCH_TIME = demisto.params().get('fetch_time', '1 days')
+TOKEN = None
+DEFAULT_HEADERS = {
+ 'Content-Type': 'application/json;charset=UTF-8',
+ 'Accept': 'application/json; charset=UTF-8',
+ 'NetWitness-Version': VERSION
+}
+
+"""
+
+COMMAND HANDLERS
+
+"""
+
+
+def http_request(method, url, body=None, headers=None, url_params=None):
+ """
+ returns the http response body
+
+ uses TOKEN global var to send requests to RSA end (this enables using a token for multiple requests and avoiding
+ unnecessary creation of a new token)
+ catches and handles token expiration: in case of 'request timeout' the token will be renewed and the request
+ will be resent once more.
+
+ """
+
+ if headers is None:
+ headers = {}
+ global TOKEN
+
+ # add token to headers
+ headers['NetWitness-Token'] = TOKEN
+
+ request_kwargs = {
+ 'headers': headers,
+ 'verify': USE_SSL
+ }
+
+ # add optional arguments if specified
+ if body is not None:
+ request_kwargs['data'] = body
+ if url_params is not None:
+ request_kwargs['params'] = url_params
+
+ LOG('Attempting {} request to {}\nWith params:{}\nWith body:\n{}'.format(method, url,
+ json.dumps(url_params, indent=4),
+ json.dumps(body, indent=4)))
+ response = requests.request(
+ method,
+ url,
+ **request_kwargs
+ )
+ # handle timeout (token expired): renew token and try again
+ if response.status_code == 408:
+ LOG('Timeout detected - renwing token')
+ TOKEN = get_token()
+ headers['NetWitness-Token'] = TOKEN
+ response = requests.request(
+ method,
+ url,
+ **request_kwargs
+ )
+ # successful request
+ if response.status_code == 200:
+ try:
+ return response.json()
+ except Exception as e:
+ LOG(e.message)
+ return None
+ # bad request - NetWitness returns a common json structure for errors; a list of error objects
+ error_lst = response.json().get('errors')
+ raise ValueError('Request failed with status: {}\n{}'.format(response.status_code, dict_list_to_str(error_lst)))
+
+
+def get_incident_request(incident_id):
+ """
+
+ returns the response body
+
+ raises an exception on:
+
+ - http request failure
+ - response status code different from 200
+ - response body does not contain valid json (ValueError)
+
+ """
+ url = '{}/incidents/{}'.format(BASE_PATH, incident_id)
+
+ response = http_request(
+ 'GET',
+ url,
+ headers=DEFAULT_HEADERS
+ )
+ return response
+
+
+def get_incident():
+ """
+
+ return incidents main attributes to the war room
+
+ raises an exception on:
+ - missing arguments
+ """
+ args = demisto.args()
+ incident_id = args.get('incidentId')
+ LOG('Requesting information on incident ' + incident_id)
+ # call get_incident_request(), given user arguments
+ # returns the response body on success
+ # raises an exception on failed request
+ incident = get_incident_request(
+ incident_id
+ )
+
+ md_content = create_incident_md_table(incident)
+ md_title = "## NetWitness Get Incident {}".format(incident_id)
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': incident,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': '\n'.join([md_title, md_content]),
+ 'EntryContext': {
+ "NetWitness.Incidents(obj.id==val.id)": incident
+ }
+ }
+ demisto.results(entry)
+
+
+def get_incidents_request(since=None, until=None, page_number=None, page_size=10):
+ """
+
+ returns the response body
+
+ arguments:
+ - keywords: url params
+
+ raises an exception on:
+
+ - http request failure
+ - response status code diff from 200
+ - response body does not contain valid json (ValueError)
+
+ """
+
+ url_params = {
+ 'since': since,
+ 'until': until,
+ 'pageNumber': page_number,
+ 'pageSize': page_size
+ }
+ url = '{}/incidents'.format(BASE_PATH)
+
+ response = http_request(
+ 'GET',
+ url,
+ headers=DEFAULT_HEADERS,
+ url_params=url_params
+ )
+ return response
+
+
+def get_all_incidents(since=None, until=None, limit=None):
+ """
+
+ returns all/up to limit incidents in a time window
+
+ """
+
+ # if limit is None, set to infinity
+ if not limit:
+ limit = float('inf')
+ has_next = True
+ page_number = 0
+ incidents = [] # type: list
+ LOG('Requesting for incidents in timeframe of: {s} - {u}'.format(s=since or 'not specified',
+ u=until or 'not specified'))
+ while has_next and limit > len(incidents):
+ # call get_incidents_request(), given user arguments
+ # returns the response body on success
+ # raises an exception on failed request
+ LOG('Requesting for page {}'.format(page_number))
+ response_body = get_incidents_request(
+ since=since,
+ until=until,
+ page_number=page_number
+ )
+ incidents.extend(response_body.get('items'))
+ has_next = response_body.get('hasNext')
+ page_number += 1
+
+ # if incidents list larger then limit - fit to limit
+ if len(incidents) > limit:
+ incidents[limit - 1: -1] = []
+
+ return incidents
+
+
+def get_incidents():
+ """
+
+ returns list of incidents in a specific time window to the war room (main attributes only)
+
+ raises an exception on:
+ - missing arguments
+ """
+ args = demisto.args()
+
+ # validate one of the following was passed - until, since
+ if not any([args.get('since'), args.get('until'), args.get('lastDays')]):
+ raise ValueError(
+ "Please provide one or both of the following parameters: since, until. Alternatively, use lastDays")
+
+ num_of_days = args.get('lastDays')
+ if num_of_days:
+ since = datetime.now() - timedelta(days=int(num_of_days))
+ # convert to ISO 8601 format and add Z suffix
+ timestamp = since.isoformat() + 'Z'
+ args['since'] = timestamp
+ args['until'] = None
+
+ limit = args.get('limit')
+ # parse limit argument to int
+ if limit:
+ limit = int(limit)
+
+ incidents = get_all_incidents(
+ since=args.get('since'),
+ until=args.get('until'),
+ limit=limit
+ )
+
+ md_content = create_incidents_list_md_table(incidents)
+ md_title = "## NetWitness Get Incidents"
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': incidents,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': '\n'.join([md_title, md_content]),
+ 'EntryContext': {
+ "NetWitness.Incidents(obj.id==val.id)": incidents
+ }
+ }
+ demisto.results(entry)
+
+
+def update_incident_request(incident_id, assignee=None, status=None):
+ """
+ returns the response body
+
+ arguments:
+ - keywords: url params
+
+ raises an exception on:
+
+ - http request failure
+ - response status code diff from 200
+ - response body does not contain valid json (ValueError)
+
+ """
+ LOG('Requestig to update incident ' + incident_id)
+
+ body = {
+ 'assignee': assignee,
+ 'status': status
+ }
+ url = '{}/incidents/{}'.format(BASE_PATH, incident_id)
+ response = http_request(
+ 'PATCH',
+ url,
+ headers=DEFAULT_HEADERS,
+ body=json.dumps(body)
+ )
+ return response
+
+
+def update_incident():
+ """
+
+ returns the updated incident main attributes
+
+ raises an exception on:
+ - missing arguments
+ """
+
+ args = demisto.args()
+
+ # validate at least one of the following was passed: status, assignee.
+ if not any([args.get('status'), args.get('assignee')]):
+ raise ValueError("Please provide one or both of the following parameters: status, assignee.")
+
+ # call update_incident_request(), given user arguments
+ # returns the response body on success
+ # raises an exception on failed request
+ incident = update_incident_request(
+ args.get('incidentId'),
+ status=args.get('status'),
+ assignee=args.get('assignee')
+ )
+
+ md_content = create_incident_md_table(incident)
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': incident,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': "## NetWitness Update Incident\n" + md_content,
+ 'EntryContext': {
+ "NetWitness.Incidents(obj.id==val.id)": incident
+ }
+ }
+ demisto.results(entry)
+
+
+def delete_incident_request(incident_id):
+ """
+ returns the response body
+
+ arguments:
+ - incident_id: the id of the incident to delete
+
+ raises an exception on:
+
+ - http request failure
+ - response status code diff from 204
+
+ """
+ LOG('Requesting to delete incident ' + incident_id)
+ url = '{}/incidents/{}'.format(BASE_PATH, incident_id)
+ response = http_request(
+ 'DELETE',
+ url,
+ headers=DEFAULT_HEADERS
+ )
+ return response
+
+
+def delete_incident():
+ """
+
+ returns a success message to the war room
+
+ """
+
+ args = demisto.args()
+ incident_id = args.get('incidentId')
+
+ # call delete_incident_request() function
+ # no return value on successful request
+ # raises an exception on failed request
+ delete_incident_request(
+ incident_id
+ )
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': 'Incident {} deleted successfully'.format(incident_id),
+ 'ContentsFormat': formats['text']
+ }
+ demisto.results(entry)
+
+
+def get_alerts_request(incident_id, page_number=None, page_size=None):
+ """
+ returns the response body
+
+ arguments:
+ - incident_id: the id of the incident
+
+ raises an exception on:
+
+ - http request failure
+ - response status code diff from 204
+
+ """
+
+ url = '{}/incidents/{}/alerts'.format(BASE_PATH, incident_id)
+ url_params = {
+ 'pageNumber': page_number,
+ 'pageSize': page_size
+ }
+
+ response = http_request(
+ 'GET',
+ url,
+ headers=DEFAULT_HEADERS,
+ url_params=url_params
+ )
+ return response
+
+
+def get_all_alerts(incident_id):
+ """
+ returns the alerts that are associated with an incident
+
+ """
+ has_next = True
+ page_number = 0
+ alerts = [] # type: list
+
+ LOG('Requesting for data on alerts related to incident ' + incident_id)
+ while has_next:
+ # call get_alerts_request(), given user arguments
+ # returns the response body on success
+ # raises an exception on failed request
+ LOG('Requesting for page {}'.format(page_number))
+ response_body = get_alerts_request(
+ incident_id,
+ page_number=page_number
+ )
+ alerts.extend(response_body.get('items'))
+ has_next = response_body.get('hasNext')
+ page_number += 1
+
+ return alerts
+
+
+def get_alerts():
+ """
+ returns all alerts associated with an incident to the war room
+
+ """
+ args = demisto.args()
+ incident_id = args.get('incidentId')
+ alerts = get_all_alerts(
+ incident_id
+ )
+
+ alerts_parsed = []
+ for alert in alerts:
+ # add incident id for each alert
+ alert['incidentId'] = incident_id
+
+ # parse each alert to markdown representation, to display in the war room
+ parsed_alert = parse_alert_to_md_representation(alert)
+ alerts_parsed.append(parsed_alert)
+
+ md_content = '\n'.join(alerts_parsed)
+ title = '## Incident {} Alerts'.format(incident_id)
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': alerts,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': '\n'.join([title, md_content]),
+ 'EntryContext': {
+ "NetWitness.Alerts(obj.id==val.id)": alerts
+ }
+ }
+ demisto.results(entry)
+
+
+def get_timestamp(timestamp):
+ """Gets a timestamp and parse it
+
+ Args:
+ timestamp (str): timestamp
+
+ Returns:
+ datetime
+
+ Examples:
+ ("2019-08-13T09:56:02.000000Z", "2019-08-13T09:56:02.440")
+
+ """
+ new_timestamp = timestamp
+ iso_format = "%Y-%m-%dT%H:%M:%S.%fZ"
+ if not new_timestamp.endswith('Z'): # Adds Z if somehow previous task didn't
+ new_timestamp += 'Z'
+ timestamp_min_four_position = new_timestamp[-4]
+ if timestamp_min_four_position == ':': # if contains no milisecs
+ new_timestamp = new_timestamp[:-1] + '.00000Z'
+ elif timestamp_min_four_position == '.': # if contains only 3 milisecs
+ new_timestamp = new_timestamp[:-1] + '000Z'
+ try:
+ return datetime.strptime(new_timestamp, iso_format)
+ except ValueError:
+ raise ValueError("Could not parse timestamp [{}]".format(timestamp))
+
+
+def fetch_incidents():
+ """
+ fetch is limited to 100 results
+ """
+ last_run = demisto.getLastRun()
+
+ # if last timestamp was recorded- use it, else generate timestamp for one day prior to current date
+ if last_run and last_run.get('timestamp'):
+ timestamp = last_run.get('timestamp')
+ else:
+ last_fetch, _ = parse_date_range(FETCH_TIME)
+ # convert to ISO 8601 format and add Z suffix
+ timestamp = last_fetch.isoformat() + 'Z'
+
+ LOG('Fetching incidents since {}'.format(timestamp))
+ netwitness_incidents = get_all_incidents(
+ since=timestamp,
+ limit=100
+ )
+
+ demisto_incidents = []
+ iso_format = "%Y-%m-%dT%H:%M:%S.%fZ"
+
+ last_incident_datetime = get_timestamp(timestamp)
+ last_incident_timestamp = timestamp
+
+ # set boolean flag for fetching alerts per incident
+ import_alerts = demisto.params().get('importAlerts')
+
+ for incident in netwitness_incidents:
+ incident_timestamp = incident.get('created')
+ if incident_timestamp == timestamp:
+ continue
+
+ # parse timestamp to datetime format to be able to compare with last_incident_datetime
+ incident_datetime = datetime.strptime(incident_timestamp, iso_format)
+ if incident_datetime > last_incident_datetime:
+ # update last_incident_datetime
+ last_incident_datetime = incident_datetime
+ last_incident_timestamp = incident_timestamp
+
+ # add to incident object an array of all related alerts
+ if import_alerts:
+ try:
+ incident['alerts'] = get_all_alerts(incident.get('id'))
+ except ValueError:
+ LOG('Failed to fetch alerts related to incident ' + incident.get('id'))
+ demisto_incidents.append(parse_incident(incident))
+
+ demisto.incidents(demisto_incidents)
+ demisto.setLastRun({'timestamp': last_incident_timestamp})
+
+
+def parse_incident(netwitness_incident):
+ incident_fields = [
+ 'id',
+ 'title',
+ 'summary',
+ 'riskScore',
+ 'status',
+ 'alertCount',
+ 'created',
+ 'lastUpdated',
+ 'assignee',
+ 'sources',
+ 'categories'
+ ]
+ incident_labels = [{'type': field, 'value': json.dumps(netwitness_incident.get(field))} for field in
+ incident_fields]
+ alerts = netwitness_incident.get('alerts')
+ if alerts:
+ alerts_ids = [alert.get('id') for alert in alerts]
+ incident_labels.append({'type': 'alerts ids', 'value': ', '.join(alerts_ids)})
+ incident = {
+ 'name': netwitness_incident.get('title'),
+ 'occurred': netwitness_incident.get('created'),
+ 'severity': priority_to_severity(netwitness_incident.get('priority')),
+ 'labels': incident_labels,
+ 'rawJSON': json.dumps(netwitness_incident)
+ }
+ return incident
+
+
+"""
+
+ADDITIONAL FUNCTIONS
+
+"""
+
+
+def create_incident_md_table(incident):
+ # list of fields to be presented in 'incident details' md table, by order of appearance
+ incident_entry_fields = [
+ 'id',
+ 'title',
+ 'summary',
+ 'riskScore',
+ 'status',
+ 'alertCount',
+ 'created',
+ 'lastUpdated',
+ 'assignee',
+ 'sources',
+ 'categories'
+ ]
+
+ # list of fields to be presented in 'journal' md table, by order of appearance
+ journal_entry_fields = [
+ 'created',
+ 'author',
+ 'notes',
+ 'milestone'
+ ]
+
+ # create incident entry
+ incident_entry = {k: v for k, v in incident.items() if k in incident_entry_fields}
+
+ # if category field exists and not empty - update incident entry 'category' field with a
+ # short string representation of the categories-list as value
+ categories = incident.get('categories')
+ if categories:
+ incident_entry['categories'] = ', '.join(
+ ["{}:{}".format(category['parent'], category['name']) for category in categories])
+ else:
+ incident_entry['categories'] = ''
+
+ # if source fields exists and not empty - update incident entry 'source' field with a short string
+ # representation of the source-list as value
+ source_list = incident.get('sources')
+ if source_list:
+ incident_entry['sources'] = ', '.join(source_list)
+ else:
+ incident_entry['sources'] = ''
+
+ incident_table = tableToMarkdown(
+ 'Incident Details',
+ incident_entry,
+ headers=incident_entry_fields,
+ headerTransform=header_transformer
+ )
+
+ # if journalEntries field exists and not empty - create journal entry
+ journal = incident.get('journalEntries')
+ journal_table = ''
+ if journal:
+ journal_entry = [{k: v for k, v in enrty.items() if k in journal_entry_fields} for enrty in journal]
+ journal_table = tableToMarkdown(
+ 'Incident Journal',
+ journal_entry,
+ headers=journal_entry_fields,
+ headerTransform=header_transformer
+ )
+
+ md_content = '\n'.join([incident_table, journal_table])
+ return md_content
+
+
+def create_incidents_list_md_table(incidents):
+ # list of fields to be presented in 'incident details' md table, by order of appearance
+ incident_entry_fields = [
+ 'id',
+ 'title',
+ 'summary',
+ 'riskScore',
+ 'status',
+ 'alertCount',
+ 'created',
+ 'lastUpdated',
+ 'assignee',
+ 'sources',
+ 'categories'
+ ]
+
+ incidents_list = []
+ for incident in incidents:
+ # create incident entry to hold the fields to be presented in the md table
+ incident_entry = {k: v for k, v in incident.items() if k in incident_entry_fields}
+ # if category field exists and not empty - update incident entry 'category' field with a
+ # short string representation of the categories-list as value
+ categories = incident.get('categories')
+ if categories:
+ incident_entry['categories'] = ', '.join(
+ ["{}:{}".format(category['parent'], category['name']) for category in categories])
+ else:
+ incident_entry['categories'] = ''
+
+ # if source fields exists and not empty - update incident entry 'source' field with a
+ # short string representation of the source-list as value
+ source_list = incident.get('sources')
+ if source_list:
+ incident_entry['sources'] = ', '.join(source_list)
+ else:
+ incident_entry['sources'] = ''
+
+ incidents_list.append(incident_entry)
+
+ incident_table = tableToMarkdown(
+ 'Incident Details',
+ incidents_list,
+ headers=incident_entry_fields,
+ headerTransform=header_transformer
+ )
+
+ return incident_table
+
+
+def parse_alert_to_md_representation(alert):
+ # list of fields to be presented in 'alert details' md table, by order of appearance
+ alert_entry_fields = [
+ 'id',
+ 'title',
+ 'detail',
+ 'created',
+ 'source',
+ 'riskScore',
+ 'type'
+ ]
+
+ alert_entry = {k: v for k, v in alert.items() if k in alert_entry_fields}
+ alert_events = alert.get('events', [])
+
+ # add 'total events' to alert entry
+ alert_entry['totalEvents'] = len(alert_events)
+ alert_entry_fields.append('totalEvents')
+
+ alert_md_table = tableToMarkdown(
+ 'Alert Details',
+ alert_entry,
+ headers=alert_entry_fields,
+ headerTransform=header_transformer
+ )
+
+ events = []
+ for event in alert_events:
+ events.append(parse_event_to_md_representation(event))
+
+ events_md = '\n'.join(events)
+ md_content = '\n'.join([alert_md_table, events_md])
+ return md_content
+
+
+def parse_event_to_md_representation(event):
+ event_details = "### Event Details \
+ \n*Domain:* {domain} \
+ \n*Source:* {source} \
+ \n*ID:* {id} \
+ ".format(
+ domain=event.get('domain', ''),
+ source=event.get('eventSource', ''),
+ id=event.get('eventSourceId', '')
+ )
+
+ event_source = event.get('source')
+ event_destination = event.get('destination')
+
+ def parse_device(device):
+ device_entry = {
+ 'Device IP': device.get('ipAddress'),
+ 'Device Port': device.get('port'),
+ 'Device MAC': device.get('macAddress'),
+ 'DNS Hostname': device.get('dnsHostname'),
+ 'DNS Domain': device.get('dnsDomain')
+ }
+ return device_entry
+
+ def parse_user(user):
+ user_entry = {
+ 'User UserName': user.get('username'),
+ 'User Email': user.get('emailAddress'),
+ 'Active Directory UserName': user.get('adUsername'),
+ 'Active Directory Domain': user.get('adDomain')
+ }
+ return user_entry
+
+ # resource table headers in order of appearance
+ all_headers = [
+ 'Device IP',
+ 'Device Port',
+ 'Device MAC',
+ 'DNS Hostname',
+ 'DNS Domain',
+ 'User UserName',
+ 'User Email',
+ 'Active Directory UserName',
+ 'Active Directory Domain'
+ ]
+
+ def resource_md(resource, resource_type):
+ resource_entry = {} # type: dict
+ device = resource.get('device')
+ user = resource.get('user')
+ resource_entry.update(parse_device(device))
+ resource_entry.update(parse_user(user))
+ # reduce headers to fields that hold actual value in resource_entry
+ headers = [field for field in all_headers if resource_entry.get(field)]
+ resource_md = tableToMarkdown(
+ resource_type,
+ resource_entry,
+ headers=headers)
+ return resource_md
+
+ source_md = resource_md(event_source, 'Source')
+ destination_md = resource_md(event_destination, 'Destination')
+
+ md_content = '\n'.join([event_details, source_md, destination_md])
+ return md_content
+
+
+def header_transformer(header):
+ """
+ e.g. input: 'someHeader' output: 'Some Header '
+
+ """
+
+ return re.sub("([a-z])([A-Z])", "\g<1> \g<2>", header).capitalize()
+
+
+def priority_to_severity(priority):
+ """
+ coverts NetWitness priority to Demisto severity grade
+
+ input:
+ - 'Low'
+ - 'Medium'
+ - 'High'
+ - 'Critical'
+ output:
+ - 0 Unknown
+ - 1 Low
+ - 2 Medium
+ - 3 High
+ - 4 Critical
+ """
+
+ priority_grade_map = {
+ 'Low': 1,
+ 'Medium': 2,
+ 'High': 3,
+ 'Critical': 4
+ }
+
+ grade = priority_grade_map.get(priority, 0)
+ return grade
+
+
+def test_module():
+ if IS_FETCH:
+ parse_date_range(FETCH_TIME)
+
+ since = datetime.now() - timedelta(days=int(10))
+ timestamp = since.isoformat() + 'Z'
+
+ incidents = get_all_incidents(
+ since=timestamp,
+ until=None,
+ limit=100
+ )
+ if incidents is not None:
+ return 'ok'
+
+
+"""
+
+EXECUTION
+
+"""
+
+
+def main():
+ global TOKEN
+ TOKEN = get_token()
+ command = demisto.command()
+ try:
+ handle_proxy()
+ if command == 'test-module':
+ demisto.results(test_module())
+ elif command == 'fetch-incidents':
+ fetch_incidents()
+ elif command == 'netwitness-get-incident':
+ get_incident()
+ get_alerts()
+ elif command == 'netwitness-get-incidents':
+ get_incidents()
+ elif command == 'netwitness-update-incident':
+ update_incident()
+ elif command == 'netwitness-delete-incident':
+ delete_incident()
+ elif command == 'netwitness-get-alerts':
+ get_alerts()
+ except ValueError as e:
+ if command == 'fetch-incidents': # fetch-incidents supports only raising exceptions
+ LOG(e.message)
+ LOG.print_log()
+ raise
+ return_error(str(e))
+
+
+if __name__ in ('__builtin__', 'builtins'):
+ main()
diff --git a/Integrations/RSANetWitness_v11_1/RSANetWitness_v11_1.yml b/Integrations/RSANetWitness_v11_1/RSANetWitness_v11_1.yml
new file mode 100644
index 000000000000..d17613364519
--- /dev/null
+++ b/Integrations/RSANetWitness_v11_1/RSANetWitness_v11_1.yml
@@ -0,0 +1,623 @@
+category: Analytics & SIEM
+commonfields:
+ id: RSA NetWitness v11.1
+ version: -1
+configuration:
+- display: Server URL (e.g. https://192.168.0.1:30022)
+ name: server
+ required: true
+ type: 0
+- display: credentials
+ name: credentials
+ required: true
+ type: 9
+- defaultvalue: '1.0'
+ display: API version
+ name: version
+ required: false
+ type: 0
+- defaultvalue: 'false'
+ display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: 'false'
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- defaultvalue: 'false'
+ display: on 'Fetch incidents' import all alerts related to the incident
+ name: importAlerts
+ required: false
+ type: 8
+- defaultvalue: 1 days
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days)
+ name: fetch_time
+ required: false
+ type: 0
+description: RSA NetWitness Platform provides systems Logs, Network, and endpoint
+ visibility for real-time collection, detection, and automated response with the
+ Demisto Enterprise platform. Providing full session analysis, customers can extract
+ critical data and effectively operate security operations automated playbook.
+display: RSA NetWitness v11.1
+name: RSA NetWitness v11.1
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The incident id
+ isArray: false
+ name: incidentId
+ required: true
+ secret: false
+ deprecated: false
+ description: Get details of a specific incident, including all alerts related
+ with the incident.
+ execution: false
+ name: netwitness-get-incident
+ outputs:
+ - contextPath: NetWitness.Incidents.id
+ description: The unique identifier of the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.title
+ description: Title of the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.summary
+ description: Summary of the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.priority
+ description: The incident priority.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.riskScore
+ description: Incident risk score calculated based on associated alert’s risk
+ score. Risk score ranges from 0 (no risk) to 100 (highest risk).
+ type: Unknown
+ - contextPath: NetWitness.Incidents.status
+ description: The current status.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.alertCount
+ description: Number of alerts associated with the Incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.averageAlertRiskScore
+ description: Average risk score of the alerts associated with the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.sealed
+ description: Indicates if additional alerts can be associated with an incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.totalRemediationTaskCount
+ description: The number of total remediation tasks for the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.openRemediationTaskCount
+ description: The number of open remediation tasks for the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.created
+ description: The timestamp of when the incident is created.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.lastUpdated
+ description: The timestamp of when the incident was last updated.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.lastUpdatedBy
+ description: The NetWitness user identifier of the user who last updated the
+ incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.assignee
+ description: The NetWitness user identifier of the user currently working on
+ the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.sources
+ description: Unique set of sources for all of the Alerts in the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.ruleId
+ description: The unique identifier of the rule that created the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.firstAlertTime
+ description: The timestamp of the earliest occurring Alert in this incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.categories.id
+ description: The unique category identifier.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.categories.parent
+ description: Parent name of the category.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.categories.name
+ description: Friendly name of the category.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.id
+ description: The unique journal entry identifier.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.author
+ description: The author of this entry.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.notes
+ description: Notes and observations about the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.created
+ description: The timestamp of the journal entry created date.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.lastUpdated
+ description: The timestamp of the journal entry last updated date.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.milestone
+ description: Incident milestone classifier.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.createdBy
+ description: The NetWitness user id or name of the rule that created the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.deletedAlertCount
+ description: The number of alerts that are deleted from theincident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.eventCount
+ description: Number of events associated with incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.alertMeta.SourceIp
+ description: Unique source IP addresses.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.alertMeta.DestinationIp
+ description: Unique destination IP addresses.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.id
+ description: The unique alert identifier.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.incidentId
+ description: The incident id associated with the alert.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.title
+ description: The title or name of the rule that created the alert.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.detail
+ description: The details of the alert. This can be the module name or meta that
+ the module included.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.created
+ description: The timestamp of the alert created date.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.source
+ description: The source of this alert. For example, "Event Stream Analysis",
+ "Malware Analysis", etc.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.riskScore
+ description: The risk score of this alert, usually in the range 0 - 100.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.type
+ description: Type of alert, "Network", "Log", etc.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.device.ipAddress
+ description: The IP address.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.device.port
+ description: The port.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.device.macAddress
+ description: The ethernet MAC address.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.device.dnsHostname
+ description: The DNS resolved hostname.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.device.dnsDomain
+ description: The top-level domain from the DNS resolved hostname
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.user.username
+ description: The unique username.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.user.emailAddress
+ description: An email address.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.user.adUsername
+ description: An Active Directory (AD) username.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.user.adDomain
+ description: An Active Directory (AD) domain
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.device.ipAddress
+ description: The IP address.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.device.port
+ description: The port.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.device.macAddress
+ description: The ethernet MAC address.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.device.dnsHostname
+ description: The DNS resolved hostname.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.device.dnsDomain
+ description: The top-level domain from the DNS resolved hostname
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.user.username
+ description: The unique username.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.user.emailAddress
+ description: An email address.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.user.adUsername
+ description: An Active Directory (AD) username.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.user.adDomain
+ description: An Active Directory (AD) domain
+ type: Unknown
+ - arguments:
+ - default: false
+ description: A timestamp in ISO 8601 format (e.g. 2018-01-01T14:00:00.000Z).
+ Use to retrieve incidents created on and after this timestamp.
+ isArray: false
+ name: since
+ required: false
+ secret: false
+ - default: false
+ description: A timestamp in ISO 8601 format (e.g. 2018-01-01T14:00:00.000Z).
+ Use to retrieve incidents created on and before this timestamp.
+ isArray: false
+ name: until
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '200'
+ description: The maximum number of incidents to retrieve. default is 200.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: Use this to retrieve incidents from the last number of days specified.
+ isArray: false
+ name: lastDays
+ required: false
+ secret: false
+ deprecated: false
+ description: 'Get a list of incidents in a specific time frame. One of the followings
+ must be specified: since, until, lastDays'
+ execution: false
+ name: netwitness-get-incidents
+ outputs:
+ - contextPath: NetWitness.Incidents.id
+ description: The unique identifier of the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.title
+ description: Title of the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.summary
+ description: Summary of the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.priority
+ description: The incident priority.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.riskScore
+ description: Incident risk score calculated based on associated alert’s risk
+ score. Risk score ranges from 0 (no risk) to 100 (highest risk).
+ type: Unknown
+ - contextPath: NetWitness.Incidents.status
+ description: The current status.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.alertCount
+ description: Number of alerts associated with the Incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.averageAlertRiskScore
+ description: Average risk score of the alerts associated with the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.sealed
+ description: Indicates if additional alerts can be associated with an incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.totalRemediationTaskCount
+ description: The number of total remediation tasks for the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.openRemediationTaskCount
+ description: The number of open remediation tasks for the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.created
+ description: The timestamp of when the incident is created.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.lastUpdated
+ description: The timestamp of when the incident was last updated.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.lastUpdatedBy
+ description: The NetWitness user identifier of the user who last updated the
+ incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.assignee
+ description: The NetWitness user identifier of the user currently working on
+ the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.sources
+ description: Unique set of sources for all of the Alerts in the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.ruleId
+ description: The unique identifier of the rule that created the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.firstAlertTime
+ description: The timestamp of the earliest occurring Alert in this incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.categories.id
+ description: The unique category identifier.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.categories.parent
+ description: Parent name of the category.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.categories.name
+ description: Friendly name of the category.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.id
+ description: The unique journal entry identifier.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.author
+ description: The author of this entry.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.notes
+ description: Notes and observations about the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.created
+ description: The timestamp of the journal entry created date.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.lastUpdated
+ description: The timestamp of the journal entry last updated date.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.milestone
+ description: Incident milestone classifier.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.createdBy
+ description: The NetWitness user id or name of the rule that created the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.deletedAlertCount
+ description: The number of alerts that are deleted from theincident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.eventCount
+ description: Number of events associated with incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.alertMeta.SourceIp
+ description: Unique source IP addresses.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.alertMeta.DestinationIp
+ description: Unique destination IP addresses.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The incident's id
+ isArray: false
+ name: incidentId
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The current status
+ isArray: false
+ name: status
+ predefined:
+ - New
+ - Assigned
+ - InProgress
+ - RemediationRequested
+ - RemediationComplete
+ - Closed
+ - ClosedFalsePositive
+ required: false
+ secret: false
+ - default: false
+ description: The NetWitness user identifier of the user currently working on
+ the incident.
+ isArray: false
+ name: assignee
+ required: false
+ secret: false
+ deprecated: false
+ description: Update a specific incident. Currently, an incident’s status and
+ assignee may be modified
+ execution: false
+ name: netwitness-update-incident
+ outputs:
+ - contextPath: NetWitness.Incidents.id
+ description: The unique identifier of the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.title
+ description: Title of the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.summary
+ description: Summary of the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.priority
+ description: The incident priority.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.riskScore
+ description: Incident risk score calculated based on associated alert’s risk
+ score. Risk score ranges from 0 (no risk) to 100 (highest risk).
+ type: Unknown
+ - contextPath: NetWitness.Incidents.status
+ description: The current status.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.alertCount
+ description: Number of alerts associated with the Incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.averageAlertRiskScore
+ description: Average risk score of the alerts associated with the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.sealed
+ description: Indicates if additional alerts can be associated with an incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.totalRemediationTaskCount
+ description: The number of total remediation tasks for the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.openRemediationTaskCount
+ description: The number of open remediation tasks for the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.created
+ description: The timestamp of when the incident is created.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.lastUpdated
+ description: The timestamp of when the incident was last updated.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.lastUpdatedBy
+ description: The NetWitness user identifier of the user who last updated the
+ incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.assignee
+ description: The NetWitness user identifier of the user currently working on
+ the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.sources
+ description: Unique set of sources for all of the Alerts in the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.ruleId
+ description: The unique identifier of the rule that created the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.firstAlertTime
+ description: The timestamp of the earliest occurring Alert in this incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.categories.id
+ description: The unique category identifier.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.categories.parent
+ description: Parent name of the category.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.categories.name
+ description: Friendly name of the category.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.id
+ description: The unique journal entry identifier.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.author
+ description: The author of this entry.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.notes
+ description: Notes and observations about the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.created
+ description: The timestamp of the journal entry created date.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.lastUpdated
+ description: The timestamp of the journal entry last updated date.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.journalEntries.milestone
+ description: Incident milestone classifier.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.createdBy
+ description: The NetWitness user id or name of the rule that created the incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.deletedAlertCount
+ description: The number of alerts that are deleted from theincident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.eventCount
+ description: Number of events associated with incident.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.alertMeta.SourceIp
+ description: Unique source IP addresses.
+ type: Unknown
+ - contextPath: NetWitness.Incidents.alertMeta.DestinationIp
+ description: Unique destination IP addresses.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The incident id
+ isArray: false
+ name: incidentId
+ required: true
+ secret: false
+ deprecated: false
+ description: Delete a specific incident, by its id.
+ execution: false
+ name: netwitness-delete-incident
+ - arguments:
+ - default: false
+ description: The incident id
+ isArray: false
+ name: incidentId
+ required: true
+ secret: false
+ deprecated: false
+ description: Get all the alerts related to a specific incident.
+ execution: false
+ name: netwitness-get-alerts
+ outputs:
+ - contextPath: NetWitness.Alerts.id
+ description: The unique alert identifier.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.incidentId
+ description: The incident id associated with the alert.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.title
+ description: The title or name of the rule that created the alert.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.detail
+ description: The details of the alert. This can be the module name or meta that
+ the module included.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.created
+ description: The timestamp of the alert created date.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.source
+ description: The source of this alert. For example, "Event Stream Analysis",
+ "Malware Analysis", etc.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.riskScore
+ description: The risk score of this alert, usually in the range 0 - 100.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.type
+ description: Type of alert, "Network", "Log", etc.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.device.ipAddress
+ description: The IP address.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.device.port
+ description: The port.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.device.macAddress
+ description: The ethernet MAC address.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.device.dnsHostname
+ description: The DNS resolved hostname.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.device.dnsDomain
+ description: The top-level domain from the DNS resolved hostname
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.user.username
+ description: The unique username.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.user.emailAddress
+ description: An email address.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.user.adUsername
+ description: An Active Directory (AD) username.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.source.user.adDomain
+ description: An Active Directory (AD) domain
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.device.ipAddress
+ description: The IP address.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.device.port
+ description: The port.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.device.macAddress
+ description: The ethernet MAC address.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.device.dnsHostname
+ description: The DNS resolved hostname.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.device.dnsDomain
+ description: The top-level domain from the DNS resolved hostname
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.user.username
+ description: The unique username.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.user.emailAddress
+ description: An email address.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.user.adUsername
+ description: An Active Directory (AD) username.
+ type: Unknown
+ - contextPath: NetWitness.Alerts.events.destination.user.adDomain
+ description: An Active Directory (AD) domain
+ type: Unknown
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- RSA NetWitness Test
diff --git a/Integrations/RSANetWitness_v11_1/RSANetWitness_v11_1_description.md b/Integrations/RSANetWitness_v11_1/RSANetWitness_v11_1_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/RSANetWitness_v11_1/RSANetWitness_v11_1_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/RSANetWitness_v11_1/RSANetWitness_v11_1_image.png b/Integrations/RSANetWitness_v11_1/RSANetWitness_v11_1_image.png
new file mode 100644
index 000000000000..d164a7fcf1bc
Binary files /dev/null and b/Integrations/RSANetWitness_v11_1/RSANetWitness_v11_1_image.png differ
diff --git a/Integrations/RSANetWitness_v11_1/RSANetWitness_v11_1_test.py b/Integrations/RSANetWitness_v11_1/RSANetWitness_v11_1_test.py
new file mode 100644
index 000000000000..9e0eabeed6e7
--- /dev/null
+++ b/Integrations/RSANetWitness_v11_1/RSANetWitness_v11_1_test.py
@@ -0,0 +1,92 @@
+import demistomock as demisto
+
+
+def test_get_timestamp(mocker):
+ def mock_demisto():
+ mocked_dict = {
+ 'server': '',
+ 'credentials': {
+ 'identifier': '',
+ 'password': ''
+ },
+ 'insecure': '',
+ 'version': '',
+ 'isFetch': ''
+ }
+ mocker.patch.object(demisto, 'params', return_value=mocked_dict)
+ import RSANetWitness_v11_1
+ mocker.patch.object(RSANetWitness_v11_1, 'get_token', return_value=None)
+
+ mock_demisto()
+ from RSANetWitness_v11_1 import get_timestamp
+ stamps_to_check = {
+ "2019-08-13T09:56:02.000000Z",
+ "2019-08-13T09:56:02.440Z",
+ "2019-08-13T09:56:02Z",
+ "2019-08-13T09:56:02.000000",
+ "2019-08-13T09:56:02.440",
+ "2019-08-13T09:56:02"
+ }
+ expected = "2019-08-13 09:56:02"
+ for timestamp in stamps_to_check:
+ result = str(get_timestamp(timestamp))
+ assert expected in result, "\n\tExpected: {}\n\tResult: {}\n\tInput timestamp: {}" \
+ "".format(expected, result, timestamp)
+
+
+def test_fetch_incidents(mocker):
+ def mock_demisto():
+ mocked_dict = {
+ 'server': '',
+ 'credentials': {
+ 'identifier': '',
+ 'password': ''
+ },
+ 'insecure': '',
+ 'version': '',
+ 'isFetch': ''
+ }
+ mocker.patch.object(demisto, 'params', return_value=mocked_dict)
+ mocker.patch.object(demisto, "getLastRun", return_value={
+ "timestamp": "2018-08-13T09:56:02.000000"
+ })
+ mocker.patch.object(demisto, 'incidents')
+ incidents = [
+ {
+ "eventCount": 1,
+ "alertMeta": {
+ "SourceIp": ["8.8.8.8"],
+ "DestinationIp": ["8.8.4.4"]
+ },
+ "openRemediationTaskCount": 0,
+ "sources": [
+ "NetWitness Investigate"
+ ],
+ "id": "INC-25",
+ "journalEntries": None,
+ "ruleId": None,
+ "created": "2019-01-14T17:19:16.029Z",
+ "priority": "Critical",
+ "sealed": True,
+ "status": "Assigned",
+ "averageAlertRiskScore": 50,
+ "lastUpdated": "2019-01-30T13:50:10.148Z",
+ "lastUpdatedBy": "admin",
+ "alertCount": 1,
+ "createdBy": "admin",
+ "deletedAlertCount": 0,
+ "categories": [],
+ "assignee": None,
+ "title": "Test",
+ "summary": "Test",
+ "firstAlertTime": None,
+ "totalRemediationTaskCount": 0,
+ "riskScore": 50}
+ ]
+ mocker.patch('RSANetWitness_v11_1.get_all_incidents', return_value=incidents)
+
+ mock_demisto()
+ from RSANetWitness_v11_1 import fetch_incidents
+
+ fetch_incidents()
+ assert demisto.incidents.call_count == 1
diff --git a/Integrations/RTIR/RTIR.py b/Integrations/RTIR/RTIR.py
new file mode 100644
index 000000000000..7a3a27a1e22b
--- /dev/null
+++ b/Integrations/RTIR/RTIR.py
@@ -0,0 +1,825 @@
+from CommonServerPython import *
+
+''' IMPORTS '''
+import requests
+import os
+import json
+import re
+import urllib
+
+if not demisto.params()['proxy']:
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARS '''
+SERVER = demisto.params()['server'][:-1] if demisto.params()['server'].endswith('/') else demisto.params()['server']
+USERNAME = demisto.params()['credentials']['identifier']
+PASSWORD = demisto.params()['credentials']['password']
+BASE_URL = SERVER + '/REST/1.0/'
+USE_SSL = not demisto.params().get('unsecure', False)
+FETCH_PRIORITY = int(demisto.params()['fetch_priority']) - 1
+FETCH_STATUS = demisto.params()['fetch_status']
+FETCH_QUEUE = demisto.params()['fetch_queue']
+CURLY_BRACKETS_REGEX = r'\{(.*?)\}' # Extracts string in curly brackets, e.g. '{string}' -> 'string'
+apostrophe = "'"
+SESSION = requests.session()
+REFERER = demisto.params().get('referer')
+HEADERS = {'Referer': REFERER} if REFERER else {}
+
+''' HELPER FUNCTIONS '''
+
+
+def ticket_to_incident(ticket):
+ incident = {
+ 'name': 'RTIR Ticket ' + str(ticket['ID']),
+ 'rawJSON': json.dumps(ticket),
+ }
+ attachments, attachments_content = get_ticket_attachments(ticket['ID'])
+ if attachments:
+ incident_attachments = []
+ for i in range(len(attachments)):
+ incident_attachments.append({
+ 'path': attachments_content[i]['FileID'],
+ 'name': attachments[i]['Name']
+ })
+
+ incident['attachment'] = incident_attachments # type: ignore
+ return incident
+
+
+def ticket_string_to_id(ticket_string):
+ '''
+ Translates 'ticket/1' to the integer 1
+ '''
+ slash_index = ticket_string.index('/')
+ ticket_id = int(ticket_string[slash_index + 1:])
+ return ticket_id
+
+
+def http_request(method, suffix_url, data=None, files=None, query=None):
+ # Returns the http request
+
+ url = BASE_URL + suffix_url
+ params = {'user': USERNAME, 'pass': PASSWORD}
+ if query:
+ params.update(query)
+
+ response = SESSION.request(method, url, data=data, params=params, files=files, headers=HEADERS) # type: ignore
+
+ # handle request failure
+ if response.status_code not in {200}:
+ message = parse_error_response(response)
+ return_error('Error in API call with status code {}\n{}'.format(response.status_code, message))
+
+ return response
+
+
+def parse_error_response(response):
+ try:
+ res = response.json()
+ msg = res.get('message')
+ if res.get('details') and res.get('details')[0].get('message'):
+ msg = msg + "\n" + json.dumps(res.get('details')[0])
+ except Exception:
+ return response.text
+ return msg
+
+
+def login():
+ data = {
+ 'user': USERNAME,
+ 'pass': PASSWORD
+ }
+ SESSION.post(SERVER, data=data) # type: ignore
+
+
+def logout():
+ suffix_url = 'logout'
+ http_request('POST', suffix_url)
+
+
+def parse_ticket_data(raw_query):
+ raw_tickets = search_ticket_request(raw_query)
+ headers = ['ID', 'Subject', 'Status', 'Priority', 'Created', 'Queue', 'Creator', 'Owner', 'InitialPriority',
+ 'FinalPriority']
+ search_context = []
+ data = raw_tickets.content.split('\n')
+ data = data[2:]
+ for line in data:
+ split_line = line.split(': ')
+ search_ticket = get_ticket_request(split_line[0]).content
+ search_ticket = search_ticket.split('\n')
+ search_ticket = search_ticket[2:]
+ id_ticket = search_ticket[0].upper()
+ search_ticket[0] = id_ticket
+
+ current_ticket_search = {}
+ for entity in search_ticket:
+ if ': ' in entity:
+ header, content = entity.split(': ', 1)
+ if 'ID' in header:
+ content = ticket_string_to_id(content)
+ if header in {'ID', 'Subject', 'Status', 'Priority', 'Created', 'Queue', 'Creator', 'Owner',
+ 'InitialPriority', 'FinalPriority'}:
+ current_ticket_search[header] = content
+
+ for key in search_ticket: # Adding ticket custom fields to outputs
+ if key.startswith('CF.'):
+ split_key = key.split(':')
+ if split_key[0]:
+ custom_field_regex = re.findall(CURLY_BRACKETS_REGEX, key)[0].replace(' ',
+ '') # Regex and removing white spaces
+ current_ticket_search[custom_field_regex] = split_key[1]
+ headers.append(custom_field_regex)
+
+ if current_ticket_search:
+ search_context.append(current_ticket_search)
+
+ return search_context
+
+
+''' FUNCTIONS '''
+
+
+def create_ticket_request(encoded):
+ suffix_url = 'ticket/new'
+ ticket_id = http_request('POST', suffix_url, data=encoded)
+
+ return ticket_id
+
+
+def create_ticket_attachments_request(encoded, files_data):
+ suffix_url = 'ticket/new'
+ ticket_id = http_request('POST', suffix_url, files=files_data)
+
+ return ticket_id
+
+
+def create_ticket():
+ queue = demisto.args().get('queue')
+ data = 'id: ticket/new\nQueue: {}\n'.format(queue)
+
+ subject = demisto.args().get('subject')
+ if subject:
+ data += "Subject: {}\n".format(subject)
+
+ requestor = demisto.args().get('requestor')
+ if requestor:
+ data += "Requestor: {}\n".format(requestor)
+
+ cc = demisto.args().get('cc', '')
+ if cc:
+ data += "Cc: {}\n".format(cc)
+
+ admin_cc = demisto.args().get('admin-cc', '')
+ if admin_cc:
+ data += "AdminCc: {}\n".format(admin_cc)
+
+ owner = demisto.args().get('owner')
+ if owner:
+ data += "Owner: {}\n".format(owner)
+
+ status = demisto.args().get('status')
+ if status:
+ data += "Status: {}\n".format(status)
+
+ priority = demisto.args().get('priority')
+ if priority:
+ data += "Priority: {}\n".format(priority)
+
+ initial_priority = demisto.args().get('initial-priority')
+ if initial_priority:
+ data += "Initial-priority: {}\n".format(initial_priority)
+
+ final_priority = demisto.args().get('final-priority')
+ if final_priority:
+ data += "FinalPriority: {}\n".format(final_priority)
+
+ text = demisto.args().get('text')
+ if text:
+ data += "Text: {}\n".format(unicode(text).encode('utf-8'))
+
+ customfields = demisto.args().get('customfields')
+ if customfields:
+ cf_list = customfields.split(',')
+ for cf in cf_list:
+ equal_index = cf.index('=')
+ key = 'CF-{}: '.format(cf[:equal_index])
+ value = cf[equal_index + 1:]
+ data = data + key + value + '\n'
+
+ attachments = demisto.args().get('attachment')
+ if attachments:
+ files_data = {}
+ if isinstance(attachments, list): # Given as list
+ attachments_list = attachments
+ else: # Given as string
+ attachments_list = attachments.split(',')
+ for i, file_pair in enumerate(attachments_list):
+ file = demisto.getFilePath(file_pair)
+ file_name = file['name']
+ files_data['attachment_{:d}'.format(i + 1)] = (file_name, open(file['path'], 'rb'))
+ data += 'Attachment: {}'.format(file_name)
+
+ encoded = "content=" + urllib.quote_plus(data)
+ if attachments:
+ files_data.update({'content': (None, data)}) # type: ignore
+ raw_ticket_res = create_ticket_attachments_request(encoded, files_data)
+ else:
+ raw_ticket_res = create_ticket_request(encoded)
+ ticket_id = re.findall('\d+', raw_ticket_res.content)[-1]
+ if ticket_id == -1:
+ return_error('Ticket creation failed')
+
+ ticket_context = ({
+ 'ID': ticket_id,
+ 'Subject': subject,
+ 'Creator': requestor,
+ 'InitialPriority': initial_priority,
+ 'Priority': priority,
+ 'FinalPriority': final_priority,
+ 'Owner': owner
+ })
+ ec = {
+ 'RTIR.Ticket(val.ID && val.ID === obj.ID)': ticket_context
+ }
+ hr = 'Ticket {} was created successfully.'.format(ticket_id)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': raw_ticket_res.content,
+ 'ContentsFormat': formats['text'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+
+
+def get_ticket_request(ticket_id):
+ suffix_url = 'ticket/{}/show'.format(ticket_id)
+ raw_ticket = http_request('GET', suffix_url)
+
+ return raw_ticket
+
+
+def search_ticket():
+ raw_query = ''
+ ticket_id = demisto.args().get('ticket-id')
+
+ if ticket_id:
+ raw_query += 'id={}{}{}+AND+'.format(apostrophe, ticket_id, apostrophe)
+
+ subject = demisto.args().get('subject')
+ if subject:
+ raw_query += 'Subject={}{}{}+AND+'.format(apostrophe, subject, apostrophe)
+
+ status = demisto.args().get('status')
+ if status:
+ raw_query += 'Status={}{}{}+AND+'.format(apostrophe, status, apostrophe)
+
+ creator = demisto.args().get('creator')
+ if creator:
+ raw_query += 'Creator={}{}{}+AND+'.format(apostrophe, creator, apostrophe)
+
+ priority_equal_to = demisto.args().get('priority-equal-to')
+ if priority_equal_to:
+ raw_query += 'Priority={}{}{}+AND+'.format(apostrophe, priority_equal_to, apostrophe)
+
+ priority_greater_than = demisto.args().get('priority-greater-than')
+ if priority_greater_than:
+ raw_query += 'Priority>{}{}{}+AND+'.format(apostrophe, priority_greater_than, apostrophe)
+
+ created_after = demisto.args().get('created-after')
+ if created_after:
+ raw_query += 'Created>{}{}{}+AND+'.format(apostrophe, created_after, apostrophe)
+
+ created_on = demisto.args().get('created-on')
+ if created_on:
+ raw_query += 'Created={}{}{}+AND+'.format(apostrophe, created_on, apostrophe)
+
+ created_before = demisto.args().get('created-before')
+ if created_before:
+ raw_query += 'Created<{}{}{}+AND+'.format(apostrophe, created_before, apostrophe)
+
+ owner = demisto.args().get('owner')
+ if owner:
+ raw_query += 'Created={}{}{}+AND+'.format(apostrophe, owner, apostrophe)
+
+ due = demisto.args().get('due')
+ if due:
+ raw_query += 'Due={}{}{}+AND+'.format(apostrophe, due, apostrophe)
+
+ queue = demisto.args().get('queue')
+ if queue:
+ raw_query += 'Queue={}{}{}+AND+'.format(apostrophe, queue, apostrophe)
+ raw_tickets = search_ticket_request(raw_query)
+ headers = ['ID', 'Subject', 'Status', 'Priority', 'Created', 'Queue', 'Creator', 'Owner', 'InitialPriority',
+ 'FinalPriority']
+ search_context = []
+ data = raw_tickets.content.split('\n')
+ data = data[2:]
+ for line in data:
+ split_line = line.split(': ')
+ search_ticket = get_ticket_request(split_line[0]).content
+ search_ticket = search_ticket.split('\n')
+ search_ticket = search_ticket[2:]
+ id_ticket = search_ticket[0].upper()
+ search_ticket[0] = id_ticket
+
+ current_ticket_search = {}
+ for entity in search_ticket:
+ if ': ' in entity:
+ header, content = entity.split(': ', 1)
+ if 'ID' in header:
+ content = ticket_string_to_id(content)
+ if header in {'ID', 'Subject', 'Status', 'Priority', 'Created', 'Queue', 'Creator', 'Owner',
+ 'InitialPriority', 'FinalPriority'}:
+ current_ticket_search[header] = content
+
+ for key in search_ticket: # Adding ticket custom fields to outputs
+ if key.startswith('CF.'):
+ split_key = key.split(':')
+ if split_key[0]:
+ custom_field_regex = re.findall(CURLY_BRACKETS_REGEX, key)[0].replace(' ',
+ '') # Regex and removing white spaces
+ current_ticket_search[custom_field_regex] = split_key[1]
+ headers.append(custom_field_regex)
+
+ if current_ticket_search:
+ search_context.append(current_ticket_search)
+ if search_context:
+ ec = {
+ 'RTIR.Ticket(val.ID && val.ID === obj.ID)': search_context
+ }
+ title = 'RTIR ticket search results'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': search_context,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, search_context, headers, removeNull=True),
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results('No results found.')
+
+
+def search_ticket_request(raw_query):
+ suffix_url = 'search/ticket'
+ raw_tickets = http_request('GET', suffix_url, query={'query': raw_query})
+
+ return raw_tickets
+
+
+def close_ticket_request(ticket_id, encoded):
+ suffix_url = 'ticket/{}/edit'.format(ticket_id)
+ closed_ticket = http_request('POST', suffix_url, data=encoded)
+
+ return closed_ticket
+
+
+def close_ticket():
+ ticket_id = demisto.args().get('ticket-id')
+ content = '\nStatus: resolved'
+ encoded = "content=" + urllib.quote_plus(content)
+ closed_ticket = close_ticket_request(ticket_id, encoded)
+ if '200 Ok' in closed_ticket.content:
+ ec = {
+ 'RTIR.Ticket(val.ID && val.ID === obj.ID)': {
+ 'ID': int(ticket_id),
+ 'State': 'resolved'
+ }
+ }
+ hr = 'Ticket {} was resolved successfully.'.format(ticket_id)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': hr,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+ else:
+ return_error('Failed to resolve ticket')
+
+
+def edit_ticket_request(ticket_id, encoded):
+ suffix_url = 'ticket/{}/edit'.format(ticket_id)
+ edited_ticket = http_request('POST', suffix_url, data=encoded)
+
+ return edited_ticket
+
+
+def edit_ticket():
+ arguments_given = False
+ ticket_id = demisto.args().get('ticket-id')
+ content = 'ID: ' + ticket_id
+ kwargs = {}
+ subject = demisto.args().get('subject')
+ if subject:
+ content += '\nSubject: ' + subject
+ arguments_given = True
+ kwargs['Subject'] = subject
+
+ owner = demisto.args().get('owner')
+ if owner:
+ content += '\nOwner: ' + owner
+ arguments_given = True
+ kwargs['Owner'] = owner
+
+ status = demisto.args().get('status')
+ if status:
+ content += '\nStatus: ' + status
+ arguments_given = True
+ kwargs['Status'] = status
+
+ priority = demisto.args().get('priority')
+ if priority:
+ content += '\nPriority: ' + priority
+ arguments_given = True
+ kwargs['Priority'] = int(priority)
+
+ final_priority = demisto.args().get('final-priority')
+ if final_priority:
+ content += '\nFinalPriority: ' + final_priority
+ arguments_given = True
+ kwargs['FinalPriority'] = int(final_priority)
+
+ due = demisto.args().get('due')
+ if due:
+ content += '\nDue: ' + due
+ arguments_given = True
+ kwargs['Due'] = due
+
+ customfields = demisto.args().get('customfields')
+ if customfields:
+ cf_list = customfields.split(',')
+ for cf in cf_list:
+ equal_index = cf.index('=')
+ key = 'CF-{}: '.format(cf[:equal_index])
+ value = cf[equal_index + 1:]
+ content = content + key + value + '\n'
+
+ if arguments_given:
+ encoded = "content=" + urllib.quote_plus(content)
+ edited_ticket = edit_ticket_request(ticket_id, encoded)
+ if "200 Ok" in edited_ticket.content:
+ ticket_context = ({
+ 'ID': ticket_id,
+ 'Subject': subject,
+ 'State': status,
+ 'Priority': priority,
+ 'FinalPriority': final_priority,
+ 'Owner': owner
+ })
+ ec = {
+ 'RTIR.Ticket(val.ID && val.ID === obj.ID)': ticket_context
+ }
+
+ hr = 'Ticket {} was edited successfully.'.format(ticket_id)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': hr,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+ else:
+ return_error('Failed to edit ticket')
+ else:
+ return_error('No arguments were given to edit the ticket.')
+
+
+def get_ticket_attachments(ticket_id):
+ suffix_url = 'ticket/{}/attachments'.format(ticket_id)
+ raw_attachments = http_request('GET', suffix_url).content
+
+ attachments = []
+ attachments_content = []
+ split_raw_attachment = raw_attachments.split('\n')
+ for i in xrange(len(split_raw_attachment)):
+ if 'Attachments' in split_raw_attachment[i]:
+ attachment_lines = split_raw_attachment[i:]
+ for line in attachment_lines:
+ if line and 'Unnamed' not in line:
+ split_line = line.split(': ')
+ if 'Attachments' in split_line:
+ starting_index = 1
+ else:
+ starting_index = 0
+ attachment_id = split_line[starting_index]
+ attachment_id = attachment_id.strip()
+ attachment_name = split_line[starting_index + 1]
+ attachment_type = attachment_name.replace('(', '').replace(')', '')
+ split_line_type = attachment_type.split(' ')
+ attachment_name = split_line_type[0]
+ attachment_type = split_line_type[1]
+ attachment_size = split_line_type[3]
+
+ attachments.append({
+ 'ID': attachment_id,
+ 'Name': attachment_name,
+ 'Type': attachment_type,
+ 'Size': attachment_size
+ })
+
+ suffix_url = 'ticket/{}/attachments/{}'.format(ticket_id, attachment_id)
+ attachment_content = http_request('GET', suffix_url).content
+ attachments_content.append(fileResult(attachment_name, attachment_content))
+ return attachments, attachments_content
+
+
+def get_ticket_attachments_command():
+ ticket_id = demisto.args().get('ticket-id')
+ attachments, attachments_content = get_ticket_attachments(ticket_id)
+ if attachments:
+ ec = {
+ 'RTIR.Ticket(val.ID && val.ID === obj.ID)': {
+ 'ID': int(ticket_id),
+ 'Attachment': attachments
+ }
+ }
+ title = 'RTIR ticket {} attachments'.format(ticket_id)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': attachments,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, attachments, removeNull=True),
+ 'EntryContext': ec
+ })
+ demisto.results(attachments_content)
+ else:
+ demisto.results('No attachments found.')
+
+
+def get_ticket_history_by_id(ticket_id, history_id):
+ """Accepts ticket ID and history ID as input and returns a dictionary of ticket history entry properties"""
+
+ suffix_url = 'ticket/{}/history/id/{}'.format(ticket_id, history_id)
+ raw_history = http_request('GET', suffix_url)
+
+ return raw_history
+
+
+def get_ticket_history(ticket_id):
+ suffix_url = 'ticket/{}/history'.format(ticket_id)
+ raw_history = http_request('GET', suffix_url)
+ history_context = []
+ headers = ['ID', 'Created', 'Creator', 'Description']
+ data = raw_history.text.split('\n')
+ data = data[4:]
+ for line in data:
+ split_line = line.split(': ')
+ current_raw_ticket_history = get_ticket_history_by_id(ticket_id, split_line[0]).content
+ current_raw_ticket_history = current_raw_ticket_history.split('\n')
+ current_raw_ticket_history = current_raw_ticket_history[4:]
+ id_ticket = current_raw_ticket_history[0].upper()
+ current_raw_ticket_history[0] = id_ticket
+ current_history_context = {}
+ for entity in current_raw_ticket_history:
+ if ': ' in entity:
+ header, content = entity.split(': ', 1)
+ if header in {'ID', 'Content', 'Created', 'Creator', 'Description', 'NewValue'}:
+ current_history_context[header] = content
+ if current_history_context:
+ history_context.append(current_history_context)
+ return history_context, headers
+
+
+def get_ticket_history_command():
+ ticket_id = demisto.args().get('ticket-id')
+ history_context, headers = get_ticket_history(ticket_id)
+ if history_context:
+ ec = {
+ 'RTIR.Ticket(val.ID && val.ID === obj.ID)': {
+ 'ID': int(ticket_id),
+ 'History': history_context
+ }
+ }
+ title = 'RTIR ticket {} history'.format(ticket_id)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': history_context,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, history_context, headers, removeNull=True),
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results('No results found.')
+
+
+def get_ticket():
+ ticket_id = demisto.args().get('ticket-id')
+ raw_ticket = get_ticket_request(ticket_id)
+ if not raw_ticket:
+ return_error('Failed to get ticket, possibly does not exist.')
+ ticket_context = []
+ data = raw_ticket.content.split('\n')
+ data = data[2:]
+ current_ticket = {}
+ for line in data:
+ split_line = line.split(': ')
+ if len(split_line) == 2:
+ current_ticket[split_line[0]] = split_line[1]
+ ticket = {
+ 'ID': ticket_string_to_id(current_ticket['id']),
+ 'Subject': current_ticket.get('Subject'),
+ 'State': current_ticket.get('Status'),
+ 'Creator': current_ticket.get('Creator'),
+ 'Created': current_ticket.get('Created'),
+ 'Priority': current_ticket.get('Priority'),
+ 'InitialPriority': current_ticket.get('InitialPriority'),
+ 'FinalPriority': current_ticket.get('FinalPriority'),
+ 'Queue': current_ticket.get('Queue'),
+ 'Owner': current_ticket.get('Owner')
+ }
+
+ for key in data: # Adding ticket custom fields to outputs
+ if key.startswith('CF.'):
+ split_key = key.split(':')
+ if split_key[0]:
+ custom_field_regex = re.findall(CURLY_BRACKETS_REGEX, key)[0].replace(' ',
+ '') # Regex and removing white spaces
+ ticket[custom_field_regex] = split_key[1]
+
+ if ticket:
+ ticket_context.append(ticket)
+
+ suffix_url = 'ticket/{}/links/show'.format(ticket_id)
+ raw_links = http_request('GET', suffix_url)
+ if raw_links:
+ links = []
+ for raw_link in raw_links:
+ link_id = raw_link.rsplit('/', 3)[-2]
+ links.append({
+ 'ID': link_id
+ })
+ ticket['LinkedTo'] = links
+ ec = {
+ 'RTIR.Ticket(val.ID && val.ID === obj.ID)': ticket
+ }
+ title = 'RTIR ticket {}'.format(ticket_id)
+ headers = ['ID', 'Subject', 'Status', 'Priority', 'Created', 'Queue', 'Creator', 'Owner', 'InitialPriority',
+ 'FinalPriority']
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': ticket_context,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, ticket, headers, removeNull=True),
+ 'EntryContext': ec
+ })
+
+
+def add_comment_request(ticket_id, encoded):
+ suffix_url = 'ticket/{}/comment'.format(ticket_id)
+ added_comment = http_request('POST', suffix_url, data=encoded)
+
+ return added_comment
+
+
+def add_comment_attachment(ticket_id, encoded, files_data):
+ suffix_url = 'ticket/{}/comment'.format(ticket_id)
+ comment = http_request('POST', suffix_url, files=files_data)
+
+ return comment.content
+
+
+def add_comment():
+ ticket_id = demisto.args().get('ticket-id')
+ text = demisto.args().get('text')
+ content = 'Action: comment\n'
+ if text:
+ content += '\nText: ' + text.encode('utf-8')
+ attachments = demisto.args().get('attachment')
+ if attachments:
+ files_data = {}
+ if isinstance(attachments, list):
+ attachments_list = attachments
+ else: # Given as string
+ attachments_list = attachments.split(',')
+ for i, file_pair in enumerate(attachments_list):
+ file = demisto.getFilePath(file_pair)
+ file_name = file['name']
+ files_data['attachment_{:d}'.format(i + 1)] = (file_name, open(file['path'], 'rb'))
+ content += 'Attachment: {}\n'.format(file_name)
+
+ encoded = "content=" + urllib.quote_plus(content)
+ if attachments:
+ files_data.update({'content': (None, content)}) # type: ignore
+ comment = add_comment_attachment(ticket_id, encoded, files_data)
+ return_outputs('Added comment to ticket {} successfully.'.format(ticket_id), {}, comment)
+ else:
+ added_comment = add_comment_request(ticket_id, encoded)
+ if '200' in added_comment.content:
+ demisto.results('Added comment to ticket {} successfully.'.format(ticket_id))
+ else:
+ return_error('Failed to add comment')
+
+
+def add_reply_request(ticket_id, encoded):
+ suffix_url = 'ticket/{}/comment'.format(ticket_id)
+ added_reply = http_request('POST', suffix_url, data=encoded)
+
+ return added_reply
+
+
+def add_reply():
+ ticket_id = demisto.args().get('ticket-id')
+ content = 'Action: comment\n'
+ text = demisto.args().get('text')
+ if text:
+ content += '\nText: ' + text.encode('utf-8')
+ cc = demisto.args().get('cc')
+ if cc:
+ content += '\nCc: ' + cc
+ try:
+ encoded = "content=" + urllib.quote_plus(content)
+ added_reply = add_reply_request(ticket_id, encoded)
+ if '200' in added_reply.content:
+ demisto.results('Replied successfully to ticket {}.'.format(ticket_id))
+ else:
+ return_error('Failed to reply')
+ except Exception, e:
+ demisto.error(str(e))
+ return_error('Failed to reply')
+
+
+def fetch_incidents():
+ last_run = demisto.getLastRun()
+ last_ticket_id = last_run['ticket_id'] if (last_run and last_run['ticket_id']) else 0
+ raw_query = 'id>{}+AND+Priority>{}+AND+Queue={}{}{}'.format(last_ticket_id, FETCH_PRIORITY, apostrophe, FETCH_QUEUE,
+ apostrophe)
+ if FETCH_STATUS:
+ status_list = FETCH_STATUS.split(',')
+ status_query = '+AND+('
+ for status in status_list:
+ status_query += 'Status={}{}{}+OR+'.format(apostrophe, status, apostrophe)
+ raw_query += status_query + ')'
+ tickets = parse_ticket_data(raw_query)
+ incidents = []
+ max_ticket_id = last_ticket_id
+ for ticket in tickets:
+ ticket_id = ticket['ID']
+ history_context, _ = get_ticket_history(ticket_id)
+ ticket['History'] = history_context
+ incidents.append(ticket_to_incident(ticket))
+ max_ticket_id = max(max_ticket_id, ticket_id)
+ if tickets:
+ demisto.setLastRun({'ticket_id': max_ticket_id})
+ demisto.incidents(incidents)
+
+
+''' EXECUTION CODE '''
+
+LOG('command is %s' % (demisto.command(),))
+try:
+ login()
+ if demisto.command() == 'test-module':
+ demisto.results('ok')
+
+ if demisto.command() in {'fetch-incidents'}:
+ fetch_incidents()
+
+ elif demisto.command() == 'rtir-create-ticket':
+ create_ticket()
+
+ elif demisto.command() == 'rtir-search-ticket':
+ search_ticket()
+
+ elif demisto.command() == 'rtir-resolve-ticket':
+ close_ticket()
+
+ elif demisto.command() == 'rtir-edit-ticket':
+ edit_ticket()
+
+ elif demisto.command() == 'rtir-ticket-history':
+ get_ticket_history_command()
+
+ elif demisto.command() == 'rtir-ticket-attachments':
+ get_ticket_attachments_command()
+
+ elif demisto.command() == 'rtir-get-ticket':
+ get_ticket()
+
+ elif demisto.command() == 'rtir-add-comment':
+ add_comment()
+
+ elif demisto.command() == 'rtir-add-reply':
+ add_reply()
+
+except Exception, e:
+ LOG(e.message)
+ LOG.print_log()
+ raise
+
+finally:
+ logout()
diff --git a/Integrations/RTIR/RTIR.yml b/Integrations/RTIR/RTIR.yml
new file mode 100644
index 000000000000..ac18724798b9
--- /dev/null
+++ b/Integrations/RTIR/RTIR.yml
@@ -0,0 +1,575 @@
+category: Case Management
+commonfields:
+ id: RTIR
+ version: -1
+configuration:
+- display: Server URL (e.g. http://192.168.0.1)
+ name: server
+ required: true
+ type: 0
+- display: Credentials
+ name: credentials
+ required: true
+ type: 9
+- defaultvalue: 'true'
+ display: Trust any certificate (not secure)
+ name: unsecure
+ required: false
+ type: 8
+- defaultvalue: 'false'
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- defaultvalue: new,open
+ display: 'Fetch incidents of the following status:'
+ name: fetch_status
+ required: false
+ type: 0
+- defaultvalue: '0'
+ display: 'Fetch incidents with priority greater or equal to:'
+ name: fetch_priority
+ required: false
+ type: 0
+- defaultvalue: Incident Reports
+ display: Queue name to fetch incidents from
+ name: fetch_queue
+ required: false
+ type: 0
+- display: Referer request header
+ name: referer
+ required: false
+ type: 0
+description: Request Tracker for Incident Response is a ticketing system which provides
+ pre-configured queues and workflows designed for incident response teams.
+display: RTIR
+name: RTIR
+script:
+ commands:
+ - arguments:
+ - default: false
+ defaultValue: General
+ description: Queue where to create the ticket
+ isArray: false
+ name: queue
+ required: false
+ secret: false
+ - default: false
+ description: Subject of the ticket
+ isArray: false
+ name: subject
+ required: true
+ secret: false
+ - default: false
+ description: Requestor email address
+ isArray: false
+ name: requestor
+ required: false
+ secret: false
+ - default: false
+ description: Sends a carbon-copy of this update to a comma-delimited list of
+ email addresses. These people will receive future updates.
+ isArray: false
+ name: cc
+ required: false
+ secret: false
+ - default: false
+ description: Sends a carbon-copy of this update to a comma-delimited list of
+ administrative email addresses. These people will receive future updates.
+ isArray: false
+ name: admin-cc
+ required: false
+ secret: false
+ - default: false
+ description: Ticker owner
+ isArray: false
+ name: owner
+ required: false
+ secret: false
+ - default: false
+ description: Ticket status
+ isArray: false
+ name: status
+ required: false
+ secret: false
+ - default: false
+ description: Ticket priority, given as number in the range 0-100
+ isArray: false
+ name: priority
+ required: false
+ secret: false
+ - default: false
+ description: The ticket content
+ isArray: false
+ name: text
+ required: false
+ secret: false
+ - default: false
+ description: Ticket initial priority, given as number in the range 0-100
+ isArray: false
+ name: initial-priority
+ required: false
+ secret: false
+ - default: false
+ description: Ticket final priority, given as number in the range 0-100
+ isArray: false
+ name: final-priority
+ required: false
+ secret: false
+ - default: false
+ description: Attachment to add to the ticket, given as Entry ID. Comma separated
+ values supported, e.g. entryID1,entryID2
+ isArray: true
+ name: attachment
+ required: false
+ secret: false
+ - default: false
+ description: 'Ticket custom fields, given in format: field1=value1,field2=value2,
+ e.g. IP=8.8.8.8,HowReported=Email. Note: the command does not create custom
+ fields, these should be created on RTIR.'
+ isArray: false
+ name: customfields
+ required: false
+ secret: false
+ deprecated: false
+ description: Create new ticket on RTIR
+ execution: false
+ name: rtir-create-ticket
+ outputs:
+ - contextPath: RTIR.Ticket.ID
+ description: Ticket ID
+ type: number
+ - contextPath: RTIR.Ticket.InitialPriority
+ description: Ticket priority (0-100)
+ type: number
+ - contextPath: RTIR.Ticket.Priority
+ description: Ticket priority (0-100)
+ type: number
+ - contextPath: RTIR.Ticket.FinalPriority
+ description: Ticket final priority (0-100)
+ type: number
+ - contextPath: RTIR.Ticket.Owner
+ description: Ticket owner
+ type: string
+ - contextPath: RTIR.Ticket.Subject
+ description: Ticket subject
+ type: string
+ - contextPath: RTIR.Ticket.Creator
+ description: Ticker creator
+ type: string
+ - arguments:
+ - default: false
+ description: 'Ticket ID '
+ isArray: false
+ name: ticket-id
+ required: false
+ secret: false
+ - default: false
+ description: Ticket subject
+ isArray: false
+ name: subject
+ required: false
+ secret: false
+ - default: false
+ description: Ticket queue (Comma separated values supported, e.g. General,Incident
+ reports,Incidents)
+ isArray: false
+ name: queue
+ required: true
+ secret: false
+ - default: false
+ description: 'Ticket status '
+ isArray: false
+ name: status
+ required: false
+ secret: false
+ - default: false
+ description: Ticket creator
+ isArray: false
+ name: creator
+ required: false
+ secret: false
+ - default: false
+ description: Ticket priority (number in range 0-100)
+ isArray: false
+ name: priority-equal-to
+ required: false
+ secret: false
+ - default: false
+ description: Ticket priority (number in range 0-100)
+ isArray: false
+ name: priority-greater-than
+ required: false
+ secret: false
+ - default: false
+ description: Date after which the ticket was created, in format of YYYY-MM-DD,
+ e.g. 2011-02-24
+ isArray: false
+ name: created-after
+ required: false
+ secret: false
+ - default: false
+ description: Date on which the ticket was created, in format of YYYY-MM-DD,
+ e.g. 2011-02-24
+ isArray: false
+ name: created-on
+ required: false
+ secret: false
+ - default: false
+ description: Date before which the ticket was created, in format of YYYY-MM-DD,
+ e.g. 2011-02-24
+ isArray: false
+ name: created-before
+ required: false
+ secret: false
+ - default: false
+ description: Ticker owner
+ isArray: false
+ name: owner
+ required: false
+ secret: false
+ - default: false
+ description: Ticket due date, in format of YYYY-MM-DD, e.g. 2011-02-24
+ isArray: false
+ name: due
+ required: false
+ secret: false
+ deprecated: false
+ description: Search for tickets on RTIR
+ execution: false
+ name: rtir-search-ticket
+ outputs:
+ - contextPath: RTIR.Ticket.ID
+ description: Ticket ID
+ type: number
+ - contextPath: RTIR.Ticket.State
+ description: Ticket state
+ type: string
+ - contextPath: RTIR.Ticket.Creator
+ description: Ticket creator
+ type: string
+ - contextPath: RTIR.Ticket.Subject
+ description: Ticket subject
+ type: string
+ - contextPath: RTIR.Ticket.Created
+ description: Ticket creation date
+ type: date
+ - contextPath: RTIR.Ticket.Priority
+ description: Ticket priority (0-100)
+ type: number
+ - contextPath: RTIR.Ticket.InitialPriority
+ description: Ticket initial priority (0-100)
+ type: number
+ - contextPath: RTIR.Ticket.FinalPriority
+ description: Ticket final priority (0-100)
+ type: number
+ - contextPath: RTIR.Ticket.Queue
+ description: Ticket queue
+ type: string
+ - contextPath: RTIR.Ticket.Owner
+ description: Ticket owner
+ type: string
+ - contextPath: RTIR.Ticket.IP
+ description: Ticket custom field - IP address
+ type: string
+ - contextPath: RTIR.Ticket.HowReported
+ description: Ticket custom field - How ticket was reported
+ type: string
+ - contextPath: RTIR.Ticket.Customer
+ description: Ticket custom field - Customer
+ type: string
+ - arguments:
+ - default: false
+ description: Ticket ID to close
+ isArray: false
+ name: ticket-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Sets a ticket as resolved on RTIR
+ execution: false
+ name: rtir-resolve-ticket
+ outputs:
+ - contextPath: RTIR.Ticket.ID
+ description: Ticket ID
+ type: number
+ - contextPath: RTIR.Ticket.State
+ description: Ticket state
+ type: string
+ - arguments:
+ - default: false
+ description: Ticket ID
+ isArray: false
+ name: ticket-id
+ required: true
+ secret: false
+ - default: false
+ description: Ticket subject
+ isArray: false
+ name: subject
+ required: false
+ secret: false
+ - default: false
+ description: Ticket priority, number in range 0-100
+ isArray: false
+ name: priority
+ required: false
+ secret: false
+ - default: false
+ description: Ticket final priority, number in range 0-100
+ isArray: false
+ name: final-priority
+ required: false
+ secret: false
+ - default: false
+ description: Ticket owner
+ isArray: false
+ name: owner
+ required: false
+ secret: false
+ - default: false
+ description: Ticket status
+ isArray: false
+ name: status
+ required: false
+ secret: false
+ - default: false
+ description: Ticket due date, in format of YYYY-MM-DD, e.g. 2011-02-24
+ isArray: false
+ name: due
+ required: false
+ secret: false
+ - default: false
+ description: 'Ticket custom fields, given in format: field1=value1,field2=value2,
+ e.g. IP=8.8.8.8,HowReported=Email. Note: the command does not create custom
+ fields, these should be created on RTIR.'
+ isArray: false
+ name: customfields
+ required: false
+ secret: false
+ deprecated: false
+ description: Modify a ticket on RTIR
+ execution: false
+ name: rtir-edit-ticket
+ outputs:
+ - contextPath: RTIR.Ticket.ID
+ description: Ticket ID
+ type: number
+ - contextPath: RTIR.Ticket.FinalPriority
+ description: Ticket final priority (0-100)
+ type: number
+ - contextPath: RTIR.Ticket.Priority
+ description: Ticket priority (0-100)
+ type: number
+ - contextPath: RTIR.Ticket.Owner
+ description: Ticket owner
+ type: string
+ - contextPath: RTIR.Ticket.State
+ description: Ticket state
+ type: string
+ - contextPath: RTIR.Ticket.Subject
+ description: Ticket subject
+ type: string
+ - arguments:
+ - default: false
+ description: Ticket ID to get history of
+ isArray: false
+ name: ticket-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets a list of all the history items for a given ticket.
+ execution: false
+ name: rtir-ticket-history
+ outputs:
+ - contextPath: RTIR.Ticket.ID
+ description: Ticket ID
+ type: number
+ - contextPath: RTIR.Ticket.History.Content
+ description: Ticket history content
+ type: string
+ - contextPath: RTIR.Ticket.History.Created
+ description: Ticket history creation date
+ type: date
+ - contextPath: RTIR.Ticket.History.Creator
+ description: Ticket history creator
+ type: string
+ - contextPath: RTIR.Ticket.History.Description
+ description: Ticket history description
+ type: string
+ - contextPath: RTIR.Ticket.History.NewValue
+ description: Value updated in history transaction
+ type: string
+ - arguments:
+ - default: false
+ description: Ticket ID to get
+ isArray: false
+ name: ticket-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets the data for a single ticket.
+ execution: false
+ name: rtir-get-ticket
+ outputs:
+ - contextPath: RTIR.Ticket.LinkedTo.ID
+ description: Linked ticket ID
+ type: number
+ - contextPath: RTIR.Ticket.ID
+ description: Ticket ID
+ type: number
+ - contextPath: RTIR.Ticket.State
+ description: Ticket state
+ type: string
+ - contextPath: RTIR.Ticket.Creator
+ description: Ticket creator
+ type: string
+ - contextPath: RTIR.Ticket.Subject
+ description: Ticket subject
+ type: string
+ - contextPath: RTIR.Ticket.Created
+ description: Ticket creation date
+ type: date
+ - contextPath: RTIR.Ticket.Priority
+ description: Ticket priority (0-100)
+ type: number
+ - contextPath: RTIR.Ticket.InitialPriority
+ description: Ticket initial priority (0-100)
+ type: number
+ - contextPath: RTIR.Ticket.FinalPriority
+ description: Ticket final priority (0-100)
+ type: number
+ - contextPath: RTIR.Ticket.Queue
+ description: Ticket queue
+ type: string
+ - contextPath: RTIR.Ticket.Owner
+ description: Ticket owner
+ type: string
+ - contextPath: RTIR.Ticket.IP
+ description: Ticket custom field - IP address
+ type: string
+ - contextPath: RTIR.Ticket.HowReported
+ description: Ticket custom field - How ticket was reported
+ type: string
+ - contextPath: RTIR.Ticket.Customer
+ description: Ticket custom field - Customer
+ type: string
+ - arguments:
+ - default: false
+ description: Ticket ID to get attachments of
+ isArray: false
+ name: ticket-id
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets a list of all attachments details and data content related to
+ the ticket.
+ execution: false
+ name: rtir-ticket-attachments
+ outputs:
+ - contextPath: RTIR.Ticket.ID
+ description: Ticket ID
+ type: number
+ - contextPath: RTIR.Ticket.Attachment.ID
+ description: Attachment ID
+ type: number
+ - contextPath: RTIR.Ticket.Attachment.Name
+ description: Attachment file name
+ type: string
+ - contextPath: RTIR.Ticket.Attachment.Size
+ description: Attachment file size
+ type: string
+ - contextPath: RTIR.Ticket.Attachment.Type
+ description: Attachment file type
+ type: string
+ - contextPath: File.EntryID
+ description: Demisto entry ID of attachment
+ type: string
+ - contextPath: File.Size
+ description: File size
+ type: number
+ - contextPath: File.SHA1
+ description: File SHA1
+ type: string
+ - contextPath: File.SHA256
+ description: File SHA256
+ type: string
+ - contextPath: File.Name
+ description: File name
+ type: string
+ - contextPath: File.SSDeep
+ description: File SSDeep
+ type: string
+ - contextPath: File.Info
+ description: File type information
+ type: string
+ - contextPath: File.Type
+ description: File type
+ type: string
+ - contextPath: File.MD5
+ description: File MD5
+ type: string
+ - contextPath: File.Extension
+ description: File extension
+ type: string
+ - arguments:
+ - default: false
+ description: Ticket ID to add comment to
+ isArray: false
+ name: ticket-id
+ required: true
+ secret: false
+ - default: false
+ description: Comment text
+ isArray: false
+ name: text
+ required: true
+ secret: false
+ - default: false
+ description: Attachment to add to the ticket, given as Entry ID. Comma separated
+ values supported, e.g. entryID1,entryID2
+ isArray: true
+ name: attachment
+ required: false
+ secret: false
+ deprecated: false
+ description: Add a comment to an existing ticket.
+ execution: false
+ name: rtir-add-comment
+ - arguments:
+ - default: false
+ description: Ticket ID to send the reply to the user
+ isArray: false
+ name: ticket-id
+ required: true
+ secret: false
+ - default: false
+ description: reply text
+ isArray: false
+ name: text
+ required: true
+ secret: false
+ - default: false
+ description: Email of the user to send the reply
+ isArray: true
+ name: cc
+ required: false
+ secret: false
+ deprecated: false
+ description: Send a reply from an existing ticket to the user
+ execution: false
+ name: rtir-add-reply
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- RTIR Test
diff --git a/Integrations/RTIR/RTIR_description.md b/Integrations/RTIR/RTIR_description.md
new file mode 100644
index 000000000000..170f4c39aab6
--- /dev/null
+++ b/Integrations/RTIR/RTIR_description.md
@@ -0,0 +1,3 @@
+Additional Parameters:
+
+* Referer (Optional) - Adds a referer header to the requests sent by the integration.
diff --git a/Integrations/RTIR/RTIR_image.png b/Integrations/RTIR/RTIR_image.png
new file mode 100644
index 000000000000..234a0ce12701
Binary files /dev/null and b/Integrations/RTIR/RTIR_image.png differ
diff --git a/Integrations/Rapid7_Nexpose/Pipfile b/Integrations/Rapid7_Nexpose/Pipfile
new file mode 100644
index 000000000000..34636f5de76a
--- /dev/null
+++ b/Integrations/Rapid7_Nexpose/Pipfile
@@ -0,0 +1,15 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+
+[packages]
+requests = "*"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/Rapid7_Nexpose/Pipfile.lock b/Integrations/Rapid7_Nexpose/Pipfile.lock
new file mode 100644
index 000000000000..ed5e480f2f24
--- /dev/null
+++ b/Integrations/Rapid7_Nexpose/Pipfile.lock
@@ -0,0 +1,240 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "73c4b534031f8318ec6903fbaa068b0bf0e2be5f86c3cfb708683f542b13df2e"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:0ef2bf9f07c3150929b25e8e61b5198c27b0dca195e156f0e4d5bdd89185ca1a",
+ "sha256:fc9b582dba0366e63540982c3944a9230cbc6f303641c51483fa547dcc22393a"
+ ],
+ "version": "==1.6.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==1.5"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:27594cf4fc279f321974061ac69164aaebd2749af962ac8686b20503ac0bcf2d",
+ "sha256:9d51fe0a382f05b6b117c5e601fc219fede4a8c71703324af3f7d883aef476a3"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==3.7.3"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==1.0.2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265",
+ "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.0"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:18c796c2cd35eb1a1d3f012a214a542790a1aed95e29768bdcb9f2197eccbd0b",
+ "sha256:96151fca2c6e736503981896495d344781b60d18bfda78dc11b290c6125ebdb6"
+ ],
+ "version": "==4.3.15"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33",
+ "sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39",
+ "sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019",
+ "sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088",
+ "sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b",
+ "sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e",
+ "sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6",
+ "sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b",
+ "sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5",
+ "sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff",
+ "sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd",
+ "sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7",
+ "sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff",
+ "sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d",
+ "sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2",
+ "sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35",
+ "sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4",
+ "sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514",
+ "sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252",
+ "sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109",
+ "sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f",
+ "sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c",
+ "sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92",
+ "sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577",
+ "sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d",
+ "sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d",
+ "sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f",
+ "sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a",
+ "sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b"
+ ],
+ "version": "==1.3.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1",
+ "sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==2.0.0"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742",
+ "sha256:5887121d7f7df3603bca2f710e7219f3eca0eb69e0b7cc6e0a022e155ac931a7"
+ ],
+ "markers": "python_version < '3.6'",
+ "version": "==2.3.3"
+ },
+ "pbr": {
+ "hashes": [
+ "sha256:8257baf496c8522437e8a6cfe0f15e00aedc6c0e0e7c9d55eeeeab31e0853843",
+ "sha256:8c361cc353d988e4f5b998555c88098b9d5964c2e11acf7b0d21925a66bb5824"
+ ],
+ "version": "==5.1.3"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f",
+ "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746"
+ ],
+ "version": "==0.9.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:02c2b6d268695a8b64ad61847f92e611e6afcff33fd26c3a2125370c4662905d",
+ "sha256:ee1e85575587c5b58ddafa25e1c1b01691ef172e139fc25585e5d3f02451da93"
+ ],
+ "index": "pypi",
+ "version": "==1.9.4"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:592eaa2c33fae68c7d75aacf042efc9f77b27c08a6224a4f59beab8d9a420523",
+ "sha256:ad3ad5c450284819ecde191a654c09b0ec72257a2c711b9633d677c71c9850c4"
+ ],
+ "index": "pypi",
+ "version": "==4.3.1"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:4d0d06d173eecf172703219a71dbd4ade0e13904e6bbce1ce660e2e0dc78b5c4",
+ "sha256:bfdf02789e3d197bd682a758cae0a4a18706566395fbe2803badcd1335e0173e"
+ ],
+ "index": "pypi",
+ "version": "==1.10.1"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ }
+ }
+}
diff --git a/Integrations/Rapid7_Nexpose/Rapid7_Nexpose.py b/Integrations/Rapid7_Nexpose/Rapid7_Nexpose.py
new file mode 100644
index 000000000000..2dbd4595b30e
--- /dev/null
+++ b/Integrations/Rapid7_Nexpose/Rapid7_Nexpose.py
@@ -0,0 +1,1354 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import time
+import re
+import requests
+import json
+
+RANGE_OPERATORS = ['in-range', 'is-between', 'not-in-range']
+YEAR_IN_MINUTES = 525600
+MONTH_IN_MINUTES = 43800
+WEEK_IN_MINUTES = 10080
+DAY_IN_MINUTES = 1440
+HOUR_IN_MINUTES = 60
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+USERNAME = demisto.params()['credentials']['identifier']
+PASSWORD = demisto.params()['credentials']['password']
+VERIFY_SSL = not demisto.params().get('unsecure', False)
+TOKEN = demisto.params().get('token')
+
+
+def get_server_url():
+ url = demisto.params()['server']
+ url = re.sub('/[\/]+$/', '', url)
+ url = re.sub('\/$', '', url)
+ return url
+
+
+BASE_URL = get_server_url()
+SERVER_URL = BASE_URL + '/api/3'
+
+
+def get_login_headers():
+ headers = {
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ 'Accept': 'application/json'
+ }
+
+ if TOKEN is not None:
+ headers['Token'] = TOKEN
+
+ return headers
+
+
+def login():
+ url = BASE_URL + '/data/user/login'
+ headers = get_login_headers()
+ body = {
+ 'nexposeccusername': USERNAME,
+ 'nexposeccpassword': PASSWORD
+ }
+ res = requests.post(url, headers=headers, data=body, verify=VERIFY_SSL)
+ if res.status_code < 200 or res.status_code >= 300:
+ return ''
+ body = res.json()
+ if 'sessionID' not in body:
+ return ''
+
+ return body['sessionID']
+
+
+SESSION = login()
+
+
+def get_headers():
+ headers = {
+ 'Content-Type': 'application/json'
+ }
+
+ if TOKEN is not None:
+ headers['Token'] = TOKEN
+ return headers
+
+
+def get_site_headers():
+ headers = get_headers()
+
+ headers['Cookie'] = 'nexposeCCSessionID=' + SESSION
+ headers['nexposeCCSessionID'] = SESSION
+
+ return headers
+
+
+def get_site(asset_id):
+ url = BASE_URL + '/data/assets/' + str(asset_id) + '/scans'
+ headers = get_site_headers()
+ res = requests.post(url, headers=headers, auth=(USERNAME, PASSWORD), verify=VERIFY_SSL)
+ if res.status_code < 200 or res.status_code >= 300:
+ return ''
+ response = res.json()
+ if response is None or response['records'] is None or len(response['records']) == 0:
+ return ''
+
+ return {
+ 'id': response['records'][0]['siteID'],
+ 'name': response['records'][0]['siteName'],
+ 'ip': response['records'][0]['ipAddress']
+ }
+
+
+def send_request(path, method='get', body=None, params=None, headers=None, is_file=False):
+ body = body if body is not None else {}
+ params = params if params is not None else {}
+
+ url = '{}/{}'.format(SERVER_URL, path)
+
+ headers = headers if headers is not None else get_headers()
+ res = requests.request(method, url, headers=headers, data=json.dumps(body), params=params,
+ auth=(USERNAME, PASSWORD), verify=VERIFY_SSL)
+ if res.status_code < 200 or res.status_code >= 300:
+ raise Exception('Got status code ' + str(
+ res.status_code) + ' with url ' + url + ' with body ' + res.content + ' with headers ' + str(res.headers))
+ return res.json() if is_file is False else res.content
+
+
+def iso8601_duration_as_minutes(d):
+ if d is None:
+ return 0
+ if d[0] != 'P':
+ raise ValueError('Not an ISO 8601 Duration string')
+ minutes = 0
+ # split by the 'T'
+ for i, item in enumerate(d.split('T')):
+ for number, period in re.findall('(?P\d+)(?PS|M|H|D|W|Y)', item):
+ # print '%s -> %s %s' % (d, number, unit )
+ number = float(number)
+ this = 0
+ if period == 'Y':
+ this = number * YEAR_IN_MINUTES # 365.25
+ elif period == 'W':
+ this = number * WEEK_IN_MINUTES
+ elif period == 'D':
+ this = number * DAY_IN_MINUTES
+ elif period == 'H':
+ this = number * HOUR_IN_MINUTES
+ elif period == 'M':
+ # ambiguity betweeen months and minutes
+ if i == 0:
+ this = number * MONTH_IN_MINUTES # assume 30 days
+ else:
+ this = number
+ elif period == 'S':
+ this = number / 60
+ minutes = minutes + this
+ return minutes
+
+
+def dq(obj, path):
+ '''
+ return a value in an object path. in case of multiple objects in path, searches them all.
+ @param obj - dictionary tree to search in
+ @param path (list) - a path of the desired value in the object. for example: ['root', 'key', 'subkey']
+ '''
+ if len(path) == 0:
+ return obj
+
+ if isinstance(obj, dict):
+ if path[0] in obj:
+ return dq(obj[path[0]], path[1:])
+ elif isinstance(obj, list):
+ # in case current obj has multiple objects, search them all.
+ line = [dq(o, path) for o in obj]
+ return [k for k in line if k is not None]
+
+ # in case of error in the path
+ return None
+
+
+def translate_single_object(obj, map_fields, filter_func=None):
+ d = {}
+ for f in map_fields:
+ if filter_func is None or filter_func(f):
+ d[f['to']] = dq(obj, f['from'].split('.'))
+
+ return d
+
+
+def translate_object(content, map_fields, filter_func=None):
+ '''
+ Converts object fields according to mapping dictionary
+ @param content - original content to copy
+ @param mapFields - an object assosiating source and destination object fields
+ @filter_func - function to filter out fields
+ @returns the mapped object
+ '''
+ if isinstance(content, (list, tuple)):
+ return [translate_single_object(item, map_fields, filter_func) for item in content]
+ else:
+ return translate_single_object(content, map_fields, filter_func)
+
+
+def get_list_response(path, method='get', limit=None, body={}, params={}):
+ final_result = [] # type: ignore
+ page_diff = 0
+ page_number = 0
+
+ while True:
+ page = page_number
+ page_number += 1
+ params['page'] = page
+ if limit is not None:
+ params['size'] = limit
+ response = send_request(path, method=method, body=body, params=params)
+ if not response:
+ break
+ if response['resources'] is not None:
+ final_result = final_result + response['resources']
+ if response['page'] is not None:
+ page_diff = response['page']['totalPages'] - response['page']['number']
+ if page_diff < 1 or limit is not None:
+ break
+
+ return final_result
+
+
+def get_last_scan(asset):
+ if asset['history'] is None:
+ return "-"
+ sorted_dates = sorted(asset['history'], key=get_datetime_from_asset_history_item,
+ reverse=True)
+
+ if sorted_dates[0] is not None:
+ return {
+ 'date': sorted_dates[0]['date'] if 'date' in sorted_dates[0] else '-',
+ 'id': sorted_dates[0]['scanId'] if 'scanId' in sorted_dates[0] else '-'
+ }
+ else:
+ return {
+ 'date': '-',
+ 'id': '-'
+ }
+
+
+def get_datetime_from_asset_history_item(item):
+ try:
+ return time.strptime(item['date'], "%Y-%m-%dT%H:%M:%S.%fZ")
+ except ValueError:
+ return time.strptime(item['date'], "%Y-%m-%dT%H:%M:%SZ")
+
+
+def get_asset_command():
+ asset = get_asset(demisto.args()['id'])
+
+ if asset is None:
+ return "Asset not found"
+ last_scan = get_last_scan(asset)
+ asset['LastScanDate'] = last_scan['date']
+ asset['LastScanId'] = last_scan['id']
+ asset['Site'] = get_site(asset['id'])['name']
+
+ asset_headers = [
+ 'AssetId',
+ 'Addresses',
+ 'Hardware',
+ 'Aliases',
+ 'HostType',
+ 'Site',
+ 'OperatingSystem',
+ 'CPE',
+ 'LastScanDate',
+ 'LastScanId',
+ 'RiskScore'
+ ]
+
+ asset_output = translate_object(asset, [
+ {'from': 'id', 'to': 'AssetId'},
+ {'from': 'addresses.ip', 'to': 'Addresses'},
+ {'from': 'addresses.mac', 'to': 'Hardware'},
+ {'from': 'hostNames.name', 'to': 'Aliases'},
+ {'from': 'type', 'to': 'HostType'},
+ {'from': 'Site', 'to': 'Site'},
+ {'from': 'os', 'to': 'OperatingSystem'},
+ {'from': 'vulnerabilities.total', 'to': 'Vulnerabilities'},
+ {'from': 'cpe.v2.3', 'to': 'CPE'},
+ {'from': 'LastScanDate', 'to': 'LastScanDate'},
+ {'from': 'LastScanId', 'to': 'LastScanId'},
+ {'from': 'riskScore', 'to': 'RiskScore'}
+ ])
+
+ software_output = None
+ services_output = None
+ users_output = None
+
+ if 'software' in asset and len(asset['software']) > 0:
+ software_headers = [
+ 'Software',
+ 'Version'
+ ]
+
+ software_output = translate_object(asset['software'], [
+ {'from': 'description', 'to': 'Software'},
+ {'from': 'version', 'to': 'Version'}
+ ])
+
+ if 'services' in asset and len(asset['services']) > 0:
+ service_headers = [
+ 'Name',
+ 'Port',
+ 'Product',
+ 'Protocol'
+ ]
+
+ services_output = translate_object(asset['services'], [
+ {'from': 'name', 'to': 'Name'},
+ {'from': 'port', 'to': 'Port'},
+ {'from': 'product', 'to': 'Product'},
+ {'from': 'protocol', 'to': 'Protocol'}
+ ])
+
+ if 'users' in asset and len(asset['users']) > 0:
+ user_headers = [
+ 'FullName',
+ 'Name',
+ 'UserId'
+ ]
+
+ users_output = translate_object(asset['users'], [
+ {'from': 'name', 'to': 'Name'},
+ {'from': 'fullName', 'to': 'FullName'},
+ {'from': 'id', 'to': 'UserId'},
+ ])
+
+ vulnerability_headers = [
+ 'Id',
+ 'Title',
+ 'Malware',
+ 'Exploit',
+ 'CVSS',
+ 'Risk',
+ 'PublishedOn',
+ 'ModifiedOn',
+ 'Severity',
+ 'Instances',
+ ]
+
+ vulnerabilities = get_vulnerabilities(asset['id'])
+ asset['vulnerabilities'] = vulnerabilities
+ vulnerabilities_output = []
+ cves_output = [] # type: ignore
+ for i, v in enumerate(asset['vulnerabilities']):
+ detailed_vuln = get_vulnerability(v['id'])
+ # Add to raw output
+ asset['vulnerabilities'][i] = dict(asset['vulnerabilities'][i].items() + detailed_vuln.items())
+ cvss = dq(detailed_vuln['cvss'], ['v2', 'score'])
+
+ if ('cves' in detailed_vuln):
+ cves_output = cves_output + map(lambda cve: {
+ 'ID': cve
+ }, detailed_vuln['cves'])
+
+ output_vuln = {
+ 'Id': v['id'],
+ 'Title': detailed_vuln['title'],
+ 'Malware': detailed_vuln['malwareKits'],
+ 'Exploit': detailed_vuln['exploits'],
+ 'CVSS': cvss,
+ 'Risk': detailed_vuln['riskScore'],
+ 'PublishedOn': detailed_vuln['published'],
+ 'ModifiedOn': detailed_vuln['modified'],
+ 'Severity': detailed_vuln['severity'],
+ 'Instances': v['instances'],
+ }
+
+ vulnerabilities_output.append(output_vuln)
+
+ asset_md = tableToMarkdown('Nexpose asset ' + str(asset['id']), asset_output, asset_headers, removeNull=True)
+ vulnerabilities_md = tableToMarkdown('Vulnerabilities', vulnerabilities_output, vulnerability_headers,
+ removeNull=True) if len(vulnerabilities_output) > 0 else ''
+ software_md = tableToMarkdown('Software', software_output, software_headers,
+ removeNull=True) if software_output is not None else ''
+ services_md = tableToMarkdown('Services', services_output, service_headers,
+ removeNull=True) if services_output is not None else ''
+ users_md = tableToMarkdown('Users', users_output, user_headers, removeNull=True) if users_output is not None else ''
+
+ md = asset_md + vulnerabilities_md + software_md + services_md + users_md
+
+ asset_output['Vulnerability'] = vulnerabilities_output
+ asset_output['Software'] = software_output
+ asset_output['Service'] = services_output
+ asset_output['User'] = users_output
+
+ endpoint = {
+ 'IP': asset_output['Addresses'],
+ 'MAC': asset_output['Hardware'],
+ 'HostName': asset_output['Aliases'],
+ 'OS': asset_output['OperatingSystem']
+ }
+
+ context = {
+ 'Nexpose.Asset(val.AssetId==obj.AssetId)': asset_output,
+ 'Endpoint(val.IP==obj.IP)': endpoint
+ }
+
+ if len(cves_output) > 0:
+ context['CVE(val.ID==obj.ID)'] = cves_output
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': asset,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': context
+ }
+
+ return entry
+
+
+def get_asset(asset_id):
+ path = 'assets/' + str(asset_id)
+ return send_request(path)
+
+
+def get_asset_vulnerability_command():
+ v = get_asset_vulnerability(demisto.args()['id'], demisto.args()['vulnerabilityId'])
+
+ if v is None:
+ return 'Vulnerability not found'
+
+ vuln_headers = [
+ 'Id',
+ 'Title',
+ 'Severity',
+ 'RiskScore',
+ 'CVSS',
+ 'CVSSV3',
+ 'Published',
+ 'Added',
+ 'Modified',
+ 'CVSSScore',
+ 'CVSSV3Score',
+ 'Categories',
+ 'CVES'
+ ]
+
+ detailed_vuln = get_vulnerability(v['id'])
+ # Add to raw output
+ v = dict(v.items() + detailed_vuln.items())
+ vuln_outputs = translate_object(detailed_vuln, [
+ {'from': 'id', 'to': 'Id'},
+ {'from': 'title', 'to': 'Title'},
+ {'from': 'severity', 'to': 'Severity'},
+ {'from': 'riskScore', 'to': 'RiskScore'},
+ {'from': 'cvss.v2.vector', 'to': 'CVSS'},
+ {'from': 'cvss.v3.vector', 'to': 'CVSSV3'},
+ {'from': 'published', 'to': 'Published'},
+ {'from': 'added', 'to': 'Added'},
+ {'from': 'modified', 'to': 'Modified'},
+ {'from': 'cvss.v2.score', 'to': 'CVSSScore'},
+ {'from': 'cvss.v3.score', 'to': 'CVSSV3Score'},
+ {'from': 'categories', 'to': 'Categories'},
+ {'from': 'cves', 'to': 'CVES'}
+ ])
+
+ results_headers = [
+ "Port",
+ "Protocol",
+ "Since",
+ "Proof",
+ "Status"
+ ]
+
+ results_output = [] # type: ignore
+ if 'results' in v and len(v['results']) > 0:
+ results_output = translate_object(v['results'], [
+ {'from': 'port', 'to': 'Port'},
+ {'from': 'protocol', 'to': 'Protocol'},
+ {'from': 'since', 'to': 'Since'},
+ {'from': 'proof', 'to': 'Proof'},
+ {'from': 'status', 'to': 'Status'}
+ ])
+
+ # Remove HTML tags
+ for r in results_output:
+ r['Proof'] = re.sub('<.*?>', '', r['Proof'])
+
+ solutions_headers = [
+ 'Type',
+ 'Summary',
+ 'Steps',
+ 'Estimate',
+ 'AdditionalInformation'
+ ]
+
+ solutions_output = None
+ solutions = get_vulnerability_solutions(demisto.args()['id'], demisto.args()['vulnerabilityId'])
+ # Add to raw output
+ v['solutions'] = solutions
+ if solutions is not None and len(solutions) > 0:
+ solutions_output = translate_object(solutions['resources'], [
+ {'from': 'type', 'to': 'Type'},
+ {'from': 'summary.text', 'to': 'Summary'},
+ {'from': 'steps.text', 'to': 'Steps'},
+ {'from': 'estimate', 'to': 'Estimate'},
+ {'from': 'additionalInformation.text', 'to': 'AdditionalInformation'}
+ ])
+ for i, val in enumerate(solutions_output):
+ solutions_output[i]['Estimate'] = str(
+ iso8601_duration_as_minutes(solutions_output[i]['Estimate'])) + ' minutes'
+
+ vulnerabilities_md = tableToMarkdown('Vulnerability ' + demisto.args()['vulnerabilityId'], vuln_outputs,
+ vuln_headers, removeNull=True)
+ results_md = tableToMarkdown('Checks', results_output, results_headers, removeNull=True) if len(
+ results_output) > 0 else ''
+ solutions_md = tableToMarkdown('Solutions', solutions_output, solutions_headers,
+ removeNull=True) if solutions_output is not None else ''
+ md = vulnerabilities_md + results_md + solutions_md
+ cves = [] # type: ignore
+ if (vuln_outputs['CVES'] is not None and len(vuln_outputs['CVES']) > 0):
+ cves = map(lambda cve: {
+ 'ID': cve
+ }, vuln_outputs['CVES'])
+
+ vuln_outputs['Check'] = results_output
+ vuln_outputs['Solution'] = solutions_output
+ asset = {
+ 'AssetId': demisto.args()['id'],
+ 'Vulnerability': [vuln_outputs]
+ }
+
+ context = {
+ 'Nexpose.Asset(val.AssetId==obj.AssetId)': asset,
+ }
+
+ if len(cves) > 0:
+ context['CVE(val.ID==obj.ID)'] = cves # type: ignore
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': v,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': context
+ }
+
+ return entry
+
+
+def get_vulnerabilities(asset_id):
+ path = 'assets/' + str(asset_id) + '/vulnerabilities'
+ return get_list_response(path)
+
+
+def get_asset_vulnerability(asset_id, vulnerability_id):
+ path = 'assets/' + str(asset_id) + '/vulnerabilities/' + str(vulnerability_id)
+ return send_request(path)
+
+
+def get_vulnerability(vulnerability_id):
+ path = 'vulnerabilities/' + str(vulnerability_id)
+ return send_request(path)
+
+
+def get_vulnerability_solutions(asset_id, vulnerability_id):
+ path = 'assets/' + str(asset_id) + '/vulnerabilities/' + str(vulnerability_id) + '/solution'
+
+ return send_request(path)
+
+
+def search_by_filter(text_filters):
+ if (text_filters is None):
+ return []
+
+ filters = get_search_filters(text_filters)
+ assets = search_assets(filters, demisto.args()['match'], demisto.args().get('limit'), demisto.args().get('sort'))
+
+ return assets
+
+
+def search_assets_command():
+ queries = demisto.args().get('query')
+ ip_addresses = demisto.args().get('ipAddressIs')
+ host_names = demisto.args().get('hostNameIs')
+ risk_score = demisto.args().get('riskScoreHigherThan')
+ vulnerability_title = demisto.args().get('vulnerabilityTitleContains')
+ siteIds = demisto.args().get('siteIdIn')
+
+ assets = None
+ if queries is not None:
+ assets = search_by_filter(queries.split(';'))
+ elif risk_score is not None:
+ assets = search_by_filter(['risk-score is-greater-than ' + str(risk_score)])
+ elif vulnerability_title is not None:
+ assets = search_by_filter(['vulnerability-title contains ' + vulnerability_title])
+ elif siteIds is not None:
+ assets = search_by_filter(['site-id in ' + siteIds])
+ elif ip_addresses is not None:
+ ips = ip_addresses.split(',')
+ assets = []
+ for i, ip in enumerate(ips):
+ assets = assets + search_by_filter(['ip-address is ' + str(ip)])
+ elif host_names is not None:
+ host_names = host_names.split(',')
+ assets = []
+ for i, host_name in enumerate(host_names):
+ assets = assets + search_by_filter(['host-name is ' + str(host_name)])
+
+ if (assets is None or len(assets) == 0):
+ return 'No assets found'
+
+ for asset in assets:
+ last_scan = get_last_scan(asset)
+ asset['LastScanDate'] = last_scan['date']
+ asset['LastScanId'] = last_scan['id']
+ site = get_site(asset['id'])
+ asset['Site'] = site['name'] if site != '' else ''
+
+ headers = [
+ 'AssetId',
+ 'Address',
+ 'Name',
+ 'Site',
+ 'Exploits',
+ 'Malware',
+ 'OperatingSystem',
+ 'RiskScore',
+ 'Assessed',
+ 'LastScanDate',
+ 'LastScanId'
+ ]
+
+ outputs = translate_object(assets, [
+ {'from': 'id', 'to': 'AssetId'},
+ {'from': 'ip', 'to': 'Address'},
+ {'from': 'hostName', 'to': 'Name'},
+ {'from': 'Site', 'to': 'Site'},
+ {'from': 'vulnerabilities.exploits', 'to': 'Exploits'},
+ {'from': 'vulnerabilities.malwareKits', 'to': 'Malware'},
+ {'from': 'os', 'to': 'OperatingSystem'},
+ {'from': 'vulnerabilities.total', 'to': 'Vulnerabilities'},
+ {'from': 'riskScore', 'to': 'RiskScore'},
+ {'from': 'assessedForVulnerabilities', 'to': 'Assessed'},
+ {'from': 'LastScanDate', 'to': 'LastScanDate'},
+ {'from': 'LastScanId', 'to': 'LastScanId'}
+ ])
+
+ endpoint = map(lambda o: {
+ 'IP': o['Address'],
+ 'HostName': o['Name'],
+ 'OS': o['OperatingSystem']
+ }, outputs)
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': assets,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Nexpose assets', outputs, headers, removeNull=True),
+ 'EntryContext': {
+ 'Nexpose.Asset(val.AssetId==obj.AssetId)': outputs,
+ 'Endpoint(val.IP==obj.IP)': endpoint
+ }
+ }
+
+ return entry
+
+
+def get_search_filters(text_filters):
+ filters = []
+ for text in text_filters:
+ components = text.split(' ')
+ field = components[0]
+ operator = components[1]
+ value = components[2].split(',')
+ # Convert numbers to floats if values are numbers
+ for i, v in enumerate(value):
+ curr_val = None
+ try:
+ curr_val = float(v)
+ except Exception:
+ curr_val = v
+ value[i] = curr_val
+
+ flt = {
+ 'field': field,
+ 'operator': operator,
+ }
+ if len(value) > 1:
+ if operator in RANGE_OPERATORS:
+ flt['lower'] = value[0]
+ flt['upper'] = value[1]
+ else:
+ flt['values'] = value
+ else:
+ flt['value'] = value[0]
+ filters.append(flt)
+ return filters
+
+
+def search_assets(filters, match, limit=None, sort=None):
+ search_body = {
+ 'filters': filters,
+ 'match': match
+ }
+
+ path = 'assets/search'
+ params = {}
+ if sort is not None:
+ params['sort'] = sort.split(';')
+
+ return get_list_response(path, method='post', limit=limit, body=search_body, params=params)
+
+
+def get_assets_command():
+ limit = demisto.args().get('limit')
+ sort = demisto.args().get('sort')
+ assets = get_assets(limit=limit, sort=sort)
+
+ if (assets is None or len(assets) == 0):
+ return 'No assets found'
+
+ for asset in assets:
+ last_scan = get_last_scan(asset)
+ asset['LastScanDate'] = last_scan['date']
+ asset['LastScanId'] = last_scan['id']
+ site = get_site(asset['id'])
+ asset['Site'] = site['name'] if site != '' else ''
+
+ headers = [
+ 'AssetId',
+ 'Address',
+ 'Name',
+ 'Site',
+ 'Exploits',
+ 'Malware',
+ 'OperatingSystem',
+ 'Vulnerabilities',
+ 'RiskScore',
+ 'Assessed',
+ 'LastScanDate',
+ 'LastScanId'
+ ]
+
+ outputs = translate_object(assets, [
+ {'from': 'id', 'to': 'AssetId'},
+ {'from': 'ip', 'to': 'Address'},
+ {'from': 'hostName', 'to': 'Name'},
+ {'from': 'Site', 'to': 'Site'},
+ {'from': 'vulnerabilities.exploits', 'to': 'Exploits'},
+ {'from': 'vulnerabilities.malwareKits', 'to': 'Malware'},
+ {'from': 'os', 'to': 'OperatingSystem'},
+ {'from': 'vulnerabilities.total', 'to': 'Vulnerabilities'},
+ {'from': 'riskScore', 'to': 'RiskScore'},
+ {'from': 'assessedForVulnerabilities', 'to': 'Assessed'},
+ {'from': 'LastScanDate', 'to': 'LastScanDate'},
+ {'from': 'LastScanId', 'to': 'LastScanId'}
+ ])
+
+ endpoint = map(lambda o: {
+ 'IP': o['Address'],
+ 'HostName': o['Name'],
+ 'OS': o['OperatingSystem']
+ }, outputs)
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': assets,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Nexpose assets', outputs, headers, removeNull=True),
+ 'EntryContext': {
+ 'Nexpose.Asset(val.AssetId==obj.AssetId)': outputs,
+ 'Endpoint(val.IP==obj.IP)': endpoint
+ }
+ }
+
+ return entry
+
+
+def get_assets(limit=None, sort=None):
+ params = {}
+ if sort is not None:
+ params['sort'] = sort.split(';')
+ return get_list_response('assets', limit=limit, params=params)
+
+
+def get_scan_command():
+ ids = argToList(str(demisto.args()['id']))
+
+ scans = []
+ for id in ids:
+ scan = get_scan(id)
+ if (scan is None):
+ return 'Scan not found'
+ scan_entry = get_scan_entry(scan)
+ scans.append(scan_entry)
+
+ return scans
+
+
+def map_scan(scan):
+ scan_output = translate_object(scan, [
+ {'from': 'id', 'to': 'Id'},
+ {'from': 'scanType', 'to': 'ScanType'},
+ {'from': 'scanName', 'to': 'ScanName'},
+ {'from': 'startedBy', 'to': 'StartedBy'},
+ {'from': 'assets', 'to': 'Assets'},
+ {'from': 'duration', 'to': 'TotalTime'},
+ {'from': 'endTime', 'to': 'Completed'},
+ {'from': 'status', 'to': 'Status'},
+ {'from': 'message', 'to': 'Message'}
+ ])
+
+ if isinstance(scan_output, list):
+ for scan in scan_output:
+ scan['TotalTime'] = str(iso8601_duration_as_minutes(scan['TotalTime'])) + ' minutes'
+ else:
+ scan_output['TotalTime'] = str(iso8601_duration_as_minutes(scan_output['TotalTime'])) + ' minutes'
+
+ return scan_output
+
+
+def get_scan_human_readable(scan_output, title):
+ scan_headers = [
+ 'Id',
+ 'ScanType',
+ 'ScanName',
+ 'StartedBy',
+ 'Assets',
+ 'TotalTime',
+ 'Completed',
+ 'Status',
+ 'Message'
+ ]
+
+ return tableToMarkdown(title, scan_output, scan_headers, removeNull=True)
+
+
+def get_scan_entry(scan):
+ scan_output = map_scan(scan)
+
+ vuln_headers = [
+ 'Critical',
+ 'Severe',
+ 'Moderate',
+ 'Total'
+ ]
+
+ vuln_output = translate_object(scan['vulnerabilities'], [
+ {'from': 'critical', 'to': 'Critical'},
+ {'from': 'severe', 'to': 'Severe'},
+ {'from': 'moderate', 'to': 'Moderate'},
+ {'from': 'total', 'to': 'Total'}
+ ])
+
+ scan_hr = get_scan_human_readable(scan_output, 'Nexpose scan ' + str(scan['id']))
+ vuln_hr = tableToMarkdown('Vulnerabilities', vuln_output, vuln_headers, removeNull=True)
+ hr = scan_hr + vuln_hr
+
+ scan_output['Vulnerabilities'] = vuln_output
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': scan,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': {
+ 'Nexpose.Scan(val.Id==obj.Id)': scan_output,
+ }
+ }
+
+ return entry
+
+
+def get_scan(scan_id):
+ path = 'scans/' + str(scan_id)
+ return send_request(path)
+
+
+def create_site_command():
+ assets = argToList(demisto.args()['assets'])
+ site = create_site(demisto.args()['name'], assets,
+ demisto.args().get('description'), demisto.args().get('importance'),
+ demisto.args().get('scanTemplateId'))
+
+ if not site or 'id' not in site:
+ raise Exception('Site creation failed, could not get the new site')
+
+ output = {
+ 'Id': site['id']
+ }
+
+ md = tableToMarkdown('New site created', output)
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': site,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': {
+ 'Nexpose.Site(val.Id==obj.Id)': output,
+ }
+ }
+
+ return entry
+
+
+def create_site(name, assets, description=None, importance=None, template_id=None):
+ site_body = {
+ 'name': name
+ }
+
+ if assets:
+ site_body['scan'] = {
+ 'assets': {
+ 'includedTargets': {
+ 'addresses': assets
+ }
+ }
+ }
+ if description:
+ site_body['description'] = description
+ if importance:
+ site_body['importance'] = importance
+ if template_id:
+ site_body['scanTemplateId'] = template_id
+
+ path = 'sites'
+
+ return send_request(path, 'post', body=site_body)
+
+
+def delete_site_command():
+ site_id = demisto.args()['id']
+
+ res = delete_site(site_id)
+
+ hr = "Site " + str(site_id) + " deleted"
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr
+ }
+
+ return entry
+
+
+def delete_site(site_id):
+ path = 'sites/' + str(site_id)
+
+ return send_request(path, 'delete')
+
+
+def get_sites_command():
+ sites = get_sites(limit=demisto.args().get('limit'), sort=demisto.args().get('sort'))
+
+ if (sites is None or len(sites) == 0):
+ return 'No sites found'
+
+ headers = [
+ 'Id',
+ 'Name',
+ 'Assets',
+ 'Vulnerabilities',
+ 'Risk',
+ 'Type',
+ 'LastScan'
+ ]
+
+ outputs = translate_object(sites, [
+ {'from': 'id', 'to': 'Id'},
+ {'from': 'name', 'to': 'Name'},
+ {'from': 'assets', 'to': 'Assets'},
+ {'from': 'vulnerabilities.total', 'to': 'Vulnerabilities'},
+ {'from': 'riskScore', 'to': 'Risk'},
+ {'from': 'type', 'to': 'Type'},
+ {'from': 'lastScanTime', 'to': 'LastScan'}
+ ])
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': sites,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Nexpose sites', outputs, headers, removeNull=True),
+ 'EntryContext': {
+ 'Nexpose.Site(val.Id==obj.Id)': outputs,
+ }
+ }
+
+ return entry
+
+
+def get_sites(limit=None, sort=None):
+ path = 'sites'
+ params = {}
+ if sort is not None:
+ params['sort'] = sort.split(';')
+ return get_list_response(path, limit=limit, params=params)
+
+
+def get_report_templates_command():
+ templates = get_report_templates()
+
+ if (templates is None or len(templates) == 0 or 'resources' not in templates):
+ return 'No templates found'
+
+ headers = [
+ 'Id',
+ 'Name',
+ 'Description',
+ 'Type'
+ ]
+
+ outputs = translate_object(templates['resources'], [
+ {'from': 'id', 'to': 'Id'},
+ {'from': 'name', 'to': 'Name'},
+ {'from': 'description', 'to': 'Description'},
+ {'from': 'type', 'to': 'Type'},
+ ])
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': templates,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Nexpose templates', outputs, headers, removeNull=True),
+ 'EntryContext': {
+ 'Nexpose.Template(val.Id==obj.Id)': outputs,
+ }
+ }
+
+ return entry
+
+
+def get_report_templates():
+ path = 'report_templates'
+
+ return send_request(path)
+
+
+def create_assets_report_command():
+ assets = str(demisto.args()['assets']).split(',')
+ template = demisto.args().get('template')
+ name = demisto.args().get('name', 'report ' + str(datetime.now()))
+ report_format = demisto.args().get('format', 'pdf')
+
+ scope = {
+ 'assets': assets
+ }
+
+ report_id = create_report(scope, name, template, report_format)
+
+ if report_id is None:
+ return 'Could not retrieve report'
+
+ return download_report(report_id, name, report_format)
+
+
+def create_sites_report_command():
+ sites = str(demisto.args()['sites']).split(',')
+ template = demisto.args().get('template')
+ name = demisto.args().get('name', 'report ' + str(datetime.now()))
+ report_format = demisto.args().get('format', 'pdf')
+
+ scope = {
+ 'sites': sites
+ }
+
+ report_id = create_report(scope, name, template, report_format)
+
+ if report_id is None:
+ return 'Could not retrieve report'
+
+ return download_report(report_id, name, report_format)
+
+
+def create_scan_report_command():
+ scan = demisto.args()['scan']
+ template = demisto.args().get('template')
+ name = demisto.args().get('name', 'report ' + str(datetime.now()))
+ report_format = demisto.args().get('format', 'pdf')
+ scope = {
+ 'scan': scan
+ }
+
+ report_id = create_report(scope, name, template, report_format)
+
+ if report_id is None:
+ return 'Could not retrieve report'
+
+ return download_report(report_id, name, report_format)
+
+
+def create_report(scope, name, template, report_format):
+ if template is None:
+ templates = get_report_templates()
+ if (templates is None or len(templates) == 0 or 'resources' not in templates):
+ return 'No templates found'
+ template = templates['resources'][0]['id']
+ for i, (k, v) in enumerate(scope.items()):
+ if not isinstance(v, list):
+ scope[k] = int(v)
+ else:
+ for i, v in enumerate(scope[k]):
+ scope[k][i] = int(v)
+ path = 'reports'
+ body = {
+ 'scope': scope,
+ 'template': template,
+ 'name': name,
+ 'format': report_format
+ }
+
+ result = send_request(path, 'post', body=body)
+ return result['id'] if 'id' in result else None
+
+
+def download_report(report_id, name, report_format):
+ # Generate the report
+ path = 'reports/' + str(report_id) + ' /generate'
+ instance = send_request(path, 'post')
+ if (instance is None):
+ return 'Failed to generate report'
+
+ headers = {
+ 'Accept': 'application/json',
+ 'Accept-Encoding': 'gzip, deflate, br'
+ }
+
+ # Wait for the report to be completed
+ time.sleep(10)
+
+ # Download
+ path = 'reports/' + str(report_id) + '/history/' + str(instance['id']) + '/output'
+ report = send_request(path, headers=headers, is_file=True)
+
+ return fileResult(name + '.' + report_format, report, entryTypes['entryInfoFile'])
+
+
+def start_assets_scan_command():
+ ips = demisto.args().get('IPs')
+ host_names = demisto.args().get('hostNames')
+ name = demisto.args().get('name', 'scan ' + str(datetime.now()))
+
+ text_filters = None
+ if ips:
+ ips = ips.split(',')
+ text_filters = ['ip-address is ' + ips[0]]
+ elif host_names:
+ host_names = host_names.split(',')
+ text_filters = ['host-name is ' + host_names[0]]
+
+ if text_filters is None:
+ return 'No IPs or hosts were provided'
+
+ filters = get_search_filters(text_filters)
+ asset = search_assets(filters, match='all')
+
+ if asset is None or len(asset) == 0:
+ return 'Could not find assets'
+
+ site = get_site(asset[0]['id'])
+ if site is None or 'id' not in site:
+ return 'Could not find site'
+
+ hosts = [] # type: ignore
+ if ips:
+ hosts += ips
+ if host_names:
+ hosts += host_names
+
+ scan_response = start_scan(site['id'], hosts, name)
+
+ if (scan_response is None or 'id' not in scan_response):
+ return 'Could not start scan'
+
+ scan = get_scan(scan_response['id'])
+
+ return get_scan_entry(scan)
+
+
+def start_site_scan_command():
+ site = demisto.args()['site']
+ name = demisto.args().get('name', 'scan ' + str(datetime.now()))
+ hosts = demisto.args().get('hosts', '')
+
+ if not hosts:
+ assets = get_site_assets(site)
+ hosts = [asset['ip'] for asset in assets]
+ else:
+ hosts = argToList(hosts)
+
+ scan_response = start_scan(site, hosts, name)
+
+ if (scan_response is None or 'id' not in scan_response):
+ return 'Could not start scan'
+
+ scan = get_scan(scan_response['id'])
+
+ return get_scan_entry(scan)
+
+
+def start_scan(site, hosts, name):
+ path = 'sites/' + str(site) + '/scans'
+ body = {
+ 'name': name,
+ 'hosts': hosts
+ }
+
+ return send_request(path, 'post', body=body)
+
+
+def get_site_assets(site_id):
+ path = 'sites/' + site_id + '/assets'
+
+ return get_list_response(path)
+
+
+def get_scans_command():
+ scans = get_scans(demisto.args().get('sort'), demisto.args().get('limit'), demisto.args().get('active'))
+
+ if not scans or len(scans) == 0:
+ return 'No scans found'
+
+ scan_output = map_scan(scans)
+ scan_hr = get_scan_human_readable(scan_output, 'Nexpose scans')
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': scans,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': scan_hr,
+ 'EntryContext': {
+ 'Nexpose.Scan(val.Id==obj.Id)': scan_output,
+ }
+ }
+
+ return entry
+
+
+def get_scans(sort, limit, active):
+ path = 'scans'
+ params = {}
+ if sort is not None:
+ params['sort'] = sort.split(';')
+ if active is not None:
+ params['active'] = active
+
+ return get_list_response(path, method='get', limit=limit, params=params)
+
+
+def stop_scan_command():
+ scan_id = demisto.args()['id']
+ res = set_scan_status(scan_id, 'stop')
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Succesfully stopped the scan',
+ }
+
+ return entry
+
+
+def pause_scan_command():
+ scan_id = demisto.args()['id']
+ res = set_scan_status(scan_id, 'pause')
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Succesfully paused the scan',
+ }
+
+ return entry
+
+
+def resume_scan_command():
+ scan_id = demisto.args()['id']
+ res = set_scan_status(scan_id, 'resume')
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Succesfully resumed the scan',
+ }
+
+ return entry
+
+
+def set_scan_status(scan_id, scan_status):
+ path = 'scans/' + str(scan_id) + '/' + scan_status
+
+ return send_request(path, 'post')
+
+
+def main():
+ try:
+ handle_proxy()
+ if demisto.command() == 'test-module':
+ get_assets(limit=1)
+ demisto.results('ok')
+ if demisto.command() == 'nexpose-get-assets':
+ demisto.results(get_assets_command())
+ if demisto.command() == 'nexpose-get-asset':
+ demisto.results(get_asset_command())
+ if demisto.command() == 'nexpose-get-asset-vulnerability':
+ demisto.results(get_asset_vulnerability_command())
+ if demisto.command() == 'nexpose-search-assets':
+ demisto.results(search_assets_command())
+ if demisto.command() == 'nexpose-get-scan':
+ demisto.results(get_scan_command())
+ if demisto.command() == 'nexpose-get-sites':
+ demisto.results(get_sites_command())
+ if demisto.command() == 'nexpose-get-report-templates':
+ demisto.results(get_report_templates_command())
+ if demisto.command() == 'nexpose-create-assets-report':
+ demisto.results(create_assets_report_command())
+ if demisto.command() == 'nexpose-create-sites-report':
+ demisto.results(create_sites_report_command())
+ if demisto.command() == 'nexpose-create-scan-report':
+ demisto.results(create_scan_report_command())
+ if demisto.command() == 'nexpose-start-site-scan':
+ demisto.results(start_site_scan_command())
+ if demisto.command() == 'nexpose-start-assets-scan':
+ demisto.results(start_assets_scan_command())
+ if demisto.command() == 'nexpose-create-site':
+ demisto.results(create_site_command())
+ if demisto.command() == 'nexpose-delete-site':
+ demisto.results(delete_site_command())
+ if demisto.command() == 'nexpose-stop-scan':
+ demisto.results(stop_scan_command())
+ if demisto.command() == 'nexpose-pause-scan':
+ demisto.results(pause_scan_command())
+ if demisto.command() == 'nexpose-resume-scan':
+ demisto.results(resume_scan_command())
+ if demisto.command() == 'nexpose-get-scans':
+ demisto.results(get_scans_command())
+ except Exception as e:
+ LOG(e)
+ LOG.print_log(False)
+ return_error(e.message)
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/Rapid7_Nexpose/Rapid7_Nexpose.yml b/Integrations/Rapid7_Nexpose/Rapid7_Nexpose.yml
new file mode 100644
index 000000000000..1feda28534ee
--- /dev/null
+++ b/Integrations/Rapid7_Nexpose/Rapid7_Nexpose.yml
@@ -0,0 +1,1012 @@
+category: Vulnerability Management
+commonfields:
+ id: Rapid7 Nexpose
+ version: -1
+configuration:
+- display: Server URL (e.g. https://192.168.0.1:8080)
+ name: server
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: true
+ type: 9
+- defaultvalue: 'false'
+ display: Trust any certificate (unsecure)
+ name: unsecure
+ required: false
+ type: 8
+- defaultvalue: 'false'
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: 2FA token
+ name: token
+ required: false
+ type: 0
+description: Rapid7's on-premise vulnerability management solution, Nexpose, helps
+ you reduce your threat exposure by enabling you to assess and respond to changes
+ in your environment real time and prioritizing risk across vulnerabilities, configurations,
+ and controls.
+display: Rapid7 Nexpose
+name: Rapid7 Nexpose
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: integer The identifier of the asset.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the specified asset.
+ execution: false
+ name: nexpose-get-asset
+ outputs:
+ - contextPath: Nexpose.Asset.Addresses
+ description: All addresses discovered on the asset.
+ type: unknown
+ - contextPath: Nexpose.Asset.AssetId
+ description: Id of the asset.
+ type: number
+ - contextPath: Nexpose.Asset.Hardware
+ description: The primary Media Access Control (MAC) address of the asset. The
+ format is six groups of two hexadecimal digits separated by colons.
+ type: string
+ - contextPath: Nexpose.Asset.Aliases
+ description: All host names or aliases discovered on the asset.
+ type: unknown
+ - contextPath: Nexpose.Asset.HostType
+ description: The type of asset, Valid values are unknown, guest, hypervisor,
+ physical, mobile
+ type: string
+ - contextPath: Nexpose.Asset.Site
+ description: Asset site name.
+ type: string
+ - contextPath: Nexpose.Asset.OperatingSystem
+ description: Operating system of the asset.
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerabilities
+ description: The total number of vulnerabilities on the asset.
+ type: number
+ - contextPath: Nexpose.Asset.CPE
+ description: The Common Platform Enumeration (CPE) of the operating system.
+ type: string
+ - contextPath: Nexpose.Asset.LastScanDate
+ description: Last scan date of the asset.
+ type: date
+ - contextPath: Nexpose.Asset.LastScanId
+ description: Id of the asset's last scan.
+ type: number
+ - contextPath: Nexpose.Asset.RiskScore
+ description: The risk score (with criticality adjustments) of the asset.
+ type: number
+ - contextPath: Nexpose.Asset.Software.Software
+ description: The description of the software.
+ type: string
+ - contextPath: Nexpose.Asset.Software.Version
+ description: The version of the software.
+ type: string
+ - contextPath: Nexpose.Asset.Services.Name
+ description: The name of the service.
+ type: string
+ - contextPath: Nexpose.Asset.Services.Port
+ description: The port of the service.
+ type: number
+ - contextPath: Nexpose.Asset.Services.Product
+ description: The product running the service.
+ type: string
+ - contextPath: Nexpose.Asset.Services.protocol
+ description: The protocol of the service, valid values are ip, icmp, igmp, ggp,
+ tcp, pup, udp, idp, esp, nd, raw
+ type: string
+ - contextPath: Nexpose.Asset.Users.FullName
+ description: The full name of the user account.
+ type: string
+ - contextPath: Nexpose.Asset.Users.Name
+ description: The name of the user account.
+ type: string
+ - contextPath: Nexpose.Asset.Users.UserId
+ description: The identifier of the user account.
+ type: number
+ - contextPath: Nexpose.Asset.Vulnerability.Id
+ description: The identifier of the vulnerability.
+ type: number
+ - contextPath: Nexpose.Asset.Vulnerability.Instances
+ description: The number of vulnerable occurrences of the vulnerability. This
+ does not include invulnerable instances.
+ type: number
+ - contextPath: Nexpose.Asset.Vulnerability.Title
+ description: The title (summary) of the vulnerability.
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerability.Malware
+ description: The malware kits that are known to be used to exploit the vulnerability.
+ type: number
+ - contextPath: Nexpose.Asset.Vulnerability.Exploit
+ description: The exploits that can be used to exploit a vulnerability.
+ type: number
+ - contextPath: Nexpose.Asset.Vulnerability.CVSS
+ description: The CVSS exploit score.
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerability.Risk
+ description: The risk score of the vulnerability, rounded to a maximum of to
+ digits of precision. If using the default Rapid7 Real Riskâ„¢ model, this value
+ ranges from 0-1000.
+ type: number
+ - contextPath: Nexpose.Asset.Vulnerability.PublishedOn
+ description: The date the vulnerability was first published or announced. The
+ format is an ISO 8601 date, YYYY-MM-DD.
+ type: date
+ - contextPath: Nexpose.Asset.Vulnerability.ModifiedOn
+ description: The last date the vulnerability was modified. The format is an
+ ISO 8601 date, YYYY-MM-DD.
+ type: date
+ - contextPath: Nexpose.Asset.Vulnerability.Severity
+ description: 'The severity of the vulnerability, one of: "Moderate", "Severe",
+ "Critical".'
+ type: string
+ - contextPath: Endpoint.IP
+ description: Endpoint IP address.
+ type: string
+ - contextPath: Endpoint.HostName
+ description: Endpoint host name.
+ type: string
+ - contextPath: Endpoint.OS
+ description: Endpoint operating system.
+ type: string
+ - contextPath: CVE.ID
+ description: Common Vulnerabilities and Exposures ids
+ type: string
+ - arguments:
+ - default: false
+ description: 'Multiple criteria of The criteria to sort the records
+ by, in the format: property[,ASC|DESC]. The default sort order is ascending.
+ Multiple sort criteria can be specified using multiple sort query parameters
+ separated by a '';''. For example: ''riskScore,DESC;hostName,ASC'''
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ - default: false
+ description: integer The number of records retrieve.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns all assets for which you have access.
+ execution: false
+ name: nexpose-get-assets
+ outputs:
+ - contextPath: Nexpose.Asset.AssetId
+ description: The identifier of the asset.
+ type: number
+ - contextPath: Nexpose.Asset.Address
+ description: The primary IPv4 or IPv6 address of the asset.
+ type: string
+ - contextPath: Nexpose.Asset.Name
+ description: The primary host name (local or FQDN) of the asset.
+ type: string
+ - contextPath: Nexpose.Asset.Site
+ description: Asset site name.
+ type: string
+ - contextPath: Nexpose.Asset.Exploits
+ description: The number of distinct exploits that can exploit any of the vulnerabilities
+ on the asset.
+ type: number
+ - contextPath: Nexpose.Asset.Malware
+ description: The number of distinct malware kits that vulnerabilities on the
+ asset are susceptible to.
+ type: number
+ - contextPath: Nexpose.Asset.OperatingSystem
+ description: Operating system of the asset.
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerabilities
+ description: The total number of vulnerabilities.
+ type: number
+ - contextPath: Nexpose.Asset.RiskScore
+ description: The risk score (with criticality adjustments) of the asset.
+ type: number
+ - contextPath: Nexpose.Asset.Assessed
+ description: Whether the asset has been assessed for vulnerabilities at least
+ once.
+ type: boolean
+ - contextPath: Nexpose.Asset.LastScanDate
+ description: Last scan date of the asset.
+ type: date
+ - contextPath: Nexpose.Asset.LastScanId
+ description: Id of the asset's last scan.
+ type: number
+ - contextPath: Endpoint.IP
+ description: Endpoint IP address.
+ type: string
+ - contextPath: Endpoint.HostName
+ description: Endpoint host name.
+ type: string
+ - contextPath: Endpoint.OS
+ description: Endpoint operating system.
+ type: string
+ - arguments:
+ - default: false
+ description: 'Multiple criteria of Filter to match assets, according
+ to the Search Criteria API standard. multiple filters can be provided using
+ '';'' separator. For example: ''ip-address in range 1.2.3.4,1.2.3.8;host-name
+ is myhost''. For more information regarding Search Criteria, refer to https://help.rapid7.com/insightvm/en-us/api/index.html#section/Overview/Responses'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: integer The number of records retrieve.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: 'Multiple criteria of The criteria to sort the records
+ by, in the format: property[,ASC|DESC]. The default sort order is ascending.
+ Multiple sort criteria can be specified using multiple sort query parameters
+ separated by a '';''. For example: ''riskScore,DESC;hostName,ASC'''
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ - default: false
+ description: Search by a specific IP address
+ isArray: false
+ name: ipAddressIs
+ required: false
+ secret: false
+ - default: false
+ description: Search by a specific host name
+ isArray: false
+ name: hostNameIs
+ required: false
+ secret: false
+ - default: false
+ description: Get all assets whose risk score is higher
+ isArray: false
+ name: riskScoreHigherThan
+ required: false
+ secret: false
+ - default: false
+ description: Search by vulnerability title
+ isArray: false
+ name: vulnerabilityTitleContains
+ required: false
+ secret: false
+ - default: false
+ description: Multiple criteria of integer Search by site ids
+ isArray: false
+ name: siteIdIn
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: all
+ description: Operator to determine how to match filters. all requires
+ that all filters match for an asset to be included. any requires only one
+ filter to match for an asset to be included.
+ isArray: false
+ name: match
+ predefined:
+ - all
+ - any
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns all assets for which you have access that match the given
+ search criteria.
+ execution: false
+ name: nexpose-search-assets
+ outputs:
+ - contextPath: Nexpose.Asset.AssetId
+ description: The identifier of the asset.
+ type: number
+ - contextPath: Nexpose.Asset.Address
+ description: The primary IPv4 or IPv6 address of the asset.
+ type: string
+ - contextPath: Nexpose.Asset.Name
+ description: The primary host name (local or FQDN) of the asset.
+ type: string
+ - contextPath: Nexpose.Asset.Site
+ description: Asset site name.
+ type: string
+ - contextPath: Nexpose.Asset.Exploits
+ description: The number of distinct exploits that can exploit any of the vulnerabilities
+ on the asset.
+ type: number
+ - contextPath: Nexpose.Asset.Malware
+ description: The number of distinct malware kits that vulnerabilities on the
+ asset are susceptible to.
+ type: number
+ - contextPath: Nexpose.Asset.OperatingSystem
+ description: Operating system of the asset.
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerabilities
+ description: The total number of vulnerabilities.
+ type: number
+ - contextPath: Nexpose.Asset.RiskScore
+ description: The risk score (with criticality adjustments) of the asset.
+ type: number
+ - contextPath: Nexpose.Asset.Assessed
+ description: Whether the asset has been assessed for vulnerabilities at least
+ once.
+ type: boolean
+ - contextPath: Nexpose.Asset.LastScanDate
+ description: Last scan date of the asset.
+ type: date
+ - contextPath: Nexpose.Asset.LastScanId
+ description: Id of the asset's last scan.
+ type: number
+ - contextPath: Endpoint.IP
+ description: Endpoint IP address.
+ type: string
+ - contextPath: Endpoint.HostName
+ description: Endpoint host name.
+ type: string
+ - contextPath: Endpoint.OS
+ description: Endpoint operating system.
+ type: string
+ - arguments:
+ - default: false
+ description: Multiple criteria of integer Identifiers of scans
+ isArray: true
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the specified scan.
+ execution: false
+ name: nexpose-get-scan
+ outputs:
+ - contextPath: Nexpose.Scan.Id
+ description: The identifier of the scan.
+ type: number
+ - contextPath: Nexpose.Scan.ScanType
+ description: The scan type (automated, manual, scheduled).
+ type: string
+ - contextPath: Nexpose.Scan.StartedBy
+ description: The name of the user that started the scan.
+ type: string
+ - contextPath: Nexpose.Scan.Assets
+ description: The number of assets found in the scan
+ type: number
+ - contextPath: Nexpose.Scan.TotalTime
+ description: The duration of the scan in minutes.
+ type: string
+ - contextPath: Nexpose.Scan.Status
+ description: The scan status. Valid values are aborted, unknown, running, finished,
+ stopped, error, paused, dispatched, integrating
+ type: string
+ - contextPath: Nexpose.Scan.Completed
+ description: The end time of the scan in ISO8601 format.
+ type: date
+ - contextPath: Nexpose.Scan.Vulnerabilities.Critical
+ description: The number of critical vulnerabilities.
+ type: number
+ - contextPath: Nexpose.Scan.Vulnerabilities.Moderate
+ description: The number of moderate vulnerabilities.
+ type: number
+ - contextPath: Nexpose.Scan.Vulnerabilities.Severe
+ description: The number of severe vulnerabilities.
+ type: number
+ - contextPath: Nexpose.Scan.Vulnerabilities.Total
+ description: The total number of vulnerabilities.
+ type: number
+ - arguments:
+ - default: false
+ description: integer The identifier of the asset.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: The identifier of the vulnerability.
+ isArray: false
+ name: vulnerabilityId
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the details and possible remediations for an asset's given
+ vulnerability.
+ execution: false
+ name: nexpose-get-asset-vulnerability
+ outputs:
+ - contextPath: Nexpose.Asset.AssetId
+ description: Identifier of the asset.
+ type: number
+ - contextPath: Nexpose.Asset.Vulnerability.Id
+ description: The identifier of the vulnerability.
+ type: number
+ - contextPath: Nexpose.Asset.Vulnerability.Title
+ description: The title (summary) of the vulnerability.
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerability.Severity
+ description: 'The severity of the vulnerability, one of: "Moderate", "Severe",
+ "Critical".'
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerability.RiskScore
+ description: The risk score of the vulnerability, rounded to a maximum of to
+ digits of precision. If using the default Rapid7 Real Riskâ„¢ model, this value
+ ranges from 0-1000.
+ type: number
+ - contextPath: Nexpose.Asset.Vulnerability.CVSS
+ description: The CVSS vector(s) for the vulnerability.
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerability.CVSSV3
+ description: The CVSS v3 vector.
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerability.Published
+ description: The date the vulnerability was first published or announced. The
+ format is an ISO 8601 date, YYYY-MM-DD.
+ type: date
+ - contextPath: Nexpose.Asset.Vulnerability.Added
+ description: The date the vulnerability coverage was added. The format is an
+ ISO 8601 date, YYYY-MM-DD.
+ type: date
+ - contextPath: Nexpose.Asset.Vulnerability.Modified
+ description: The last date the vulnerability was modified. The format is an
+ ISO 8601 date, YYYY-MM-DD.
+ type: date
+ - contextPath: Nexpose.Asset.Vulnerability.CVSSScore
+ description: The CVSS score, which ranges from 0-10.
+ type: number
+ - contextPath: Nexpose.Asset.Vulnerability.CVSSV3Score
+ description: The CVSS3 score, which ranges from 0-10.
+ type: number
+ - contextPath: Nexpose.Asset.Vulnerability.Categories
+ description: All vulnerability categories assigned to this vulnerability.
+ type: unknown
+ - contextPath: Nexpose.Asset.Vulnerability.CVES
+ description: All CVEs assigned to this vulnerability.
+ type: unknown
+ - contextPath: Nexpose.Asset.Vulnerability.Check.Port
+ description: The port of the service the result was discovered on.
+ type: number
+ - contextPath: Nexpose.Asset.Vulnerability.Check.Protocol
+ description: The protocol of the service the result was discovered on, valid
+ values ip, icmp, igmp, ggp, tcp, pup, udp, idp, esp, nd, raw
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerability.Check.Since
+ description: The date and time the result was first recorded, in the ISO8601
+ format. If the result changes status this value is the date and time of the
+ status change.
+ type: date
+ - contextPath: Nexpose.Asset.Vulnerability.Check.Proof
+ description: The proof explaining why the result was found vulnerable.
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerability.Check.Status
+ description: The status of the vulnerability check result. Valid values are,
+ unknown, not-vulnerable, vulnerable, vulnerable-version, vulnerable-potential,
+ vulnerable-with-exception-applied, vulnerable-version-with-exception-applied,
+ vulnerable-potential-with-exception-applied
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerability.Solution.Type
+ description: 'The type of the solution. One of: "Configuration", "Rollup patch",
+ "Patch".'
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerability.Solution.Summary
+ description: The summary of the solution.
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerability.Solution.Steps
+ description: The steps required to remediate the vulnerability.
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerability.Solution.Estimate
+ description: The estimated duration to apply the solution, in minutes.
+ type: string
+ - contextPath: Nexpose.Asset.Vulnerability.Solution.AdditionalInformation
+ description: Additional information or resources that can assist in applying
+ the remediation
+ type: string
+ - contextPath: CVE.ID
+ description: Common Vulnerabilities and Exposures ids
+ type: string
+ - arguments:
+ - default: false
+ description: The site name. Name must be unique.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The site's description.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: Multiple criteria of Specify asset addresses to be included
+ in site scans
+ isArray: true
+ name: assets
+ required: true
+ secret: false
+ - default: false
+ description: ' The identifier of a scan template. Use nexpose-get-report-templates
+ to get all templates, default scan template is selected when not specified. '
+ isArray: false
+ name: scanTemplateId
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The site importance. Defaults to "normal" if not specified.
+ isArray: false
+ name: importance
+ predefined:
+ - very_low
+ - low
+ - normal
+ - high
+ - very_high
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a new site with the specified configuration.
+ execution: false
+ name: nexpose-create-site
+ outputs:
+ - contextPath: Nexpose.Site.Id
+ description: The created site Id
+ type: number
+ - arguments:
+ - default: false
+ description: Id of the site to delete
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes a site.
+ execution: false
+ name: nexpose-delete-site
+ - arguments:
+ - default: false
+ description: integer The number of records retrieve.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: 'Multiple criteria of The criteria to sort the records
+ by, in the format: property[,ASC|DESC]. The default sort order is ascending.
+ Multiple sort criteria can be specified using multiple sort query parameters
+ separated by a '';''. For example: ''riskScore,DESC;hostName,ASC'''
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves accessible sites.
+ execution: false
+ name: nexpose-get-sites
+ outputs:
+ - contextPath: Nexpose.Site.Id
+ description: The identifier of the site.
+ type: number
+ - contextPath: Nexpose.Site.Name
+ description: The site name.
+ type: string
+ - contextPath: Nexpose.Site.Assets
+ description: The number of assets that belong to the site.
+ type: number
+ - contextPath: Nexpose.Site.Type
+ description: The type of the site. Valid values are agent, dynamic, static
+ type: string
+ - contextPath: Nexpose.Site.Vulnerabilities
+ description: The total number of vulnerabilities.
+ type: number
+ - contextPath: Nexpose.Site.Risk
+ description: The risk score (with criticality adjustments) of the site.
+ type: number
+ - contextPath: Nexpose.Site.LastScan
+ description: The date and time of the site's last scan.
+ type: date
+ - deprecated: false
+ description: Returns all available report templates.
+ execution: false
+ name: nexpose-get-report-templates
+ outputs:
+ - contextPath: Nexpose.Template.Id
+ description: The identifier of the report template.
+ type: number
+ - contextPath: Nexpose.Template.Name
+ description: The name of the report template.
+ type: string
+ - contextPath: Nexpose.Template.Description
+ description: The description of the report template.
+ type: string
+ - contextPath: Nexpose.Template.Type
+ description: The type of the report template. document is a templatized, typically
+ printable, report that has various sections of content. export is data-oriented
+ output, typically CSV. file is a printable report template using a report
+ template file.
+ type: string
+ - arguments:
+ - default: false
+ description: Multiple criteria of integer Asset ids to create the report
+ on, comma separated.
+ isArray: false
+ name: assets
+ required: true
+ secret: false
+ - default: false
+ description: Report template id to create the report with. If none
+ is provided, the first template available will be used.
+ isArray: false
+ name: template
+ required: false
+ secret: false
+ - default: false
+ description: The report name
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The report format, default is PDF
+ isArray: false
+ name: format
+ predefined:
+ - pdf
+ - rtf
+ - xml
+ - html
+ - text
+ required: false
+ secret: false
+ deprecated: false
+ description: Generates a new report on given assets according to a template and
+ arguments.
+ execution: false
+ name: nexpose-create-assets-report
+ outputs:
+ - contextPath: InfoFile.EntryId
+ description: Entry Id of the report file
+ type: string
+ - contextPath: InfoFile.Name
+ description: Name of the report file
+ type: string
+ - contextPath: InfoFile.Extension
+ description: File extension of the report file
+ type: string
+ - contextPath: InfoFile.Info
+ description: Info about the report file
+ type: string
+ - contextPath: InfoFile.Size
+ description: Size of the report file
+ type: number
+ - contextPath: InfoFile.Type
+ description: Type of the report file
+ type: string
+ - arguments:
+ - default: false
+ description: Multiple criteria of integer Site ids to create the report
+ on, comma separated.
+ isArray: false
+ name: sites
+ required: true
+ secret: false
+ - default: false
+ description: Report template id to create the report with. If none
+ is provided, the first template available will be used.
+ isArray: false
+ name: template
+ required: false
+ secret: false
+ - default: false
+ description: The report name
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The report format, default is PDF
+ isArray: false
+ name: format
+ predefined:
+ - pdf
+ - rtf
+ - xml
+ - html
+ - text
+ required: false
+ secret: false
+ deprecated: false
+ description: Generates a new report on given sites according to a template and
+ arguments.
+ execution: false
+ name: nexpose-create-sites-report
+ outputs:
+ - contextPath: InfoFile.EntryId
+ description: Entry Id of the report file
+ type: string
+ - contextPath: InfoFile.Name
+ description: Name of the report file
+ type: string
+ - contextPath: InfoFile.Extension
+ description: File extension of the report file
+ type: string
+ - contextPath: InfoFile.Info
+ description: Info about the report file
+ type: string
+ - contextPath: InfoFile.Size
+ description: Size of the report file
+ type: number
+ - contextPath: InfoFile.Type
+ description: Type of the report file
+ type: string
+ - arguments:
+ - default: false
+ description: integer The identifier of the scan.
+ isArray: false
+ name: scan
+ required: true
+ secret: false
+ - default: false
+ description: Report template id to create the report with. If none
+ is provided, the first template available will be used.
+ isArray: false
+ name: template
+ required: false
+ secret: false
+ - default: false
+ description: The report name
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The report format, default is PDF
+ isArray: false
+ name: format
+ predefined:
+ - pdf
+ - rtf
+ - xml
+ - html
+ - text
+ required: false
+ secret: false
+ deprecated: false
+ description: Generates a new report for a specified scan.
+ execution: false
+ name: nexpose-create-scan-report
+ outputs:
+ - contextPath: InfoFile.EntryId
+ description: Entry Id of the report file
+ type: string
+ - contextPath: InfoFile.Name
+ description: Name of the report file
+ type: string
+ - contextPath: InfoFile.Extension
+ description: File extension of the report file
+ type: string
+ - contextPath: InfoFile.Info
+ description: Info about the report file
+ type: string
+ - contextPath: InfoFile.Size
+ description: Size of the report file
+ type: number
+ - contextPath: InfoFile.Type
+ description: Type of the report file
+ type: string
+ - arguments:
+ - default: false
+ description: integer The identifier of the site.
+ isArray: false
+ name: site
+ required: true
+ secret: false
+ - default: false
+ description: Multiple criteria of The hosts that should be included
+ as a part of the scan. This should be a mixture of IP Addresses and host names
+ as a comma separated string array.
+ isArray: true
+ name: hosts
+ required: false
+ secret: false
+ - default: false
+ description: The user-driven scan name for the scan.
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ deprecated: false
+ description: Starts a scan for the specified site.
+ execution: false
+ name: nexpose-start-site-scan
+ outputs:
+ - contextPath: Nexpose.Scan.Id
+ description: The identifier of the scan.
+ type: number
+ - contextPath: Nexpose.Scan.ScanType
+ description: The scan type (automated, manual, scheduled).
+ type: string
+ - contextPath: Nexpose.Scan.StartedBy
+ description: The name of the user that started the scan.
+ type: date
+ - contextPath: Nexpose.Scan.Assets
+ description: The number of assets found in the scan
+ type: number
+ - contextPath: Nexpose.Scan.TotalTime
+ description: The duration of the scan in minutes.
+ type: string
+ - contextPath: Nexpose.Scan.Completed
+ description: The end time of the scan in ISO8601 format.
+ type: date
+ - contextPath: Nexpose.Scan.Status
+ description: The scan status. Valid values are aborted, unknown, running, finished,
+ stopped, error, paused, dispatched, integrating
+ type: string
+ - contextPath: Nexpose.Scan.Vulnerabilities.Critical
+ description: The number of critical vulnerabilities.
+ type: number
+ - contextPath: Nexpose.Scan.Vulnerabilities.Moderate
+ description: The number of moderate vulnerabilities.
+ type: number
+ - contextPath: Nexpose.Scan.Vulnerabilities.Severe
+ description: The number of severe vulnerabilities.
+ type: number
+ - contextPath: Nexpose.Scan.Vulnerabilities.Total
+ description: The total number of vulnerabilities.
+ type: number
+ - arguments:
+ - default: false
+ description: Multiple criteria of IP addresses of assets, comma separated.
+ isArray: false
+ name: IPs
+ required: false
+ secret: false
+ - default: false
+ description: Multiple criteria of Host names of assets, comma separated.
+ isArray: false
+ name: hostNames
+ required: false
+ secret: false
+ - default: false
+ description: The user-driven scan name for the scan.
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ deprecated: false
+ description: Starts a scan for specified asset IP addresses and host names.
+ execution: false
+ name: nexpose-start-assets-scan
+ outputs:
+ - contextPath: Nexpose.Scan.Id
+ description: The identifier of the scan.
+ type: number
+ - contextPath: Nexpose.Scan.ScanType
+ description: The scan type (automated, manual, scheduled).
+ type: string
+ - contextPath: Nexpose.Scan.StartedBy
+ description: The name of the user that started the scan.
+ type: date
+ - contextPath: Nexpose.Scan.Assets
+ description: The number of assets found in the scan
+ type: number
+ - contextPath: Nexpose.Scan.TotalTime
+ description: The duration of the scan in minutes.
+ type: string
+ - contextPath: Nexpose.Scan.Completed
+ description: The end time of the scan in ISO8601 format.
+ type: date
+ - contextPath: Nexpose.Scan.Status
+ description: The scan status. Valid values are aborted, unknown, running, finished,
+ stopped, error, paused, dispatched, integrating
+ type: string
+ - contextPath: Nexpose.Scan.Vulnerabilities.Critical
+ description: The number of critical vulnerabilities.
+ type: number
+ - contextPath: Nexpose.Scan.Vulnerabilities.Moderate
+ description: The number of moderate vulnerabilities.
+ type: number
+ - contextPath: Nexpose.Scan.Vulnerabilities.Severe
+ description: The number of severe vulnerabilities.
+ type: number
+ - contextPath: Nexpose.Scan.Vulnerabilities.Total
+ description: The total number of vulnerabilities.
+ type: number
+ - arguments:
+ - default: false
+ description: integer ID of the scan to stop.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Stop the specified scan
+ execution: false
+ name: nexpose-stop-scan
+ - arguments:
+ - default: false
+ description: integer ID of the scan to pause.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Pause the specified scan
+ execution: false
+ name: nexpose-pause-scan
+ - arguments:
+ - default: false
+ description: integer ID of the scan to resume.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Resume the specified scan
+ execution: false
+ name: nexpose-resume-scan
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: Return active or past scans.
+ isArray: false
+ name: active
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: integer The number of records retrieve.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: 'Multiple criteria of The criteria to sort the records
+ by, in the format: property[,ASC|DESC]. The default sort order is ascending.
+ Multiple sort criteria can be specified using multiple sort query parameters
+ separated by a '';''. For example: ''riskScore,DESC;hostName,ASC'''
+ isArray: false
+ name: sort
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of scans.
+ execution: false
+ name: nexpose-get-scans
+ outputs:
+ - contextPath: Nexpose.Scan.Id
+ description: The identifier of the scan.
+ type: number
+ - contextPath: Nexpose.Scan.ScanType
+ description: The scan type (automated, manual, scheduled).
+ type: string
+ - contextPath: Nexpose.Scan.StartedBy
+ description: The name of the user that started the scan.
+ type: date
+ - contextPath: Nexpose.Scan.Assets
+ description: The number of assets found in the scan
+ type: number
+ - contextPath: Nexpose.Scan.TotalTime
+ description: The duration of the scan in minutes.
+ type: string
+ - contextPath: Nexpose.Scan.Completed
+ description: The end time of the scan in ISO8601 format.
+ type: date
+ - contextPath: Nexpose.Scan.Status
+ description: The scan status. Valid values are aborted, unknown, running, finished,
+ stopped, error, paused, dispatched, integrating
+ type: string
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+fromversion: 3.6.0
+tests:
+ - nexpose_test
diff --git a/Integrations/Rapid7_Nexpose/Rapid7_Nexpose_description.md b/Integrations/Rapid7_Nexpose/Rapid7_Nexpose_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/Rapid7_Nexpose/Rapid7_Nexpose_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/Rapid7_Nexpose/Rapid7_Nexpose_image.png b/Integrations/Rapid7_Nexpose/Rapid7_Nexpose_image.png
new file mode 100644
index 000000000000..75f901080dc5
Binary files /dev/null and b/Integrations/Rapid7_Nexpose/Rapid7_Nexpose_image.png differ
diff --git a/Integrations/Rapid7_Nexpose/Rapid7_Nexpose_test.py b/Integrations/Rapid7_Nexpose/Rapid7_Nexpose_test.py
new file mode 100644
index 000000000000..e8b60876abd2
--- /dev/null
+++ b/Integrations/Rapid7_Nexpose/Rapid7_Nexpose_test.py
@@ -0,0 +1,88 @@
+import demistomock as demisto
+import requests
+
+ITEM_WITHOUT_MS = {
+ 'date': '2019-05-03T03:01:54Z'
+}
+
+ITEM_WITH_MS = {
+ 'date': '2019-05-03T03:02:54.123Z'
+}
+
+ITEM_WITH_SCANID = {
+ 'date': '2019-05-03T03:03:54.123Z',
+ 'scanId': '1'
+}
+
+
+class ResponseMock:
+ def __init__(self):
+ self.status_code = 200
+
+ def json(self):
+ return ''
+
+
+def init_integration(mocker):
+ mocker.patch.object(demisto, 'params', return_value={
+ 'credentials': {
+ 'identifier': 'a',
+ 'password': 'a'
+ },
+ 'server': 'nexpose.com',
+ 'proxy': True
+ })
+ mocker.patch.object(requests, 'post', return_value=ResponseMock())
+
+
+def test_get_datetime_from_asset_history_item(mocker):
+ init_integration(mocker)
+ from Rapid7_Nexpose import get_datetime_from_asset_history_item
+
+ assert(get_datetime_from_asset_history_item(ITEM_WITH_MS))
+ assert(get_datetime_from_asset_history_item(ITEM_WITHOUT_MS))
+
+
+def test_sort_with_and_without_ms(mocker):
+ init_integration(mocker)
+ from Rapid7_Nexpose import get_datetime_from_asset_history_item
+
+ dt_arr = [ITEM_WITH_MS, ITEM_WITHOUT_MS]
+ sorted_dt_arr = sorted(dt_arr, key=get_datetime_from_asset_history_item)
+ assert(sorted_dt_arr[0] == ITEM_WITHOUT_MS)
+ assert(sorted_dt_arr[1] == ITEM_WITH_MS)
+
+
+def test_get_last_scan(mocker):
+ init_integration(mocker)
+ from Rapid7_Nexpose import get_last_scan
+
+ # test empty history
+ expected = '-'
+ assert(get_last_scan({'history': None}) == expected)
+
+ # test history with assorted items
+ asset = {
+ 'history': [
+ ITEM_WITH_MS,
+ ITEM_WITHOUT_MS
+ ]
+ }
+ expected = {
+ 'date': '2019-05-03T03:02:54.123Z',
+ 'id': '-'
+ }
+
+ # test history with assorted items + scanId
+ asset = {
+ 'history': [
+ ITEM_WITH_MS,
+ ITEM_WITHOUT_MS,
+ ITEM_WITH_SCANID
+ ]
+ }
+ expected = {
+ 'date': '2019-05-03T03:03:54.123Z',
+ 'id': '1'
+ }
+ assert(get_last_scan(asset) == expected)
diff --git a/Integrations/Recorded_Future/CHANGELOG.md b/Integrations/Recorded_Future/CHANGELOG.md
new file mode 100644
index 000000000000..d724ade2d364
--- /dev/null
+++ b/Integrations/Recorded_Future/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+Added the *Suspicious Threshold* parameter.
diff --git a/Integrations/Recorded_Future/Recorded_Future.py b/Integrations/Recorded_Future/Recorded_Future.py
new file mode 100644
index 000000000000..61056aee5a44
--- /dev/null
+++ b/Integrations/Recorded_Future/Recorded_Future.py
@@ -0,0 +1,1763 @@
+import demistomock as demisto
+from CommonServerPython import *
+
+''' IMPORTS '''
+import requests
+import os
+import json
+import urllib
+from datetime import datetime
+
+if not demisto.params()['proxy']:
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARS '''
+TOKEN = demisto.params()['token']
+SERVER = demisto.params()['server'][:-1] if demisto.params()['server'].endswith('/') else demisto.params()['server']
+BASE_URL = SERVER + '/v2/'
+USE_SSL = not demisto.params().get('unsecure', False)
+HEADERS = {
+ 'X-RFToken': TOKEN,
+ 'X-RF-User-Agent': 'DemistoIntegrations+v1.0'
+}
+FILE_THRESHOLD = int(demisto.params()['file_threshold'])
+IP_THRESHOLD = int(demisto.params()['ip_threshold'])
+DOMAIN_THRESHOLD = int(demisto.params()['domain_threshold'])
+URL_THRESHOLD = int(demisto.params()['url_threshold'])
+CVE_THRESHOLD = int(demisto.params()['cve_threshold'])
+SUSPICIOUS_THRESHOLD = int(demisto.params()['suspicious_threshold'])
+
+FETCH_TIME = demisto.params().get('triggered').strip()
+RULE_NAMES = demisto.params().get('rule_names').strip()
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url_suffix, params=None):
+ LOG('running request with url=%s' % (BASE_URL + url_suffix))
+
+ params = params if params is not None else {}
+
+ try:
+ res = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ params=params,
+ headers=HEADERS,
+ verify=USE_SSL
+ )
+ if res.status_code not in {200, 404}:
+ if res.status_code == 401:
+ error_str = 'Request failed with status 401 - Authentication error'
+ else:
+ error_str = 'Request failed, status: ' + str(res.status_code) + ', details: ' + res.text
+ return_error(error_str)
+ except Exception, e:
+ LOG(e.message)
+ return_error(e.message)
+ return res.text
+
+
+def translate_score(score, threshold):
+ '''
+ Translates Recorded Future score to DBot score
+ '''
+ if score >= threshold: # Bad
+ return 3
+ elif score >= SUSPICIOUS_THRESHOLD: # Suspicious
+ return 2
+ else:
+ return 0 # Unknown
+
+
+def determine_hash(hash):
+ '''
+ Determines hash type by length
+ '''
+ if len(hash) == 128:
+ return 'SHA512'
+ elif len(hash) == 64:
+ return 'SHA256'
+ elif len(hash) == 40:
+ return 'SHA1'
+ elif len(hash) == 32:
+ return 'MD5'
+ elif len(hash) == 8:
+ return 'CRC32'
+ else:
+ return 'CTPH'
+
+
+''' FUNCTIONS '''
+
+
+def domain_command():
+ domain = demisto.args().get('domain')
+ detailed = False if demisto.args().get('detailed') == 'false' else True
+ response = json.loads(domain_lookup(domain))
+ if response and ('error' not in response):
+ data = response['data']
+ timestamps = data['timestamps']
+ risk = data['risk']
+ rf_score = risk['score']
+ sightings = data['sightings']
+ hr = '### Recorded Future domain reputation for ' + domain + '\n'
+ hr += 'Risk score: ' + str(rf_score) + ' out of 99\n'
+ hr += 'Criticality label: ' + risk.get('criticalityLabel') + '\n'
+ hr += 'Summary: ' + risk.get('riskSummary') + '\n'
+ hr += 'Total references to this entity: ' + str(len(sightings)) + '\n'
+ hr += 'First reference collected on: ' + timestamps.get('firstSeen') + '\n'
+ hr += 'Latest reference collected on: ' + timestamps.get('lastSeen') + '\n'
+ hr += '[Intelligence Card](https://app.recordedfuture.com/live/sc/entity/idn:' + domain + ')' + '\n'
+ hr_table = []
+ if detailed:
+ evidence_details = risk['evidenceDetails']
+ for detail in evidence_details:
+ hr_table.append({
+ 'Rule Criticality': detail.get('criticalityLabel'),
+ 'Evidence Summary': detail.get('evidenceString'),
+ 'Rule Triggered': detail.get('rule'),
+ 'Rule Triggered Time': detail.get('timestamp')
+ })
+ hr += tableToMarkdown('Triggered Risk Rules', hr_table,
+ ['Evidence Summary', 'Rule Criticality', 'Rule Triggered', 'Rule Triggered Time'])
+ sightings_table = []
+ for raw_sighting in sightings:
+ sighting = {
+ 'Published': raw_sighting.get('published'),
+ 'Type': raw_sighting.get('type'),
+ 'Fragment': raw_sighting.get('fragment'),
+ 'Source': raw_sighting.get('source'),
+ 'Title': raw_sighting.get('title')
+ }
+ if raw_sighting['url']:
+ sighting['URL'] = '[{}]({})'.format(raw_sighting['url'], raw_sighting['url'])
+ sightings_table.append(sighting)
+ if sightings_table:
+ hr += tableToMarkdown('References collected for this domain', sightings_table,
+ ['Title', 'Source', 'Type', 'URL', 'Fragment', 'Published'])
+ ec = {}
+ ec[outputPaths['domain']] = {
+ 'Name': domain,
+ 'RecordedFuture': {
+ 'Criticality': risk.get('criticalityLabel'),
+ 'FirstSeen': timestamps.get('firstSeen'),
+ 'LastSeen': timestamps.get('lastSeen')
+ }
+ }
+ dbot_score = translate_score(rf_score, DOMAIN_THRESHOLD)
+ ec['DBotScore'] = {
+ 'Indicator': domain,
+ 'Type': 'domain',
+ 'Vendor': 'Recorded Future',
+ 'Score': dbot_score
+ }
+ if (dbot_score == 3):
+ ec[outputPaths['domain']]['Malicious'] = {
+ 'Vendor': 'Recorded Future',
+ 'Description': 'Score above ' + str(rf_score)
+ }
+ else:
+ hr = 'No records found'
+ ec = {
+ 'DBotScore': {
+ 'Indicator': domain,
+ 'Type': 'domain',
+ 'Vendor': 'Recorded Future',
+ 'Score': 0
+ }
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+
+
+def domain_lookup(domain):
+ cmd_url = 'domain/' + domain
+ params = {
+ 'fields': 'sightings,timestamps,risk'
+ }
+
+ response = http_request('get', cmd_url, params=params)
+ return response
+
+
+def url_command():
+ url = demisto.args().get('url')
+ detailed = False if demisto.args().get('detailed') == 'false' else True
+ response = json.loads(url_lookup(url))
+ if response and ('error' not in response):
+ data = response['data']
+ timestamps = data['timestamps']
+ risk = data['risk']
+ rf_score = risk['score']
+ sightings = data['sightings']
+ encoded_url = urllib.quote_plus(url)
+ hr = '### Recorded Future url reputation for ' + url + '\n'
+ hr += 'Risk score: ' + str(rf_score) + ' out of 99\n'
+ hr += 'Criticality label: ' + risk.get('criticalityLabel') + '\n'
+ hr += 'Summary: ' + risk.get('riskSummary') + '\n'
+ hr += 'Total references to this entity: ' + str(len(sightings)) + '\n'
+ hr += 'First reference collected on: ' + timestamps.get('firstSeen') + '\n'
+ hr += 'Latest reference collected on: ' + timestamps.get('lastSeen') + '\n'
+ hr += '[Intelligence Card](https://app.recordedfuture.com/live/sc/entity/url:' + encoded_url + ')' + '\n'
+ hr_table = []
+ if detailed:
+ evidence_details = risk['evidenceDetails']
+ for detail in evidence_details:
+ hr_table.append({
+ 'Rule Criticality': detail.get('criticalityLabel'),
+ 'Evidence Summary': detail.get('evidenceString'),
+ 'Rule Triggered': detail.get('rule'),
+ 'Rule Triggered Time': detail.get('timestamp')
+ })
+ hr += tableToMarkdown('Triggered Risk Rules', hr_table,
+ ['Evidence Summary', 'Rule Criticality', 'Rule Triggered', 'Rule Triggered Time'])
+ sightings_table = []
+ for raw_sighting in sightings:
+ sighting = {
+ 'Published': raw_sighting.get('published'),
+ 'Type': raw_sighting.get('type'),
+ 'Fragment': raw_sighting.get('fragment'),
+ 'Source': raw_sighting.get('source'),
+ 'Title': raw_sighting.get('title')
+ }
+ if raw_sighting['url']:
+ sighting['URL'] = '[{}]({})'.format(raw_sighting['url'], raw_sighting['url'])
+ sightings_table.append(sighting)
+ if sightings_table:
+ hr += tableToMarkdown('References collected for this URL', sightings_table,
+ ['Title', 'Source', 'Type', 'URL', 'Fragment', 'Published'])
+ ec = {}
+ ec[outputPaths['url']] = {
+ 'Data': url,
+ 'RecordedFuture': {
+ 'Criticality': risk.get('criticalityLabel'),
+ 'FirstSeen': timestamps.get('firstSeen'),
+ 'LastSeen': timestamps.get('lastSeen')
+ }
+ }
+ dbot_score = translate_score(rf_score, URL_THRESHOLD)
+ ec['DBotScore'] = {
+ 'Indicator': url,
+ 'Type': 'url',
+ 'Vendor': 'Recorded Future',
+ 'Score': dbot_score
+ }
+ if (dbot_score == 3):
+ ec[outputPaths['url']]['Malicious'] = {
+ 'Vendor': 'Recorded Future',
+ 'Description': 'Score above ' + str(rf_score)
+ }
+ else:
+ hr = 'No records found'
+ ec = {
+ 'DBotScore': {
+ 'Indicator': url,
+ 'Type': 'url',
+ 'Vendor': 'Recorded Future',
+ 'Score': 0
+ }
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+
+
+def url_lookup(url):
+ encoded_url = urllib.quote_plus(url)
+ cmd_url = 'url/' + encoded_url
+ params = {
+ 'fields': 'sightings,timestamps,risk'
+ }
+
+ response = http_request('get', cmd_url, params=params)
+ return response
+
+
+def ip_command():
+ ip = demisto.args().get('ip')
+ detailed = False if demisto.args().get('detailed') == 'false' else True
+ response = json.loads(ip_lookup(ip))
+ if response and ('error' not in response):
+ data = response['data']
+ timestamps = data['timestamps']
+ risk = data['risk']
+ rf_score = risk['score']
+ sightings = data['sightings']
+ hr = '### Recorded Future IP address reputation for ' + ip + '\n'
+ hr += 'Risk score: ' + str(rf_score) + ' out of 99\n'
+ hr += 'Criticality label: ' + risk.get('criticalityLabel') + '\n'
+ hr += 'Summary: ' + risk.get('riskSummary') + '\n'
+ hr += 'Total references to this entity: ' + str(len(sightings)) + '\n'
+ hr += 'First reference collected on: ' + timestamps.get('firstSeen') + '\n'
+ hr += 'Latest reference collected on: ' + timestamps.get('lastSeen') + '\n'
+ hr += '[Intelligence Card](https://app.recordedfuture.com/live/sc/entity/ip:' + ip + ')' + '\n'
+ evidence_table = []
+ if detailed:
+ evidence_details = risk['evidenceDetails']
+ for detail in evidence_details:
+ evidence_table.append({
+ 'Rule Criticality': detail.get('criticalityLabel'),
+ 'Evidence Summary': detail.get('evidenceString'),
+ 'Rule Triggered': detail.get('rule'),
+ 'Rule Triggered Time': detail.get('timestamp')
+ })
+ hr += tableToMarkdown('Triggered Risk Rules', evidence_table,
+ ['Evidence Summary', 'Rule Criticality', 'Rule Triggered', 'Rule Triggered Time'])
+ sightings_table = []
+ for raw_sighting in sightings:
+ sighting = {
+ 'Published': raw_sighting.get('published'),
+ 'Type': raw_sighting.get('type'),
+ 'Fragment': raw_sighting.get('fragment'),
+ 'Source': raw_sighting.get('source'),
+ 'Title': raw_sighting.get('title')
+ }
+ if raw_sighting['url']:
+ sighting['URL'] = '[{}]({})'.format(raw_sighting['url'], raw_sighting['url'])
+ sightings_table.append(sighting)
+ if sightings_table:
+ hr += tableToMarkdown('References collected for this IP', sightings_table,
+ ['Title', 'Source', 'Type', 'URL', 'Fragment', 'Published'])
+ ec = {}
+ ec[outputPaths['ip']] = {
+ 'Address': ip,
+ 'RecordedFuture': {
+ 'Criticality': risk.get('criticalityLabel'),
+ 'FirstSeen': timestamps.get('firstSeen'),
+ 'LastSeen': timestamps.get('lastSeen')
+ }
+ }
+ dbot_score = translate_score(rf_score, IP_THRESHOLD)
+ ec['DBotScore'] = {
+ 'Indicator': ip,
+ 'Type': 'ip',
+ 'Vendor': 'Recorded Future',
+ 'Score': dbot_score
+ }
+ if (dbot_score == 3):
+ ec[outputPaths['ip']]['Malicious'] = {
+ 'Vendor': 'Recorded Future',
+ 'Description': 'Score above ' + str(rf_score)
+ }
+
+ else:
+ hr = 'No records found'
+ ec = {
+ 'DBotScore': {
+ 'Indicator': ip,
+ 'Type': 'ip',
+ 'Vendor': 'Recorded Future',
+ 'Score': 0
+ }
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+
+
+def ip_lookup(ip):
+ cmd_url = 'ip/' + ip
+
+ params = {
+ 'fields': 'sightings,timestamps,risk'
+ }
+
+ response = http_request('get', cmd_url, params=params)
+ return response
+
+
+def file_command():
+ file = demisto.args().get('file')
+ detailed = False if demisto.args().get('detailed') == 'false' else True
+ response = json.loads(file_lookup(file))
+ if response and ('error' not in response):
+ data = response['data']
+ timestamps = data['timestamps']
+ risk = data['risk']
+ rf_score = risk['score']
+ sightings = data['sightings']
+ hr = '### Recorded Future file reputation for ' + file + '\n'
+ hr += 'Risk score: ' + str(rf_score) + ' out of 99\n'
+ hr += 'Criticality label: ' + risk.get('criticalityLabel') + '\n'
+ hr += 'Summary: ' + risk.get('riskSummary') + '\n'
+ hr += 'Total references to this entity: ' + str(len(sightings)) + '\n'
+ hr += 'First reference collected on: ' + timestamps.get('firstSeen') + '\n'
+ hr += 'Latest reference collected on: ' + timestamps.get('lastSeen') + '\n'
+ hr += '[Intelligence Card](https://app.recordedfuture.com/live/sc/entity/hash:' + file + ')' + '\n'
+ hr_table = []
+ if detailed:
+ evidence_details = risk['evidenceDetails']
+ for detail in evidence_details:
+ hr_table.append({
+ 'Rule Criticality': detail.get('criticalityLabel'),
+ 'Evidence Summary': detail.get('evidenceString'),
+ 'Rule Triggered': detail.get('rule'),
+ 'Rule Triggered Time': detail.get('timestamp')
+ })
+ hr += tableToMarkdown('Triggered Risk Rules', hr_table,
+ ['Rule Triggered', 'Rule Criticality', 'Evidence Summary', 'Rule Triggered Time'])
+ sightings_table = []
+ for raw_sighting in sightings:
+ sighting = {
+ 'Published': raw_sighting.get('published'),
+ 'Type': raw_sighting.get('type'),
+ 'Fragment': raw_sighting.get('fragment'),
+ 'Source': raw_sighting.get('source'),
+ 'Title': raw_sighting.get('title')
+ }
+ if raw_sighting['url']:
+ sighting['URL'] = '[{}]({})'.format(raw_sighting['url'], raw_sighting['url'])
+ sightings_table.append(sighting)
+ if sightings_table:
+ hr += tableToMarkdown('References collected for this hash', sightings_table,
+ ['Title', 'Source', 'Type', 'URL', 'Fragment', 'Published'])
+ hash_type = determine_hash(file)
+ ec = {}
+ ec[outputPaths['file']] = {
+ hash_type: file,
+ 'RecordedFuture': {
+ 'Criticality': risk['criticalityLabel'],
+ 'FirstSeen': timestamps['firstSeen'],
+ 'LastSeen': timestamps['lastSeen']
+ }
+ }
+ dbot_score = translate_score(rf_score, FILE_THRESHOLD)
+ ec['DBotScore'] = {
+ 'Indicator': file,
+ 'Type': 'file',
+ 'Vendor': 'Recorded Future',
+ 'Score': dbot_score
+ }
+ if (dbot_score == 3):
+ ec[outputPaths['file']]['Malicious'] = {
+ 'Vendor': 'Recorded Future',
+ 'Description': 'Score above ' + str(rf_score)
+ }
+
+ else:
+ hr = 'No records found'
+ ec = {
+ 'DBotScore': {
+ 'Indicator': file,
+ 'Type': 'file',
+ 'Vendor': 'Recorded Future',
+ 'Score': 0
+ }
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+
+
+def file_lookup(file):
+ cmd_url = 'hash/' + file
+
+ params = {
+ 'fields': 'sightings,timestamps,risk'
+ }
+ response = http_request('get', cmd_url, params=params)
+ return response
+
+
+def get_related_entities_command():
+ entity_value = demisto.args().get('entityValue')
+ entity_result_type = demisto.args().get('resultEntityType')
+ entity_type = demisto.args().get('entityType').lower()
+ if entity_type == 'file':
+ entity_type = 'hash'
+ if entity_type == 'url':
+ entity_value = urllib.quote_plus(entity_value)
+ response = json.loads(get_related_entities(entity_value, entity_type))
+
+ ec = {}
+ if response and ('error' not in response):
+ hr = [] # type: list
+ entity_result_type = entity_result_type.split(',')
+ entity_types = [] # type: list
+ if 'All' in entity_result_type:
+ entity_types.extend(['RelatedIpAddress', 'RelatedInternetDomainName', 'RelatedHash', 'RelatedMalware',
+ 'RelatedAttackVector', 'RelatedURL'])
+ else:
+ if 'IP' in entity_result_type:
+ entity_types.append('RelatedIpAddress')
+ if 'Hash' in entity_result_type:
+ entity_types.append('RelatedHash')
+ if 'Domain' in entity_result_type:
+ entity_types.append('RelatedInternetDomainName')
+ if 'Attacker' in entity_result_type:
+ entity_types.append('RelatedAttackVector')
+ if 'Malware' in entity_result_type:
+ entity_types.append('RelatedMalware')
+ if 'URL' in entity_result_type:
+ entity_types.append('RelatedURL')
+ ip_outputs = [] # type: list
+ hash_outputs = [] # type: list
+ domain_outputs = [] # type: list
+ attacker_outputs = [] # type: list
+ malware_outputs = [] # type: list
+ url_outputs = [] # type: list
+
+ output_map = {
+ 'RelatedIpAddress': ip_outputs,
+ 'RelatedHash': hash_outputs,
+ 'RelatedInternetDomainName': domain_outputs,
+ 'RelatedAttackVector': attacker_outputs,
+ 'RelatedMalware': malware_outputs,
+ 'RelatedURL': url_outputs
+ }
+ related_entities = response['data']['relatedEntities']
+ for related_entity in related_entities:
+ if related_entity['type'] in entity_types:
+ entities = related_entity['entities']
+ for entity in entities:
+ hr_entity = {
+ 'Count': entity['count'],
+ 'ID': entity['entity']['id']
+ }
+
+ if related_entity['type'] == 'RelatedURL':
+ hr_entity['Data'] = entity['entity']['name']
+ else:
+ hr_entity['Name'] = entity['entity']['name']
+
+ output_map[related_entity['type']].append(hr_entity)
+ hr_md = ''
+
+ related_entities_ec = {}
+ if ip_outputs:
+ hr_md += tableToMarkdown('IP Address', ip_outputs)
+ related_entities_ec['IPAddress'] = ip_outputs
+
+ if hash_outputs:
+ hr_md += tableToMarkdown('Hash', hash_outputs)
+ related_entities_ec['Hash'] = hash_outputs
+
+ if domain_outputs:
+ hr_md += tableToMarkdown('Domain', domain_outputs)
+ related_entities_ec['Domain'] = domain_outputs
+
+ if attacker_outputs:
+ hr_md += tableToMarkdown('Attacker', attacker_outputs)
+ related_entities_ec['Attacker'] = attacker_outputs
+
+ if malware_outputs:
+ hr_md += tableToMarkdown('Malware', malware_outputs)
+ related_entities_ec['Malware'] = malware_outputs
+
+ if url_outputs:
+ hr_md += tableToMarkdown('URL', url_outputs)
+ related_entities_ec['URL'] = url_outputs
+
+ if hr_md:
+ hr_md = '### Recorded Future related entities to ' + entity_value + '\n' + hr
+ if entity_type == 'ip':
+ ec[outputPaths['ip']] = {
+ 'Address': entity_value,
+ 'RecordedFuture': {
+ 'RelatedEntities': related_entities_ec
+ }
+ }
+ elif entity_type == 'domain':
+ ec[outputPaths['domain']] = {
+ 'Name': entity_value,
+ 'RecordedFuture': {
+ 'RelatedEntities': related_entities_ec
+ }
+ }
+ elif entity_type == 'hash':
+ ec[outputPaths['file']] = {
+ determine_hash(entity_value): entity_value,
+ 'RecordedFuture': {
+ 'RelatedEntities': related_entities_ec
+ }
+ }
+ elif entity_type == 'url':
+ ec[outputPaths['url']] = {
+ 'Data': entity_value,
+ 'RecordedFuture': {
+ 'RelatedEntities': related_entities_ec
+ }
+ }
+ else:
+ hr_md = 'No results found'
+ else:
+ hr_md = 'No results found'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr_md,
+ 'EntryContext': ec
+ })
+
+
+def get_related_entities(entity_value, entity_type):
+ cmd_url = entity_type + '/' + entity_value
+
+ params = {
+ 'fields': 'relatedEntities'
+ }
+ response = http_request('get', cmd_url, params=params)
+ return response
+
+
+def hashlist_command():
+ detailed = False if demisto.args().get('detailed') == 'false' else True
+ limit = demisto.args().get('limit')
+ risk_lower = demisto.args().get('risk_lower')
+ risk_higher = demisto.args().get('risk_higher')
+ orderby = demisto.args().get('orderby')
+ direction = demisto.args().get('direction')
+
+ response = json.loads(hashlist_lookup(limit, risk_lower, risk_higher, orderby, direction))
+ if not response or 'data' not in response:
+ demisto.results('No results found')
+ return
+
+ resultlist = response['data'].get('results', [])
+ if len(resultlist) == 0:
+ demisto.results('No results found')
+ return
+
+ resultlist = response['data']['results']
+ for result in resultlist:
+ intelcard = result['intelCard']
+ timestamps = result['timestamps']
+ file = result['entity']['name']
+ risk = result['risk']
+ rf_score = risk['score']
+ sightings = result['sightings']
+ hr = '### Recorded Future file reputation for ' + file + '\n'
+ hr += 'Risk score: ' + str(rf_score) + ' out of 99\n'
+ hr += 'Criticality label: ' + risk.get('criticalityLabel') + '\n'
+ hr += 'Summary: ' + risk.get('riskSummary') + '\n'
+ hr += 'Total references to this entity: ' + str(len(sightings)) + '\n'
+ hr += '[Intelligence Card](' + intelcard + ')' + '\n'
+ hr_table = []
+ hash_type = determine_hash(file)
+ if detailed:
+ evidence_details = risk['evidenceDetails']
+ for detail in evidence_details:
+ hr_table.append({
+ 'Rule Criticality': detail.get('criticalityLabel'),
+ 'Evidence Summary': detail.get('evidenceString'),
+ 'Rule Triggered': detail.get('rule'),
+ 'Rule Triggered Time': detail.get('timestamp')
+ })
+ hr += tableToMarkdown('Triggered Risk Rules', hr_table,
+ ['Rule Triggered', 'Rule Criticality', 'Evidence Summary', 'Rule Triggered Time'])
+ sightings_table = []
+ for raw_sighting in sightings:
+ sighting = {
+ 'Published': raw_sighting.get('published'),
+ 'Type': raw_sighting.get('type'),
+ 'Fragment': raw_sighting.get('fragment'),
+ 'Source': raw_sighting.get('source'),
+ 'Title': raw_sighting.get('title')
+ }
+ if raw_sighting['url']:
+ sighting['URL'] = '[{}]({})'.format(raw_sighting['url'], raw_sighting['url'])
+ sightings_table.append(sighting)
+ if sightings_table:
+ hr += tableToMarkdown('References collected for this hash', sightings_table,
+ ['Title', 'Source', 'Type', 'URL', 'Fragment', 'Published'])
+ ec = {}
+ ec[outputPaths['file']] = {
+ hash_type: file,
+ 'RecordedFuture': {
+ 'Criticality': risk['criticalityLabel'],
+ 'FirstSeen': timestamps['firstSeen'],
+ 'LastSeen': timestamps['lastSeen']
+ }
+ }
+ dbot_score = translate_score(rf_score, FILE_THRESHOLD)
+ ec['DBotScore'] = {
+ 'Indicator': file,
+ 'Type': 'file',
+ 'Vendor': 'Recorded Future',
+ 'Score': dbot_score
+ }
+ if (dbot_score == 3):
+ ec[outputPaths['file']]['Malicious'] = {
+ 'Vendor': 'Recorded Future',
+ 'Description': 'Score above ' + str(rf_score)
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+
+
+def hashlist_lookup(limit, risk_lower, risk_higher, orderby, direction):
+ cmd_url = 'hash/search'
+
+ params = {
+ 'fields': 'entity,intelCard,risk,sightings,timestamps'
+ }
+
+ if limit:
+ params['limit'] = limit
+ if orderby:
+ params['orderby'] = orderby
+ if direction:
+ params['direction'] = direction
+ if risk_lower and risk_higher:
+ params['riskScore'] = '[{},{}]'.format(risk_lower, risk_higher)
+
+ response = http_request('get', cmd_url, params=params)
+ return response
+
+
+def iplist_command():
+ detailed = False if demisto.args().get('detailed') == 'false' else True
+ limit = demisto.args().get('limit')
+ risk_lower = demisto.args().get('risk_lower')
+ risk_higher = demisto.args().get('risk_higher')
+ orderby = demisto.args().get('orderby')
+ direction = demisto.args().get('direction')
+
+ response = json.loads(iplist_lookup(limit, risk_lower, risk_higher, orderby, direction))
+ if not response or 'data' not in response:
+ demisto.results('No results found')
+ return
+
+ resultlist = response['data'].get('results', [])
+ if len(resultlist) == 0:
+ demisto.results('No results found')
+ return
+
+ for result in resultlist:
+ intelcard = result['intelCard']
+ timestamps = result['timestamps']
+ ip = result['entity']['name']
+ risk = result['risk']
+ rf_score = risk['score']
+ sightings = result['sightings']
+ hr = '### Recorded Future IP reputation for ' + ip + '\n'
+ hr += 'Risk score: ' + str(rf_score) + ' out of 99\n'
+ hr += 'Criticality label: ' + risk.get('criticalityLabel') + '\n'
+ hr += 'Summary: ' + risk.get('riskSummary') + '\n'
+ hr += 'Total references to this entity: ' + str(len(sightings)) + '\n'
+ hr += '[Intelligence Card](' + intelcard + ')' + '\n'
+ hr_table = []
+ if detailed:
+ evidence_details = risk['evidenceDetails']
+ for detail in evidence_details:
+ hr_table.append({
+ 'Rule Criticality': detail.get('criticalityLabel'),
+ 'Evidence Summary': detail.get('evidenceString'),
+ 'Rule Triggered': detail.get('rule'),
+ 'Rule Triggered Time': detail.get('timestamp')
+ })
+ hr += tableToMarkdown('Triggered Risk Rules', hr_table,
+ ['Rule Triggered', 'Rule Criticality', 'Evidence Summary', 'Rule Triggered Time'])
+ sightings_table = []
+ for raw_sighting in sightings:
+ sighting = {
+ 'Published': raw_sighting.get('published'),
+ 'Type': raw_sighting.get('type'),
+ 'Fragment': raw_sighting.get('fragment'),
+ 'Source': raw_sighting.get('source'),
+ 'Title': raw_sighting.get('title')
+ }
+ if raw_sighting['url']:
+ raw_sighting['url'] = raw_sighting['url']
+ sighting['URL'] = '[{}]({})'.format(raw_sighting['url'], raw_sighting['url'])
+ sightings_table.append(sighting)
+ if sightings_table:
+ hr += tableToMarkdown('References collected for this IP', sightings_table,
+ ['Title', 'Source', 'Type', 'URL', 'Fragment', 'Published'])
+ ec = {}
+ ec[outputPaths['ip']] = {
+ 'Address': ip,
+ 'RecordedFuture': {
+ 'Criticality': risk.get('criticalityLabel'),
+ 'FirstSeen': timestamps.get('firstSeen'),
+ 'LastSeen': timestamps.get('lastSeen')
+ }
+ }
+ dbot_score = translate_score(rf_score, IP_THRESHOLD)
+ ec['DBotScore'] = {
+ 'Indicator': ip,
+ 'Type': 'ip',
+ 'Vendor': 'Recorded Future',
+ 'Score': dbot_score
+ }
+ if (dbot_score == 3):
+ ec[outputPaths['ip']]['Malicious'] = {
+ 'Vendor': 'Recorded Future',
+ 'Description': 'Score above ' + str(rf_score)
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+
+
+def iplist_lookup(limit, risk_lower, risk_higher, orderby, direction):
+ cmd_url = 'ip/search'
+
+ params = {
+ 'fields': 'entity,intelCard,risk,sightings,timestamps'
+ }
+
+ if limit:
+ params['limit'] = limit
+ if orderby:
+ params['orderby'] = orderby
+ if direction:
+ params['direction'] = direction
+ if risk_lower and risk_higher:
+ params['riskScore'] = '[{},{}]'.format(risk_lower, risk_higher)
+
+ response = http_request('get', cmd_url, params=params)
+ return response
+
+
+def domainlist_command():
+ detailed = False if demisto.args().get('detailed') == 'false' else True
+ limit = demisto.args().get('limit')
+ risk_lower = demisto.args().get('risk_lower')
+ risk_higher = demisto.args().get('risk_higher')
+ orderby = demisto.args().get('orderby')
+ direction = demisto.args().get('direction')
+
+ response = json.loads(domainlist_lookup(limit, risk_lower, risk_higher, orderby, direction))
+ if not response or 'data' not in response:
+ demisto.results('No results found')
+ return
+
+ resultlist = response['data'].get('results', [])
+ if len(resultlist) == 0:
+ demisto.results('No results found')
+ return
+
+ for result in resultlist:
+ timestamps = result['timestamps']
+ domain = result['entity']['name']
+ risk = result['risk']
+ rf_score = risk['score']
+ sightings = result['sightings']
+ hr = '### Recorded Future Domain reputation for ' + domain + '\n'
+ hr += 'Risk score: ' + str(rf_score) + ' out of 99\n'
+ hr += 'Criticality label: ' + risk.get('criticalityLabel') + '\n'
+ hr += 'Summary: ' + risk.get('riskSummary') + '\n'
+ hr += 'Total references to this entity: ' + str(len(sightings)) + '\n'
+ hr += '[Intelligence Card](https://app.recordedfuture.com/live/sc/entity/idn:' + domain + ')' + '\n'
+ hr_table = []
+ if detailed:
+ evidence_details = risk['evidenceDetails']
+ for detail in evidence_details:
+ hr_table.append({
+ 'Rule Criticality': detail.get('criticalityLabel'),
+ 'Evidence Summary': detail.get('evidenceString'),
+ 'Rule Triggered': detail.get('rule'),
+ 'Rule Triggered Time': detail.get('timestamp')
+ })
+ hr += tableToMarkdown('Triggered Risk Rules', hr_table,
+ ['Rule Triggered', 'Rule Criticality', 'Evidence Summary', 'Rule Triggered Time'])
+ sightings_table = []
+ for raw_sighting in sightings:
+ sighting = {
+ 'Published': raw_sighting.get('published'),
+ 'Type': raw_sighting.get('type'),
+ 'Fragment': raw_sighting.get('fragment'),
+ 'Source': raw_sighting.get('source'),
+ 'Title': raw_sighting.get('title')
+ }
+ if raw_sighting['url']:
+ sighting['URL'] = '[{}]({})'.format(raw_sighting['url'], raw_sighting['url'])
+ sightings_table.append(sighting)
+ if sightings_table:
+ hr += tableToMarkdown('References collected for this domain', sightings_table,
+ ['Title', 'Source', 'Type', 'URL', 'Fragment', 'Published'])
+ ec = {}
+ ec[outputPaths['domain']] = {
+ 'Name': domain,
+ 'RecordedFuture': {
+ 'Criticality': risk.get('criticalityLabel'),
+ 'FirstSeen': timestamps.get('firstSeen'),
+ 'LastSeen': timestamps.get('lastSeen')
+ }
+ }
+ dbot_score = translate_score(rf_score, DOMAIN_THRESHOLD)
+ ec['DBotScore'] = {
+ 'Indicator': domain,
+ 'Type': 'domain',
+ 'Vendor': 'Recorded Future',
+ 'Score': dbot_score
+ }
+ if (dbot_score == 3):
+ ec[outputPaths['domain']]['Malicious'] = {
+ 'Vendor': 'Recorded Future',
+ 'Description': 'Score above ' + str(rf_score)
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+
+
+def domainlist_lookup(limit, risk_lower, risk_higher, orderby, direction):
+ cmd_url = 'domain/search'
+
+ params = {
+ 'fields': 'entity,intelCard,risk,sightings,timestamps'
+ }
+
+ if limit:
+ params['limit'] = limit
+ if orderby:
+ params['orderby'] = orderby
+ if direction:
+ params['direction'] = direction
+ if risk_lower and risk_higher:
+ params['riskScore'] = '[{},{}]'.format(risk_lower, risk_higher)
+
+ response = http_request('get', cmd_url, params=params)
+ return response
+
+
+def urllist_command():
+ detailed = False if demisto.args().get('detailed') == 'false' else True
+ limit = demisto.args().get('limit')
+ risk_lower = demisto.args().get('risk_lower')
+ risk_higher = demisto.args().get('risk_higher')
+ orderby = demisto.args().get('orderby')
+ direction = demisto.args().get('direction')
+
+ response = json.loads(urllist_lookup(limit, risk_lower, risk_higher, orderby, direction))
+ if not response or 'data' not in response:
+ demisto.results('No results found')
+ return
+
+ resultlist = response['data'].get('results', [])
+ if len(resultlist) == 0:
+ demisto.results('No results found')
+ return
+
+ for result in resultlist:
+ timestamps = result['timestamps']
+ url = result['entity']['name']
+ intelcard = urllib.quote_plus(url)
+ risk = result['risk']
+ rf_score = risk['score']
+ sightings = result['sightings']
+ hr = '### Recorded Future URL reputation for ' + url + '\n'
+ hr += 'Risk score: ' + str(rf_score) + ' out of 99\n'
+ hr += 'Criticality label: ' + risk.get('criticalityLabel') + '\n'
+ hr += 'Summary: ' + risk.get('riskSummary') + '\n'
+ hr += 'Total references to this entity: ' + str(len(sightings)) + '\n'
+ hr += '[Intelligence Card](https://app.recordedfuture.com/live/sc/entity/url:' + intelcard + ')' + '\n'
+ hr_table = []
+ if detailed:
+ evidence_details = risk['evidenceDetails']
+ for detail in evidence_details:
+ hr_table.append({
+ 'Rule Criticality': detail.get('criticalityLabel'),
+ 'Evidence Summary': detail.get('evidenceString'),
+ 'Rule Triggered': detail.get('rule'),
+ 'Rule Triggered Time': detail.get('timestamp')
+ })
+ hr += tableToMarkdown('Triggered Risk Rules', hr_table,
+ ['Rule Triggered', 'Rule Criticality', 'Evidence Summary', 'Rule Triggered Time'])
+ sightings_table = []
+ for raw_sighting in sightings:
+ sighting = {
+ 'Published': raw_sighting.get('published'),
+ 'Type': raw_sighting.get('type'),
+ 'Fragment': raw_sighting.get('fragment'),
+ 'Source': raw_sighting.get('source'),
+ 'Title': raw_sighting.get('title')
+ }
+ if raw_sighting['url']:
+ sighting['URL'] = '[{}]({})'.format(raw_sighting['url'], raw_sighting['url'])
+ sightings_table.append(sighting)
+ if sightings_table:
+ hr += tableToMarkdown('References collected for this URL', sightings_table,
+ ['Title', 'Source', 'Type', 'URL', 'Fragment', 'Published'])
+ ec = {}
+ ec[outputPaths['url']] = {
+ 'Data': url,
+ 'RecordedFuture': {
+ 'Criticality': risk.get('criticalityLabel'),
+ 'FirstSeen': timestamps.get('firstSeen'),
+ 'LastSeen': timestamps.get('lastSeen')
+ }
+ }
+ dbot_score = translate_score(rf_score, URL_THRESHOLD)
+ ec['DBotScore'] = {
+ 'Indicator': url,
+ 'Type': 'url',
+ 'Vendor': 'Recorded Future',
+ 'Score': dbot_score
+ }
+ if (dbot_score == 3):
+ ec[outputPaths['url']]['Malicious'] = {
+ 'Vendor': 'Recorded Future',
+ 'Description': 'Score above ' + str(rf_score)
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+
+
+def urllist_lookup(limit, risk_lower, risk_higher, orderby, direction):
+ cmd_url = 'url/search'
+
+ params = {
+ 'fields': 'entity,intelCard,risk,sightings,timestamps'
+ }
+
+ if limit:
+ params['limit'] = limit
+ if orderby:
+ params['orderby'] = orderby
+ if direction:
+ params['direction'] = direction
+ if risk_lower and risk_higher:
+ params['riskScore'] = '[{},{}]'.format(risk_lower, risk_higher)
+
+ response = http_request('get', cmd_url, params=params)
+ return response
+
+
+def vulnlist_command():
+ detailed = False if demisto.args().get('detailed') == 'false' else True
+ limit = demisto.args().get('limit')
+ risk_lower = demisto.args().get('risk_lower')
+ risk_higher = demisto.args().get('risk_higher')
+ orderby = demisto.args().get('orderby')
+ direction = demisto.args().get('direction')
+
+ response = json.loads(vulnlist_lookup(limit, risk_lower, risk_higher, orderby, direction))
+ if not response or 'data' not in response:
+ demisto.results('No results found')
+ return
+
+ resultlist = response['data'].get('results', [])
+ if len(resultlist) == 0:
+ demisto.results('No results found')
+ return
+
+ for result in resultlist:
+ timestamps = result['timestamps']
+ vuln = result['entity']['name']
+ entity_id = result['entity']['id']
+ risk = result['risk']
+ rf_score = risk['score']
+ sightings = result['sightings']
+ hr = '### Recorded Future Vulnerability info for ' + vuln + '\n'
+ hr += 'Risk score: ' + str(rf_score) + ' out of 99\n'
+ hr += 'Criticality label: ' + risk.get('criticalityLabel') + '\n'
+ hr += 'Summary: ' + risk.get('riskSummary') + '\n'
+ hr += 'Total references to this entity: ' + str(len(sightings)) + '\n'
+ hr += '[Intelligence Card](https://app.recordedfuture.com/live/sc/entity/' + entity_id + ')' + '\n'
+ hr_table = []
+ if detailed:
+ evidence_details = risk['evidenceDetails']
+ for detail in evidence_details:
+ hr_table.append({
+ 'Rule Criticality': detail.get('criticalityLabel'),
+ 'Evidence Summary': detail.get('evidenceString'),
+ 'Rule Triggered': detail.get('rule'),
+ 'Rule Triggered Time': detail.get('timestamp')
+ })
+ hr += tableToMarkdown('Triggered Risk Rules', hr_table,
+ ['Rule Triggered', 'Rule Criticality', 'Evidence Summary', 'Rule Triggered Time'])
+ sightings_table = []
+ for raw_sighting in sightings:
+ sighting = {
+ 'Published': raw_sighting.get('published'),
+ 'Type': raw_sighting.get('type'),
+ 'Fragment': raw_sighting.get('fragment'),
+ 'Source': raw_sighting.get('source'),
+ 'Title': raw_sighting.get('title')
+ }
+ if raw_sighting['url']:
+ sighting['URL'] = '[{}]({})'.format(raw_sighting['url'], raw_sighting['url'])
+ sightings_table.append(sighting)
+ if sightings_table:
+ hr += tableToMarkdown('References collected for this vulnerability', sightings_table,
+ ['Title', 'Source', 'Type', 'URL', 'Fragment', 'Published'])
+ ec = {}
+ ec[outputPaths['cve']] = {
+ 'ID': vuln,
+ 'RecordedFuture': {
+ 'Criticality': risk.get('criticalityLabel'),
+ 'FirstSeen': timestamps.get('firstSeen'),
+ 'LastSeen': timestamps.get('lastSeen')
+ }
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': ec
+ })
+
+
+def vulnlist_lookup(limit, risk_lower, risk_higher, orderby, direction):
+ cmd_url = 'vulnerability/search'
+
+ params = {
+ 'fields': 'entity,intelCard,risk,sightings,timestamps'
+ }
+
+ if limit:
+ params['limit'] = limit
+ if orderby:
+ params['orderby'] = orderby
+ if direction:
+ params['direction'] = direction
+ if risk_lower and risk_higher:
+ params['riskScore'] = '[{},{}]'.format(risk_lower, risk_higher)
+
+ response = http_request('get', cmd_url, params=params)
+ return response
+
+
+def get_url_risklist_command():
+ specific_list = demisto.args().get('list')
+
+ res = get_url_risklist(specific_list)
+
+ if not res:
+ return_error('Received empty response')
+
+ demisto.results(
+ fileResult(filename='url_risk_list.csv', data=res.encode('utf-8'), file_type=entryTypes['entryInfoFile']))
+
+
+def get_url_risklist(specific_list):
+ cmd_url = 'url/risklist'
+
+ params = {
+ 'format': 'csv/splunk'
+ }
+
+ if specific_list:
+ params['list'] = specific_list
+
+ return http_request('get', cmd_url, params=params)
+
+
+def get_domain_risklist_command():
+ specific_list = demisto.args().get('list')
+
+ res = get_domain_risklist(specific_list)
+
+ if not res:
+ return_error('Received empty response')
+
+ demisto.results(
+ fileResult(filename='domain_risk_list.csv', data=res.encode('utf-8'), file_type=entryTypes['entryInfoFile']))
+
+
+def get_domain_risklist(specific_list):
+ cmd_url = 'domain/risklist'
+
+ params = {
+ 'format': 'csv/splunk'
+ }
+
+ if specific_list:
+ params['list'] = specific_list
+
+ return http_request('get', cmd_url, params=params)
+
+
+def get_ip_risklist_command():
+ specific_list = demisto.args().get('list')
+
+ res = get_ip_risklist(specific_list)
+
+ if not res:
+ return_error('Received empty response')
+
+ demisto.results(
+ fileResult(filename='ip_risk_list.csv', data=res.encode('utf-8'), file_type=entryTypes['entryInfoFile']))
+
+
+def get_ip_risklist(specific_list):
+ cmd_url = 'ip/risklist'
+
+ params = {
+ 'format': 'csv/splunk'
+ }
+
+ if specific_list:
+ params['list'] = specific_list
+
+ return http_request('get', cmd_url, params=params)
+
+
+def get_hash_risklist_command():
+ specific_list = demisto.args().get('list')
+
+ res = get_hash_risklist(specific_list)
+
+ if not res:
+ return_error('Received empty response')
+
+ demisto.results(
+ fileResult(filename='hash_list.csv', data=res.encode('utf-8'), file_type=entryTypes['entryInfoFile']))
+
+
+def get_hash_risklist(specific_list):
+ cmd_url = 'hash/risklist'
+
+ params = {
+ 'format': 'csv/splunk'
+ }
+
+ if specific_list:
+ params['list'] = specific_list
+
+ return http_request('get', cmd_url, params=params)
+
+
+def get_vulnerability_risklist_command():
+ specific_list = demisto.args().get('list')
+
+ res = get_vulnerability_risklist(specific_list)
+
+ if not res:
+ return_error('Received empty response')
+
+ demisto.results(
+ fileResult(filename='cve_risk_list.csv', data=res.encode('utf-8'), file_type=entryTypes['entryInfoFile']))
+
+
+def get_vulnerability_risklist(specific_list):
+ cmd_url = 'vulnerability/risklist'
+
+ params = {
+ 'format': 'csv/splunk'
+ }
+
+ if specific_list:
+ params['list'] = specific_list
+
+ return http_request('get', cmd_url, params=params)
+
+
+def get_domain_riskrules_command():
+ response = json.loads(get_hash_riskrules())
+
+ if not response or 'data' not in response:
+ 'No data found'
+
+ headers = ['Name', 'Description', 'Count', 'Criticality']
+
+ mapped_rules = [{
+ 'Name': r.get('name'),
+ 'Description': r.get('description'),
+ 'Count': r.get('count'),
+ 'Criticality': r.get('criticalityLabel')
+ } for r in response['data'].get('results', [])]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Recorded Future Domain risk rules', mapped_rules, headers=headers,
+ removeNull=True),
+ 'EntryContext': {
+ 'RecordedFuture.RiskRule.Domain(val.Name === obj.Name)': createContext(mapped_rules)
+ }
+ })
+
+
+def get_domain_riskrules():
+ cmd_url = 'domain/riskrules'
+
+ res = http_request('get', cmd_url)
+
+ return res
+
+
+def get_hash_riskrules_command():
+ response = json.loads(get_hash_riskrules())
+
+ if not response or 'data' not in response:
+ demisto.results('No results found')
+ return
+
+ headers = ['Name', 'Description', 'Count', 'Criticality']
+
+ mapped_rules = [{
+ 'Name': r.get('name'),
+ 'Description': r.get('description'),
+ 'Count': r.get('count'),
+ 'Criticality': r.get('criticalityLabel')
+ } for r in response['data'].get('results', [])]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Recorded Future Hash risk rules', mapped_rules, headers=headers,
+ removeNull=True),
+ 'EntryContext': {
+ 'RecordedFuture.RiskRule.Hash(val.Name === obj.Name)': createContext(mapped_rules)
+ }
+ })
+
+
+def get_hash_riskrules():
+ cmd_url = 'hash/riskrules'
+
+ res = http_request('get', cmd_url)
+
+ return res
+
+
+def get_ip_riskrules_command():
+ response = json.loads(get_ip_riskrules())
+
+ if not response or 'data' not in response:
+ demisto.results('No results found')
+ return
+
+ headers = ['Name', 'Description', 'Count', 'Criticality']
+
+ mapped_rules = [{
+ 'Name': r.get('name'),
+ 'Description': r.get('description'),
+ 'Count': r.get('count'),
+ 'Criticality': r.get('criticalityLabel')
+ } for r in response['data'].get('results', [])]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Recorded Future IP risk rules', mapped_rules, headers=headers,
+ removeNull=True),
+ 'EntryContext': {
+ 'RecordedFuture.RiskRule.IP(val.Name === obj.Name)': createContext(mapped_rules)
+ }
+ })
+
+
+def get_ip_riskrules():
+ cmd_url = 'ip/riskrules'
+
+ res = http_request('get', cmd_url)
+
+ return res
+
+
+def get_url_riskrules_command():
+ response = json.loads(get_url_riskrules())
+
+ if not response or 'data' not in response:
+ demisto.results('No results found')
+ return
+
+ headers = ['Name', 'Description', 'Count', 'Criticality']
+
+ mapped_rules = [{
+ 'Name': r.get('name'),
+ 'Description': r.get('description'),
+ 'Count': r.get('count'),
+ 'Criticality': r.get('criticalityLabel')
+ } for r in response['data'].get('results', [])]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Recorded Future URL risk rules', mapped_rules, headers=headers,
+ removeNull=True),
+ 'EntryContext': {
+ 'RecordedFuture.RiskRule.URL(val.Name === obj.Name)': createContext(mapped_rules)
+ }
+ })
+
+
+def get_url_riskrules():
+ cmd_url = 'url/riskrules'
+
+ res = http_request('get', cmd_url)
+
+ return res
+
+
+def get_vulnerability_riskrules_command():
+ response = json.loads(get_vulnerability_riskrules())
+
+ if not response or 'data' not in response:
+ demisto.results('No results found')
+ return
+
+ headers = ['Name', 'Description', 'Count', 'Criticality']
+
+ mapped_rules = [{
+ 'Name': r.get('name'),
+ 'Description': r.get('description'),
+ 'Count': r.get('count'),
+ 'Criticality': r.get('criticalityLabel')
+ } for r in response['data'].get('results', [])]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Recorded Future Vulnerability risk rules', mapped_rules, headers=headers,
+ removeNull=True),
+ 'EntryContext': {
+ 'RecordedFuture.RiskRule.Vulnerability(val.Name === obj.Name)': createContext(mapped_rules)
+ }
+ })
+
+
+def get_vulnerability_riskrules():
+ cmd_url = 'vulnerability/riskrules'
+
+ res = http_request('get', cmd_url)
+
+ return res
+
+
+def get_alert_rules_command():
+ rule_name = demisto.args().get('rule_name')
+ limit = demisto.args().get('limit')
+
+ response = json.loads(get_alert_rules(rule_name, limit))
+
+ if not response or 'data' not in response:
+ demisto.results('No results found')
+ return
+
+ mapped_rules = [{
+ 'Name': r['title'],
+ 'ID': r['id']
+ } for r in response['data'].get('results', [])]
+
+ if len(mapped_rules) == 0:
+ demisto.results('No results found')
+ return
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Recorded Future Alert rules', mapped_rules, removeNull=True),
+ 'EntryContext': {
+ 'RecordedFuture.AlertRule(val.ID === obj.ID)': createContext(mapped_rules)
+ }
+ })
+
+
+def get_alert_rules(rule_name=None, limit=None):
+ cmd_url = 'alert/rule'
+
+ params = {}
+
+ if rule_name:
+ params['freetext'] = rule_name
+ if limit:
+ params['limit'] = limit
+
+ return http_request('get', cmd_url, params=params)
+
+
+def get_alerts_command():
+ rule_id = demisto.args().get('rule_id')
+ limit = demisto.args().get('limit')
+ triggered = demisto.args().get('triggered_time')
+ assignee = demisto.args().get('assignee')
+ status = demisto.args().get('status')
+ freetext = demisto.args().get('freetext')
+ offset = demisto.args().get('offset')
+ orderby = demisto.args().get('orderby')
+ direction = demisto.args().get('direction')
+
+ triggered_time = None
+ if triggered:
+ date, _ = parse_date_range(triggered, date_format='%Y-%m-%d %H:%M:%S')
+ triggered_time = '[{},)'.format(date)
+
+ response = json.loads(
+ get_alerts(rule_id, triggered_time, limit, assignee, status, freetext, offset, orderby, direction))
+
+ if not response or 'data' not in response:
+ demisto.results('No results found')
+ return
+
+ headers = ['ID', 'Name', 'Type', 'Triggered', 'Status', 'Assignee', 'Rule']
+
+ mapped_alerts = [{
+ 'ID': a['id'],
+ 'Name': a['title'],
+ 'Type': a['type'],
+ 'Triggered': a['triggered'],
+ 'Status': a.get('review', {}).get('status'),
+ 'Assignee': a.get('review', {}).get('assignee'),
+ 'Rule': a.get('rule', {}).get('name')
+ } for a in response['data'].get('results', [])]
+
+ if len(mapped_alerts) == 0:
+ demisto.results('No results found')
+ return
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Recorded Future Alerts', mapped_alerts, headers=headers, removeNull=True),
+ 'EntryContext': {
+ 'RecordedFuture.Alert(val.ID === obj.ID)': createContext(mapped_alerts)
+ }
+ })
+
+
+def get_alerts(rule_id=None, triggered=None, limit=None, assignee=None, status=None, freetext=None, offset=None,
+ orderby=None, direction=None):
+ cmd_url = 'alert/search'
+
+ params = {}
+
+ if rule_id:
+ params['alertRule'] = rule_id
+ if limit:
+ params['limit'] = limit
+ if triggered:
+ params['triggered'] = triggered
+ if assignee:
+ params['assignee'] = assignee
+ if status:
+ params['status'] = status
+ if freetext:
+ params['freetext'] = freetext
+ if offset:
+ params['from'] = offset
+ if orderby:
+ params['orderby'] = orderby
+ if direction:
+ params['direction'] = direction
+
+ return http_request('get', cmd_url, params=params)
+
+
+def get_alert(alert_id):
+ cmd_url = 'alert/' + alert_id
+
+ return http_request('get', cmd_url)
+
+
+def fetch_incidents():
+ if RULE_NAMES:
+ rule_names = RULE_NAMES.split(';')
+ else:
+ rule_names = []
+
+ if FETCH_TIME:
+ fetch_time = FETCH_TIME
+ else:
+ fetch_time = '24 hours'
+
+ last_run = demisto.getLastRun()
+ if not last_run:
+ last_run = {}
+ if 'time' not in last_run:
+ time, _ = parse_date_range(fetch_time, date_format='%Y-%m-%dT%H:%M:%S.%fZ')
+ else:
+ time = last_run['time']
+
+ current_time = datetime.strptime(time, '%Y-%m-%dT%H:%M:%S.%fZ')
+ triggered_time = '[{},)'.format(datetime.strftime(current_time, '%Y-%m-%d %H:%M:%S'))
+ max_time = current_time
+
+ rule_ids = [] # type: list
+
+ for rule in rule_names:
+ rules = json.loads(get_alert_rules(rule))
+ if rules and 'data' in rules:
+ rule_ids += map(lambda r: r['id'], rules['data'].get('results', []))
+
+ all_alerts = [] # type: list
+ if rule_ids:
+ for rule_id in rule_ids:
+ alerts = json.loads(get_alerts(rule_id, triggered_time))
+ if alerts and 'data' in alerts:
+ all_alerts += alerts['data'].get('results', [])
+ else:
+ alerts = json.loads(get_alerts(triggered=triggered_time))
+ if alerts and 'data' in alerts:
+ all_alerts += alerts['data'].get('results', [])
+
+ incidents = []
+ for alert in all_alerts:
+ alert_time = datetime.strptime(alert['triggered'], '%Y-%m-%dT%H:%M:%S.%fZ')
+ # The API returns also alerts that are triggered in the same time
+ if alert_time > current_time:
+ alert_data = json.loads(get_alert(alert['id']))
+ if alert_data and 'data' in alert_data:
+ alert = alert_data['data']
+ incidents.append({
+ 'name': 'Recorded Future Alert - ' + alert['title'],
+ 'occurred': datetime.strftime(alert_time, '%Y-%m-%dT%H:%M:%SZ'),
+ 'rawJSON': json.dumps(alert)
+ })
+
+ if alert_time > max_time:
+ max_time = alert_time
+
+ demisto.incidents(incidents)
+ demisto.setLastRun({
+ 'time': datetime.strftime(max_time, '%Y-%m-%dT%H:%M:%S.%fZ')
+ })
+
+
+''' EXECUTION CODE '''
+LOG('command is %s' % (demisto.command(),))
+
+try:
+ if demisto.command() == 'test-module':
+ try:
+ res = json.loads(ip_lookup('8.8.8.8'))
+ except Exception as ex:
+ return_error('Failed to get response. The URL might be incorrect.' + str(ex))
+ demisto.results('ok')
+
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+
+ elif demisto.command() == 'domain':
+ domain_command()
+
+ elif demisto.command() == 'url':
+ url_command()
+
+ elif demisto.command() == 'ip':
+ ip_command()
+
+ elif demisto.command() == 'file':
+ file_command()
+
+ elif demisto.command() == 'recorded-future-get-related-entities':
+ get_related_entities_command()
+
+ elif demisto.command() == 'recorded-future-get-threats-hash':
+ hashlist_command()
+
+ elif demisto.command() == 'recorded-future-get-threats-ip':
+ iplist_command()
+
+ elif demisto.command() == 'recorded-future-get-threats-url':
+ urllist_command()
+
+ elif demisto.command() == 'recorded-future-get-threats-domain':
+ domainlist_command()
+
+ elif demisto.command() == 'recorded-future-get-threats-vulnerabilities':
+ vulnlist_command()
+
+ elif demisto.command() == 'recorded-future-get-url-risklist':
+ get_url_risklist_command()
+
+ elif demisto.command() == 'recorded-future-get-domain-risklist':
+ get_domain_risklist_command()
+
+ elif demisto.command() == 'recorded-future-get-ip-risklist':
+ get_ip_risklist_command()
+
+ elif demisto.command() == 'recorded-future-get-vulnerability-risklist':
+ get_vulnerability_risklist_command()
+
+ elif demisto.command() == 'recorded-future-get-hash-risklist':
+ get_hash_risklist_command()
+
+ elif demisto.command() == 'recorded-future-get-domain-riskrules':
+ get_domain_riskrules_command()
+
+ elif demisto.command() == 'recorded-future-get-hash-riskrules':
+ get_hash_riskrules_command()
+
+ elif demisto.command() == 'recorded-future-get-ip-riskrules':
+ get_ip_riskrules_command()
+
+ elif demisto.command() == 'recorded-future-get-url-riskrules':
+ get_url_riskrules_command()
+
+ elif demisto.command() == 'recorded-future-get-vulnerability-riskrules':
+ get_vulnerability_riskrules_command()
+
+ elif demisto.command() == 'recorded-future-get-alert-rules':
+ get_alert_rules_command()
+
+ elif demisto.command() == 'recorded-future-get-alerts':
+ get_alerts_command()
+
+except Exception, e:
+ LOG(e.message)
+ LOG.print_log()
+ return_error(e.message)
diff --git a/Integrations/Recorded_Future/Recorded_Future.yml b/Integrations/Recorded_Future/Recorded_Future.yml
new file mode 100644
index 000000000000..0633cf506aa6
--- /dev/null
+++ b/Integrations/Recorded_Future/Recorded_Future.yml
@@ -0,0 +1,1179 @@
+commonfields:
+ id: Recorded Future
+ version: -1
+name: Recorded Future
+display: Recorded Future
+category: Data Enrichment & Threat Intelligence
+description: Unique threat intel technology that automatically serves up relevant
+ insights in real time.
+configuration:
+- display: Server URL (e.g., https://api.recordedfuture.com)
+ name: server
+ defaultvalue: https://api.recordedfuture.com
+ type: 0
+ required: true
+- display: API Token
+ name: token
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Suspicious Threshold. Minimum risk score to consider an indicator suspicious.
+ name: suspicious_threshold
+ defaultvalue: "5"
+ type: 0
+ required: false
+- display: File Threshold. Minimum risk score from Recorded Future to consider the
+ file malicious.
+ name: file_threshold
+ defaultvalue: "65"
+ type: 0
+ required: false
+- display: IP Threshold. Minimum risk score from RF to consider the IP malicious.
+ name: ip_threshold
+ defaultvalue: "65"
+ type: 0
+ required: false
+- display: Domain Threshold. Minimum risk score from Recorded Future to consider the
+ domain malicious.
+ name: domain_threshold
+ defaultvalue: "65"
+ type: 0
+ required: false
+- display: URL Threshold. Minimum risk score from Recorded Future to consider the
+ URL malicious.
+ name: url_threshold
+ defaultvalue: "65"
+ type: 0
+ required: false
+- display: Vulnerability Threshold. Minimum risk score from Recorded Future to consider
+ the vulnerability critical.
+ name: cve_threshold
+ defaultvalue: "65"
+ type: 0
+ required: false
+- display: Trust any certificate (not secure)
+ name: unsecure
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ type: 8
+ required: false
+- display: Fetch incidents
+ name: isFetch
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Rule names to fetch alerts by, separated by semicolon. If empty, all alerts
+ will be fetched
+ name: rule_names
+ defaultvalue: Global Trends, Trending Vulnerabilities;Global Trends, Trending Attackers
+ type: 12
+ required: false
+- display: First fetch time ( , e.g., 12 hours, 7 days, 3 months,
+ 1 year)
+ name: triggered
+ defaultvalue: 24 hours
+ type: 0
+ required: false
+- display: Incident type
+ name: incidentType
+ defaultvalue: ""
+ type: 13
+ required: false
+script:
+ script: '-'
+ type: python
+ subtype: python2
+ commands:
+ - name: domain
+ arguments:
+ - name: domain
+ required: true
+ default: true
+ description: Domain to get the reputation of
+ - name: detailed
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: If true, fetches evidence details. Evidence is a record that is
+ generated if any of the risk rules in Recorded Future is triggered.
+ defaultValue: "false"
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: Domain.Name
+ description: Domain name
+ type: string
+ - contextPath: Domain.RecordedFuture.Criticality
+ description: Domain criticality label
+ type: string
+ - contextPath: Domain.RecordedFuture.FirstSeen
+ description: Risk first seen timestamp
+ type: date
+ - contextPath: Domain.RecordedFuture.LastSeen
+ description: Risk last seen timestamp
+ type: date
+ description: Returns threat intelligence information for a domain or DNS in Recorded
+ Future.
+ - name: ip
+ arguments:
+ - name: ip
+ required: true
+ default: true
+ description: IP address to get the reputation of
+ - name: detailed
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: If true, fetches evidence details. Evidence is a record that is
+ generated if any of the risk rules in Recorded Future is triggered.
+ defaultValue: "false"
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IP addresses, the vendor that made the decision
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IP addresses, the reason that the vendor made the
+ decision
+ type: string
+ - contextPath: IP.Address
+ description: IP address
+ type: string
+ - contextPath: IP.RecordedFuture.Criticality
+ description: Risk criticality label
+ type: string
+ - contextPath: IP.RecordedFuture.FirstSeen
+ description: Risk first seen timestamp
+ type: date
+ - contextPath: IP.RecordedFuture.LastSeen
+ description: Risk last seen timestamp
+ type: date
+ description: Returns threat intelligence information for an IP address in Recorded
+ Future.
+ - name: file
+ arguments:
+ - name: file
+ required: true
+ default: true
+ description: File hash to check the reputation of (MD5, SHA-1, SHA-256, SHA-512,
+ CRC-32, CTPH)
+ - name: detailed
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: If true, fetches evidence details. Evidence is a record that is
+ generated if any of the risk rules in Recorded Future is triggered.
+ defaultValue: "false"
+ outputs:
+ - contextPath: File.SHA256
+ description: File SHA-256
+ type: string
+ - contextPath: File.SHA512
+ description: File SHA-512
+ type: string
+ - contextPath: File.SHA1
+ description: File SHA-1
+ type: string
+ - contextPath: File.MD5
+ description: File MD-5
+ type: string
+ - contextPath: File.CRC32
+ description: File CRC-32
+ type: string
+ - contextPath: File.CTPH
+ description: File CTPH
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the reason that the vendor made the decision
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: File.Criticality
+ description: Risk criticality label
+ type: string
+ - contextPath: File.RecordedFuture.FirstSeen
+ description: Risk first seen timestamp
+ type: date
+ - contextPath: File.RecordedFuture.LastSeen
+ description: Risk last seen timestamp
+ type: date
+ description: Returns threat intelligence information for a file in Recorded Future.
+ - name: recorded-future-get-related-entities
+ arguments:
+ - name: entityType
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - domain
+ - ip
+ - file
+ - url
+ description: The type of entity to fetch context for. (Should be provided with
+ its value in entityValue argument)
+ - name: entityValue
+ required: true
+ description: 'The value of the entity to fetch context for. (Should be provided
+ with its type in entityType argument, Hash types supported: MD5, SHA-1, SHA-256,
+ SHA-512, CRC-32, CTPH)'
+ - name: resultEntityType
+ auto: PREDEFINED
+ predefined:
+ - All
+ - Hash
+ - IP
+ - Domain
+ - Attacker
+ - Malware
+ - URL
+ description: CSV list of related entity types to return in the result (e.g.,
+ Hash,IP,Domain)
+ defaultValue: All
+ outputs:
+ - contextPath: File.SHA256
+ description: File SHA-256
+ type: string
+ - contextPath: File.SHA512
+ description: File SHA-512
+ type: string
+ - contextPath: File.SHA1
+ description: File SHA-1
+ type: string
+ - contextPath: File.MD5
+ description: File MD-5
+ type: string
+ - contextPath: File.CRC32
+ description: File CRC-32
+ type: string
+ - contextPath: File.CTPH
+ description: File CTPH
+ type: string
+ - contextPath: File.RecordedFuture.RelatedEntities.IPAddress.Count
+ description: File related entity count (IP)
+ type: number
+ - contextPath: File.RecordedFuture.RelatedEntities.IPAddress.ID
+ description: File related entity ID (IP)
+ type: string
+ - contextPath: File.RecordedFuture.RelatedEntities.IPAddress.Name
+ description: File related entity name (IP)
+ type: string
+ - contextPath: File.RecordedFuture.RelatedEntities.Hash.Count
+ description: File related entity count (Hash)
+ type: number
+ - contextPath: File.RecordedFuture.RelatedEntities.Hash.ID
+ description: File related entity ID (Hash)
+ type: string
+ - contextPath: File.RecordedFuture.RelatedEntities.Hash.Name
+ description: File related entity name (Hash)
+ type: string
+ - contextPath: File.RecordedFuture.RelatedEntities.Domain.Count
+ description: File related entity count (Domain)
+ type: number
+ - contextPath: File.RecordedFuture.RelatedEntities.Domain.ID
+ description: File related entity ID (Domain)
+ type: string
+ - contextPath: File.RecordedFuture.RelatedEntities.Domain.Name
+ description: File related entity name (Domain)
+ type: string
+ - contextPath: File.RecordedFuture.RelatedEntities.Attacker.Count
+ description: File related entity count (Attacker)
+ type: number
+ - contextPath: File.RecordedFuture.RelatedEntities.Attacker.ID
+ description: File related entity ID (Attacker)
+ type: string
+ - contextPath: File.RecordedFuture.RelatedEntities.Attacker.Name
+ description: File related entity name (Attacker)
+ type: string
+ - contextPath: File.RecordedFuture.RelatedEntities.Malware.Count
+ description: File related entity count (Malware)
+ type: number
+ - contextPath: File.RecordedFuture.RelatedEntities.Malware.ID
+ description: File related entity ID (Malware)
+ type: string
+ - contextPath: File.RecordedFuture.RelatedEntities.Malware.Name
+ description: File related entity name (Malware)
+ type: string
+ - contextPath: File.RecordedFuture.RelatedEntities.URL.Count
+ description: File related entity count (URL)
+ type: number
+ - contextPath: File.RecordedFuture.RelatedEntities.URL.ID
+ description: File related entity ID (URL)
+ type: string
+ - contextPath: File.RecordedFuture.RelatedEntities.URL.Data
+ description: File related entity name (URL)
+ type: string
+ - contextPath: IP.Address
+ description: IP address
+ type: string
+ - contextPath: IP.RecordedFuture.RelatedEntities.IPAddress.Count
+ description: IP related entity count (IP)
+ type: number
+ - contextPath: IP.RecordedFuture.RelatedEntities.IPAddress.ID
+ description: IP related entity ID (IP)
+ type: string
+ - contextPath: IP.RecordedFuture.RelatedEntities.IPAddress.Name
+ description: IP related entity name (IP)
+ type: string
+ - contextPath: IP.RecordedFuture.RelatedEntities.Hash.Count
+ description: IP related entity count (Hash)
+ type: number
+ - contextPath: IP.RecordedFuture.RelatedEntities.Hash.ID
+ description: IP related entity ID (Hash)
+ type: string
+ - contextPath: IP.RecordedFuture.RelatedEntities.Hash.Name
+ description: IP related entity name (Hash)
+ type: string
+ - contextPath: IP.RecordedFuture.RelatedEntities.Domain.Count
+ description: IP related entity count (Domain)
+ type: number
+ - contextPath: IP.RecordedFuture.RelatedEntities.Domain.ID
+ description: IP related entity ID (Domain)
+ type: string
+ - contextPath: IP.RecordedFuture.RelatedEntities.Domain.Name
+ description: IP related entity name (Domain)
+ type: string
+ - contextPath: IP.RecordedFuture.RelatedEntities.Attacker.Count
+ description: IP related entity count (Attacker)
+ type: number
+ - contextPath: IP.RecordedFuture.RelatedEntities.Attacker.ID
+ description: IP related entity ID (Attacker)
+ type: string
+ - contextPath: IP.RecordedFuture.RelatedEntities.Attacker.Name
+ description: IP related entity name (Attacker)
+ type: string
+ - contextPath: IP.RecordedFuture.RelatedEntities.Malware.Count
+ description: IP related entity count (Malware)
+ type: number
+ - contextPath: IP.RecordedFuture.RelatedEntities.Malware.ID
+ description: IP related entity ID (Malware)
+ type: string
+ - contextPath: IP.RecordedFuture.RelatedEntities.Malware.Name
+ description: IP related entity name (Malware)
+ type: string
+ - contextPath: IP.RecordedFuture.RelatedEntities.URL.Count
+ description: IP related entity count (URL)
+ type: number
+ - contextPath: IP.RecordedFuture.RelatedEntities.URL.ID
+ description: IP related entity ID (URL)
+ type: string
+ - contextPath: IP.RecordedFuture.RelatedEntities.URL.Data
+ description: IP related entity name (URL)
+ type: string
+ - contextPath: Domain.Name
+ description: Domain name
+ type: string
+ - contextPath: Domain.RecordedFuture.RelatedEntities.IPAddress.Count
+ description: Domain related entity count (IP)
+ type: number
+ - contextPath: Domain.RecordedFuture.RelatedEntities.IPAddress.ID
+ description: Domain related entity ID (IP)
+ type: string
+ - contextPath: Domain.RecordedFuture.RelatedEntities.IPAddress.Name
+ description: Domain related entity name (IP)
+ type: string
+ - contextPath: Domain.RecordedFuture.RelatedEntities.Hash.Count
+ description: Domain related entity count (Hash)
+ type: number
+ - contextPath: Domain.RecordedFuture.RelatedEntities.Hash.ID
+ description: Domain related entity ID (Hash)
+ type: string
+ - contextPath: Domain.RecordedFuture.RelatedEntities.Hash.Name
+ description: Domain related entity name (Hash)
+ type: string
+ - contextPath: Domain.RecordedFuture.RelatedEntities.Domain.Count
+ description: Domain related entity count (Domain)
+ type: number
+ - contextPath: Domain.RecordedFuture.RelatedEntities.Domain.ID
+ description: Domain related entity ID (Domain)
+ type: string
+ - contextPath: Domain.RecordedFuture.RelatedEntities.Domain.Name
+ description: Domain related entity name (Domain)
+ type: string
+ - contextPath: Domain.RecordedFuture.RelatedEntities.Attacker.Count
+ description: Domain related entity count (Attacker)
+ type: number
+ - contextPath: Domain.RecordedFuture.RelatedEntities.Attacker.ID
+ description: Domain related entity ID (Attacker)
+ type: string
+ - contextPath: Domain.RecordedFuture.RelatedEntities.Attacker.Name
+ description: Domain related entity name (Attacker)
+ type: string
+ - contextPath: Domain.RecordedFuture.RelatedEntities.Malware.Count
+ description: Domain related entity count (Malware)
+ type: number
+ - contextPath: Domain.RecordedFuture.RelatedEntities.Malware.ID
+ description: Domain related entity ID (Malware)
+ type: string
+ - contextPath: Domain.RecordedFuture.RelatedEntities.Malware.Name
+ description: Domain related entity name (Malware)
+ type: string
+ - contextPath: Domain.RecordedFuture.RelatedEntities.URL.Count
+ description: Domain related entity count (URL)
+ type: number
+ - contextPath: Domain.RecordedFuture.RelatedEntities.URL.ID
+ description: Domain related entity ID (URL)
+ type: string
+ - contextPath: Domain.RecordedFuture.RelatedEntities.URL.Data
+ description: Domain related entity name (URL)
+ type: string
+ - contextPath: URL.Data
+ description: URL name
+ type: string
+ - contextPath: URL.RecordedFuture.RelatedEntities.IPAddress.Count
+ description: URL related entity count (IP)
+ type: number
+ - contextPath: URL.RecordedFuture.RelatedEntities.IPAddress.ID
+ description: URL related entity ID (IP)
+ type: string
+ - contextPath: URL.RecordedFuture.RelatedEntities.IPAddress.Name
+ description: URL related entity name (IP)
+ type: string
+ - contextPath: URL.RecordedFuture.RelatedEntities.Hash.Count
+ description: URL related entity count (Hash)
+ type: number
+ - contextPath: URL.RecordedFuture.RelatedEntities.Hash.ID
+ description: URL related entity ID (Hash)
+ type: string
+ - contextPath: URL.RecordedFuture.RelatedEntities.Hash.Name
+ description: URL related entity name (Hash)
+ type: string
+ - contextPath: URL.RecordedFuture.RelatedEntities.Domain.Count
+ description: URL related entity count (Domain)
+ type: number
+ - contextPath: URL.RecordedFuture.RelatedEntities.Domain.ID
+ description: URL related entity ID (Domain)
+ type: string
+ - contextPath: URL.RecordedFuture.RelatedEntities.Domain.Name
+ description: URL related entity name (Domain)
+ type: string
+ - contextPath: URL.RecordedFuture.RelatedEntities.Attacker.Count
+ description: URL related entity count (Attacker)
+ type: number
+ - contextPath: URL.RecordedFuture.RelatedEntities.Attacker.ID
+ description: URL related entity ID (Attacker)
+ type: string
+ - contextPath: URL.RecordedFuture.RelatedEntities.Attacker.Name
+ description: URL related entity name (Attacker)
+ type: string
+ - contextPath: URL.RecordedFuture.RelatedEntities.Malware.Count
+ description: URL related entity count (Malware)
+ type: number
+ - contextPath: URL.RecordedFuture.RelatedEntities.Malware.ID
+ description: URL related entity ID (Malware)
+ type: string
+ - contextPath: URL.RecordedFuture.RelatedEntities.Malware.Name
+ description: URL related entity name (Malware)
+ type: string
+ - contextPath: URL.RecordedFuture.RelatedEntities.URL.Count
+ description: URL related entity count (URL)
+ type: number
+ - contextPath: URL.RecordedFuture.RelatedEntities.URL.ID
+ description: URL related entity ID (URL)
+ type: string
+ - contextPath: URL.RecordedFuture.RelatedEntities.URL.Data
+ description: URL related entity name (URL)
+ type: string
+ description: Returns threat intelligence context for an indicator in Recorded
+ Future.
+ - name: url
+ arguments:
+ - name: url
+ required: true
+ default: true
+ description: URL to get the reputation of
+ - name: detailed
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: If true, fetches evidence details. Evidence is a record that is
+ generated if any of the risk rules in Recorded Future is triggered.
+ defaultValue: "false"
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: URL.Data
+ description: URL name
+ type: string
+ - contextPath: URL.RecordedFuture.Criticality
+ description: URL criticality label
+ type: string
+ - contextPath: URL.RecordedFuture.FirstSeen
+ description: Risk first seen timestamp
+ type: date
+ - contextPath: URL.RecordedFuture.LastSeen
+ description: Risk last seen timestamp
+ type: date
+ description: Returns threat intelligence information for a URL in Recorded Future.
+ - name: recorded-future-get-threats-hash
+ arguments:
+ - name: detailed
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: If true, fetches evidence details. Evidence is a record that is
+ generated if any of the risk rules in Recorded Future is triggered.
+ defaultValue: "false"
+ - name: limit
+ description: Number of results to return
+ defaultValue: "1"
+ - name: risk_lower
+ description: Minimum threshold score to return results for
+ defaultValue: "65"
+ - name: risk_higher
+ description: Maximum threshold score to return results for
+ defaultValue: "99"
+ - name: orderby
+ auto: PREDEFINED
+ predefined:
+ - created
+ - criticality
+ - firstseen
+ - lastseen
+ - modified
+ - riskscore
+ - rules
+ - sevendayshits
+ - sixtydayshits
+ - totalhits
+ description: Category to sort results by
+ defaultValue: sevendayshits
+ - name: direction
+ description: Sort direction
+ outputs:
+ - contextPath: File.SHA256
+ description: File SHA-256
+ type: string
+ - contextPath: File.SHA512
+ description: File SHA-512
+ type: string
+ - contextPath: File.SHA1
+ description: File SHA-1
+ type: string
+ - contextPath: File.MD5
+ description: File MD-5
+ type: string
+ - contextPath: File.CRC32
+ description: File CRC-32
+ type: string
+ - contextPath: File.CTPH
+ description: File CTPH
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the reason that the vendor made the decision
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: File.RecordedFuture.Criticality
+ description: Risk criticality label
+ type: string
+ - contextPath: File.RecordedFuture.FirstSeen
+ description: Risk first seen timestamp
+ type: date
+ - contextPath: File.RecordedFuture.LastSeen
+ description: Risk last seen timestamp
+ type: date
+ description: Returns hash threats from Recorded Future
+ - name: recorded-future-get-threats-ip
+ arguments:
+ - name: detailed
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: If true, fetches evidence details. Evidence is a record that is
+ generated if any of the risk rules in Recorded Future is triggered.
+ defaultValue: "false"
+ - name: limit
+ description: Number of results to return
+ defaultValue: "1"
+ - name: risk_lower
+ description: Minimum threshold score to return results for
+ defaultValue: "65"
+ - name: risk_higher
+ description: Maximum threshold score to return results for
+ defaultValue: "99"
+ - name: orderby
+ auto: PREDEFINED
+ predefined:
+ - created
+ - criticality
+ - firstseen
+ - lastseen
+ - modified
+ - riskscore
+ - rules
+ - sevendayshits
+ - sixtydayshits
+ - totalhits
+ description: Category to sort by
+ defaultValue: sevendayshits
+ - name: direction
+ auto: PREDEFINED
+ predefined:
+ - asc
+ - desc
+ description: Sort direction
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IP addresses, the vendor that made the decision
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IP addresses, the reason that the vendor made the
+ decision
+ type: string
+ - contextPath: IP.Address
+ description: IP address
+ type: string
+ - contextPath: IP.RecordedFuture.Criticality
+ description: Risk criticality label
+ type: string
+ - contextPath: IP.RecordedFuture.FirstSeen
+ description: Risk first seen timestamp
+ type: string
+ - contextPath: IP.RecordedFuture.LastSeen
+ description: Risk last seen timestamp
+ type: date
+ description: Returns IP threats from Recorded Future
+ - name: recorded-future-get-threats-url
+ arguments:
+ - name: detailed
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: If true, fetches evidence details. Evidence is a record that is
+ generated if any of the risk rules in Recorded Future is triggered.
+ defaultValue: "false"
+ - name: limit
+ description: Number of results to return
+ defaultValue: "1"
+ - name: risk_lower
+ description: Minimum threshold score to return results for
+ defaultValue: "65"
+ - name: risk_higher
+ description: Maximum threshold score to return results for
+ defaultValue: "99"
+ - name: orderby
+ auto: PREDEFINED
+ predefined:
+ - created
+ - criticality
+ - firstseen
+ - lastseen
+ - modified
+ - riskscore
+ - rules
+ - sevendayshits
+ - sixtydayshits
+ - totalhits
+ description: Category to sort by
+ defaultValue: sevendayshits
+ - name: direction
+ auto: PREDEFINED
+ predefined:
+ - asc
+ - desc
+ description: Sort direction
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: URL.Data
+ description: URL name
+ type: string
+ - contextPath: URL.RecordedFuture.Criticality
+ description: URL criticality label
+ type: string
+ - contextPath: URL.RecordedFuture.FirstSeen
+ description: Risk first seen timestamp
+ type: date
+ - contextPath: URL.RecordedFuture.LastSeen
+ description: Risk last seen timestamp
+ type: date
+ description: Returns URL threats from Recorded Future
+ - name: recorded-future-get-threats-domain
+ arguments:
+ - name: detailed
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: If true, fetches evidence details. Evidence is a record that is
+ generated if any of the risk rules in Recorded Future is triggered.
+ defaultValue: "false"
+ - name: limit
+ description: Limit number of results returned
+ defaultValue: "1"
+ - name: risk_lower
+ description: Minimum threshold score to return results for
+ defaultValue: "65"
+ - name: risk_higher
+ description: Maximum threshold score to return results for
+ defaultValue: "99"
+ - name: orderby
+ auto: PREDEFINED
+ predefined:
+ - created
+ - criticality
+ - firstseen
+ - lastseen
+ - modified
+ - riskscore
+ - rules
+ - sevendayshits
+ - sixtydayshits
+ - totalhits
+ description: Category to sort by
+ defaultValue: sevendayshits
+ - name: direction
+ auto: PREDEFINED
+ predefined:
+ - asc
+ - desc
+ description: Sort direction
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: Domain.Name
+ description: Domain name
+ type: string
+ - contextPath: Domain.RecordedFuture.Criticality
+ description: Domain criticality label
+ type: string
+ - contextPath: Domain.RecordedFuture.FirstSeen
+ description: Risk first seen timestamp
+ type: date
+ - contextPath: Domain.RecordedFuture.LastSeen
+ description: Risk last seen timestamp
+ type: date
+ description: Returns domain threats from Recorded Future
+ - name: recorded-future-get-threats-vulnerabilities
+ arguments:
+ - name: limit
+ description: Number of results to return
+ defaultValue: "1"
+ - name: risk_lower
+ description: Minimum threshold score to return results for
+ defaultValue: "65"
+ - name: risk_higher
+ description: Maximum threshold score to return results for
+ defaultValue: "99"
+ - name: detailed
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: If true, fetches evidence details. Evidence is a record that is
+ generated if any of the risk rules in Recorded Future is triggered.
+ defaultValue: "false"
+ - name: orderby
+ auto: PREDEFINED
+ predefined:
+ - created
+ - criticality
+ - firstseen
+ - lastseen
+ - modified
+ - riskscore
+ - rules
+ - sevendayshits
+ - sixtydayshits
+ - totalhits
+ description: Category to sort by
+ defaultValue: sevendayshits
+ - name: direction
+ auto: PREDEFINED
+ predefined:
+ - asc
+ - desc
+ description: Sort direction
+ outputs:
+ - contextPath: CVE.ID
+ description: Vulnerability CVE ID
+ type: string
+ - contextPath: CVE.RecordedFuture.Criticality
+ description: CVE criticality label
+ type: string
+ - contextPath: CVE.RecordedFuture.FirstSeen
+ description: Risk first seen timestamp
+ type: date
+ - contextPath: CVE.RecordedFuture.LastSeen
+ description: Risk last seen timestamp
+ type: date
+ description: Returns vulnerability threats from Recorded Future.
+ - name: recorded-future-get-domain-risklist
+ arguments:
+ - name: list
+ description: Specify a domain list by a risk rule name, which can be retrieved
+ by the get-domain-riskrules command.
+ outputs:
+ - contextPath: InfoFile.Name
+ description: File name
+ type: string
+ - contextPath: InfoFile.EntryID
+ description: The EntryID of the file
+ type: string
+ - contextPath: InfoFile.Size
+ description: File size
+ type: number
+ - contextPath: InfoFile.Type
+ description: File type, e.g., "PE"
+ type: string
+ - contextPath: InfoFile.Info
+ description: Basic information of the file
+ type: string
+ - contextPath: InfoFile.Extension
+ description: File extension
+ type: string
+ description: Gets the domain risk list as a CSV file from Recorded Future.
+ - name: recorded-future-get-url-risklist
+ arguments:
+ - name: list
+ description: Specify a URL list by a risk rule name, which can be retrieved
+ from the get-url-riskrules command.
+ outputs:
+ - contextPath: InfoFile.Name
+ description: File name
+ type: string
+ - contextPath: InfoFile.EntryID
+ description: The EntryID of the file
+ type: string
+ - contextPath: InfoFile.Size
+ description: File size
+ type: number
+ - contextPath: InfoFile.Type
+ description: File type, e.g., "PE"
+ type: string
+ - contextPath: InfoFile.Info
+ description: Basic information of the file
+ type: string
+ - contextPath: InfoFile.Extension
+ description: File extension
+ type: string
+ description: Gets the URL risk list as a CSV file from Recorded Future.
+ - name: recorded-future-get-ip-risklist
+ arguments:
+ - name: list
+ description: Specify an IP list by a risk rule name, which can be retrieved
+ from the get-ip-riskrules command.
+ outputs:
+ - contextPath: InfoFile.Name
+ description: File name
+ type: string
+ - contextPath: InfoFile.EntryID
+ description: The EntryID of the file
+ type: string
+ - contextPath: InfoFile.Size
+ description: File size
+ type: number
+ - contextPath: InfoFile.Type
+ description: File type, e.g., "PE"
+ type: string
+ - contextPath: InfoFile.Info
+ description: Basic information of the file
+ type: string
+ - contextPath: InfoFile.Extension
+ description: File extension
+ type: string
+ description: Gets the IP risk list as a CSV file from Recorded Future.
+ - name: recorded-future-get-vulnerability-risklist
+ arguments:
+ - name: list
+ description: Specify a vulnerability list by a risk rule name, which can be
+ retrieved from the get-vulnerability-riskrules command.
+ outputs:
+ - contextPath: InfoFile.Name
+ description: File name
+ type: string
+ - contextPath: InfoFile.EntryID
+ description: File entry ID
+ type: string
+ - contextPath: InfoFile.Size
+ description: File size
+ type: number
+ - contextPath: InfoFile.Type
+ description: File type, e.g., "PE"
+ type: string
+ - contextPath: InfoFile.Info
+ description: Basic information of the file
+ type: string
+ - contextPath: InfoFile.Extension
+ description: File extension
+ type: string
+ description: Gets the vulnerability (CVE) risk list from Recorded Future.
+ - name: recorded-future-get-hash-risklist
+ arguments:
+ - name: list
+ description: Specify a hash list by a riskrule name, which can be retrieved
+ from the get-hash-riskrules command.
+ outputs:
+ - contextPath: InfoFile.Name
+ description: File name
+ type: string
+ - contextPath: InfoFile.EntryID
+ description: File entry ID
+ type: string
+ - contextPath: InfoFile.Size
+ description: File size
+ type: number
+ - contextPath: InfoFile.Type
+ description: File type, e.g., "PE"
+ type: string
+ - contextPath: InfoFile.Info
+ description: Basic information of the file
+ type: string
+ - contextPath: InfoFile.Extension
+ description: File extension
+ type: string
+ description: Gets the hash risk list from Recorded Future.
+ - name: recorded-future-get-domain-riskrules
+ arguments: []
+ outputs:
+ - contextPath: RecordedFuture.RiskRule.Domain.Name
+ description: Risk rule name
+ type: string
+ - contextPath: RecordedFuture.RiskRule.Domain.Description
+ description: Risk rule description
+ type: string
+ - contextPath: RecordedFuture.RiskRule.Domain.Count
+ description: Risk rule indicator count
+ type: number
+ - contextPath: RecordedFuture.RiskRule.Domain.Criticality
+ description: Risk rule criticality
+ type: string
+ description: Gets the risk rules for domain data.
+ - name: recorded-future-get-hash-riskrules
+ arguments: []
+ outputs:
+ - contextPath: RecordedFuture.RiskRule.Hash.Name
+ description: Risk rule name
+ type: string
+ - contextPath: RecordedFuture.RiskRule.Hash.Description
+ description: Risk rule description
+ type: string
+ - contextPath: RecordedFuture.RiskRule.Hash.Count
+ description: Risk rule indicator count
+ type: number
+ - contextPath: RecordedFuture.RiskRule.Hash.Criticality
+ description: Risk rule criticality
+ type: string
+ description: Gets the risk rules for hash data.
+ - name: recorded-future-get-ip-riskrules
+ arguments: []
+ outputs:
+ - contextPath: RecordedFuture.RiskRule.IP.Name
+ description: Risk rule name
+ type: string
+ - contextPath: RecordedFuture.RiskRule.IP.Description
+ description: Risk rule description
+ type: string
+ - contextPath: RecordedFuture.RiskRule.IP.Count
+ description: Risk rule indicator count
+ type: number
+ - contextPath: RecordedFuture.RiskRule.IP.Criticality
+ description: Risk rule criticality
+ type: string
+ description: Gets the risk rules for IP data.
+ - name: recorded-future-get-url-riskrules
+ arguments: []
+ outputs:
+ - contextPath: RecordedFuture.RiskRule.URL.Name
+ description: Risk rule name
+ type: string
+ - contextPath: RecordedFuture.RiskRule.URL.Description
+ description: Risk rule description
+ type: string
+ - contextPath: RecordedFuture.RiskRule.URL.Count
+ description: Risk rule indicator count
+ type: number
+ - contextPath: RecordedFuture.RiskRule.URL.Criticality
+ description: Risk rule criticality
+ type: string
+ description: Gets the risk rules for URL data.
+ - name: recorded-future-get-vulnerability-riskrules
+ arguments: []
+ outputs:
+ - contextPath: RecordedFuture.RiskRule.Vulnerability.Name
+ description: Risk rule name
+ type: string
+ - contextPath: RecordedFuture.RiskRule.Vulnerability.Description
+ description: Risk rule description
+ type: string
+ - contextPath: RecordedFuture.RiskRule.Vulnerability.Count
+ description: Risk rule indicator count
+ type: number
+ - contextPath: RecordedFuture.RiskRule.Vulnerability.Criticality
+ description: Risk rule criticality
+ type: string
+ description: Gets the risk rules for vulnerability data.
+ - name: recorded-future-get-alert-rules
+ arguments:
+ - name: rule_name
+ description: Rule name to search, can be a partial name
+ - name: limit
+ description: Number of rules to return
+ defaultValue: "10"
+ outputs:
+ - contextPath: RecordedFuture.AlertRule.ID
+ description: Alert rule ID
+ type: string
+ - contextPath: RecordedFuture.AlertRule.Name
+ description: Alert rule name
+ type: string
+ description: Gets Recorded Future alert rules.
+ - name: recorded-future-get-alerts
+ arguments:
+ - name: rule_id
+ description: Alert rule ID
+ - name: limit
+ description: Number of alerts to return
+ defaultValue: "10"
+ - name: triggered_time
+ description: Alert triggered time, e.g., "1 hour" or "2 days"
+ - name: assignee
+ description: Alert assignee's email address
+ - name: status
+ auto: PREDEFINED
+ predefined:
+ - unassigned
+ - assigned
+ - actionable
+ - no-action
+ - tuning
+ description: Alert review status
+ - name: freetext
+ description: Free text search
+ - name: offset
+ description: Alerts from offset
+ - name: orderby
+ auto: PREDEFINED
+ predefined:
+ - triggered
+ description: Alerts sort order
+ - name: direction
+ auto: PREDEFINED
+ predefined:
+ - asc
+ - desc
+ description: Alerts sort direction
+ outputs:
+ - contextPath: RecordedFuture.Alert.ID
+ description: Alert ID
+ type: string
+ - contextPath: RecordedFuture.Alert.Name
+ description: Alert name
+ type: string
+ - contextPath: RecordedFuture.Alert.Type
+ description: Alert type
+ type: string
+ - contextPath: RecordedFuture.Alert.Triggered
+ description: Alert triggered time
+ type: date
+ - contextPath: RecordedFuture.Alert.Status
+ description: Alert status
+ type: string
+ - contextPath: RecordedFuture.Alert.Assignee
+ description: Alert assignee
+ type: string
+ - contextPath: RecordedFuture.Alert.Rule
+ description: Alert rule name
+ type: string
+ description: Gets alerts from Recorded Future.
+ isfetch: true
+ runonce: false
+tests:
+ - Recorded Future Test
diff --git a/Integrations/Recorded_Future/Recorded_Future_image.png b/Integrations/Recorded_Future/Recorded_Future_image.png
new file mode 100644
index 000000000000..1fcdbc426c6f
Binary files /dev/null and b/Integrations/Recorded_Future/Recorded_Future_image.png differ
diff --git a/Integrations/RedCanary/CHANGELOG.md b/Integrations/RedCanary/CHANGELOG.md
new file mode 100644
index 000000000000..bf7cc8ecc43e
--- /dev/null
+++ b/Integrations/RedCanary/CHANGELOG.md
@@ -0,0 +1,9 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+-
+
+
+## [19.8.2] - 2019-08-22
+Package RedCanary
\ No newline at end of file
diff --git a/Integrations/RedCanary/RedCanary.py b/Integrations/RedCanary/RedCanary.py
new file mode 100644
index 000000000000..a9df686a1075
--- /dev/null
+++ b/Integrations/RedCanary/RedCanary.py
@@ -0,0 +1,562 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+import requests
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARS '''
+BASE_URL = '{}/openapi/v3'.format(demisto.params()['domain'])
+API_KEY = demisto.params()['api_key']
+USE_SSL = not demisto.params().get('insecure', False)
+
+''' HELPER FUNCTIONS '''
+
+
+def get_time_obj(t, time_format=None):
+ '''
+ convert a time string to datetime object
+
+ :type t: ``string`` or ``int``
+ :param t: time object as string or int for timestamp (required)
+
+ :type time_format: ``string``
+ :param time_format: time format string (optional)
+
+ :return: datetime object
+ :rtype: ``datetime``
+ '''
+ if time_format is not None:
+ return datetime.strptime(t, time_format)
+ if isinstance(t, int):
+ return datetime.fromtimestamp(t)
+ elif isinstance(t, tuple(STRING_TYPES)):
+ if '.' in t:
+ # in case of "2018-09-14T13:27:18.123456Z"
+ return datetime.strptime(t, '%Y-%m-%dT%H:%M:%S.%fZ')
+ else:
+ # in case of "2018-09-14T13:27:18.123456Z"
+ return datetime.strptime(t, '%Y-%m-%dT%H:%M:%SZ')
+
+
+def get_time_str(time_obj, time_format=None):
+ '''
+ convert a datetime object to time format string
+
+ :type t: ``datetime``
+ :param t: time object (required)
+
+ :type time_format: ``string``
+ :param time_format: time format string (optional)
+
+ :return: time format string
+ :rtype: ``string``
+ '''
+ if time_format is None:
+ return time_obj.isoformat().split('.')[0] + 'Z'
+ else:
+ return datetime.strftime(t, time_format) # type:ignore # pylint: disable=E0602
+
+
+def http_request(requests_func, url_suffix, **kwargs):
+ params = kwargs.get('params')
+ headers = kwargs.get('headers', {})
+ data = kwargs.get('data', {})
+
+ res = requests_func(BASE_URL + url_suffix,
+ verify=USE_SSL,
+ params=params,
+ headers=headers,
+ data=data
+ )
+
+ if res.status_code == 403:
+ raise Exception('API Key is incorrect')
+
+ if res.status_code not in [200, 201, ]:
+ LOG('result is: %s' % (res.json(),))
+ error = res.json()
+ raise Exception('Your request failed with the following error: {}.\n'.format(error, ))
+
+ return res.json()
+
+
+@logger
+def http_get(url_suffix, params=None, data=None):
+ headers = {'X-Api-Key': API_KEY}
+ return http_request(requests.get, url_suffix, headers=headers, params=params, data=data)
+
+
+@logger
+def http_patch(url_suffix, params=None, data=None):
+ headers = {'X-Api-Key': API_KEY}
+ return http_request(requests.patch, url_suffix, headers=headers, params=params, data=data)
+
+
+@logger
+def http_post(url_suffix, params=None, data=None):
+ headers = {'X-Api-Key': API_KEY}
+ return http_request(requests.post, url_suffix, headers=headers, params=params, data=data)
+
+
+def playbook_name_to_id(name):
+ playbooks = http_get('/exec/playbooks')['data']
+ ids = [p['id'] for p in playbooks if p['name'] == name]
+ if len(ids) != 1:
+ raise ValueError('Could not find specific id for name "{}"'.format(name))
+
+ return ids[0]
+
+
+def get_endpoint_context(res=None, endpoint_id=None):
+ if res is None:
+ res = http_get('/endpoints/{}'.format(endpoint_id))['data']
+
+ # Endpoint(val.Hostname == obj.Hostname)
+ return [{
+ 'Hostname': endpoint['attributes']['hostname'],
+ 'ID': endpoint['id'],
+ 'IPAddress': [addr['attributes']['ip_address']['attributes']['ip_address']
+ for addr in endpoint['attributes']['endpoint_network_addresses']],
+ 'MACAddress': [addr['attributes']['mac_address']['attributes']['address']
+ for addr in endpoint['attributes']['endpoint_network_addresses']],
+ 'OS': endpoint['attributes']['platform'],
+ 'OSVersion': endpoint['attributes']['operating_system'],
+ 'IsIsolated': endpoint['attributes']['is_isolated'],
+ 'IsDecommissioned': endpoint['attributes']['is_decommissioned'],
+ } for endpoint in res]
+
+
+def get_endpoint_user_context(res=None, endpoint_user_id=None):
+ if res is None:
+ res = http_get('/endpoint_users/{}'.format(endpoint_user_id))['data']
+
+ return [{
+ 'Username': endpoint_user['attributes']['username'].split('\\')[1],
+ 'Hostname': endpoint_user['attributes']['username'].split('\\')[0],
+ } for endpoint_user in res]
+
+
+def get_full_timeline(detection_id, per_page=100):
+ ''' iterate over all timeline detections later then time t '''
+ page = 1
+ done = False
+ activities = [] # type:ignore
+ while not done:
+ res = http_get('/detections/{}/timeline'.format(detection_id),
+ params={
+ 'page': page,
+ 'per_page': per_page,
+ })
+
+ if len(res['data']) == 0 or True:
+ done = True
+
+ activities.extend(res['data'])
+ page += 1
+
+ return activities
+
+
+def process_timeline(detection_id):
+ res = get_full_timeline(detection_id)
+
+ activities = []
+ domains = []
+ files = []
+ ips = []
+ processes = []
+ for activity in res:
+ if activity['type'] != 'activity_timelines.ActivityOccurred':
+ continue
+
+ activity_time = get_time_str(get_time_obj(activity['attributes']['occurred_at']))
+ notes = activity['attributes']['analyst_notes']
+ additional_data = {} # type:ignore
+
+ if activity['attributes']['type'] == 'process_activity_occurred':
+ process = activity['attributes']['process_execution']['attributes']['operating_system_process'][
+ 'attributes']
+ image = process['image']['attributes']
+ additional_data = {
+ 'MD5': image['md5'],
+ 'SHA256': image['sha256'],
+ 'Path': image['path'],
+ 'Type': image['file_type'],
+ 'CommandLine': process['command_line']['attributes']['command_line'],
+ }
+ files.append({
+ 'Name': os.path.basename(image['path']),
+ 'MD5': image['md5'],
+ 'SHA256': image['sha256'],
+ 'Path': image['path'],
+ 'Extension': os.path.splitext(image['path'])[-1],
+ })
+ processes.append({
+ 'Name': os.path.basename(image['path']),
+ 'Path': image['path'],
+ 'MD5': image['md5'],
+ 'SHA256': image['sha256'],
+ 'StartTime': get_time_str(get_time_obj(process['started_at'])),
+ 'CommandLine': process['command_line']['attributes']['command_line'],
+ })
+
+ elif activity['attributes']['type'] == 'network_connection_activity_occurred':
+ network = activity['attributes']['network_connection']['attributes']
+ additional_data = {
+ 'IP': network['ip_address']['attributes']['ip_address'],
+ 'Port': network['port'],
+ 'Domain': network['domain']['attributes']['name'],
+ }
+ domains.append({'Name': network['domain']['attributes']['name'],
+ # 'DNS' :
+ })
+ ips.append({
+ 'Address': network['ip_address']['attributes']['ip_address'],
+ 'Port': network['port'],
+ })
+
+ activities.append({
+ 'Time': activity_time,
+ 'Type': activity['attributes']['type'].replace('_', ' '),
+ 'Notes': notes,
+ 'Activity Details': createContext(additional_data, removeNull=True),
+ })
+
+ return activities, domains, files, ips, processes
+
+
+def detection_to_context(raw_detection):
+ return {
+ 'Type': 'RedCanaryDetection',
+ 'ID': raw_detection['id'],
+ 'Headline': raw_detection['attributes']['headline'],
+ 'Severity': raw_detection['attributes']['severity'],
+ 'Summary': raw_detection['attributes']['summary'],
+ 'Classification': raw_detection['attributes']['classification']['superclassification'],
+ 'Subclassification': raw_detection['attributes']['classification']['subclassification'],
+ 'Time': get_time_str(get_time_obj(raw_detection['attributes']['time_of_occurrence'])),
+ 'Acknowledged': raw_detection['attributes']['last_acknowledged_at'] is None and raw_detection['attributes'][
+ 'last_acknowledged_by'] is None,
+ 'RemediationStatus': raw_detection['attributes'].get('last_remediated_status', {}).get('remediation_status',
+ ''),
+ }
+
+
+def detections_to_entry(detections, show_timeline=False):
+ fixed_detections = [detection_to_context(d) for d in detections]
+ endpoints = [get_endpoint_context(endpoint_id=d['relationships']['affected_endpoint']['data']['id'])
+ for d in detections]
+ endpoints = sum(endpoints, []) # type: list
+ endpoint_users = [
+ get_endpoint_user_context(endpoint_user_id=d['relationships']['related_endpoint_user']['data']['id'])
+ for d in detections]
+ endpoint_users = sum(endpoint_users, []) # type: list
+
+ domains, files, ips, processes = [], [], [], [] # type:ignore
+ activities = ''
+ title = 'Detections'
+ if show_timeline and len(detections) == 1:
+ title = 'Detection {}'.format(fixed_detections[0]['Headline'])
+ activities, domains, files, ips, processes = process_timeline(fixed_detections[0]['ID'])
+ activities = tableToMarkdown('Detection Timeline', activities,
+ headers=['Time', 'Type', 'Activity Details', 'Notes'])
+
+ headers = ['ID', 'Headline', 'Severity', 'Time', 'Classification', 'Summary', ]
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': fixed_detections,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': '\n\n'.join([
+ tableToMarkdown(title, fixed_detections, headers=headers, removeNull=True),
+ activities,
+ ]),
+ 'EntryContext': {
+ 'RedCanary.Detection(val.ID && val.ID == obj.ID)': createContext(fixed_detections, removeNull=True),
+ 'Account(val.Username == obj.Username)': createContext(endpoint_users, removeNull=True),
+ 'Domain(val.Username == obj.Username)': createContext(domains, removeNull=True),
+ 'Endpoint(val.Hostname == obj.Hostname)': createContext(endpoints, removeNull=True),
+ 'File(val.Name == obj.Name)': createContext(files, removeNull=True),
+ 'IP(val.Address == obj.Address)': createContext(ips, removeNull=True),
+ 'Process(val.Username == obj.Username)': createContext(processes, removeNull=True),
+ }
+ }
+
+
+def get_unacknowledge_detections(t, per_page=50):
+ ''' iterate over all unacknowledged detections later then time t'''
+ page = 1
+ passed = False
+ while not passed:
+ res = list_detections(page=page, per_page=per_page)
+
+ if len(res) == 0:
+ passed = True
+
+ for detection in res:
+ if get_time_obj(detection['attributes']['time_of_occurrence']) < t:
+ passed = True
+ break
+ if detection['attributes']['last_acknowledged_at'] is not None or detection['attributes'][
+ 'last_acknowledged_by'] is not None:
+ continue
+
+ yield detection
+
+ page += 1
+
+
+@logger
+def detection_to_incident(raw_detection):
+ detection = detection_to_context(raw_detection)
+ detection['Timeline'] = get_full_timeline(detection['ID'])
+
+ return {
+ 'type': 'RedCanaryDetection',
+ 'name': detection['Headline'],
+ 'details': detection['Summary'],
+ 'occurred': detection['Time'],
+ 'rawJSON': json.dumps(detection),
+ }
+
+
+''' FUNCTIONS '''
+
+
+def list_detections_command():
+ args = demisto.args()
+ page = int(args.get('page', '1'))
+ per_page = int(args.get('per-page', '50'))
+
+ data = list_detections(page, per_page)
+ return detections_to_entry(data)
+
+
+@logger
+def list_detections(page, per_page):
+ res = http_get('/detections',
+ data={
+ 'page': page,
+ 'per_page': per_page
+ },
+ )
+ return res['data']
+
+
+def get_detection_command():
+ args = demisto.args()
+ _id = args['id']
+
+ data = get_detection(_id)
+ return detections_to_entry(data, show_timeline=True)
+
+
+@logger
+def get_detection(_id):
+ res = http_get('/detections/{}'.format(_id))
+ return res['data']
+
+
+def acknowledge_detection_command():
+ args = demisto.args()
+ _id = args['id']
+
+ acknowledge_detection(_id)
+ return 'detection acknowledged successfully.'
+
+
+@logger
+def acknowledge_detection(_id):
+ res = http_patch('/detections/{}/mark_acknowledged'.format(_id))
+ return res['data']
+
+
+def remediate_detection_command():
+ args = demisto.args()
+ _id = args['id']
+ remediation_state = args['remediation-state']
+ comment = args.get('comment')
+
+ remediate_detection(_id, remediation_state, comment)
+ return 'Detection was updated to "{}" successfully.'.format(remediation_state.replace('_', ' '))
+
+
+@logger
+def remediate_detection(_id, remediation_state, comment):
+ res = http_patch('/detections/{}/update_remediation_state'.format(_id),
+ data={
+ 'remediation_state': remediation_state,
+ 'comment': comment,
+ }
+ )
+ return res
+
+
+def list_endpoints_command():
+ args = demisto.args()
+ page = int(args.get('page', '1'))
+ per_page = int(args.get('per-page', '50'))
+
+ data = list_endpoints(page, per_page)
+ endpoints = get_endpoint_context(res=data)
+ headers = ['ID', 'IPAddress', 'Hostname', 'MACAddress', 'IsIsolated', 'IsDecommissioned', 'OSVersion', ]
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': endpoints,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('EndPoints', endpoints, headers=headers, removeNull=True),
+ 'EntryContext': {
+ 'EndPoint(val.Hostname == obj.Hostname)': createContext(endpoints, removeNull=True),
+ }
+ }
+
+
+@logger
+def list_endpoints(page, per_page):
+ res = http_get('/endpoints',
+ data={
+ 'page': page,
+ 'per_page': per_page
+ },
+ )
+
+ return res['data']
+
+
+def get_endpoint_command():
+ args = demisto.args()
+ _id = args['id']
+
+ data = get_endpoint(_id)
+ endpoints = get_endpoint_context(res=data)
+ headers = ['ID', 'IPAddress', 'Hostname', 'MACAddress', 'IsIsolated', 'IsDecommissioned', 'OSVersion', ]
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': endpoints,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('EndPoint {}'.format(endpoints[0]['Hostname']), endpoints, headers=headers,
+ removeNull=True),
+ 'EntryContext': {
+ 'EndPoint(val.Hostname == obj.Hostname)': createContext(endpoints, removeNull=True),
+ }
+ }
+
+
+@logger
+def get_endpoint(_id):
+ res = http_get('/endpoints/{}'.format(_id))
+
+ return res['data']
+
+
+def get_endpoint_detections_command():
+ args = demisto.args()
+ _id = args['id']
+
+ detections = get_endpoint_detections(_id)
+ return detections_to_entry(detections)
+
+
+@logger
+def get_endpoint_detections(_id):
+ endpoint = get_endpoint(_id)
+
+ detection_ids = [d['href'].split('detections/')[1] for d in endpoint[0]['links']['detections']]
+ detections = [] # type:ignore
+ for detection_id in detection_ids:
+ detections.extend(get_detection(detection_id))
+
+ return detections
+
+
+def execute_playbook_command():
+ args = demisto.args()
+ detection_id = args['detection-id']
+ playbook_id = args.get('playbook-id')
+ playbook_name = args.get('playbook-name')
+ if playbook_id is None:
+ if playbook_name is None:
+ raise ValueError('You must specify either playbook-id or playbook-name.')
+ playbook_id = playbook_name_to_id(args.get('playbook-name'))
+
+ execute_playbook(playbook_id, detection_id)
+
+ return 'playbook #{} execution started successfully.'.format(playbook_id)
+
+
+def execute_playbook(playbook_id, detection_id):
+ res = http_post('/exec/playbooks/{}/execute'.format(playbook_id),
+ params={
+ 'resource_type': 'Detection',
+ 'resource_id': detection_id,
+ }
+ )
+
+ return res
+
+
+def fetch_incidents():
+ last_run = demisto.getLastRun()
+ last_fetch = last_run.get('time')
+
+ # handle first time fetch
+ if last_fetch is None:
+ last_fetch = datetime.now() - timedelta(days=5)
+ else:
+ last_fetch = datetime.strptime(last_fetch, '%Y-%m-%dT%H:%M:%SZ')
+
+ LOG('iterating on detections, looking for more recent than {}'.format(last_fetch))
+ incidents = []
+ for raw_detection in get_unacknowledge_detections(last_fetch, per_page=2):
+ LOG('found detection #{}'.format(raw_detection['id']))
+ incident = detection_to_incident(raw_detection)
+
+ incidents.append(incident)
+
+ if len(incidents) != 0:
+ last_fetch = max([get_time_obj(incident['occurred']) for incident in incidents]) # noqa:F812
+ demisto.setLastRun({'time': get_time_str(last_fetch + timedelta(seconds=1))})
+ demisto.incidents(incidents)
+
+
+@logger
+def test_integration():
+ list_detections(1, 1)
+ return 'ok'
+
+
+''' EXECUTION CODE '''
+COMMANDS = {
+ 'test-module': test_integration,
+ 'fetch-incidents': fetch_incidents,
+ 'redcanary-list-detections': list_detections_command,
+ 'redcanary-list-endpoints': list_endpoints_command,
+ 'redcanary-get-endpoint': get_endpoint_command,
+ 'redcanary-get-endpoint-detections': get_endpoint_detections_command,
+ 'redcanary-get-detection': get_detection_command,
+ 'redcanary-acknowledge-detection': acknowledge_detection_command,
+ 'redcanary-update-remediation-state': remediate_detection_command,
+ 'redcanary-execute-playbook': execute_playbook_command,
+}
+
+try:
+ handle_proxy()
+ LOG('command is %s' % (demisto.command(),))
+ command_func = COMMANDS.get(demisto.command())
+ if command_func is not None:
+ if demisto.command() == 'fetch-incidents':
+ demisto.incidents(command_func())
+ else:
+ demisto.results(command_func())
+
+except Exception as e:
+ LOG(e.message)
+ if demisto.command() != 'test-module':
+ LOG.print_log()
+ return_error('error has occurred: {}'.format(e.message, ))
diff --git a/Integrations/RedCanary/RedCanary.yml b/Integrations/RedCanary/RedCanary.yml
new file mode 100644
index 000000000000..288d00f32bfa
--- /dev/null
+++ b/Integrations/RedCanary/RedCanary.yml
@@ -0,0 +1,396 @@
+commonfields:
+ id: RedCanary
+ version: -1
+name: RedCanary
+display: Red Canary
+category: Endpoint
+description: Red Canary collects endpoint data using Carbon Black Response and CrowdStrike
+ Falcon. The collected data is standardized into a common schema which allows teams
+ to detect, analyze and respond to security incidents.
+configuration:
+- display: Domain (for example, https://demisto.my.redcanary.co)
+ name: domain
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: API Key
+ name: api_key
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Fetch incidents
+ name: isFetch
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Incident type
+ name: incidentType
+ defaultvalue: ""
+ type: 13
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: "False"
+ type: 8
+ required: false
+- display: Trust any certificate (unsecure)
+ name: insecure
+ defaultvalue: ""
+ type: 8
+ required: false
+script:
+ script: ''
+ type: python
+ subtype: python2
+ commands:
+ - name: redcanary-acknowledge-detection
+ arguments:
+ - name: id
+ required: true
+ default: true
+ description: Detection ID. Can be obtained from the context.
+ description: Mark a detection as acknowledged to inform that it's being handled.
+ - name: redcanary-update-remediation-state
+ arguments:
+ - name: id
+ required: true
+ default: true
+ description: Detection ID. Can be obtained from the context.
+ - name: remediation-state
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - remediated
+ - not_remediated_false_positive
+ - not_remediated_sanctioned_activity
+ - not_remediated_unwarranted
+ description: The way in which the detection was remediated.
+ - name: comment
+ description: Describe the reason why the detection was remediated in this manner.
+ This is ignored for 'remediated' state
+ defaultValue: remediated by Demisto.
+ description: Update the remediation state of a detection.
+ - name: redcanary-list-detections
+ arguments:
+ - name: page
+ description: What page of results to fetch. Defaults to first page.
+ - name: per-page
+ description: How many results to return per page.
+ outputs:
+ - contextPath: RedCanary.Detection.ID
+ description: Detection ID.
+ type: number
+ - contextPath: RedCanary.Detection.Headline
+ description: Human readable text about the detection.
+ type: string
+ - contextPath: RedCanary.Detection.Severity
+ description: The severity of the detection. High, medium, or low
+ type: string
+ - contextPath: RedCanary.Detection.Summary
+ description: A summary of information about the detection.
+ type: string
+ - contextPath: RedCanary.Detection.Classification
+ description: Detection Classification
+ type: string
+ - contextPath: RedCanary.Detection.Subclassification
+ description: Detection Subclassification
+ type: string
+ - contextPath: RedCanary.Detection.Time
+ description: The time the detection was confirmed as a threat by Red Canary
+ type: date
+ - contextPath: RedCanary.Detection.Acknowledged
+ description: Whether or not the detection is acknowledged.
+ type: boolean
+ - contextPath: RedCanary.Detection.RemediationStatus
+ description: The state the detection is currently in.
+ type: string
+ - contextPath: Endpoint.Hostname
+ description: Fully-qualified endpoint hostname
+ type: string
+ - contextPath: Endpoint.ID
+ description: Endpoint ID
+ type: string
+ - contextPath: Endpoint.IPAddress
+ description: IP Address of the endpoint
+ - contextPath: Endpoint.MACAddress
+ description: MAC Address of the endpoint
+ - contextPath: Endpoint.OS
+ description: The operating system platform used by the endpoint
+ type: string
+ - contextPath: Endpoint.OSVersion
+ description: The complete operating system version identifier used by the endpoint
+ type: string
+ - contextPath: Endpoint.Memory
+ description: Physical memory of the endpoint in bytes
+ type: number
+ - contextPath: Endpoint.IsIsolated
+ description: Whether or not the endpoint is currently network isolated
+ type: boolean
+ - contextPath: Endpoint.IsDecommissioned
+ description: Whether or not the endpoint is decommissioned
+ type: boolean
+ - contextPath: Account.Username
+ description: Endpoint user name
+ type: string
+ - contextPath: Account.Hostname
+ description: Endpoint host name
+ type: string
+ description: Get a list of confirmed detections.
+ - name: redcanary-list-endpoints
+ arguments:
+ - name: page
+ description: Page number in the query response. Default is 1.
+ defaultValue: "1"
+ - name: per-page
+ description: Number of returned results per page. Default is 50
+ defaultValue: "50"
+ outputs:
+ - contextPath: Endpoint.Hostname
+ description: Fully-qualified endpoint hostname
+ type: string
+ - contextPath: Endpoint.ID
+ description: Endpoint ID
+ type: string
+ - contextPath: Endpoint.IPAddress
+ description: IP Address of the endpoint
+ - contextPath: Endpoint.MACAddress
+ description: MAC Address of the endpoint
+ - contextPath: Endpoint.OS
+ description: The operating system platform used by the endpoint
+ type: string
+ - contextPath: Endpoint.OSVersion
+ description: The complete operating system version identifier used by the endpoint
+ type: string
+ - contextPath: Endpoint.Memory
+ description: Physical memory of the endpoint in bytes
+ type: number
+ - contextPath: Endpoint.IsIsolated
+ description: Whether or not the endpoint is currently network isolated
+ type: boolean
+ - contextPath: Endpoint.IsDecommissioned
+ description: Whether or not the endpoint is decommissioned
+ type: boolean
+ description: Get a list of endpoints.
+ - name: redcanary-execute-playbook
+ arguments:
+ - name: playbook-id
+ description: Playbook ID for execution.
+ - name: detection-id
+ required: true
+ description: Detection to execute the playbook on. Can be retrieve from the
+ context
+ - name: playbook-name
+ description: Playbook name for execution. if playbook ID is specify, this is
+ ignored.
+ description: Execute a predefined playbook on a detection.
+ - name: redcanary-get-endpoint
+ arguments:
+ - name: id
+ required: true
+ default: true
+ description: Endpoint ID
+ outputs:
+ - contextPath: Endpoint.Hostname
+ description: Fully-qualified endpoint hostname
+ type: string
+ - contextPath: Endpoint.ID
+ description: Endpoint ID
+ type: string
+ - contextPath: Endpoint.IPAddress
+ description: IP Address of the endpoint
+ - contextPath: Endpoint.MACAddress
+ description: MAC Address of the endpoint
+ - contextPath: Endpoint.OS
+ description: The operating system platform used by the endpoint
+ type: string
+ - contextPath: Endpoint.OSVersion
+ description: The complete operating system version identifier used by the endpoint
+ type: string
+ - contextPath: Endpoint.Memory
+ description: Physical memory of the endpoint in bytes
+ type: number
+ - contextPath: Endpoint.IsIsolated
+ description: Whether or not the endpoint is currently network isolated
+ type: boolean
+ - contextPath: Endpoint.IsDecommissioned
+ description: Whether or not the endpoint is decommissioned
+ type: boolean
+ description: Get an endpoint by unique identifier.
+ - name: redcanary-get-endpoint-detections
+ arguments:
+ - name: id
+ required: true
+ default: true
+ description: Endpoint ID
+ outputs:
+ - contextPath: RedCanary.Detection.ID
+ description: Detection ID.
+ type: number
+ - contextPath: RedCanary.Detection.Headline
+ description: Human readable text about the detection.
+ type: string
+ - contextPath: RedCanary.Detection.Severity
+ description: The severity of the detection. High, medium, or low
+ type: string
+ - contextPath: RedCanary.Detection.Summary
+ description: A summary of information about the detection.
+ type: string
+ - contextPath: RedCanary.Detection.Classification
+ description: Detection Classification
+ type: string
+ - contextPath: RedCanary.Detection.Subclassification
+ description: Detection Subclassification
+ type: string
+ - contextPath: RedCanary.Detection.Time
+ description: The time the detection was confirmed as a threat by Red Canary
+ type: date
+ - contextPath: RedCanary.Detection.Acknowledged
+ description: Whether or not the detection is acknowledged.
+ type: boolean
+ - contextPath: RedCanary.Detection.RemediationStatus
+ description: The state the detection is currently in.
+ type: string
+ - contextPath: Endpoint.Hostname
+ description: Fully-qualified endpoint hostname
+ type: string
+ - contextPath: Endpoint.ID
+ description: Endpoint ID
+ type: string
+ - contextPath: Endpoint.IPAddress
+ description: IP Address of the endpoint
+ - contextPath: Endpoint.MACAddress
+ description: MAC Address of the endpoint
+ - contextPath: Endpoint.OS
+ description: The operating system platform used by the endpoint
+ type: string
+ - contextPath: Endpoint.OSVersion
+ description: The complete operating system version identifier used by the endpoint
+ type: string
+ - contextPath: Endpoint.Memory
+ description: Physical memory of the endpoint in bytes
+ type: number
+ - contextPath: Endpoint.IsIsolated
+ description: Whether or not the endpoint is currently network isolated
+ type: boolean
+ - contextPath: Endpoint.IsDecommissioned
+ description: Whether or not the endpoint is decommissioned
+ type: boolean
+ - contextPath: Account.Username
+ description: Endpoint user name
+ type: string
+ - contextPath: Account.Hostname
+ description: Endpoint host name
+ type: string
+ description: Get a list of detections associated with the endpoint.
+ - name: redcanary-get-detection
+ arguments:
+ - name: id
+ required: true
+ default: true
+ description: Detection ID
+ outputs:
+ - contextPath: RedCanary.Detection.ID
+ description: Detection ID.
+ type: number
+ - contextPath: RedCanary.Detection.Headline
+ description: Human readable text about the detection.
+ type: string
+ - contextPath: RedCanary.Detection.Severity
+ description: The severity of the detection. High, medium, or low
+ type: string
+ - contextPath: RedCanary.Detection.Summary
+ description: A summary of information about the detection.
+ type: string
+ - contextPath: RedCanary.Detection.Classification
+ description: Detection Classification
+ type: string
+ - contextPath: RedCanary.Detection.Subclassification
+ description: Detection Subclassification
+ type: string
+ - contextPath: RedCanary.Detection.Time
+ description: The time the detection was confirmed as a threat by Red Canary
+ type: date
+ - contextPath: RedCanary.Detection.Acknowledged
+ description: Whether or not the detection is acknowledged.
+ type: boolean
+ - contextPath: RedCanary.Detection.RemediationStatus
+ description: The state the detection is currently in.
+ type: string
+ - contextPath: Endpoint.Hostname
+ description: Fully-qualified endpoint hostname
+ type: string
+ - contextPath: Endpoint.ID
+ description: Endpoint ID
+ type: string
+ - contextPath: Endpoint.IPAddress
+ description: IP Address of the endpoint
+ - contextPath: Endpoint.MACAddress
+ description: MAC Address of the endpoint
+ - contextPath: Endpoint.OS
+ description: The operating system platform used by the endpoint
+ type: string
+ - contextPath: Endpoint.OSVersion
+ description: The complete operating system version identifier used by the endpoint
+ type: string
+ - contextPath: Endpoint.Memory
+ description: Physical memory of the endpoint in bytes
+ type: number
+ - contextPath: Endpoint.IsIsolated
+ description: Whether or not the endpoint is currently network isolated
+ type: boolean
+ - contextPath: Endpoint.IsDecommissioned
+ description: Whether or not the endpoint is decommissioned
+ type: boolean
+ - contextPath: Account.Username
+ description: Endpoint user name
+ type: string
+ - contextPath: Account.Hostname
+ description: Endpoint host name
+ type: string
+ - contextPath: Domain.Name
+ description: Domain name
+ type: string
+ - contextPath: File.Name
+ description: File name
+ type: string
+ - contextPath: File.MD5
+ description: File MD5
+ type: string
+ - contextPath: File.SHA256
+ description: File SHA256
+ type: string
+ - contextPath: File.Path
+ description: File path in the endpoint
+ type: string
+ - contextPath: File.Extension
+ description: File extension
+ type: string
+ - contextPath: IP.Address
+ description: IP Address
+ type: string
+ - contextPath: IP.Port
+ description: Port
+ type: string
+ - contextPath: Process.Name
+ description: Process name
+ type: string
+ - contextPath: Process.Path
+ description: process binary path
+ type: string
+ - contextPath: Process.MD5
+ description: Binary MD5
+ type: string
+ - contextPath: Process.SHA256
+ description: Binary SHA256
+ type: string
+ - contextPath: Process.StartTime
+ description: Process execution time
+ type: date
+ - contextPath: Process.CommandLine
+ description: Process command line
+ type: string
+ description: Get a detection by unique identifier.
+ isfetch: true
+ runonce: false
+tests:
+ - RedCanaryTest
diff --git a/Integrations/RedCanary/RedCanary_desc.md b/Integrations/RedCanary/RedCanary_desc.md
new file mode 100644
index 000000000000..f40e9dda6529
--- /dev/null
+++ b/Integrations/RedCanary/RedCanary_desc.md
@@ -0,0 +1,5 @@
+ In order to generate an API key, follow these step:
+ 1) Login to your Red Canary instance.
+ 2) Go to `profile` -> `Account`.
+ 3) Scroll down to `Security Settings`.
+ 4) Copy the API token under `API Authentication Token`.
\ No newline at end of file
diff --git a/Integrations/RedCanary/RedCanary_image.png b/Integrations/RedCanary/RedCanary_image.png
new file mode 100644
index 000000000000..b6423a21f351
Binary files /dev/null and b/Integrations/RedCanary/RedCanary_image.png differ
diff --git a/Integrations/RedLock/CHANGELOG.md b/Integrations/RedLock/CHANGELOG.md
new file mode 100644
index 000000000000..cd0293e63328
--- /dev/null
+++ b/Integrations/RedLock/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+ - Updated the display name to: *Prisma Cloud (RedLock)*.
+ - Added the *Trust any certificate* configuration parameter.
diff --git a/Integrations/RedLock/Pipfile b/Integrations/RedLock/Pipfile
new file mode 100644
index 000000000000..9ed479d31633
--- /dev/null
+++ b/Integrations/RedLock/Pipfile
@@ -0,0 +1,23 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+flake8 = "*"
+
+[packages]
+certifi = "==2017.11.5"
+chardet = "==3.0.4"
+idna = "==2.6"
+olefile = "==0.44"
+requests = "==2.18.4"
+urllib3 = "==1.22"
+PyYAML = "==3.12"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/RedLock/Pipfile.lock b/Integrations/RedLock/Pipfile.lock
new file mode 100644
index 000000000000..d98a7bee0ec6
--- /dev/null
+++ b/Integrations/RedLock/Pipfile.lock
@@ -0,0 +1,422 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "6e86eca8a4a0771ea3f6302e96f7afb9a7fe1f342e02d00c2443c36b86f1af28"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "certifi": {
+ "hashes": [
+ "sha256:244be0d93b71e93fc0a0a479862051414d0e00e16435707e5bf5000f92e04694",
+ "sha256:5ec74291ca1136b40f0379e1128ff80e866597e4e2c1e755739a913bbc3613c0"
+ ],
+ "index": "pypi",
+ "version": "==2017.11.5"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "index": "pypi",
+ "version": "==2.6"
+ },
+ "olefile": {
+ "hashes": [
+ "sha256:61f2ca0cd0aa77279eb943c07f607438edf374096b66332fae1ee64a6f0f73ad"
+ ],
+ "index": "pypi",
+ "version": "==0.44"
+ },
+ "pyyaml": {
+ "hashes": [
+ "sha256:16b20e970597e051997d90dc2cddc713a2876c47e3d92d59ee198700c5427736",
+ "sha256:3262c96a1ca437e7e4763e2843746588a965426550f3797a79fca9c6199c431f",
+ "sha256:592766c6303207a20efc445587778322d7f73b161bd994f227adaa341ba212ab",
+ "sha256:5ac82e411044fb129bae5cfbeb3ba626acb2af31a8d17d175004b70862a741a7",
+ "sha256:827dc04b8fa7d07c44de11fabbc888e627fa8293b695e0f99cb544fdfa1bf0d1",
+ "sha256:bc6bced57f826ca7cb5125a10b23fd0f2fff3b7c4701d64c439a300ce665fff8",
+ "sha256:c01b880ec30b5a6e6aa67b09a2fe3fb30473008c85cd6a67359a1b15ed6d83a4",
+ "sha256:e863072cdf4c72eebf179342c94e6989c67185842d9997960b3e69290b2fa269"
+ ],
+ "index": "pypi",
+ "version": "==3.12"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "index": "pypi",
+ "version": "==1.22"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:ec20e7a4825331c1b5ebf261d111e16fa9612c1f7a5e1f884f12bd53a664dfd2",
+ "sha256:f913492e1663d3c36f502e5e9ba6cd13cf19d7fab50aa13239e420fef95e1396"
+ ],
+ "version": "==19.2.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==1.5"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:244be0d93b71e93fc0a0a479862051414d0e00e16435707e5bf5000f92e04694",
+ "sha256:5ec74291ca1136b40f0379e1128ff80e866597e4e2c1e755739a913bbc3613c0"
+ ],
+ "index": "pypi",
+ "version": "==2017.11.5"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c",
+ "sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==4.0.2"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:7197aa736777caac513dbd800944c209a49765bf1979b12b037dce0277077ed3",
+ "sha256:9d2c67f18c1f9b6db1b46317f7f784aa82789d2ee5dea5d9c0f0f2a764eb862e"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.6.0"
+ },
+ "entrypoints": {
+ "hashes": [
+ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
+ "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
+ ],
+ "version": "==0.3"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "flake8": {
+ "hashes": [
+ "sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548",
+ "sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696"
+ ],
+ "index": "pypi",
+ "version": "==3.7.8"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==1.0.2"
+ },
+ "functools32": {
+ "hashes": [
+ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0",
+ "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.3.post2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16",
+ "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.3.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "index": "pypi",
+ "version": "==2.6"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26",
+ "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af"
+ ],
+ "markers": "python_version < '3.8'",
+ "version": "==0.23"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:02b260c8deb80db09325b99edf62ae344ce9bc64d68b7a634410b8e9a568edbf",
+ "sha256:18f9c401083a4ba6e162355873f906315332ea7035803d0fd8166051e3d402e3",
+ "sha256:1f2c6209a8917c525c1e2b55a716135ca4658a3042b5122d4e3413a4030c26ce",
+ "sha256:2f06d97f0ca0f414f6b707c974aaf8829c2292c1c497642f63824119d770226f",
+ "sha256:616c94f8176808f4018b39f9638080ed86f96b55370b5a9463b2ee5c926f6c5f",
+ "sha256:63b91e30ef47ef68a30f0c3c278fbfe9822319c15f34b7538a829515b84ca2a0",
+ "sha256:77b454f03860b844f758c5d5c6e5f18d27de899a3db367f4af06bec2e6013a8e",
+ "sha256:83fe27ba321e4cfac466178606147d3c0aa18e8087507caec78ed5a966a64905",
+ "sha256:84742532d39f72df959d237912344d8a1764c2d03fe58beba96a87bfa11a76d8",
+ "sha256:874ebf3caaf55a020aeb08acead813baf5a305927a71ce88c9377970fe7ad3c2",
+ "sha256:9f5caf2c7436d44f3cec97c2fa7791f8a675170badbfa86e1992ca1b84c37009",
+ "sha256:a0c8758d01fcdfe7ae8e4b4017b13552efa7f1197dd7358dc9da0576f9d0328a",
+ "sha256:a4def978d9d28cda2d960c279318d46b327632686d82b4917516c36d4c274512",
+ "sha256:ad4f4be843dace866af5fc142509e9b9817ca0c59342fdb176ab6ad552c927f5",
+ "sha256:ae33dd198f772f714420c5ab698ff05ff900150486c648d29951e9c70694338e",
+ "sha256:b4a2b782b8a8c5522ad35c93e04d60e2ba7f7dcb9271ec8e8c3e08239be6c7b4",
+ "sha256:c462eb33f6abca3b34cdedbe84d761f31a60b814e173b98ede3c81bb48967c4f",
+ "sha256:fd135b8d35dfdcdb984828c84d695937e58cc5f49e1c854eb311c4d6aa03f4f1"
+ ],
+ "version": "==1.4.2"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47",
+ "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108"
+ ],
+ "version": "==19.2"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db",
+ "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868"
+ ],
+ "markers": "python_version < '3.6'",
+ "version": "==2.3.5"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6",
+ "sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34"
+ ],
+ "version": "==0.13.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pyflakes": {
+ "hashes": [
+ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
+ "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ ],
+ "version": "==2.1.1"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42",
+ "sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300"
+ ],
+ "index": "pypi",
+ "version": "==1.9.5"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
+ "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
+ ],
+ "version": "==2.4.2"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:8fc39199bdda3d9d025d3b1f4eb99a192c20828030ea7c9a0d2840721de7d347",
+ "sha256:d100a02770f665f5dcf7e3f08202db29857fee6d15f34c942be0a511f39814f0"
+ ],
+ "index": "pypi",
+ "version": "==4.6.5"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:34520283d459cdf1d0dbb58a132df804697f1b966ecedf808bbf3d255af8f659",
+ "sha256:f1ab8aefe795204efe7a015900296d1719e7bf0f4a0558d71e8599da1d1309d0"
+ ],
+ "index": "pypi",
+ "version": "==1.11.1"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:510df890afe08d36eca5bb16b4aa6308a6f85e3159ad3013bac8b9de7bd5a010",
+ "sha256:88d3402dd8b3c69a9e4f9d3a73ad11b15920c6efd36bc27bf1f701cf4a8e4646"
+ ],
+ "index": "pypi",
+ "version": "==1.7.0"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typing": {
+ "hashes": [
+ "sha256:91dfe6f3f706ee8cc32d38edbbf304e9b7583fb37108fef38229617f8b3eba23",
+ "sha256:c8cabb5ab8945cd2f54917be357d134db9cc1eb039e59d1606dc1e60cb1d9d36",
+ "sha256:f38d83c5a7a7086543a0f649564d661859c5146a85775ab90c0d2f93ffaa9714"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==3.7.4.1"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "index": "pypi",
+ "version": "==1.22"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e",
+ "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
+ ],
+ "version": "==0.6.0"
+ }
+ }
+}
diff --git a/Integrations/RedLock/RedLock.py b/Integrations/RedLock/RedLock.py
new file mode 100644
index 000000000000..a71e8c38fdfe
--- /dev/null
+++ b/Integrations/RedLock/RedLock.py
@@ -0,0 +1,408 @@
+import demistomock as demisto
+from CommonServerPython import *
+import requests
+from datetime import datetime
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+URL = demisto.getParam('url')
+if URL[-1] != '/':
+ URL += '/'
+
+if not demisto.getParam('proxy'):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+VERIFY = not demisto.params().get('unsecure', False)
+
+# Standard headers
+HEADERS = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+TOKEN = None
+
+
+def get_token():
+ """
+ Retrieve the token using the credentials
+ """
+ r = requests.post(URL + 'login', headers=HEADERS, verify=VERIFY, json={
+ 'customerName': demisto.getParam('customer') or '',
+ 'username': demisto.getParam('credentials')['identifier'],
+ 'password': demisto.getParam('credentials')['password']
+ })
+ if r.status_code != requests.codes.ok:
+ return_error('Error authenticating to RedLock service [%d] - %s' % (r.status_code, r.text))
+ TOKEN = r.json()['token']
+ HEADERS['x-redlock-auth'] = TOKEN
+
+
+def req(method, path, data, param_data):
+ if not TOKEN:
+ get_token()
+ r = requests.request(method, URL + path, json=data, params=param_data, headers=HEADERS, verify=VERIFY)
+ if r.status_code != requests.codes.ok:
+ text = r.text
+ if r.headers.get('x-redlock-status'):
+ try:
+ status = json.loads(r.headers.get('x-redlock-status')) # type: ignore
+ for s in status:
+ text += '\n%s [%s]' % (s.get('i18nKey', ''), s.get('subject', ''))
+ except Exception:
+ pass
+ return_error('Error in API call to RedLock service [%d] - %s' % (r.status_code, text))
+ if not r.text:
+ return {}
+ return r.json()
+
+
+def list_filters():
+ """
+ List the acceptable filters on alerts
+ """
+ r = req('GET', 'filter/alert/suggest', None, None)
+ filters = [{
+ 'Name': k,
+ 'Options': ','.join(r.get(k).get('options')),
+ 'Static': r.get(k).get('staticFilter')
+ } for k in r]
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': r,
+ 'HumanReadable': tableToMarkdown('Filter options', filters, ['Name', 'Options', 'Static'])
+ })
+
+
+def convertDateToUnix(dstr):
+ """
+ Convert a given string with MM/DD/YYYY format to millis since epoch
+ """
+ d = datetime.strptime(dstr, '%m/%d/%Y')
+ return int((d - datetime.utcfromtimestamp(0)).total_seconds() * 1000)
+
+
+def convertUnixToDate(d):
+ """
+ Convert millise since epoch to date formatted MM/DD/YYYY HH:MI:SS
+ """
+ if d:
+ dt = datetime.utcfromtimestamp(d / 1000)
+ return dt.strftime('%m/%d/%Y %H:%M:%S')
+ return 'N/A'
+
+
+def convertUnixToDemisto(d):
+ """
+ Convert millise since epoch to date formatted MM/DD/YYYYTHH:MI:SS
+ """
+ if d:
+ dt = datetime.utcfromtimestamp(d / 1000)
+ return dt.strftime('%Y-%m-%dT%H:%M:%SZ')
+ return ''
+
+
+def handle_time_filter(payload, baseCase):
+ """
+ Add the time filter to the payload
+ """
+ unit = demisto.getArg('time-range-unit')
+ value = demisto.getArg('time-range-value')
+ timeFrom = demisto.getArg('time-range-date-from')
+ timeTo = demisto.getArg('time-range-date-to')
+ relative = ('hour', 'day', 'week', 'month', 'year')
+ toNow = relative[1:] + ('epoch', 'login')
+ if unit:
+ if timeFrom or timeTo:
+ return_error('You cannot specify absolute times [time-range-date-from, time-range-date-to] '
+ + 'with relative times [time-range-unit, time-range-value]')
+ if value:
+ if unit not in relative:
+ return_error('Time unit for relative time must be one of the following: ' + ','.join(relative))
+ payload['timeRange'] = {'type': 'relative', 'value': {'amount': int(value), 'unit': unit}}
+ else:
+ if unit not in toNow:
+ return_error('Time unit for to_now time must be one of the following: ' + ','.join(toNow))
+ payload['timeRange'] = {'type': 'to_now', 'value': unit}
+ else:
+ if not timeFrom or not timeTo:
+ payload['timeRange'] = baseCase
+ else:
+ payload['timeRange'] = {'type': 'absolute', 'value': {
+ 'startTime': convertDateToUnix(timeFrom), 'endTime': convertDateToUnix(timeTo)}}
+
+
+def handle_filters(payload):
+ """
+ Add filters to the filter object based on received arguments
+ """
+ argsConversion = {
+ 'alert-status': 'alert.status',
+ 'policy-name': 'policy.name',
+ 'policy-label': 'policy.label',
+ 'policy-compliance-standard': 'policy.complianceStandard',
+ 'cloud-account': 'cloud.account',
+ 'cloud-region': 'cloud.region',
+ 'alert-rule-name': 'alertRule.name',
+ 'resource-id': 'resource.id',
+ 'resource-name': 'resource.name',
+ 'resource-type': 'resource.type',
+ 'alert-id': 'alert.id',
+ 'cloud-type': 'cloud.type',
+ 'risk-grade': 'risk.grade',
+ 'policy-type': 'policy.type',
+ 'policy-severity': 'policy.severity'
+ }
+ payload['filters'] = []
+ for k in demisto.args():
+ if k in ('policy-name', 'policy-label', 'policy-compliance-standard', 'cloud-account', 'cloud-region',
+ 'alert-rule-name', 'resource-id', 'resource-name', 'resource-type', 'alert-status', 'alert-id',
+ 'cloud-type', 'risk-grade', 'policy-type', 'policy-severity') and demisto.getArg(k):
+ payload['filters'].append({'name': argsConversion[k], 'operator': '=', 'value': demisto.getArg(k)})
+
+
+def alert_to_readable(a):
+ """
+ Transform an alert to a nice readable object
+ """
+ return {
+ 'ID': a.get('id'),
+ 'Status': a.get('status'),
+ 'FirstSeen': convertUnixToDate(a.get('firstSeen')),
+ 'LastSeen': convertUnixToDate(a.get('lastSeen')),
+ 'AlertTime': convertUnixToDate(a.get('alertTime')),
+ 'PolicyName': demisto.get(a, 'policy.name'),
+ 'PolicyType': demisto.get(a, 'policy.policyType'),
+ 'PolicyDescription': demisto.get(a, 'policy.description'),
+ 'PolicySeverity': demisto.get(a, 'policy.severity'),
+ 'PolicyRecommendation': demisto.get(a, 'policy.recommendation'),
+ 'PolicyDeleted': demisto.get(a, 'policy.deleted'),
+ 'PolicyRemediable': demisto.get(a, 'policy.remediable'),
+ 'RiskRating': demisto.get(a, 'riskDetail.rating'),
+ 'ResourceName': demisto.get(a, 'resource.name'),
+ 'ResourceAccount': demisto.get(a, 'resource.account'),
+ 'ResourceType': demisto.get(a, 'resource.resourceType'),
+ 'ResourceCloudType': demisto.get(a, 'resource.cloudType')
+ }
+
+
+def alert_to_context(a):
+ """
+ Transform a single alert to context struct
+ """
+ return {
+ 'ID': a.get('id'),
+ 'Status': a.get('status'),
+ 'AlertTime': convertUnixToDate(a.get('alertTime')),
+ 'Policy': {
+ 'ID': demisto.get(a, 'policy.policyId'),
+ 'Name': demisto.get(a, 'policy.name'),
+ 'Type': demisto.get(a, 'policy.policyType'),
+ 'Severity': demisto.get(a, 'policy.severity'),
+ 'Remediable': demisto.get(a, 'policy.remediable')
+ },
+ 'RiskDetail': {
+ 'Rating': demisto.get(a, 'riskDetail.rating'),
+ 'Score': demisto.get(a, 'riskDetail.riskScore.score')
+ },
+ 'Resource': {
+ 'ID': demisto.get(a, 'resource.id'),
+ 'Name': demisto.get(a, 'resource.name'),
+ 'Account': demisto.get(a, 'resource.account'),
+ 'AccountID': demisto.get(a, 'resource.accountId')
+ }
+ }
+
+
+def search_alerts():
+ """
+ Retrieves alerts by filter
+ """
+ payload = {} # type: dict
+ handle_time_filter(payload, {'type': 'relative', 'value': {'amount': 7, 'unit': 'day'}})
+ handle_filters(payload)
+ r = req('POST', 'alert', payload, {'detailed': 'true'})
+ alerts = []
+ context_path = 'Redlock.Alert(val.ID === obj.ID)'
+ ec = {context_path: []} # type: dict
+ for k in r:
+ alerts.append(alert_to_readable(k))
+ ec[context_path].append(alert_to_context(k))
+ ec['Redlock.Metadata.CountOfAlerts'] = len(r)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': r,
+ 'EntryContext': ec,
+ 'HumanReadable': tableToMarkdown('Alerts', alerts, [
+ 'ID', 'Status', 'FirstSeen', 'LastSeen', 'AlertTime', 'PolicyName', 'PolicyType', 'PolicyDescription',
+ 'PolicySeverity', 'PolicyRecommendation', 'PolicyDeleted', 'PolicyRemediable', 'RiskRating', 'ResourceName',
+ 'ResourceAccount', 'ResourceType', 'ResourceCloudType'
+ ])
+ })
+
+
+def get_alert_details():
+ """
+ Retrieve alert details by given ID
+ """
+ r = req('GET', 'alert/' + demisto.getArg('alert-id'), None, None) # {'detailed': demisto.getArg('detailed')})
+ alert = alert_to_readable(r)
+ alert.update({
+ 'PolicyID': demisto.get(r, 'policy.policyID'),
+ 'PolicySystemDefault': demisto.get(r, 'policy.systemDefault'),
+ 'PolicyLabels': demisto.get(r, 'policy.labels'),
+ 'PolicyLastModifiedOn': demisto.get(r, 'policy.lastModifiedOn'),
+ 'PolicyLastModifiedBy': demisto.get(r, 'policy.lastModifiedBy'),
+ 'RiskScore': demisto.get(r, 'riskDetail.riskScore.score'),
+ 'ResourceRRN': demisto.get(r, 'resource.rrn'),
+ 'ResourceID': demisto.get(r, 'resource.id'),
+ 'ResourceAccountID': demisto.get(r, 'resource.accountId'),
+ 'ResourceRegionID': demisto.get(r, 'resource.regionId'),
+ 'ResourceApiName': demisto.get(r, 'resource.resourceApiName'),
+ 'ResourceUrl': demisto.get(r, 'resource.url'),
+ 'ResourceData': demisto.get(r, 'resource.data'),
+ 'ResourceAccessKeyAge': demisto.get(r, 'resource.additionalInfo.accessKeyAge'),
+ 'ResourceInactiveSinceTs': demisto.get(r, 'resource.additionalInfo.inactiveSinceTs')
+ })
+
+ ec = {'Redlock.Alert(val.ID === obj.ID)': alert_to_context(r)}
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': r,
+ 'EntryContext': ec,
+ 'HumanReadable': tableToMarkdown('Alert', [alert], ['ID', 'Status', 'FirstSeen', 'LastSeen', 'AlertTime', 'PolicyID',
+ 'PolicyName', 'PolicyType', 'PolicySystemDefault', 'PolicyLabels',
+ 'PolicyDescription', 'PolicySeverity', 'PolicyRecommendation',
+ 'PolicyDeleted', 'PolicyRemediable', 'PolicyLastModifiedOn',
+ 'PolicyLastModifiedBy', 'RiskScore', 'RiskRating',
+ 'ResourceName', 'ResourceRRN', 'ResourceID', 'ResourceAccount',
+ 'ResourceAccountID', 'ResourceType',
+ 'ResourceRegionID', 'ResourceApiName', 'ResourceUrl', 'ResourceData',
+ 'ResourceAccessKeyAge', 'ResourceInactiveSinceTs', 'ResourceCloudType'
+ ])
+ })
+
+
+def dismiss_alerts():
+ """
+ Dismiss the given list of alerts based on given filter
+ """
+ ids = argToList(demisto.getArg('alert-id'))
+ policies = argToList(demisto.getArg('policy-id'))
+ payload = {'alerts': ids, 'policies': policies, 'dismissalNote': demisto.getArg('dismissal-note'), 'filter': {}}
+ demisto.args().pop('alert-id', None)
+ handle_filters(payload['filter'])
+ handle_time_filter(payload['filter'], {'type': 'to_now', 'value': 'epoch'})
+ if not ids and not policies:
+ return_error('You must specify either alert-id or policy-id for dismissing alerts')
+ r = req('POST', 'alert/dismiss', payload, None)
+ ec = {}
+ if ids:
+ ec['Redlock.DismissedAlert.ID'] = ids
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': r,
+ 'EntryContext': ec,
+ 'HumanReadable': '### Alerts dismissed successfully. Dismissal Note: %s.' % demisto.getArg('dismissal-note')
+ })
+
+
+def reopen_alerts():
+ """
+ Reopen the given list of alerts based on given filter
+ """
+ ids = argToList(demisto.getArg('alert-id'))
+ policies = argToList(demisto.getArg('policy-id'))
+ payload = {'alerts': ids, 'policies': policies, 'filter': {}}
+ demisto.args().pop('alert-id', None)
+ handle_filters(payload['filter'])
+ handle_time_filter(payload['filter'], {'type': 'to_now', 'value': 'epoch'})
+ if not ids and not policies:
+ return_error('You must specify either alert-id or policy-id for re-opening alerts')
+ r = req('POST', 'alert/reopen', payload, None)
+ ec = {}
+ if ids:
+ ec['Redlock.ReopenedAlert.ID'] = ids
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': r,
+ 'EntryContext': ec,
+ 'HumanReadable': '### Alerts re-opened successfully.'
+ })
+
+
+def translate_severity(alert):
+ """
+ Translate alert severity to demisto
+ Might take risk grade into account in the future
+ """
+ sev = demisto.get(alert, 'policy.severity')
+ if sev == 'high':
+ return 3
+ elif sev == 'medium':
+ return 2
+ elif sev == 'low':
+ return 1
+ return 0
+
+
+def fetch_incidents():
+ """
+ Retrieve new incidents periodically based on pre-defined instance parameters
+ """
+ now = int((datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds() * 1000)
+ lastRunObject = demisto.getLastRun()
+ lastRun = lastRunObject and lastRunObject['time']
+ if not lastRun:
+ lastRun = now - 24 * 60 * 60 * 1000
+ payload = {
+ 'timeRange': {
+ 'type': 'absolute',
+ 'value': {
+ 'startTime': lastRun,
+ 'endTime': now
+ }
+ }
+ }
+ payload['filters'] = [{'name': 'alert.status', 'operator': '=', 'value': 'open'}] # type: ignore
+ if demisto.getParam('ruleName'):
+ payload['filters'].append({'name': 'alertRule.name', 'operator': '=', # type: ignore
+ 'value': demisto.getParam('ruleName')})
+ if demisto.getParam('policySeverity'):
+ payload['filters'].append({'name': 'policy.severity', 'operator': '=', # type: ignore
+ 'value': demisto.getParam('policySeverity')})
+ r = req('POST', 'alert', payload, {'detailed': 'true'})
+ incidents = []
+ for a in r:
+ incidents.append({
+ 'name': a.get('policy.name', 'No policy') + ' - ' + a.get('id'),
+ 'occurred': convertUnixToDemisto(a.get('alertTime')),
+ 'severity': translate_severity(a),
+ 'rawJSON': json.dumps(a)
+ })
+ demisto.incidents(incidents)
+ demisto.setLastRun({'time': now})
+
+
+if demisto.command() == 'test-module':
+ get_token()
+ demisto.results('ok')
+elif demisto.command() == 'redlock-search-alerts':
+ search_alerts()
+elif demisto.command() == 'redlock-list-alert-filters':
+ list_filters()
+elif demisto.command() == 'redlock-get-alert-details':
+ get_alert_details()
+elif demisto.command() == 'redlock-dismiss-alerts':
+ dismiss_alerts()
+elif demisto.command() == 'redlock-reopen-alerts':
+ reopen_alerts()
+elif demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+else:
+ return_error('Unrecognized command: ' + demisto.command())
diff --git a/Integrations/RedLock/RedLock.yml b/Integrations/RedLock/RedLock.yml
new file mode 100644
index 000000000000..f09292b204da
--- /dev/null
+++ b/Integrations/RedLock/RedLock.yml
@@ -0,0 +1,488 @@
+commonfields:
+ id: RedLock
+ version: -1
+name: RedLock
+display: Prisma Cloud (RedLock)
+category: Network Security
+description: Cloud threat defense
+configuration:
+- display: Server URL
+ name: url
+ defaultvalue: https://api.redlock.io/
+ type: 0
+ required: true
+- display: Username
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: true
+- display: Customer name
+ name: customer
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Trust any certificate (not secure)
+ name: unsecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Fetch only incidents matching this rule name
+ name: ruleName
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Fetch only incidents with this severity
+ name: policySeverity
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Fetch incidents
+ name: isFetch
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Incident type
+ name: incidentType
+ defaultvalue: ""
+ type: 13
+ required: false
+script:
+ script: ''
+ type: python
+ subtype: python2
+ commands:
+ - name: redlock-search-alerts
+ arguments:
+ - name: time-range-date-from
+ description: Start time for search in the following string format - MM/DD/YYYY
+ - name: time-range-date-to
+ description: End time for search in the following format - MM/DD/YYYY
+ - name: time-range-value
+ description: The amount of units to go back in time
+ - name: time-range-unit
+ auto: PREDEFINED
+ predefined:
+ - hour
+ - day
+ - week
+ - month
+ - year
+ - login
+ - epoch
+ description: The search unit. login and epoch are only available if timeRangeValue
+ is not provided.
+ - name: policy-name
+ auto: PREDEFINED
+ predefined:
+ - IAM password policy does not have a symbol
+ - IAM password policy does not expire in 90 days
+ - IAM password policy does not have a lowercase character
+ - IAM password policy does not have a minimum of 14 characters
+ - IAM password policy allows password reuse
+ - Default Security Group does not restrict all traffic
+ - IAM password policy does not have password expiration period
+ - IAM password policy does not exist
+ - Access keys are not rotated for 90 days
+ - Security Groups allow internet traffic from internet to RDP port (3389)
+ - Internet connectivity via tcp over insecure port
+ - IAM policy allow full administrative privileges
+ - Primitive IAM roles should not be used
+ - Internet exposed instances
+ - IAM user has both Console access and Access Keys
+ - S3 buckets are accessible to public
+ - Access logging not enabled on all CloudTrail buckets
+ - CloudTrail trail is not integrated with CloudWatch Log
+ - Security Groups allow internet traffic to SSH port (22)
+ - CloudTrail logs are not encrypted using Customer Master Keys (CMKs)
+ - Excessive login failures
+ - VPC endpoints were not used for consuming S3 storage from within the VPC
+ - Access logging not enabled on S3 buckets
+ - S3 buckets do not have server side encryption
+ - Account hijacking attempts
+ - Security groups allow internet traffic
+ - VPC subnets should not allow automatic public IP assignment
+ - VPC Flow Logs not enabled
+ - MFA not enabled for IAM users
+ - Inactive users for more than 30 days
+ description: The policy name
+ - name: policy-label
+ description: The policy label
+ - name: policy-compliance-standard
+ description: The policy compliance standard
+ - name: cloud-account
+ description: The cloud account
+ - name: cloud-region
+ description: The cloud region
+ - name: alert-rule-name
+ description: The alert rule name
+ - name: resource-id
+ description: The resource ID
+ - name: resource-name
+ description: The resource name
+ - name: resource-type
+ description: The resource type
+ - name: alert-status
+ auto: PREDEFINED
+ predefined:
+ - open
+ - resolved
+ - dismissed
+ description: The alert status
+ defaultValue: open
+ - name: alert-id
+ description: The alert ID
+ - name: cloud-type
+ auto: PREDEFINED
+ predefined:
+ - aws
+ - azure
+ - gcp
+ description: The cloud type
+ - name: risk-grade
+ auto: PREDEFINED
+ predefined:
+ - A
+ - B
+ - C
+ - F
+ description: The risk grade
+ - name: policy-type
+ auto: PREDEFINED
+ predefined:
+ - anomaly
+ - audit_event
+ - config
+ - network
+ description: The policy type
+ - name: policy-severity
+ auto: PREDEFINED
+ predefined:
+ - high
+ - medium
+ - low
+ description: The policy severity
+ outputs:
+ - contextPath: Redlock.Alert.ID
+ description: ID of returned alert
+ type: string
+ - contextPath: Redlock.Alert.Status
+ description: Status of returned alert
+ type: string
+ - contextPath: Redlock.Alert.AlertTime
+ description: Time of alert
+ type: string
+ - contextPath: Redlock.Alert.Policy.ID
+ description: The policy ID
+ type: string
+ - contextPath: Redlock.Alert.Policy.Name
+ description: The policy name
+ type: string
+ - contextPath: Redlock.Alert.Policy.Type
+ description: The policy type
+ type: string
+ - contextPath: Redlock.Alert.Policy.Severity
+ description: The policy severity
+ type: string
+ - contextPath: Redlock.Alert.Policy.Remediable
+ description: Whether or not the policy is remediable
+ type: boolean
+ - contextPath: Redlock.Alert.RiskDetail.Rating
+ description: The risk rating
+ type: string
+ - contextPath: Redlock.Alert.RiskDetail.Score
+ description: The risk score
+ type: string
+ - contextPath: Redlock.Metadata.CountOfAlerts
+ description: The number of alerts found
+ type: number
+ description: Search alerts on the RedLock platform
+ - name: redlock-get-alert-details
+ arguments:
+ - name: alert-id
+ required: true
+ default: true
+ description: The alert ID
+ - name: detailed
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Allows for retrieving entire / trimmed alert model
+ defaultValue: "true"
+ outputs:
+ - contextPath: Redlock.Alert.ID
+ description: The alert ID
+ type: string
+ - contextPath: Redlock.Alert.Status
+ description: The alert status
+ type: string
+ - contextPath: Redlock.Alert.AlertTime
+ description: The time of the alert
+ type: date
+ - contextPath: Redlock.Alert.Policy.ID
+ description: The policy ID
+ type: string
+ - contextPath: Redlock.Alert.Policy.Name
+ description: The policy name
+ type: string
+ - contextPath: Redlock.Alert.Policy.Type
+ description: The type of policy
+ type: string
+ - contextPath: Redlock.Alert.Policy.Severity
+ description: The policy severity
+ type: string
+ - contextPath: Redlock.Alert.Policy.Remediable
+ description: Whether or not the policy is remediable
+ type: boolean
+ - contextPath: Redlock.Alert.RiskDetail.Rating
+ description: The risk rating
+ type: string
+ - contextPath: Redlock.Alert.RiskDetail.Score
+ description: The risk score
+ type: string
+ description: Gets the details of an alert based on alert ID
+ - name: redlock-dismiss-alerts
+ arguments:
+ - name: alert-id
+ description: comma separated list of string IDs to be dismissed
+ - name: dismissal-note
+ required: true
+ description: Reason for dismissal
+ - name: time-range-date-from
+ description: Start time for search in the following string format - MM/DD/YYYY
+ - name: time-range-date-to
+ description: End time for search in the following format - MM/DD/YYYY
+ - name: time-range-value
+ description: The amount of units to go back in time
+ - name: time-range-unit
+ auto: PREDEFINED
+ predefined:
+ - hour
+ - day
+ - week
+ - month
+ - year
+ - login
+ - epoch
+ description: The search unit
+ - name: policy-name
+ auto: PREDEFINED
+ predefined:
+ - IAM password policy does not have a symbol
+ - IAM password policy does not expire in 90 days
+ - IAM password policy does not have a lowercase character
+ - IAM password policy does not have a minimum of 14 characters
+ - IAM password policy allows password reuse
+ - Default Security Group does not restrict all traffic
+ - IAM password policy does not have password expiration period
+ - IAM password policy does not exist
+ - Access keys are not rotated for 90 days
+ - Security Groups allow internet traffic from internet to RDP port (3389)
+ - Internet connectivity via tcp over insecure port
+ - IAM policy allow full administrative privileges
+ - Primitive IAM roles should not be used
+ - Internet exposed instances
+ - IAM user has both Console access and Access Keys
+ - S3 buckets are accessible to public
+ - Access logging not enabled on all CloudTrail buckets
+ - CloudTrail trail is not integrated with CloudWatch Log
+ - Security Groups allow internet traffic to SSH port (22)
+ - CloudTrail logs are not encrypted using Customer Master Keys (CMKs)
+ - Excessive login failures
+ - VPC endpoints were not used for consuming S3 storage from within the VPC
+ - Access logging not enabled on S3 buckets
+ - S3 buckets do not have server side encryption
+ - Account hijacking attempts
+ - Security groups allow internet traffic
+ - VPC subnets should not allow automatic public IP assignment
+ - VPC Flow Logs not enabled
+ - MFA not enabled for IAM users
+ - Inactive users for more than 30 days
+ description: The policy name
+ - name: policy-label
+ description: The policy label
+ - name: policy-compliance-standard
+ description: The policy compliance standard
+ - name: cloud-account
+ description: The cloud account
+ - name: cloud-region
+ description: The cloud region
+ - name: alert-rule-name
+ description: The alert rule name
+ - name: resource-id
+ description: The resource ID
+ - name: resource-name
+ description: The resource name
+ - name: resource-type
+ description: The resource type
+ - name: alert-status
+ auto: PREDEFINED
+ predefined:
+ - open
+ - resolved
+ - dismissed
+ description: The alert status
+ - name: cloud-type
+ auto: PREDEFINED
+ predefined:
+ - aws
+ - azure
+ - gcp
+ description: The cloud type
+ - name: risk-grade
+ auto: PREDEFINED
+ predefined:
+ - A
+ - B
+ - C
+ - F
+ description: The risk grade
+ - name: policy-type
+ auto: PREDEFINED
+ predefined:
+ - anomaly
+ - audit_event
+ - config
+ - network
+ description: The policy type
+ - name: policy-severity
+ auto: PREDEFINED
+ predefined:
+ - high
+ - medium
+ - low
+ description: The policy severity
+ - name: policy-id
+ description: comma separated string of policy IDs
+ outputs:
+ - contextPath: Redlock.DismissedAlert.ID
+ description: The IDs of the dismissed alerts
+ type: string
+ description: Dismiss the alerts matching the given filter. Must provide either
+ policy IDs or alert IDs.
+ execution: true
+ - name: redlock-reopen-alerts
+ arguments:
+ - name: alert-id
+ description: The IDs of alerts to reopen
+ - name: time-range-date-from
+ description: Start time for search in the following string format - MM/DD/YYYY
+ - name: time-range-date-to
+ description: End time for search in the following format - MM/DD/YYYY
+ - name: time-range-value
+ description: The amount of units to go back in time
+ - name: time-range-unit
+ auto: PREDEFINED
+ predefined:
+ - hour
+ - day
+ - week
+ - month
+ - year
+ - login
+ - epoch
+ description: The search unit
+ - name: policy-name
+ auto: PREDEFINED
+ predefined:
+ - IAM password policy does not have a symbol
+ - IAM password policy does not expire in 90 days
+ - IAM password policy does not have a lowercase character
+ - IAM password policy does not have a minimum of 14 characters
+ - IAM password policy allows password reuse
+ - Default Security Group does not restrict all traffic
+ - IAM password policy does not have password expiration period
+ - IAM password policy does not exist
+ - Access keys are not rotated for 90 days
+ - Security Groups allow internet traffic from internet to RDP port (3389)
+ - Internet connectivity via tcp over insecure port
+ - IAM policy allow full administrative privileges
+ - Primitive IAM roles should not be used
+ - Internet exposed instances
+ - IAM user has both Console access and Access Keys
+ - S3 buckets are accessible to public
+ - Access logging not enabled on all CloudTrail buckets
+ - CloudTrail trail is not integrated with CloudWatch Log
+ - Security Groups allow internet traffic to SSH port (22)
+ - CloudTrail logs are not encrypted using Customer Master Keys (CMKs)
+ - Excessive login failures
+ - VPC endpoints were not used for consuming S3 storage from within the VPC
+ - Access logging not enabled on S3 buckets
+ - S3 buckets do not have server side encryption
+ - Account hijacking attempts
+ - Security groups allow internet traffic
+ - VPC subnets should not allow automatic public IP assignment
+ - VPC Flow Logs not enabled
+ - MFA not enabled for IAM users
+ - Inactive users for more than 30 days
+ description: The policy name
+ - name: policy-label
+ description: The policy label
+ - name: policy-compliance-standard
+ description: The policy compliance standard
+ - name: cloud-account
+ description: The cloud account
+ - name: cloud-region
+ description: The cloud region
+ - name: alert-rule-name
+ description: The alert rule name
+ - name: resource-id
+ description: The resource ID
+ - name: resource-name
+ description: The resource name
+ - name: resource-type
+ description: The resource type
+ - name: alert-status
+ auto: PREDEFINED
+ predefined:
+ - open
+ - resolved
+ - dismissed
+ description: The alert status
+ - name: cloud-type
+ auto: PREDEFINED
+ predefined:
+ - aws
+ - azure
+ - gcp
+ description: The cloud type
+ - name: risk-grade
+ auto: PREDEFINED
+ predefined:
+ - A
+ - B
+ - C
+ - F
+ description: The risk grade
+ - name: policy-type
+ auto: PREDEFINED
+ predefined:
+ - anomaly
+ - audit_event
+ - config
+ - network
+ description: The policy type
+ - name: policy-severity
+ auto: PREDEFINED
+ predefined:
+ - high
+ - medium
+ - low
+ description: The policy severity
+ outputs:
+ - contextPath: Redlock.ReopenedAlert.ID
+ description: IDs of the re-opened alerts
+ type: string
+ description: Re-open the alerts matching the given filter. Must provide either
+ policy IDs or alert IDs.
+ execution: true
+ - name: redlock-list-alert-filters
+ arguments: []
+ description: List the acceptable filters and values for alerts
+ isfetch: true
+ runonce: false
diff --git a/Integrations/RedLock/RedLock_description.md b/Integrations/RedLock/RedLock_description.md
new file mode 100644
index 000000000000..40c9a046f651
--- /dev/null
+++ b/Integrations/RedLock/RedLock_description.md
@@ -0,0 +1,2 @@
+## Prisma Cloud
+Use the Prisma Cloud integration to manage alerts from Microsoft Azure, Google Cloud Platform, and AWS.
diff --git a/Integrations/RedLock/RedLock_image.png b/Integrations/RedLock/RedLock_image.png
new file mode 100644
index 000000000000..10d6be781f1a
Binary files /dev/null and b/Integrations/RedLock/RedLock_image.png differ
diff --git a/Integrations/SMB/SMB.py b/Integrations/SMB/SMB.py
new file mode 100644
index 000000000000..73072bee49ce
--- /dev/null
+++ b/Integrations/SMB/SMB.py
@@ -0,0 +1,121 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+
+''' IMPORTS '''
+
+
+import tempfile
+from smb.SMBConnection import SMBConnection
+
+''' GLOBAL VARS '''
+
+
+USER = demisto.params()['credentials']['identifier']
+PASSWORD = demisto.params()['credentials']['password']
+HOSTNAME = demisto.params()['hostname']
+PORT = int(demisto.params()['port'])
+NBNAME = demisto.params()['nbname']
+DOMAIN = demisto.params().get('domain', None)
+
+
+''' HELPER FUNCTIONS '''
+
+
+def split_path(path):
+ delim = '/' if '/' in path else '\\'
+ path = path.strip(delim)
+ return path.split(delim, 1)
+
+
+def connect(hostname, domain, user, password, nb_name, port):
+ if not domain:
+ connection = SMBConnection(user, password, 'Demisto', nb_name, is_direct_tcp=True)
+ else:
+ connection = SMBConnection(user, password, 'Demisto', nb_name, domain=domain, is_direct_tcp=True)
+ if not connection.connect(hostname, port):
+ return_error('Authentication failed, verify instance configuration parameters and try again.')
+ return connection
+
+
+''' FUNCTIONS '''
+
+
+def test_module():
+ if HOSTNAME and NBNAME:
+ connection = connect(hostname=HOSTNAME, domain=DOMAIN, user=USER, password=PASSWORD, nb_name=NBNAME, port=PORT)
+ demisto.results('ok')
+ connection.close()
+ else:
+ demisto.results('No hostname or NetBIOS name was configured, cannot perform a connection test.')
+
+
+def smb_download():
+ share, path = split_path(demisto.getArg('file_path'))
+ hostname = demisto.args().get('hostname') if demisto.args().get('hostname') else HOSTNAME
+ nbname = demisto.args().get('nbname') if demisto.args().get('nbname') else NBNAME
+ domain = demisto.args().get('domain') if demisto.args().get('domain') else DOMAIN
+
+ if not hostname:
+ return_error('No hostname was configured for the integration, cannot establish connection.')
+ elif not nbname:
+ return_error('No NetBIOS name was configured for the integration, cannot establish connection.')
+ connection = connect(hostname=hostname, domain=domain, user=USER, password=PASSWORD, nb_name=nbname, port=PORT)
+ try:
+ with tempfile.NamedTemporaryFile() as file_obj:
+ file_attributes, filesize = connection.retrieveFile(share, path, file_obj)
+ file_obj.seek(0)
+ filename = path.split('/')[-1] if '/' in path else path.split('\\')[-1]
+ if demisto.getArg('download_and_attach') == "yes":
+ demisto.results(fileResult(filename, file_obj.read()))
+ else:
+ demisto.results(file_obj.read())
+ finally:
+ connection.close()
+
+
+def smb_upload():
+ share, path = split_path(demisto.getArg('file_path'))
+ entryID = demisto.getArg('entryID')
+ content = demisto.getArg('content')
+ hostname = demisto.args().get('hostname') if demisto.args().get('hostname') else HOSTNAME
+ nbname = demisto.args().get('nbname') if demisto.args().get('nbname') else NBNAME
+ domain = demisto.args().get('domain') if demisto.args().get('domain') else DOMAIN
+
+ if not hostname:
+ return_error('No hostname was configured for the integration, cannot establish connection.')
+ elif not nbname:
+ return_error('No NetBIOS name was configured for the integration, cannot establish connection.')
+ connection = connect(hostname=hostname, domain=domain, user=USER, password=PASSWORD, nb_name=nbname, port=PORT)
+ try:
+ if not entryID and not content:
+ raise Exception("smb-upload requires one of the following arguments: content, entryID.")
+ if entryID:
+ file = demisto.getFilePath(entryID)
+ filePath = file['path']
+ with open(filePath, mode='rb') as f:
+ content = f.read()
+
+ with tempfile.NamedTemporaryFile() as file_obj:
+ file_obj.write(content)
+ file_obj.seek(0)
+ file_bytes_transfered = connection.storeFile(share, path, file_obj)
+ demisto.results("Transfered {} bytes of data.".format(file_bytes_transfered))
+ finally:
+ connection.close()
+
+
+''' EXECUTION CODE '''
+
+LOG('command is %s' % (demisto.command(),))
+
+try:
+ if demisto.command() == 'test-module':
+ test_module()
+ elif demisto.command() == 'smb-download':
+ smb_download()
+ elif demisto.command() == 'smb-upload':
+ smb_upload()
+except Exception as e:
+ return_error(str(e))
diff --git a/Integrations/SMB/SMB.yml b/Integrations/SMB/SMB.yml
new file mode 100644
index 000000000000..1d628487c733
--- /dev/null
+++ b/Integrations/SMB/SMB.yml
@@ -0,0 +1,122 @@
+category: Utilities
+commonfields:
+ id: Server Message Block (SMB)
+ version: -1
+configuration:
+- display: Server IP / Hostname (e.g. 1.2.3.4)
+ name: hostname
+ required: false
+ type: 0
+- defaultvalue: '445'
+ display: Port
+ name: port
+ required: true
+ type: 0
+- display: Server NetBIOS (AD) Name
+ name: nbname
+ required: false
+ type: 0
+- display: Domain
+ name: domain
+ required: false
+ type: 0
+- display: Username
+ name: credentials
+ required: true
+ type: 9
+- defaultvalue: 'false'
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: File exchange with an SMB server.
+display: Server Message Block (SMB)
+name: Server Message Block (SMB)
+script:
+ commands:
+ - arguments:
+ - name: hostname
+ default: false
+ description: 'Server IP address / hostname, for example: 1.2.3.4.'
+ - name: nbname
+ default: false
+ description: Server NetBIOS (AD) name.
+ - name: domain
+ default: false
+ description: The host domain
+ - name: file_path
+ required: true
+ default: true
+ description: 'The path to the file, starting from the share, for example: Share/Folder/File.'
+ - name: download_and_attach
+ default: false
+ auto: PREDEFINED
+ predefined:
+ - "yes"
+ - "no"
+ description: 'If "yes", the file is downloaded and attached. If "no", only the output is attached. Default is yes".'
+ defaultValue: "yes"
+ outputs:
+ - contextPath: File.Size
+ description: File size.
+ type: number
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file.
+ type: string
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file.
+ type: string
+ - contextPath: File.Name
+ description: File name.
+ type: string
+ - contextPath: File.SSDeep
+ description: SSDeep hash of the file.
+ type: string
+ - contextPath: File.EntryID
+ description: File entry ID.
+ type: string
+ - contextPath: File.Info
+ description: Information about the file.
+ type: string
+ - contextPath: File.Type
+ description: File type.
+ type: string
+ - contextPath: File.MD5
+ description: MD5 hash of the file.
+ type: string
+ deprecated: false
+ description: Downloads a file from the SMB server.
+ execution: false
+ name: smb-download
+ - arguments:
+ - name: hostname
+ default: false
+ description: 'Server IP address / hostname, for example: 1.2.3.4.'
+ - name: nbname
+ default: false
+ description: Server NetBIOS (AD) name.
+ - name: domain
+ default: false
+ description: The host domain
+ - name: file_path
+ default: false
+ required: true
+ description: 'The path to the file, starting from the share, for example: Share/Folder/File.'
+ - name: entryID
+ default: false
+ description: EntryID of the file to send to the share.
+ - name: content
+ default: false
+ description: File content to send to the share. Ignored if EntryID argument
+ is specified.
+ deprecated: false
+ description: Uploads a file to the SMB server.
+ execution: false
+ name: smb-upload
+ dockerimage: demisto/pysmb
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- SMB test
diff --git a/Integrations/SMB/SMB_description.md b/Integrations/SMB/SMB_description.md
new file mode 100644
index 000000000000..e87e3d2fd544
--- /dev/null
+++ b/Integrations/SMB/SMB_description.md
@@ -0,0 +1,2 @@
+This integration is for uploading and downloading files from an SMB server.
+For the configuration, you should either configure server details as parameters, or as command inputs.
\ No newline at end of file
diff --git a/Integrations/SMB/SMB_image.png b/Integrations/SMB/SMB_image.png
new file mode 100644
index 000000000000..f42ecab9edf2
Binary files /dev/null and b/Integrations/SMB/SMB_image.png differ
diff --git a/Integrations/SMIME_Messaging/CHANGELOG.md b/Integrations/SMIME_Messaging/CHANGELOG.md
new file mode 100644
index 000000000000..e21faa4385aa
--- /dev/null
+++ b/Integrations/SMIME_Messaging/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+#### New Integration
+Use the S/MIME (Secure Multipurpose Internet Mail Extensions) integration to send and receive secure MIME data.
\ No newline at end of file
diff --git a/Integrations/SMIME_Messaging/SMIME_Messaging.py b/Integrations/SMIME_Messaging/SMIME_Messaging.py
new file mode 100644
index 000000000000..3237939546ab
--- /dev/null
+++ b/Integrations/SMIME_Messaging/SMIME_Messaging.py
@@ -0,0 +1,206 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+from M2Crypto import BIO, SMIME, X509
+from typing import Dict
+from tempfile import NamedTemporaryFile
+
+
+''' HELPER FUNCTIONS '''
+
+
+def makebuf(text):
+ return BIO.MemoryBuffer(text)
+
+
+class Client:
+ def __init__(self, private_key, public_key):
+ self.smime = SMIME.SMIME()
+
+ public_key_file = NamedTemporaryFile(delete=False)
+ public_key_file.write(bytes(public_key, 'utf-8'))
+ self.public_key_file = public_key_file.name
+ public_key_file.close()
+
+ private_key_file = NamedTemporaryFile(delete=False)
+ private_key_file.write(bytes(private_key, 'utf-8'))
+ self.private_key_file = private_key_file.name
+ private_key_file.close()
+
+
+''' COMMANDS '''
+
+
+def sign_email(client: Client, args: Dict):
+ """
+ send a S/MIME-signed message via SMTP.
+ """
+ message_body = args.get('message_body', '')
+
+ buf = makebuf(message_body.encode())
+
+ client.smime.load_key(client.private_key_file, client.public_key_file)
+ p7 = client.smime.sign(buf, SMIME.PKCS7_DETACHED)
+
+ buf = makebuf(message_body.encode())
+
+ out = BIO.MemoryBuffer()
+ client.smime.write(out, p7, buf)
+
+ signed = out.read().decode('utf-8')
+ signed_message = signed.split('\n\n')
+ headers = signed_message[0].replace(': ', '=').replace('\n', ',')
+ body = signed_message[2:]
+ context = {
+ 'SMIME.Signed': {
+ 'Message': body,
+ 'Headers': headers
+ }
+ }
+
+ return signed, context
+
+
+def encrypt_email_body(client: Client, args: Dict):
+ """ generate an S/MIME-encrypted message
+
+ Args:
+ client: Client
+ args: Dict
+
+ """
+ message_body = args.get('message', '').encode('utf-8')
+
+ buf = makebuf(message_body)
+
+ x509 = X509.load_cert(client.public_key_file)
+ sk = X509.X509_Stack()
+ sk.push(x509)
+ client.smime.set_x509_stack(sk)
+
+ client.smime.set_cipher(SMIME.Cipher('des_ede3_cbc'))
+
+ p7 = client.smime.encrypt(buf)
+
+ out = BIO.MemoryBuffer()
+
+ client.smime.write(out, p7)
+ encrypted_message = out.read().decode('utf-8')
+ message = encrypted_message.split('\n\n')
+ headers = message[0]
+ new_headers = headers.replace(': ', '=').replace('\n', ',')
+
+ entry_context = {
+ 'SMIME.Encrypted': {
+ 'Message': encrypted_message,
+ 'Headers': new_headers
+ }
+ }
+ return encrypted_message, entry_context
+
+
+def verify(client: Client, args: Dict):
+ """ Verify the signature
+
+ Args:
+ client: Client
+ args: Dict
+
+ """
+ signed_message = demisto.getFilePath(args.get('signed_message'))
+
+ x509 = X509.load_cert(client.public_key_file)
+ sk = X509.X509_Stack()
+ sk.push(x509)
+ client.smime.set_x509_stack(sk)
+
+ st = X509.X509_Store()
+ st.load_info(client.public_key_file)
+ client.smime.set_x509_store(st)
+
+ p7, data = SMIME.smime_load_pkcs7(signed_message['path'])
+ v = client.smime.verify(p7, data, flags=SMIME.PKCS7_NOVERIFY)
+
+ human_readable = f'The signature verified\n\n{v}'
+ return human_readable, {}
+
+
+def decrypt_email_body(client: Client, args: Dict, file_path=None):
+ """ Decrypt the message
+
+ Args:
+ client: Client
+ args: Dict
+ file_path: relevant for the test module
+ """
+ if file_path:
+ encrypt_message = file_path
+ else:
+ encrypt_message = demisto.getFilePath(args.get('encrypt_message'))
+
+ client.smime.load_key(client.private_key_file, client.public_key_file)
+
+ p7, data = SMIME.smime_load_pkcs7(encrypt_message['path'])
+
+ out = client.smime.decrypt(p7).decode('utf-8')
+ entry_context = {
+ 'SMIME.Decrypted': {
+ 'Message': out
+ }
+ }
+ human_readable = f'The decrypted message is: \n{out}'
+
+ return human_readable, entry_context
+
+
+def test_module(client, *_):
+ message_body = 'testing'
+ try:
+ encrypt_message = encrypt_email_body(client, {'message': message_body})
+ if encrypt_message:
+ test_file = NamedTemporaryFile(delete=False)
+ test_file.write(bytes(encrypt_message[0], 'utf-8'))
+ test_file.close()
+ decrypt_message = decrypt_email_body(client, {}, file_path={'path': test_file.name})
+ if decrypt_message:
+ demisto.results('ok')
+ except Exception:
+ return_error('Verify that you provided valid keys.')
+ finally:
+ os.unlink(test_file.name)
+
+
+def main():
+
+ public_key: str = demisto.params().get('public_key', '')
+ private_key: str = demisto.params().get('private_key', '')
+
+ client = Client(private_key, public_key)
+ LOG(f'Command being called is {demisto.command()}')
+ commands = {
+ 'test-module': test_module,
+ 'smime-sign-email': sign_email,
+ 'smime-encrypt-email-body': encrypt_email_body,
+ 'smime-verify-sign': verify,
+ 'smime-decrypt-email-body': decrypt_email_body
+ }
+ try:
+ command = demisto.command()
+ if command in commands:
+ return_outputs(*commands[command](client, demisto.args())) # type: ignore
+
+ except Exception as e:
+ return_error(str(e))
+
+ finally:
+ if client.private_key_file:
+ os.unlink(client.private_key_file)
+ if client.public_key_file:
+ os.unlink(client.public_key_file)
+
+
+if __name__ in ['__main__', 'builtin', 'builtins']:
+ main()
diff --git a/Integrations/SMIME_Messaging/SMIME_Messaging.yml b/Integrations/SMIME_Messaging/SMIME_Messaging.yml
new file mode 100644
index 000000000000..b1972c063d74
--- /dev/null
+++ b/Integrations/SMIME_Messaging/SMIME_Messaging.yml
@@ -0,0 +1,92 @@
+category: Utilities
+commonfields:
+ id: SMIME Messaging
+ version: -1
+configuration:
+- display: Public Key
+ name: public_key
+ required: true
+ type: 12
+- display: Private Key
+ name: private_key
+ required: true
+ type: 14
+description: Use the S/MIME (Secure Multipurpose Internet Mail Extensions) integration
+ to send and receive secure MIME data.
+display: SMIME Messaging
+name: SMIME Messaging
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: The message body to send.
+ isArray: false
+ name: message_body
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieve items from the service.
+ execution: false
+ name: smime-sign-email
+ outputs:
+ - contextPath: SMIME.Signed.Message
+ description: The signed message body.
+ type: String
+ - contextPath: SMIME.Signed.Headers
+ description: The S/MIME signing headers.
+ type: String
+ - arguments:
+ - default: false
+ description: The message body to encrypt.
+ isArray: false
+ name: message
+ required: true
+ secret: false
+ deprecated: false
+ description: Encrypts an email message with S/MIME protocol by using a public
+ RSA certificate.
+ execution: false
+ name: smime-encrypt-email-body
+ outputs:
+ - contextPath: SMIME.Encrypted.Message
+ description: The encrypted message.
+ type: String
+ - contextPath: SMIME.Encrypted.Headers
+ description: The encryption headers.
+ type: String
+ - arguments:
+ - default: false
+ description: The signed email with .p7 extension.
+ isArray: false
+ name: signed_message
+ required: true
+ secret: false
+ deprecated: false
+ description: Verifies the signature.
+ execution: false
+ name: smime-verify-sign
+ - arguments:
+ - default: false
+ description: The encrypted message with .p7 extension.
+ isArray: false
+ name: encrypt_message
+ required: true
+ secret: false
+ deprecated: false
+ description: Decrypts the message body.
+ execution: false
+ name: smime-decrypt-email-body
+ outputs:
+ - contextPath: SMIME.Decrypted.Message
+ description: The decrypted message.
+ type: String
+ dockerimage: demisto/m2crypto:1.0.0.1850
+ isfetch: false
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- No tests - added unitest
diff --git a/Integrations/SMIME_Messaging/SMIME_Messaging_description.md b/Integrations/SMIME_Messaging/SMIME_Messaging_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/SMIME_Messaging/SMIME_Messaging_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/SMIME_Messaging/SMIME_Messaging_image.png b/Integrations/SMIME_Messaging/SMIME_Messaging_image.png
new file mode 100644
index 000000000000..99a47140d513
Binary files /dev/null and b/Integrations/SMIME_Messaging/SMIME_Messaging_image.png differ
diff --git a/Integrations/SMIME_Messaging/SMIME_Messaging_test.py b/Integrations/SMIME_Messaging/SMIME_Messaging_test.py
new file mode 100644
index 000000000000..48e42cae52ef
--- /dev/null
+++ b/Integrations/SMIME_Messaging/SMIME_Messaging_test.py
@@ -0,0 +1,48 @@
+from SMIME_Messaging import Client, sign_email, encrypt_email_body, verify, decrypt_email_body
+import demistomock as demisto
+
+
+with open('./test_data/signer_key.pem') as f:
+ private_key = f.read()
+with open('./test_data/signer.pem') as file_:
+ public_key = file_.read()
+
+client = Client(private_key, public_key)
+
+
+def test_sign():
+ message_body = 'text to check'
+
+ sign, _ = sign_email(client, {'message_body': message_body})
+ assert 'MIME-Version: 1.0\nContent-Type: multipart/signed; protocol="application/x-pkcs7-signature"; ' \
+ 'micalg="sha1";' in sign
+
+
+def test_verify(mocker):
+
+ mocker.patch.object(demisto, 'getFilePath', return_value={'path': './test_data/signed.p7'})
+
+ v, _ = verify(client, {})
+ assert 'a sign of our times' in v
+
+
+def test_encrypt(mocker):
+
+ mocker.patch.object(demisto, 'args', return_value={'message': 'testing message'})
+ encrypt, _ = encrypt_email_body(client, {})
+ assert 'MIME-Version: 1.0\nContent-Disposition: attachment; filename="smime.p7m"\n' \
+ 'Content-Type: application/x-pkcs7-mime; smime-type=enveloped-data; name="smime.p7m"\n' \
+ 'Content-Transfer-Encoding: base64' in encrypt
+
+
+def test_decrypt(mocker):
+ mocker.patch.object(demisto, 'getFilePath', return_value={'path': './test_data/encrypt.p7'})
+
+ decrypted, _ = decrypt_email_body(client, {})
+ assert 'Hello world' in decrypted
+
+
+def test_test_module(mocker):
+ from SMIME_Messaging import test_module
+ mocker.patch.object(demisto, 'results')
+ test_module(client)
diff --git a/Integrations/SMIME_Messaging/test_data/encrypt.p7 b/Integrations/SMIME_Messaging/test_data/encrypt.p7
new file mode 100644
index 000000000000..86d91944103a
--- /dev/null
+++ b/Integrations/SMIME_Messaging/test_data/encrypt.p7
@@ -0,0 +1,12 @@
+MIME-Version: 1.0
+Content-Disposition: attachment; filename="smime.p7m"
+Content-Type: application/x-pkcs7-mime; smime-type=enveloped-data; name="smime.p7m"
+Content-Transfer-Encoding: base64
+
+MIIBMAYJKoZIhvcNAQcDoIIBITCCAR0CAQAxgeIwgd8CAQAwSDA7MQswCQYDVQQG
+EwJJTDEPMA0GA1UECAwGSXNyYWVsMRswGQYDVQQKDBJQYWxvIEFsdG8gTmV0d29y
+a3MCCQCds7D9QJpcpTANBgkqhkiG9w0BAQEFAASBgGRdxYfX5SBXHvcyKVlLSZKl
+FigWl4dhvj7JTHFEmuAASOimCo30vY53pSTCL7j6tAdx2mQ4ZSbkbN8WIYT1rTBD
+71uRwUBUIxT5zKSrgMD85lUnAM74nB/RLizLRCtZvXBtYw/7mfZa1MBEEDl16fo7
+ZoitjOp09UCfEKwfcfsPMDMGCSqGSIb3DQEHATAUBggqhkiG9w0DBwQIn1NXRUH2
+aAWAELbY+rIVcbAOU2Gj6W7okLs=
\ No newline at end of file
diff --git a/Integrations/SMIME_Messaging/test_data/recipient.pem b/Integrations/SMIME_Messaging/test_data/recipient.pem
new file mode 100644
index 000000000000..c12388cbd14f
--- /dev/null
+++ b/Integrations/SMIME_Messaging/test_data/recipient.pem
@@ -0,0 +1,13 @@
+-----BEGIN CERTIFICATE-----
+MIIB7TCCAVYCCQCX+tuiFyU7rTANBgkqhkiG9w0BAQsFADA7MQswCQYDVQQGEwJJ
+TDEPMA0GA1UECAwGSXNyYWVsMRswGQYDVQQKDBJQYWxvIEFsdG8gTmV0d29ya3Mw
+HhcNMTkxMDA4MDkzNzIyWhcNMjAxMDA3MDkzNzIyWjA7MQswCQYDVQQGEwJJTDEP
+MA0GA1UECAwGSXNyYWVsMRswGQYDVQQKDBJQYWxvIEFsdG8gTmV0d29ya3MwgZ8w
+DQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBALC1k1PFEcW5PFcP6whb3UayoWURdigg
+YAORpZ0bwwHYSkLhGbRZNMM20W3sHeNSpcgWAyDHtsoHxEagho2MCLGXlQA5YPVg
+7PaSP7+qsVOwy7FEcsGfo8+Q4x9D0nanv1VZmWCzAcMUwpR1pGqG03PbGrOUTpVK
+4cZH8Qq1vJJxAgMBAAEwDQYJKoZIhvcNAQELBQADgYEAnNi4aLREAnEvWyoLGOy4
+jLdlrC64Bs4XT0GJpjJpFrn+Au3yl/9XlEBM1SFs8szn3MScNgDRksi4zehLlC3z
+ke5btGdJuKiZe++F7hhO4cunyZtQm+xfmasWbyh5HrjxoCB8oqvqvh+hnDU4wHK9
+yX/lGcnhS1ZtT22DysMznKs=
+-----END CERTIFICATE-----
diff --git a/Integrations/SMIME_Messaging/test_data/recipient_key.pem b/Integrations/SMIME_Messaging/test_data/recipient_key.pem
new file mode 100644
index 000000000000..803df756a61c
--- /dev/null
+++ b/Integrations/SMIME_Messaging/test_data/recipient_key.pem
@@ -0,0 +1,16 @@
+-----BEGIN PRIVATE KEY-----
+MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBALC1k1PFEcW5PFcP
+6whb3UayoWURdiggYAORpZ0bwwHYSkLhGbRZNMM20W3sHeNSpcgWAyDHtsoHxEag
+ho2MCLGXlQA5YPVg7PaSP7+qsVOwy7FEcsGfo8+Q4x9D0nanv1VZmWCzAcMUwpR1
+pGqG03PbGrOUTpVK4cZH8Qq1vJJxAgMBAAECgYEArPVseGfBV3hRGR4ZfBr+YD5j
+Plty/R+DGrsqELaljl63gPJ2iFFwFUrIfgDH2EKCxW7WLPWNcf1cHYztZgoUTq0R
+J5dyfenSr2gxKk+kTOMfDPOzuxKKZU1+XfC+LeVY/G69iAF+v+J26WNw67RC5sSm
+ygkxAW25jQCEf7ju1oECQQDoB9J49wBnyLRLn/GEILWIc8IlOhWHNVRVIkdvWwgB
+k6LcYjeEEUA8IviD8igdn11T/u+2KDGe7q9sbCQTDZxpAkEAwvbAUaBU2WdVPLNZ
+xdzyEIKWphZEg7+2c2VLUGY6mgr2+2JAaAGcT608EsurOyl99zDHRqzjVmeg5v2w
+O94kyQJAOZ0fnNuwJRUH97M6VBWPHdeogHfgupb6Xr1JQZQ387RF/zUAFfH2ojgI
+vzKU1W4pzV6DBRcL4yvfMSvU9Uou6QJBAJoucWEcHn3fjjT20SP82yHbZC/spDpc
+vnj7vlfSwL1ZtoCO7shRncAVC9EBaGTdJ1T5Kl7mEtMeGM0UCu8NeqECQHcpYgQy
+JUNXYO24QeoqtBwW+suQcBNuDbZZDXYHOwxBcwtMIdZtRi88NNv/3mgo8XFGN9+7
+4Zm2B1hT5jf4vRY=
+-----END PRIVATE KEY-----
\ No newline at end of file
diff --git a/Integrations/SMIME_Messaging/test_data/signed.p7 b/Integrations/SMIME_Messaging/test_data/signed.p7
new file mode 100644
index 000000000000..ad47a5cf18b0
--- /dev/null
+++ b/Integrations/SMIME_Messaging/test_data/signed.p7
@@ -0,0 +1,46 @@
+MIME-Version: 1.0
+Content-Type: multipart/signed; protocol="application/x-pkcs7-signature"; micalg="sha1"; boundary="----B58C5A5B909A51DD107E2194490EC974"
+
+This is an S/MIME signed message
+
+------B58C5A5B909A51DD107E2194490EC974
+a sign of our times
+------B58C5A5B909A51DD107E2194490EC974
+Content-Type: application/x-pkcs7-signature; name="smime.p7s"
+Content-Transfer-Encoding: base64
+Content-Disposition: attachment; filename="smime.p7s"
+
+MIIF8QYJKoZIhvcNAQcCoIIF4jCCBd4CAQExCzAJBgUrDgMCGgUAMAsGCSqGSIb3
+DQEHAaCCA2MwggNfMIICR6ADAgECAgEDMA0GCSqGSIb3DQEBCwUAME8xCzAJBgNV
+BAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMREwDwYDVQQKDAhNMkNyeXB0bzEY
+MBYGA1UEAwwPSGVpa2tpIFRvaXZvbmVuMB4XDTE4MTAwNzE1MTIwMloXDTI4MTAw
+NDE1MTIwMlowaTELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExETAP
+BgNVBAoMCE0yQ3J5cHRvMQ8wDQYDVQQDDAZTaWduZXIxITAfBgkqhkiG9w0BCQEW
+EnNpZ25lckBleGFtcGxlLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAMiFhj1yqpHYexECBZK3G7+fNb6IVtuW8nA+mDzDYkL/VHR6Y+epTK5SSKJP
+q8VrHd45uPCJNhX3cD2opsNJPhDYrrrPGubdC9jN2QDqV/J8Lm2JgJVosf0GvOq6
+RMH6IIdX06zltF26TxAgglsbJ3WzDM5PeT9JjuO6FDDPioIRZVl4e0EWQ+jqf6Rg
+UkaTU07ehz9QilbF0wRcPKLTsGTYzh51rc7NkbFYzC2m3Nq1ssIOL2h0p6VuYUwB
+8tB9g2Z4Zp+5hzPMWdwDgm+7mAzpEGMS7ukFwVuYfpRlmN8xv1mQ8BxsrCwb3ZBx
+qxGVi5z3Ki/ADy2xyFpl1qk0RIECAwEAAaMsMCowCQYDVR0TBAIwADAdBgNVHQ4E
+FgQUaAMzJ9mHmup9Qdgt6/nuq8lqxfowDQYJKoZIhvcNAQELBQADggEBAEcnYm1D
+8/aHCo8f8UsrZwURhbETacreHm+PeK61t3XKhPNYJp3FCw5M6c4cXWkDbtcuiCh1
+eMVUN+ZwT4+S+1/OwrOjdHfmw5SrxtJpu93YFp02KtHB4Q2B2paf8a6kuXMWsZi1
++WpMiwgww9YxOnni73/bT28T+JjWsgRywwOy/2Otzk6NlTem3FJeXCY1nxR+naAE
+RgD/UpTxK3UcUvB560GoOaJaY8zecM2Ccncdd+E9/ziGLRJPq0xyNWlPrVUd2q6d
+CiNKzCpA2E71vTNM67dHQYIqyfxkDTN2AGG6tkDKJJZehNVP0k0uejGlUFO66zF7
+eJ/0Q/HlVE9Z3LoxggJWMIICUgIBATBUME8xCzAJBgNVBAYTAlVTMRMwEQYDVQQI
+DApDYWxpZm9ybmlhMREwDwYDVQQKDAhNMkNyeXB0bzEYMBYGA1UEAwwPSGVpa2tp
+IFRvaXZvbmVuAgEDMAkGBSsOAwIaBQCggdgwGAYJKoZIhvcNAQkDMQsGCSqGSIb3
+DQEHATAcBgkqhkiG9w0BCQUxDxcNMTkxMDAyMDgwNjI0WjAjBgkqhkiG9w0BCQQx
+FgQU6h5FR3wTEhhd9CrwFMW4patI/eIweQYJKoZIhvcNAQkPMWwwajALBglghkgB
+ZQMEASowCwYJYIZIAWUDBAEWMAsGCWCGSAFlAwQBAjAKBggqhkiG9w0DBzAOBggq
+hkiG9w0DAgICAIAwDQYIKoZIhvcNAwICAUAwBwYFKw4DAgcwDQYIKoZIhvcNAwIC
+ASgwDQYJKoZIhvcNAQEBBQAEggEAk/b1dtBPQHRvJJsN2KKrbOrCWpUJNQfdqRBh
+8rlPHM4DvsqCjjZm4hpgO0Qf7C5VySGP8PQneCOLoDr48O90paTmBihgPQPt149a
+eP9D2g4iNaAeTU5WNy4QbjhiCQbKmbbJkNQ6mXfDNDZfP9qhP7XVER48OCDUkG8c
+05mF+cUI7rTl50M30j45INL/A4tEMrOrSBS/bKXEUuMGPqGcoPbfuNwKBKZuNhpa
+H5j+UlmwawWJVPTlNvHGzWGhdiNtcYqXijaSl/xFQNdF95gdMpftjdc/MYAFY0Te
+Ym9AoGZMALq2unxujpALB3hsn77e6IKG+QMqlp8AOY8/81IWZQ==
+
+------B58C5A5B909A51DD107E2194490EC974--
\ No newline at end of file
diff --git a/Integrations/SMIME_Messaging/test_data/signer.pem b/Integrations/SMIME_Messaging/test_data/signer.pem
new file mode 100644
index 000000000000..fef251d00d1b
--- /dev/null
+++ b/Integrations/SMIME_Messaging/test_data/signer.pem
@@ -0,0 +1,13 @@
+-----BEGIN CERTIFICATE-----
+MIIB7TCCAVYCCQCds7D9QJpcpTANBgkqhkiG9w0BAQsFADA7MQswCQYDVQQGEwJJ
+TDEPMA0GA1UECAwGSXNyYWVsMRswGQYDVQQKDBJQYWxvIEFsdG8gTmV0d29ya3Mw
+HhcNMTkxMDA4MDkwODU3WhcNMjAxMDA3MDkwODU3WjA7MQswCQYDVQQGEwJJTDEP
+MA0GA1UECAwGSXNyYWVsMRswGQYDVQQKDBJQYWxvIEFsdG8gTmV0d29ya3MwgZ8w
+DQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANRVL8fxRFNUp/LOnl9gStTHz9yyjx54
+9zlmulQfptfG3lmiEWDYLoxsFnLrmA9WSpGG7/IKVezLqQRNIheRfKaFo/jzTVZ2
+PfMYYKb4YisyyjVvJUOPAxEX5Ia2Nd8FPUowItNOo3f05ntNJ4K0VZRZKuGvnpCd
+lUaIfeKCTK5RAgMBAAEwDQYJKoZIhvcNAQELBQADgYEAS43ZA/LVzs1udEdDWU3/
+/RqiEIo3CfcPdUqUWz/wRJRMmUHXSG6lI+031ix+CBWHKlyGhlXuZ+9UfUH3QnOe
+s3UTmR3yIn5MNI2SZQSH+ixF3P7wzWABpbY1bKtIhEMvdUyeOxJxiAuZdt7HK+uA
+whFMjyGZwzgX5Df6gfJhpec=
+-----END CERTIFICATE-----
diff --git a/Integrations/SMIME_Messaging/test_data/signer_key.pem b/Integrations/SMIME_Messaging/test_data/signer_key.pem
new file mode 100644
index 000000000000..df5ab7bc67d6
--- /dev/null
+++ b/Integrations/SMIME_Messaging/test_data/signer_key.pem
@@ -0,0 +1,16 @@
+-----BEGIN PRIVATE KEY-----
+MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBANRVL8fxRFNUp/LO
+nl9gStTHz9yyjx549zlmulQfptfG3lmiEWDYLoxsFnLrmA9WSpGG7/IKVezLqQRN
+IheRfKaFo/jzTVZ2PfMYYKb4YisyyjVvJUOPAxEX5Ia2Nd8FPUowItNOo3f05ntN
+J4K0VZRZKuGvnpCdlUaIfeKCTK5RAgMBAAECgYEAvKcDUO43+qHYpo6Vf8AaPHtb
+X4b2SejIHyP6oQuOPWDf5VttfVIV6YWPXIOeEuJkVnBBeBdeyIkanku54p7M07fO
++RhS2rtfOJP0pA1lXsXzj7pMt604YwiIB+XVuRhIFeEhv1jxqYg6bmRFgHe7QExK
+vLxjTS0+hTUHSsaZ9lECQQD6I5LbbWpYunCp3nPu3ZIOsBgIltASX/3IxtdcZCkd
+SCXOBGcDbPOYW/uabrshmhFUrxv8uVCuCLUGO0WRByBvAkEA2U7WXZqDrfl1lHQz
+akUlo44s4r9C41tXQem3YSU7PBEBqb+Y09EfZEduhbO2KFaRWiY6r1xM5O2vcTeW
+ZGL9PwJBAMxE5rXYeWMyBbVC22vyd0cVWNsSxaJrz2T5tH9XsBeghnbiM3mhrJ/8
+ZdBsotbC6Wo9gd51O0SUCbioCJxxtHMCQGTgcyS6sLwXXJzQoHmJpeJaAr4z2UIf
+yc7gRu+C0A+fOW1qcvCJWJA9lLHebgYlwY3ex0lWiJ+RMutUyazKQxMCQEqFkXWg
+nQbdshlbTu14JMYZLCQhkzGs4pYoaP4T+w4aoFIi26uyQgIYxAU9uobKnNQA0SBT
+WbmhHD2WtvorXQQ=
+-----END PRIVATE KEY-----
\ No newline at end of file
diff --git a/Integrations/SNDBOX/CHANGELOG.md b/Integrations/SNDBOX/CHANGELOG.md
new file mode 100644
index 000000000000..acde33254fae
--- /dev/null
+++ b/Integrations/SNDBOX/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.8.0] - 2019-08-06
+ - Fixed an issue with command mapping in which some commands were were not called correctly.
+ - Deprecated the ***detonate-file*** function.
\ No newline at end of file
diff --git a/Integrations/SNDBOX/SNDBOX.py b/Integrations/SNDBOX/SNDBOX.py
new file mode 100644
index 000000000000..db432a39fe8f
--- /dev/null
+++ b/Integrations/SNDBOX/SNDBOX.py
@@ -0,0 +1,290 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+import time
+import shutil
+import requests
+from distutils.util import strtobool
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+
+''' GLOBAL VARS '''
+BASE_URL = 'https://api.sndbox.com/'
+SAMPLE_URL = 'https://app.sndbox.com/sample/'
+USE_SSL = not demisto.params().get('insecure', False)
+
+''' HELPER FUNCTIONS '''
+
+
+def http_cmd(url_suffix, data=None, files=None, parse_json=True):
+ data = {} if data is None else data
+
+ url_params = {} # type:dict
+
+ use_public_api = demisto.params().get('public_api_key', False)
+ api_key = demisto.params().get('api_key', False)
+ if not api_key and use_public_api:
+ url_params.setdefault('apikey', demisto.params()['secret_public_api_key'])
+ elif api_key:
+ url_params.setdefault('apikey', api_key)
+
+ LOG('running request with url=%s\n\tdata=%s\n\tfiles=%s' % (BASE_URL + url_suffix,
+ data, files,))
+
+ res = {} # type:dict
+ if files:
+ res = requests.post(BASE_URL + url_suffix, # type:ignore
+ verify=USE_SSL,
+ params=url_params,
+ data=data,
+ files=files
+ )
+ else:
+ res = requests.get(BASE_URL + url_suffix, # type:ignore
+ verify=USE_SSL,
+ params=url_params
+ )
+
+ if res.status_code == 401: # type:ignore
+ raise Exception('API Key is incorrect')
+
+ if res.status_code >= 400: # type:ignore
+ LOG('result is: %s' % (res.json(),)) # type:ignore
+ error_msg = res.json()['errors'][0]['msg'] # type:ignore
+ raise Exception('Your request failed with the following error: %s.\n%s' % (res.reason, error_msg,)) # type:ignore
+
+ if parse_json:
+ return res.json() # type:ignore
+ else:
+ return res.content # type:ignore
+
+
+def extract_status(sndbox_status):
+ s = sndbox_status['dynamic']['code'] + sndbox_status['static']['code']
+ if s <= 1:
+ return 'pending'
+ elif s == 2:
+ return 'finished'
+ else:
+ return 'error'
+
+
+def extract_errors(sndbox_status):
+ errors = []
+ if 'message' in sndbox_status['static']:
+ errors.append(sndbox_status['static']['message'])
+ if 'message' in sndbox_status['dynamic']:
+ errors.append(sndbox_status['dynamic']['message'])
+ return errors
+
+
+def analysis_to_entry(title, info):
+ if not isinstance(info, list):
+ info = [info]
+
+ context = []
+ table = []
+ dbot_scores = []
+ for analysis in info:
+ malicious = analysis['score'] and analysis['score'] * 100 > 56
+ status = extract_status(analysis['status'])
+ result = ''
+ if status == 'finished':
+ result = 'malicious' if malicious else 'clean'
+
+ analysis_info = {
+ 'ID': analysis['id'], # for detonate generic polling
+ 'SampleName': analysis['name'],
+ 'Status': status,
+ 'Time': analysis['created_at'],
+ 'Link': SAMPLE_URL + analysis['id'],
+ 'MD5': analysis['md5'],
+ 'SHA1': analysis['sha1'],
+ 'SHA256': analysis['sha256'],
+ 'Score': analysis['score'],
+ 'Result': result,
+ 'Errors': extract_errors(analysis['status']),
+ }
+
+ analysis_context = dict(analysis_info)
+ analysis_table = dict(analysis_info)
+ if not any(analysis_table['Errors']):
+ analysis_table['Errors'] = None
+
+ dbot_score = 0
+
+ if malicious:
+ dbot_score = 3
+ malicious = {
+ 'Vendor': 'SNDBOX',
+ # 'Detections' : ['TODO'],
+ 'SHA1': analysis_info['SHA1'],
+ }
+ else:
+ dbot_score = 1
+ malicious = None
+
+ dbot_scores.append({
+ 'Vendor': 'SNDBOX',
+ 'Indicator': analysis_info['SampleName'],
+ 'Type': 'file',
+ 'Score': dbot_score,
+ 'Malicious': malicious,
+ })
+ context.append(analysis_context)
+ table.append(analysis_table)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': context,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, table, removeNull=True),
+ 'EntryContext': {'SNDBOX.Analysis(val.ID && val.ID == obj.ID)': createContext(context, removeNull=True),
+ 'DBotScore':
+ createContext(dbot_scores, removeNull=True),
+ }
+ }
+
+ return entry
+
+
+def poll_analysis_id(analysis_id):
+ result = info_request(analysis_id)
+ max_polls = MAX_POLLS / 10 if MAX_POLLS > 0 else MAX_POLLS # type:ignore # pylint: disable=E0602
+
+ while (max_polls >= 0) and extract_status(result['status']) != 'finished':
+ if extract_status(result['status']) != 'pending':
+ LOG('error while polling: result is %s' % (result,))
+ result = info_request(analysis_id)
+ time.sleep(10)
+ max_polls -= 1
+
+ LOG('reached max_polls #%d' % (max_polls,))
+ if max_polls < 0:
+ return analysis_to_entry('Polling timeout on Analysis #' + analysis_id, result)
+ else:
+ return analysis_to_entry('Analysis #' + analysis_id, result)
+
+
+''' FUNCTIONS '''
+
+
+def is_online():
+ cmd_url = ''
+ res = http_cmd(cmd_url)
+ return res['status'] == 'online'
+
+
+def analysis_info():
+ ids = demisto.args().get('analysis_id')
+ if type(ids) in STRING_TYPES:
+ ids = ids.split(',')
+ LOG('info: analysis_id = %s' % (ids,))
+ res = [info_request(analysis_id) for analysis_id in ids]
+ return analysis_to_entry('Analyses:', res)
+
+
+def info_request(analysis_id):
+ cmd_url = '/developers/files/' + analysis_id
+ return http_cmd(cmd_url)
+
+
+def analyse_sample():
+ args = demisto.args()
+ file_entry = args.get('file_id', '')
+ if type(file_entry) in STRING_TYPES:
+ file_entry = [f for f in file_entry.split(',') if f != '']
+
+ should_wait = bool(strtobool(demisto.get(args, 'should_wait')))
+
+ if len(file_entry) == 0 or not file_entry:
+ raise ValueError('You must specify: file_id.')
+
+ LOG('analysing sample')
+ return [analyse_sample_file_request(f, should_wait)
+ for f in file_entry]
+
+
+def analyse_sample_file_request(file_entry, should_wait):
+ data = {} # type:dict
+
+ shutil.copy(demisto.getFilePath(file_entry)['path'],
+ demisto.getFilePath(file_entry)['name'])
+
+ with open(demisto.getFilePath(file_entry)['name'], 'rb') as f:
+ res = http_cmd('/developers/files',
+ data=data,
+ files={'file': f})
+
+ if 'errors' in res:
+ LOG('Error! in command sample file: file_entry=%s' % (file_entry,))
+ LOG('got the following errors:\n' + '\n'.join(e['msg'] for e in res['errors']))
+ raise Exception('command failed to run.')
+
+ shutil.rmtree(demisto.getFilePath(file_entry)['name'], ignore_errors=True)
+
+ if should_wait:
+ return poll_analysis_id(res['id'])
+
+ analysis_id = res['id']
+ result = info_request(analysis_id)
+ return analysis_to_entry('Analysis #%s' % (analysis_id,), result)
+
+
+def download_report():
+ args = demisto.args()
+ analysis_id = args.get('analysis_id')
+ rsc_type = args.get('type')
+ return download_request(analysis_id, rsc_type)
+
+
+def download_sample():
+ args = demisto.args()
+ analysis_id = args.get('analysis_id')
+ rsc_type = 'sample'
+ return download_request(analysis_id, rsc_type)
+
+
+def download_request(analysis_id, rsc_type):
+ cmd_url = '/developers/files/' + analysis_id + '/' + rsc_type.lower()
+ res = http_cmd(cmd_url, parse_json=False)
+
+ info = info_request(analysis_id)
+ if rsc_type == 'sample':
+ return fileResult('%s.dontrun' % (info.get('filename', analysis_id),), res)
+ else:
+ rsc_type = rsc_type if rsc_type != 'json' else 'json.gz'
+ return fileResult('%s_report.%s' % (info.get('filename', analysis_id), rsc_type,), res,
+ entryTypes['entryInfoFile'])
+
+
+''' EXECUTION CODE '''
+LOG('command is %s' % (demisto.command(),))
+try:
+ handle_proxy()
+ if demisto.command() in ['test-module', 'sndbox-is-online']:
+ # This is the call made when pressing the integration test button.
+ if is_online():
+ demisto.results('ok')
+ else:
+ demisto.results('not online')
+ elif demisto.command() == 'sndbox-analysis-info':
+ demisto.results(analysis_info())
+ elif demisto.command() == 'sndbox-analysis-submit-sample':
+ demisto.results(analyse_sample())
+ elif demisto.command() == 'sndbox-download-report':
+ demisto.results(download_report())
+ elif demisto.command() == 'sndbox-download-sample':
+ demisto.results(download_sample())
+
+except Exception as e:
+ if demisto.params().get('verbose'):
+ LOG(str(e))
+ if demisto.command() != 'test-module':
+ LOG.print_log()
+ return_error('An error has occurred in the SNDBOX integration: {err}'.format(err=str(e)))
diff --git a/Integrations/SNDBOX/SNDBOX.yml b/Integrations/SNDBOX/SNDBOX.yml
new file mode 100644
index 000000000000..35e815cc0da1
--- /dev/null
+++ b/Integrations/SNDBOX/SNDBOX.yml
@@ -0,0 +1,303 @@
+commonfields:
+ id: SNDBOX
+ version: -1
+name: SNDBOX
+display: SNDBOX
+category: Forensics & Malware Analysis
+description: SNDBOX as a service
+configuration:
+ - display: 'Use Public API Key.
+ (By approving SNDBOX public API you are accepting SNDBOX TOS @ https://app.sndbox.com/tos).
+ Public submissions are shared with the community'
+ name: public_api_key
+ defaultvalue: ''
+ type: 8
+ required: false
+ - display: 'Private API Key'
+ name: api_key
+ defaultvalue: ''
+ type: 4
+ required: true
+ - display: Use system proxy settings
+ name: proxy
+ defaultvalue: "False"
+ type: 8
+ required: false
+ - display: Trust any certificate (unsecure)
+ name: insecure
+ defaultvalue: ''
+ type: 8
+ required: false
+ - display: 'Max. Polling Time (in seconds):'
+ name: maxpolls
+ defaultvalue: '300'
+ type: 0
+ required: false
+ - display: 'Verbose (show log in case of error) '
+ name: verbose
+ defaultvalue: ''
+ type: 8
+ required: false
+script:
+ script: ''
+ type: python
+ commands:
+ - name: sndbox-is-online
+ arguments: []
+ description: Check if SNDBOX is online or in maintenance mode.
+ - name: sndbox-analysis-info
+ arguments:
+ - name: analysis_id
+ required: true
+ default: true
+ description: Analysis IDs, supports comma-seperated arrays.
+ isArray: true
+ outputs:
+ - contextPath: SNDBOX.Analysis.ID
+ description: Analysis ID
+ type: string
+ - contextPath: SNDBOX.Analysis.SampleName
+ description: Sample Data
+ type: string
+ - contextPath: SNDBOX.Analysis.Status
+ description: Analysis Status
+ type: string
+ - contextPath: SNDBOX.Analysis.Time
+ description: Submitted Time
+ type: date
+ - contextPath: SNDBOX.Analysis.Score
+ description: Analysis Score
+ type: float
+ - contextPath: SNDBOX.Analysis.Result
+ description: Analysis Results
+ type: string
+ - contextPath: SNDBOX.Analysis.Errors
+ description: Raised errors during sampling
+ - contextPath: SNDBOX.Analysis.Link
+ description: Analysis Link
+ type: string
+ - contextPath: SNDBOX.Analysis.MD5
+ description: MD5 of analysis sample
+ type: string
+ - contextPath: SNDBOX.Analysis.SHA1
+ description: SHA1 of analysis sample
+ type: string
+ - contextPath: SNDBOX.Analysis.SHA256
+ description: SHA256 of analysis sample
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: 'The name of the vendor: SNDBOX'
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The name of the sample file
+ - contextPath: DBotScore.Type
+ description: file
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: DBotScore.Malicious.Vendor
+ description: 'The name of the vendor: SNDBOX'
+ type: string
+ - contextPath: DBotScore.Malicious.Detections
+ description: The sub analysis detection statuses
+ type: string
+ - contextPath: DBotScore.Malicious.SHA1
+ description: The SHA1 of the file
+ type: string
+ description: Show information about an analysis.
+ - name: sndbox-analysis-submit-sample
+ arguments:
+ - name: file_id
+ default: true
+ description: War Room entry of a file (for example, 3245@4)
+ - name: should_wait
+ auto: PREDEFINED
+ predefined:
+ - 'True'
+ - 'False'
+ description: Should the command poll for the result of the analysis
+ defaultValue: 'False'
+ outputs:
+ - contextPath: SNDBOX.Analysis.ID
+ description: Analysis ID
+ type: string
+ - contextPath: SNDBOX.Analysis.SampleName
+ description: Sample Data, could be a file name or URL
+ type: string
+ - contextPath: SNDBOX.Analysis.Status
+ description: Analysis Status
+ type: string
+ - contextPath: SNDBOX.Analysis.Time
+ description: Submitted Time
+ type: date
+ - contextPath: SNDBOX.Analysis.Result
+ description: Analysis Results
+ type: string
+ - contextPath: SNDBOX.Analysis.Errors
+ description: Raised errors during sampling
+ - contextPath: SNDBOX.Analysis.Link
+ description: Analysis Link
+ type: string
+ - contextPath: SNDBOX.Analysis.MD5
+ description: MD5 of analysis sample
+ type: string
+ - contextPath: SNDBOX.Analysis.SHA1
+ description: SHA1 of analysis sample
+ type: string
+ - contextPath: SNDBOX.Analysis.SHA256
+ description: SHA256 of analysis sample
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: 'The name of the vendor: SNDBOX'
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The name of the sample file or URL
+ - contextPath: DBotScore.Type
+ description: "'url' for url samples, otherwise 'file'"
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: DBotScore.Malicious.Vendor
+ description: 'The name of the vendor: SNDBOX'
+ type: string
+ - contextPath: DBotScore.Malicious.SHA1
+ description: The SHA1 of the file
+ type: string
+ description: Submit a sample for analysis.
+ - name: sndbox-download-report
+ arguments:
+ - name: analysis_id
+ required: true
+ default: true
+ description: Analysis ID
+ - name: type
+ auto: PREDEFINED
+ predefined:
+ - json
+ - pcap
+ description: The resource type to download. Defaults to json.
+ defaultValue: json
+ outputs:
+ - contextPath: InfoFile.Name
+ description: FileName
+ type: string
+ - contextPath: InfoFile.EntryID
+ description: The EntryID of the report
+ type: string
+ - contextPath: InfoFile.Size
+ description: File Size
+ type: number
+ - contextPath: InfoFile.Type
+ description: File type e.g. "PE"
+ type: string
+ - contextPath: InfoFile.Info
+ description: Basic information of the file
+ type: string
+ - contextPath: InfoFile.Extension
+ description: File Extension
+ type: string
+ description:
+ Download a resource belonging to a report. This can be the full report,
+ dropped binaries, etc.
+ - name: sndbox-detonate-file
+ arguments:
+ - name: file_id
+ default: true
+ description: War Room entry of a file (for example, 3245@4)
+ outputs:
+ - contextPath: SNDBOX.Analysis.ID
+ description: Analysis ID
+ type: string
+ - contextPath: SNDBOX.Analysis.SampleName
+ description: Sample Data, could be a file name or URL
+ type: string
+ - contextPath: SNDBOX.Analysis.Status
+ description: Analysis Status
+ type: string
+ - contextPath: SNDBOX.Analysis.Time
+ description: Submitted Time
+ type: date
+ - contextPath: SNDBOX.Analysis.Result
+ description: Analysis Results
+ type: string
+ - contextPath: SNDBOX.Analysis.Errors
+ description: Raised errors during sampling
+ - contextPath: SNDBOX.Analysis.Link
+ description: Analysis Link
+ type: string
+ - contextPath: SNDBOX.Analysis.MD5
+ description: MD5 of analysis sample
+ type: string
+ - contextPath: SNDBOX.Analysis.SHA1
+ description: SHA1 of analysis sample
+ type: string
+ - contextPath: SNDBOX.Analysis.SHA256
+ description: SHA256 of analysis sample
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: 'The name of the vendor: SNDBOX'
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The name of the sample file or URL
+ - contextPath: DBotScore.Type
+ description: file
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: DBotScore.Malicious.Vendor
+ description: 'The name of the vendor: SNDBOX'
+ type: string
+ - contextPath: DBotScore.Malicious.Detections
+ description: The sub analysis detection statuses
+ type: string
+ - contextPath: DBotScore.Malicious.SHA1
+ description: The SHA1 of the file
+ type: string
+ deprecated: true
+ description: Submit a sample for analysis.
+ - name: sndbox-download-sample
+ arguments:
+ - name: analysis_id
+ required: true
+ default: true
+ description: Analysis ID
+ outputs:
+ - contextPath: File.Size
+ description: File Size
+ type: number
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file
+ type: string
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file
+ type: string
+ - contextPath: File.Name
+ description: The sample name
+ type: string
+ - contextPath: File.SSDeep
+ description: SSDeep hash of the file
+ type: string
+ - contextPath: File.EntryID
+ description: War-Room Entry ID of the file
+ type: string
+ - contextPath: File.Info
+ description: Basic information of the file
+ type: string
+ - contextPath: File.Type
+ description: File type e.g. "PE"
+ type: string
+ - contextPath: File MD5
+ description: MD5 hash of the file
+ type: string
+ - contextPath: File.Extension
+ description: File Extension
+ type: string
+ description:
+ Download the sample file of an analysis. for security reasons, the
+ extension will be "dontrun"
+ runonce: false
+tests:
+ - SNDBOX_Test
diff --git a/Integrations/SNDBOX/SNDBOX_desc.md b/Integrations/SNDBOX/SNDBOX_desc.md
new file mode 100644
index 000000000000..f55c8bc5d080
--- /dev/null
+++ b/Integrations/SNDBOX/SNDBOX_desc.md
@@ -0,0 +1 @@
+SNDBOX as a service
\ No newline at end of file
diff --git a/Integrations/SNDBOX/SNDBOX_image.png b/Integrations/SNDBOX/SNDBOX_image.png
new file mode 100644
index 000000000000..55adc9f8138a
Binary files /dev/null and b/Integrations/SNDBOX/SNDBOX_image.png differ
diff --git a/Integrations/SentinelOne-V2/CHANGELOG.md b/Integrations/SentinelOne-V2/CHANGELOG.md
new file mode 100644
index 000000000000..d89f5ce4ce50
--- /dev/null
+++ b/Integrations/SentinelOne-V2/CHANGELOG.md
@@ -0,0 +1,20 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+ - Fixed an issue with the ***sentinelone-disconnect-agent*** command.
+ - Fixed human-readable output in the ***sentinelone-get-threat*** command in cases where the content_hash does not exist.
+
+## [19.9.1] - 2019-09-18
+ - Fixed an issue in the **Fetch incidents*** function.
+ - Fixed date parameters in the ***sentinelone-get-threats*** command.
+ - Added the ***fetch_limit*** parameter, which specifies the maximum number of incidents to fetch.
+
+## [19.8.2] - 2019-08-22
+- Added 5 commands.
+ - ***sentinelone-get-events***
+ - ***sentinelone-create-query***
+ - ***sentinelone-get-processes***
+ - ***sentinelone-shutdown-agent***
+ - ***sentinelone-uninstall-agent***
+- Fixed the *agentIds* filter in the ***get-activities*** command.
diff --git a/Integrations/SentinelOne-V2/SentinelOne-V2.py b/Integrations/SentinelOne-V2/SentinelOne-V2.py
new file mode 100644
index 000000000000..ce3208f695c2
--- /dev/null
+++ b/Integrations/SentinelOne-V2/SentinelOne-V2.py
@@ -0,0 +1,1849 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import json
+import requests
+from distutils.util import strtobool
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+TOKEN = demisto.params().get('token')
+SERVER = demisto.params()['url'][:-1] if (demisto.params()['url'] and demisto.params()['url'].endswith('/')) \
+ else demisto.params()['url']
+USE_SSL = not demisto.params().get('insecure', False)
+FETCH_TIME = demisto.params().get('fetch_time', '3 days')
+FETCH_THREAT_RANK = int(demisto.params().get('fetch_threshold', 5))
+FETCH_LIMIT = int(demisto.params().get('fetch_limit', 10))
+BASE_URL = SERVER + '/web/api/v2.0/'
+HEADERS = {
+ 'Authorization': 'ApiToken ' + TOKEN,
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, url_suffix, params={}, data=None):
+ LOG(f'Attempting {method} request to {BASE_URL + url_suffix}\nWith params:{params}\nWith body:\n{data}')
+ res = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ verify=USE_SSL,
+ params=params,
+ data=data,
+ headers=HEADERS
+ )
+ if res.status_code not in {200}:
+ try:
+ errors = ''
+ for error in res.json().get('errors'):
+ errors = '\n' + errors + error.get('detail')
+ raise ValueError(
+ f'Error in API call to Sentinel One [{res.status_code}] - [{res.reason}] \n'
+ f'Error details: [{errors}]'
+ )
+ except Exception:
+ raise ValueError(f'Error in API call to Sentinel One [{res.status_code}] - [{res.reason}]')
+ try:
+ return res.json()
+ except ValueError:
+ return None
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """
+ Performs basic get request to get activities types.
+ """
+ http_request('GET', 'activities/types')
+ return True
+
+
+def get_activities_request(created_after=None, user_emails=None, group_ids=None, created_until=None,
+ activities_ids=None, include_hidden=None, created_before=None, threats_ids=None,
+ activity_types=None, user_ids=None, created_from=None, created_between=None, agent_ids=None,
+ limit=None):
+ endpoint_url = 'activities'
+
+ params = {
+ 'created_at__gt': created_after,
+ 'userEmails': user_emails,
+ 'groupIds': group_ids,
+ 'created_at__lte': created_until,
+ 'ids': activities_ids,
+ 'includeHidden': include_hidden,
+ 'created_at__lt': created_before,
+ 'threatIds': threats_ids,
+ 'activityTypes': activity_types,
+ 'userIds': user_ids,
+ 'created_at__gte': created_from,
+ 'createdAt_between': created_between,
+ 'agentIds': agent_ids,
+ 'limit': limit
+ }
+
+ response = http_request('GET', endpoint_url, params)
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')
+ return {}
+
+
+def get_activities_command():
+ """
+ Get a list of activities.
+ """
+ context = {}
+ context_entries = []
+ contents = []
+ headers = ['ID', 'Primary description', 'Data', 'User ID', 'Created at', 'Updated at', 'Threat ID']
+
+ created_after = demisto.args().get('created_after')
+ user_emails = demisto.args().get('user_emails')
+ group_ids = argToList(demisto.args().get('group_ids', []))
+ created_until = demisto.args().get('created_until')
+ activities_ids = argToList(demisto.args().get('activities_ids', []))
+ include_hidden = demisto.args().get('include_hidden')
+ created_before = demisto.args().get('created_before')
+ threats_ids = argToList(demisto.args().get('threats_ids', []))
+ activity_types = argToList(demisto.args().get('activity_types', []))
+ user_ids = argToList(demisto.args().get('user_ids', []))
+ created_from = demisto.args().get('created_from')
+ created_between = demisto.args().get('created_between')
+ agent_ids = argToList(demisto.args().get('agent_ids', []))
+ limit = int(demisto.args().get('limit', 50))
+
+ activities = get_activities_request(created_after, user_emails, group_ids, created_until, activities_ids,
+ include_hidden, created_before, threats_ids,
+ activity_types, user_ids, created_from, created_between, agent_ids, limit)
+ if activities:
+ for activity in activities:
+ contents.append({
+ 'ID': activity.get('id'),
+ 'Created at': activity.get('createdAt'),
+ 'Primary description': activity.get('primaryDescription'),
+ 'User ID': activity.get('userId'),
+ 'Data': activity.get('data'),
+ 'Threat ID': activity.get('threatId'),
+ 'Updated at': activity.get('updatedAt')
+ })
+
+ context_entries.append({
+ 'Hash': activity.get('hash'),
+ 'ActivityType': activity.get('activityType'),
+ 'OsFamily': activity.get('osFamily'),
+ 'PrimaryDescription': activity.get('primaryDescription'),
+ 'Comments': activity.get('comments'),
+ 'AgentUpdatedVersion': activity.get('agentUpdatedVersion'),
+ 'UserID': activity.get('userId'),
+ 'ID': activity.get('id'),
+ 'Data': activity.get('data'),
+ 'CreatedAt': activity.get('createdAt'),
+ 'SecondaryDescription': activity.get('secondaryDescription'),
+ 'ThreatID': activity.get('threatId'),
+ 'GroupID': activity.get('groupId'),
+ 'UpdatedAt': activity.get('updatedAt'),
+ 'Description': activity.get('description'),
+ 'AgentID': activity.get('agentId'),
+ 'SiteID': activity.get('siteId')
+ })
+
+ context['SentinelOne.Activity(val.ID && val.ID === obj.ID)'] = context_entries
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Sentinel One Activities', contents, headers, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def get_groups_request(group_type=None, group_ids=None, group_id=None, is_default=None, name=None, query=None,
+ rank=None, limit=None):
+ endpoint_url = 'groups'
+
+ params = {
+ 'type': group_type,
+ 'groupIds': group_ids,
+ 'id': group_id,
+ 'isDefault': is_default,
+ 'name': name,
+ 'query': query,
+ 'rank': rank,
+ 'limit': limit
+ }
+
+ response = http_request('GET', endpoint_url, params)
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')
+ return {}
+
+
+def get_groups_command():
+ """
+ Gets the group data.
+ """
+
+ context = {}
+ contents = []
+ headers = ['ID', 'Name', 'Type', 'Creator', 'Creator ID', 'Created at', 'Rank']
+
+ group_type = demisto.args().get('type')
+ group_id = demisto.args().get('id')
+ group_ids = argToList(demisto.args().get('group_ids', []))
+ is_default = demisto.args().get('is_default')
+ name = demisto.args().get('name')
+ query = demisto.args().get('query')
+ rank = demisto.args().get('rank')
+ limit = int(demisto.args().get('limit', 50))
+
+ groups = get_groups_request(group_type, group_id, group_ids, is_default, name, query, rank, limit)
+ if groups:
+ for group in groups:
+ contents.append({
+ 'ID': group.get('id'),
+ 'Type': group.get('type'),
+ 'Name': group.get('name'),
+ 'Creator ID': group.get('creatorId'),
+ 'Creator': group.get('creator'),
+ 'Created at': group.get('createdAt'),
+ 'Rank': group.get('rank')
+ })
+
+ context['SentinelOne.Group(val.ID && val.ID === obj.ID)'] = groups
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Sentinel One Groups', contents, headers, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def delete_group_request(group_id=None):
+ endpoint_url = f'groups/{group_id}'
+
+ response = http_request('DELETE', endpoint_url)
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')
+ return {}
+
+
+def delete_group():
+ """
+ Deletes a group by ID.
+ """
+ group_id = demisto.args().get('group_id')
+
+ delete_group_request(group_id)
+ demisto.results('The group was deleted successfully')
+
+
+def move_agent_request(group_id, agents_id):
+ endpoint_url = f'groups/{group_id}/move-agents'
+
+ payload = {
+ "filter": {
+ "agentIds": agents_id
+ }
+ }
+
+ response = http_request('PUT', endpoint_url, data=json.dumps(payload))
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')
+ return {}
+
+
+def move_agent_to_group_command():
+ """
+ Move agents to a new group.
+ """
+ group_id = demisto.args().get('group_id')
+ agents_id = argToList(demisto.args().get('agents_ids', []))
+ context = {}
+
+ agents_groups = move_agent_request(group_id, agents_id)
+
+ # Parse response into context & content entries
+ if agents_groups.get('agentsMoved') and int(agents_groups.get('agentsMoved')) > 0:
+ agents_moved = True
+ else:
+ agents_moved = False
+ date_time_utc = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
+ context_entries = contents = {
+ 'Date': date_time_utc,
+ 'AgentsMoved': agents_groups.get('agentsMoved'),
+ 'AffectedAgents': agents_moved
+ }
+
+ context['SentinelOne.Agent(val.Date && val.Date === obj.Date)'] = context_entries
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Sentinel One - Moved Agents \n' + 'Total of: ' + str(
+ agents_groups.get('AgentsMoved')) + ' agents were Moved successfully', contents, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def get_agent_processes_request(agents_ids=None):
+ endpoint_url = 'agents/processes'
+
+ params = {
+ 'ids': agents_ids
+ }
+
+ response = http_request('GET', endpoint_url, params)
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')
+ return {}
+
+
+def get_agent_processes():
+ """
+ Retrieve running processes for a specific agent.
+ Note: This feature is obsolete and an empty array will always be returned
+ """
+ headers = ['ProcessName', 'StartTime', 'Pid', 'MemoryUsage', 'CpuUsage', 'ExecutablePath']
+ contents = []
+ context = {}
+ agents_ids = demisto.args().get('agents_ids')
+
+ processes = get_agent_processes_request(agents_ids)
+
+ if processes:
+ for process in processes:
+ contents.append({
+ 'ProcessName': process.get('processName'),
+ 'CpuUsage': process.get('cpuUsage'),
+ 'MemoryUsage': process.get('memoryUsage'),
+ 'StartTime': process.get('startTime'),
+ 'ExecutablePath': process.get('executablePath'),
+ 'Pid': process.get('pid')
+ })
+ context['SentinelOne.Agent(val.Pid && val.Pid === obj.Pid)'] = processes
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Sentinel One Agent Processes', contents, headers, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def get_threats_command():
+ """
+ Gets a list of threats.
+ """
+ # Init main vars
+ contents = []
+ context = {}
+ context_entries = []
+
+ # Get arguments
+ content_hash = demisto.args().get('content_hash')
+ mitigation_status = argToList(demisto.args().get('mitigation_status'))
+ created_before = demisto.args().get('created_before')
+ created_after = demisto.args().get('created_after')
+ created_until = demisto.args().get('created_until')
+ created_from = demisto.args().get('created_from')
+ resolved = bool(strtobool(demisto.args().get('resolved', 'false')))
+ display_name = demisto.args().get('display_name_like')
+ query = demisto.args().get('query', '')
+ threat_ids = argToList(demisto.args().get('threat_ids', []))
+ limit = int(demisto.args().get('limit', 20))
+ classifications = argToList(demisto.args().get('classifications', []))
+ rank = int(demisto.args().get('rank', 0))
+
+ # Make request and get raw response
+ threats = get_threats_request(content_hash, mitigation_status, created_before, created_after, created_until,
+ created_from, resolved, display_name, query, threat_ids, limit, classifications)
+
+ # Parse response into context & content entries
+ if threats:
+ for threat in threats:
+ if not rank or (rank and threat.get('rank') >= rank):
+ contents.append({
+ 'ID': threat.get('id'),
+ 'Agent Computer Name': threat.get('agentComputerName'),
+ 'Created Date': threat.get('createdDate'),
+ 'Site ID': threat.get('siteId'),
+ 'Classification': threat.get('classification'),
+ 'Mitigation Status': threat.get('mitigationStatus'),
+ 'Agent ID': threat.get('agentId'),
+ 'Site Name': threat.get('siteName'),
+ 'Rank': threat.get('rank'),
+ 'Marked As Benign': threat.get('markedAsBenign'),
+ 'File Content Hash': threat.get('fileContentHash')
+ })
+ context_entries.append({
+ 'ID': threat.get('id'),
+ 'AgentComputerName': threat.get('agentComputerName'),
+ 'CreatedDate': threat.get('createdDate'),
+ 'SiteID': threat.get('siteId'),
+ 'Classification': threat.get('classification'),
+ 'MitigationStatus': threat.get('mitigationStatus'),
+ 'AgentID': threat.get('agentId'),
+ 'Rank': threat.get('rank'),
+ 'MarkedAsBenign': threat.get('markedAsBenign'),
+ 'FileContentHash': threat.get('fileContentHash'),
+ 'InQuarantine': threat.get('inQuarantine'),
+ 'FileMaliciousContent': threat.get('fileMaliciousContent'),
+ 'ThreatName': threat.get('threatName'),
+ 'FileSha256': threat.get('fileSha256'),
+ 'AgentOsType': threat.get('agentOsType'),
+ 'Description': threat.get('description'),
+ 'FileDisplayName': threat.get('fileDisplayName'),
+ 'FilePath': threat.get('filePath'),
+ 'Username': threat.get('username')
+
+ })
+
+ context['SentinelOne.Threat(val.ID && val.ID === obj.ID)'] = context_entries
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Sentinel One - Getting Threat List \n' + 'Provides summary information and '
+ 'details for all the threats that '
+ 'matched your search criteria.',
+ contents, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def get_threats_request(content_hash=None, mitigation_status=None, created_before=None, created_after=None,
+ created_until=None, created_from=None, resolved=None, display_name=None, query=None,
+ threat_ids=None, limit=None, classifications=None):
+ endpoint_url = 'threats'
+
+ params = {
+ 'contentHash': content_hash,
+ 'mitigationStatus': mitigation_status,
+ 'createdAt__lt': created_before,
+ 'createdAt__gt': created_after,
+ 'createdAt__lte': created_until,
+ 'createdAt__gte': created_from,
+ 'resolved': resolved,
+ 'displayName__like': display_name,
+ 'query': query,
+ 'ids': threat_ids,
+ 'limit': limit,
+ 'classifications': classifications,
+ }
+
+ response = http_request('GET', endpoint_url, params)
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')
+ return {}
+
+
+def get_hash_command():
+ """
+ Get hash reputation and classification.
+ """
+ # Init main vars
+ headers = ['Hash', 'Rank', 'ClassificationSource', 'Classification']
+ # Get arguments
+ hash_ = demisto.args().get('hash')
+ type_ = get_hash_type(hash_)
+ if type_ == 'Unknown':
+ return_error('Enter a valid hash format.')
+ # Make request and get raw response
+ hash_reputation = get_hash_reputation_request(hash_)
+ reputation = hash_reputation.get('data', {})
+ contents = {
+ 'Rank': reputation.get('rank'),
+ 'Hash': hash_
+ }
+ # try get classification - might return 404 (classification is not mandatory)
+ try:
+ hash_classification = get_hash_classification_request(hash_)
+ classification = hash_classification.get('data', {})
+ contents['ClassificationSource'] = classification.get('classificationSource')
+ contents['Classification'] = classification.get('classification')
+ except ValueError as e:
+ if '404' in str(e): # handling case classification not found for the specific hash
+ contents['Classification'] = 'No classification was found.'
+ else:
+ raise e
+
+ # Parse response into context & content entries
+ title = 'Sentinel One - Hash Reputation and Classification \n' + \
+ 'Provides hash reputation (rank from 0 to 10):'
+
+ context = {
+ 'SentinelOne.Hash(val.Hash && val.Hash === obj.Hash)': contents
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, contents, headers, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def get_hash_reputation_request(hash_):
+ endpoint_url = f'hashes/{hash_}/reputation'
+
+ response = http_request('GET', endpoint_url)
+ return response
+
+
+def get_hash_classification_request(hash_):
+ endpoint_url = f'hashes/{hash_}/classification'
+
+ response = http_request('GET', endpoint_url)
+ return response
+
+
+def mark_as_threat_command():
+ """
+ Mark suspicious threats as threats
+ """
+ # Init main vars
+ headers = ['ID', 'Marked As Threat']
+ contents = []
+ context = {}
+ context_entries = []
+
+ # Get arguments
+ threat_ids = argToList(demisto.args().get('threat_ids'))
+ target_scope = demisto.args().get('target_scope')
+
+ # Make request and get raw response
+ affected_threats = mark_as_threat_request(threat_ids, target_scope)
+
+ # Parse response into context & content entries
+ if affected_threats.get('affected') and int(affected_threats.get('affected')) > 0:
+ title = 'Total of ' + str(affected_threats.get('affected')) + ' provided threats were marked successfully'
+ affected = True
+ else:
+ affected = False
+ title = 'No threats were marked'
+ for threat_id in threat_ids:
+ contents.append({
+ 'Marked As Threat': affected,
+ 'ID': threat_id,
+ })
+ context_entries.append({
+ 'MarkedAsThreat': affected,
+ 'ID': threat_id,
+ })
+
+ context['SentinelOne.Threat(val.ID && val.ID === obj.ID)'] = context_entries
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Sentinel One - Marking suspicious threats as threats \n' + title, contents,
+ headers, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def mark_as_threat_request(threat_ids, target_scope):
+ endpoint_url = 'threats/mark-as-threat'
+
+ payload = {
+ "filter": {
+ "ids": threat_ids
+ },
+ "data": {
+ "targetScope": target_scope
+ }
+ }
+
+ response = http_request('POST', endpoint_url, data=json.dumps(payload))
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')
+ return {}
+
+
+def mitigate_threat_command():
+ """
+ Apply a mitigation action to a group of threats
+ """
+ # Init main vars
+ headers = ['ID', 'Mitigation Action', 'Mitigated']
+ contents = []
+ context = {}
+ context_entries = []
+
+ # Get arguments
+ threat_ids = argToList(demisto.args().get('threat_ids'))
+ action = demisto.args().get('action')
+
+ # Make request and get raw response
+ mitigated_threats = mitigate_threat_request(threat_ids, action)
+
+ # Parse response into context & content entries
+ if mitigated_threats.get('affected') and int(mitigated_threats.get('affected')) > 0:
+ mitigated = True
+ title = 'Total of ' + str(mitigated_threats.get('affected')) + ' provided threats were mitigated successfully'
+ else:
+ mitigated = False
+ title = 'No threats were mitigated'
+ for threat_id in threat_ids:
+ contents.append({
+ 'Mitigated': mitigated,
+ 'ID': threat_id,
+ 'Mitigation Action': action
+ })
+ context_entries.append({
+ 'Mitigated': mitigated,
+ 'ID': threat_id,
+ 'Mitigation': {
+ 'Action': action
+ }
+ })
+
+ context['SentinelOne.Threat(val.ID && val.ID === obj.ID)'] = context_entries
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Sentinel One - Mitigating threats \n' + title, contents, headers,
+ removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def mitigate_threat_request(threat_ids, action):
+ endpoint_url = f'threats/mitigate/{action}'
+
+ payload = {
+ "filter": {
+ "ids": threat_ids
+ }
+ }
+
+ response = http_request('POST', endpoint_url, data=json.dumps(payload))
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')
+ return {}
+
+
+def resolve_threat_command():
+ """
+ Mark threats as resolved
+ """
+ # Init main vars
+ contents = []
+ context = {}
+ context_entries = []
+
+ # Get arguments
+ threat_ids = argToList(demisto.args().get('threat_ids'))
+
+ # Make request and get raw response
+ resolved_threats = resolve_threat_request(threat_ids)
+
+ # Parse response into context & content entries
+ if resolved_threats.get('affected') and int(resolved_threats.get('affected')) > 0:
+ resolved = True
+ title = 'Total of ' + str(resolved_threats.get('affected')) + ' provided threats were resolved successfully'
+ else:
+ resolved = False
+ title = 'No threats were resolved'
+
+ for threat_id in threat_ids:
+ contents.append({
+ 'Resolved': resolved,
+ 'ID': threat_id
+ })
+ context_entries.append({
+ 'Resolved': resolved,
+ 'ID': threat_id
+ })
+
+ context['SentinelOne.Threat(val.ID && val.ID === obj.ID)'] = context_entries
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Sentinel One - Resolving threats \n' + title, contents, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def resolve_threat_request(threat_ids):
+ endpoint_url = 'threats/mark-as-resolved'
+
+ payload = {
+ "filter": {
+ "ids": threat_ids
+ }
+ }
+
+ response = http_request('POST', endpoint_url, data=json.dumps(payload))
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')
+ return {}
+
+
+def get_white_list_command():
+ """
+ List all white items matching the input filter
+ """
+ # Init main vars
+ contents = []
+ context = {}
+ context_entries = []
+
+ # Get arguments
+ item_ids = argToList(demisto.args().get('item_ids', []))
+ os_types = argToList(demisto.args().get('os_types', []))
+ exclusion_type = demisto.args().get('exclusion_type')
+ limit = int(demisto.args().get('limit', 10))
+
+ # Make request and get raw response
+ exclusion_items = get_white_list_request(item_ids, os_types, exclusion_type, limit)
+
+ # Parse response into context & content entries
+ if exclusion_items:
+ for exclusion_item in exclusion_items:
+ contents.append({
+ 'ID': exclusion_item.get('id'),
+ 'Type': exclusion_item.get('type'),
+ 'CreatedAt': exclusion_item.get('createdAt'),
+ 'Value': exclusion_item.get('value'),
+ 'Source': exclusion_item.get('source'),
+ 'UserID': exclusion_item.get('userId'),
+ 'UpdatedAt': exclusion_item.get('updatedAt'),
+ 'OsType': exclusion_item.get('osType'),
+ 'UserName': exclusion_item.get('userName'),
+ 'Mode': exclusion_item.get('mode')
+ })
+ context_entries.append({
+ 'ID': exclusion_item.get('id'),
+ 'Type': exclusion_item.get('type'),
+ 'CreatedAt': exclusion_item.get('createdAt'),
+ 'Value': exclusion_item.get('value'),
+ 'Source': exclusion_item.get('source'),
+ 'UserID': exclusion_item.get('userId'),
+ 'UpdatedAt': exclusion_item.get('updatedAt'),
+ 'OsType': exclusion_item.get('osType'),
+ 'UserName': exclusion_item.get('userName'),
+ 'Mode': exclusion_item.get('mode')
+ })
+
+ context['SentinelOne.Exclusions(val.ID && val.ID === obj.ID)'] = context_entries
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Sentinel One - Listing exclusion items \n'
+ + 'provides summary information and details for all the exclusion items that '
+ 'matched your search criteria.', contents, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def get_white_list_request(item_ids, os_types, exclusion_type, limit):
+ endpoint_url = 'exclusions'
+
+ params = {
+ "ids": item_ids,
+ "osTypes": os_types,
+ "type": exclusion_type,
+ "limit": limit
+ }
+
+ response = http_request('GET', endpoint_url, params)
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')
+ return {}
+
+
+def create_white_item_command():
+ """
+ Create white item.
+ """
+ # Init main vars
+ contents = []
+ context = {}
+ context_entries = []
+ title = ''
+
+ # Get arguments
+ group_ids = argToList(demisto.args().get('group_ids', []))
+ site_ids = argToList(demisto.args().get('site_ids', []))
+ exclusion_type = demisto.args().get('exclusion_type')
+ exclusion_value = demisto.args().get('exclusion_value')
+ os_type = demisto.args().get('os_type')
+ description = demisto.args().get('description')
+ exclusion_mode = demisto.args().get('exclusion_mode')
+ path_exclusion_type = demisto.args().get('path_exclusion_type')
+
+ # Make request and get raw response
+ new_item = create_exclusion_item_request(exclusion_type, exclusion_value, os_type, description, exclusion_mode,
+ path_exclusion_type, group_ids, site_ids)
+
+ # Parse response into context & content entries
+ if new_item:
+ title = 'Sentinel One - Adding an exclusion item \n' + \
+ 'The provided item was successfully added to the exclusion list'
+ contents.append({
+ 'ID': new_item.get('id'),
+ 'Type': new_item.get('type'),
+ 'Created At': new_item.get('createdAt')
+ })
+ context_entries.append({
+ 'ID': new_item.get('id'),
+ 'Type': new_item.get('type'),
+ 'CreatedAt': new_item.get('createdAt')
+ })
+
+ context['SentinelOne.Exclusion(val.ID && val.ID === obj.ID)'] = context_entries
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, contents, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def create_exclusion_item_request(exclusion_type, exclusion_value, os_type, description, exclusion_mode,
+ path_exclusion_type, group_ids, site_ids):
+ endpoint_url = 'exclusions'
+
+ payload = {
+ "filter": {
+ "groupIds": group_ids,
+ "siteIds": site_ids
+ },
+ "data": {
+ "type": exclusion_type,
+ "value": exclusion_value,
+ "osType": os_type,
+ "description": description,
+ "mode": exclusion_mode
+ }
+ }
+
+ if path_exclusion_type:
+ payload['data']['pathExclusionType'] = path_exclusion_type
+
+ response = http_request('POST', endpoint_url, data=json.dumps(payload))
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')[0]
+ return {}
+
+
+def get_sites_command():
+ """
+ List all sites with filtering options
+ """
+ # Init main vars
+ contents = []
+ context = {}
+ context_entries = []
+
+ # Get arguments
+ updated_at = demisto.args().get('updated_at')
+ query = demisto.args().get('query')
+ site_type = demisto.args().get('site_type')
+ features = demisto.args().get('features')
+ state = demisto.args().get('state')
+ suite = demisto.args().get('suite')
+ admin_only = bool(strtobool(demisto.args().get('admin_only', 'false')))
+ account_id = demisto.args().get('account_id')
+ site_name = demisto.args().get('site_name')
+ created_at = demisto.args().get('created_at')
+ limit = int(demisto.args().get('limit', 50))
+ site_ids = argToList(demisto.args().get('site_ids', []))
+
+ # Make request and get raw response
+ sites, all_sites = get_sites_request(updated_at, query, site_type, features, state, suite, admin_only, account_id,
+ site_name, created_at, limit, site_ids)
+
+ # Parse response into context & content entries
+ if sites:
+ for site in sites:
+ contents.append({
+ 'ID': site.get('id'),
+ 'Creator': site.get('creator'),
+ 'Name': site.get('name'),
+ 'Type': site.get('siteType'),
+ 'Account Name': site.get('accountName'),
+ 'State': site.get('state'),
+ 'Health Status': site.get('healthStatus'),
+ 'Suite': site.get('suite'),
+ 'Created At': site.get('createdAt'),
+ 'Expiration': site.get('expiration'),
+ 'Unlimited Licenses': site.get('unlimitedLicenses'),
+ 'Total Licenses': all_sites.get('totalLicenses'),
+ 'Active Licenses': all_sites.get('activeLicenses')
+ })
+ context_entries.append({
+ 'ID': site.get('id'),
+ 'Creator': site.get('creator'),
+ 'Name': site.get('name'),
+ 'Type': site.get('siteType'),
+ 'AccountName': site.get('accountName'),
+ 'State': site.get('state'),
+ 'HealthStatus': site.get('healthStatus'),
+ 'Suite': site.get('suite'),
+ 'CreatedAt': site.get('createdAt'),
+ 'Expiration': site.get('expiration'),
+ 'UnlimitedLicenses': site.get('unlimitedLicenses'),
+ 'TotalLicenses': all_sites.get('totalLicenses'),
+ 'ActiveLicenses': all_sites.get('activeLicenses')
+ })
+
+ context['SentinelOne.Site(val.ID && val.ID === obj.ID)'] = context_entries
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Sentinel One - Gettin List of Sites \n' + 'Provides summary information and '
+ 'details for all sites that matched'
+ ' your search criteria.', contents,
+ removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def get_sites_request(updated_at, query, site_type, features, state, suite, admin_only, account_id, site_name,
+ created_at, limit, site_ids):
+ endpoint_url = 'sites'
+
+ params = {
+ "updatedAt": updated_at,
+ "query": query,
+ "siteType": site_type,
+ "features": features,
+ "state": state,
+ "suite": suite,
+ "adminOnly": admin_only,
+ "accountId": account_id,
+ "name": site_name,
+ "createdAt": created_at,
+ "limit": limit,
+ "siteIds": site_ids
+ }
+
+ response = http_request('GET', endpoint_url, params)
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data').get('sites'), response.get('data').get('allSites')
+ return {}
+
+
+def get_site_command():
+ """
+ Get a specific site by ID
+ """
+ # Init main vars
+ contents = []
+ context = {}
+ context_entries = []
+ title = ''
+
+ # Get arguments
+ site_id = demisto.args().get('site_id')
+
+ # Make request and get raw response
+ site = get_site_request(site_id)
+
+ # Parse response into context & content entries
+ if site:
+ title = 'Sentinel One - Summary About Site: ' + site_id + '\n' + \
+ 'Provides summary information and details for specific site ID'
+ contents.append({
+ 'ID': site.get('id'),
+ 'Creator': site.get('creator'),
+ 'Name': site.get('name'),
+ 'Type': site.get('siteType'),
+ 'Account Name': site.get('accountName'),
+ 'State': site.get('state'),
+ 'Health Status': site.get('healthStatus'),
+ 'Suite': site.get('suite'),
+ 'Created At': site.get('createdAt'),
+ 'Expiration': site.get('expiration'),
+ 'Unlimited Licenses': site.get('unlimitedLicenses'),
+ 'Total Licenses': site.get('totalLicenses'),
+ 'Active Licenses': site.get('activeLicenses'),
+ 'AccountID': site.get('accountId'),
+ 'IsDefault': site.get('isDefault')
+ })
+ context_entries.append({
+ 'ID': site.get('id'),
+ 'Creator': site.get('creator'),
+ 'Name': site.get('name'),
+ 'Type': site.get('siteType'),
+ 'AccountName': site.get('accountName'),
+ 'State': site.get('state'),
+ 'HealthStatus': site.get('healthStatus'),
+ 'Suite': site.get('suite'),
+ 'CreatedAt': site.get('createdAt'),
+ 'Expiration': site.get('expiration'),
+ 'UnlimitedLicenses': site.get('unlimitedLicenses'),
+ 'TotalLicenses': site.get('totalLicenses'),
+ 'ActiveLicenses': site.get('activeLicenses'),
+ 'AccountID': site.get('accountId'),
+ 'IsDefault': site.get('isDefault')
+ })
+
+ context['SentinelOne.Site(val.ID && val.ID === obj.ID)'] = context_entries
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, contents, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def get_site_request(site_id):
+ endpoint_url = f'sites/{site_id}'
+
+ response = http_request('GET', endpoint_url)
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')
+ return {}
+
+
+def reactivate_site_command():
+ """
+ Reactivate specific site by ID
+ """
+ # Init main vars
+ context = {}
+ title = ''
+
+ # Get arguments
+ site_id = demisto.args().get('site_id')
+
+ # Make request and get raw response
+ site = reactivate_site_request(site_id)
+
+ # Parse response into context & content entries
+ if site:
+ title = 'Sentinel One - Reactivated Site: ' + site_id + '\n' + 'Site has been reactivated successfully'
+ contents = {
+ 'ID': site.get('id'),
+ 'Reactivated': site.get('success')
+ }
+ context_entries = {
+ 'ID': site.get('id'),
+ 'Reactivated': site.get('success')
+ }
+
+ context['SentinelOne.Site(val.ID && val.ID === obj.ID)'] = context_entries
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, contents, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def reactivate_site_request(site_id):
+ endpoint_url = f'sites/{site_id}/reactivate'
+
+ response = http_request('PUT', endpoint_url)
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if response.get('data'):
+ return response.get('data')
+ return {}
+
+
+def get_threat_summary_command():
+ """
+ Get dashboard threat summary
+ """
+ # Init main vars
+ context = {}
+ title = ''
+
+ # Get arguments
+ site_ids = argToList(demisto.args().get('site_ids', []))
+ group_ids = argToList(demisto.args().get('group_ids', []))
+
+ # Make request and get raw response
+ threat_summary = get_threat_summary_request(site_ids, group_ids)
+
+ # Parse response into context & content entries
+ if threat_summary:
+ title = 'Sentinel One - Dashboard Threat Summary'
+ contents = {
+ 'Active': threat_summary.get('active'),
+ 'Total': threat_summary.get('total'),
+ 'Mitigated': threat_summary.get('mitigated'),
+ 'Suspicious': threat_summary.get('suspicious'),
+ 'Blocked': threat_summary.get('blocked')
+ }
+
+ context_entries = {
+ 'Active': threat_summary.get('active'),
+ 'Total': threat_summary.get('total'),
+ 'Mitigated': threat_summary.get('mitigated'),
+ 'Suspicious': threat_summary.get('suspicious'),
+ 'Blocked': threat_summary.get('blocked')
+ }
+
+ context['SentinelOne.Threat(val && val === obj)'] = context_entries
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, contents, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def get_threat_summary_request(site_ids, group_ids):
+ endpoint_url = 'private/threats/summary'
+
+ params = {
+ "siteIds": site_ids,
+ "groupIds": group_ids
+ }
+
+ response = http_request('GET', endpoint_url, params)
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')
+ return {}
+
+
+# Agents Commands
+
+
+def list_agents_command():
+ """
+ List all agents matching the input filter
+ """
+ # Init main vars
+ contents = []
+ context = {}
+ context_entries = []
+
+ # Get arguments
+ active_threats = demisto.args().get('min_active_threats')
+ computer_name = demisto.args().get('computer_name')
+ scan_status = demisto.args().get('scan_status')
+ os_type = demisto.args().get('os_type')
+ created_at = demisto.args().get('created_at')
+
+ # Make request and get raw response
+ agents = list_agents_request(active_threats, computer_name, scan_status, os_type, created_at)
+
+ # Parse response into context & content entries
+ if agents:
+ for agent in agents:
+ contents.append({
+ 'ID': agent.get('id'),
+ 'Network Status': agent.get('networkStatus'),
+ 'Agent Version': agent.get('agentVersion'),
+ 'Is Decomissioned': agent.get('isDecommissioned'),
+ 'Is Active': agent.get('isActive'),
+ 'Last ActiveDate': agent.get('lastActiveDate'),
+ 'Registered At': agent.get('registeredAt'),
+ 'External IP': agent.get('externalIp'),
+ 'Threat Count': agent.get('activeThreats'),
+ 'Encrypted Applications': agent.get('encryptedApplications'),
+ 'OS Name': agent.get('osName'),
+ 'Computer Name': agent.get('computerName'),
+ 'Domain': agent.get('domain'),
+ 'Created At': agent.get('createdAt'),
+ 'Site Name': agent.get('siteName')
+ })
+ context_entries.append({
+ 'ID': agent.get('id'),
+ 'NetworkStatus': agent.get('networkStatus'),
+ 'AgentVersion': agent.get('agentVersion'),
+ 'IsDecomissioned': agent.get('isDecommissioned'),
+ 'IsActive': agent.get('isActive'),
+ 'LastActiveDate': agent.get('lastActiveDate'),
+ 'RegisteredAt': agent.get('registeredAt'),
+ 'ExternalIP': agent.get('externalIp'),
+ 'ThreatCount': agent.get('activeThreats'),
+ 'EncryptedApplications': agent.get('encryptedApplications'),
+ 'OSName': agent.get('osName'),
+ 'ComputerName': agent.get('computerName'),
+ 'Domain': agent.get('domain'),
+ 'CreatedAt': agent.get('createdAt'),
+ 'SiteName': agent.get('siteName')
+ })
+
+ context['SentinelOne.Agents(val.ID && val.ID === obj.ID)'] = context_entries
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Sentinel One - List of Agents \n Provides summary information and details for'
+ ' all the agents that matched your search criteria',
+ contents, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def list_agents_request(active_threats, computer_name, scan_status, os_type, created_at):
+ endpoint_url = 'agents'
+
+ params = {
+ "activeThreats__gt": active_threats,
+ "computerName": computer_name,
+ "scanStatus": scan_status,
+ "osType": os_type,
+ "createdAt__gte": created_at
+ }
+
+ response = http_request('GET', endpoint_url, params)
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')
+ return {}
+
+
+def get_agent_command():
+ """
+ Get single agent via ID
+ """
+ # Init main vars
+ contents = []
+ context = {}
+ context_entries = []
+ title = ''
+
+ # Get arguments
+ agent_id = demisto.args().get('agent_id')
+
+ # Make request and get raw response
+ agent = get_agent_request(agent_id)
+
+ # Parse response into context & content entries
+ if agent:
+ title = 'Sentinel One - Get Agent Details \nProvides details for the following agent ID : ' + agent_id
+ contents.append({
+ 'ID': agent.get('id'),
+ 'Network Status': agent.get('networkStatus'),
+ 'Agent Version': agent.get('agentVersion'),
+ 'Is Decomissioned': agent.get('isDecommissioned'),
+ 'Is Active': agent.get('isActive'),
+ 'Last ActiveDate': agent.get('lastActiveDate'),
+ 'Registered At': agent.get('registeredAt'),
+ 'External IP': agent.get('externalIp'),
+ 'Threat Count': agent.get('activeThreats'),
+ 'Encrypted Applications': agent.get('encryptedApplications'),
+ 'OS Name': agent.get('osName'),
+ 'Computer Name': agent.get('computerName'),
+ 'Domain': agent.get('domain'),
+ 'Created At': agent.get('createdAt'),
+ 'Site Name': agent.get('siteName')
+ })
+ context_entries.append({
+ 'ID': agent.get('id'),
+ 'NetworkStatus': agent.get('networkStatus'),
+ 'AgentVersion': agent.get('agentVersion'),
+ 'IsDecomissioned': agent.get('isDecommissioned'),
+ 'IsActive': agent.get('isActive'),
+ 'LastActiveDate': agent.get('lastActiveDate'),
+ 'RegisteredAt': agent.get('registeredAt'),
+ 'ExternalIP': agent.get('externalIp'),
+ 'ThreatCount': agent.get('activeThreats'),
+ 'EncryptedApplications': agent.get('encryptedApplications'),
+ 'OSName': agent.get('osName'),
+ 'ComputerName': agent.get('computerName'),
+ 'Domain': agent.get('domain'),
+ 'CreatedAt': agent.get('createdAt'),
+ 'SiteName': agent.get('siteName')
+ })
+
+ context['SentinelOne.Agent(val.ID && val.ID === obj.ID)'] = context_entries
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, contents, removeNull=True),
+ 'EntryContext': context
+ })
+
+
+def get_agent_request(agent_id):
+ endpoint_url = 'agents'
+
+ params = {
+ "ids": [agent_id]
+ }
+
+ response = http_request('GET', endpoint_url, params)
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')[0]
+ return {}
+
+
+def connect_to_network_request(agents_id):
+ endpoint_url = 'agents/actions/connect'
+
+ payload = {
+ 'filter': {
+ 'ids': agents_id
+ }
+ }
+
+ response = http_request('POST', endpoint_url, data=json.dumps(payload))
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response
+ return {}
+
+
+def connect_agent_to_network():
+ """
+ Sends a "connect to network" command to all agents matching the input filter.
+ """
+ # Get arguments
+ agents_id = demisto.args().get('agent_id')
+
+ # Make request and get raw response
+ agents = connect_to_network_request(agents_id)
+ agents_affected = agents.get('data', {}).get('affected', 0)
+
+ # Parse response into context & content entries
+ if agents_affected > 0:
+ network_status = get_agent_request(agents_id)
+ contents = {
+ 'NetworkStatus': network_status.get('networkStatus'),
+ 'ID': agents_id
+ }
+ else:
+ return_error('No agents were connected to the network.')
+
+ context = {
+ 'SentinelOne.Agent(val.ID && val.ID === obj.ID)': contents
+ }
+
+ return_outputs(
+ f'{agents_affected} agent(s) successfully connected to the network.',
+ context,
+ agents
+ )
+
+
+def disconnect_from_network_request(agents_id):
+ endpoint_url = 'agents/actions/disconnect'
+
+ payload = {
+ 'filter': {
+ 'ids': agents_id
+ }
+ }
+
+ response = http_request('POST', endpoint_url, data=json.dumps(payload))
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ else:
+ return response
+
+
+def disconnect_agent_from_network():
+ """
+ Sends a "disconnect from network" command to all agents matching the input filter.
+ """
+ # Get arguments
+ agents_id = demisto.args().get('agent_id')
+
+ # Make request and get raw response
+ agents = disconnect_from_network_request(agents_id)
+ agents_affected = agents.get('data', {}).get('affected', 0)
+
+ # Parse response into context & content entries
+ if agents_affected > 0:
+ network_status = get_agent_request(agents_id)
+ contents = {
+ 'NetworkStatus': network_status.get('networkStatus'),
+ 'ID': agents_id
+ }
+ else:
+ return_error('No agents were disconnected from the network.')
+
+ context = {
+ 'SentinelOne.Agent(val.ID && val.ID === obj.ID)': contents
+ }
+
+ return_outputs(
+ f'{agents_affected} agent(s) successfully disconnected from the network.',
+ context,
+ agents
+ )
+
+
+def broadcast_message_request(message, is_active=None, group_id=None, agent_id=None, domain=None):
+ filters = {}
+ endpoint_url = 'agents/actions/broadcast'
+
+ if is_active:
+ filters['isActive'] = is_active
+ if group_id:
+ filters['groupIds'] = group_id
+ if agent_id:
+ filters['ids'] = agent_id
+ if domain:
+ filters['domains'] = domain
+
+ payload = {
+ 'data': {
+ 'message': message
+ },
+ 'filter': filters
+ }
+ response = http_request('POST', endpoint_url, data=json.dumps(payload))
+
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ else:
+ return response
+
+
+def broadcast_message():
+ """
+ Broadcasts a message to all agents matching the input filter.
+ """
+ message = demisto.args().get('message')
+ is_active = bool(demisto.args().get('active_agent'))
+ group_id = demisto.args().get('group_id')
+ agent_id = demisto.args().get('agent_id')
+ domain = demisto.args().get('domain')
+
+ broadcast_message = broadcast_message_request(message, is_active=is_active, group_id=group_id, agent_id=agent_id,
+ domain=domain)
+
+ agents_affected = broadcast_message.get('data', {}).get('affected', 0)
+ if agents_affected > 0:
+ demisto.results('The message was successfully delivered to the agent(s)')
+ else:
+ return_error('No messages were sent. Verify that the inputs are correct.')
+
+
+def shutdown_agents_request(query, agent_id, group_id):
+ endpoint_url = 'agents/actions/shutdown'
+ filters = {}
+
+ if query:
+ filters['query'] = query
+ if agent_id:
+ filters['ids'] = agent_id
+ if group_id:
+ filters['groupIds'] = group_id
+
+ payload = {
+ 'filter': filters
+ }
+
+ response = http_request('POST', endpoint_url, data=json.dumps(payload))
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ else:
+ return response
+
+
+def shutdown_agents():
+ """
+ Sends a shutdown command to all agents matching the input filter
+ """
+ query = demisto.args().get('query', '')
+
+ agent_id = argToList(demisto.args().get('agent_id'))
+ group_id = argToList(demisto.args().get('group_id'))
+ if not (agent_id or group_id):
+ return_error('Expecting at least one of the following arguments to filter by: agent_id, group_id.')
+
+ affected_agents = shutdown_agents_request(query, agent_id, group_id)
+ agents = affected_agents.get('data', {}).get('affected', 0)
+ if agents > 0:
+ demisto.results(f'Shutting down {agents} agent(s).')
+ else:
+ return_error('No agents were shutdown.')
+
+
+def uninstall_agent_request(query=None, agent_id=None, group_id=None):
+ endpoint_url = 'agents/actions/uninstall'
+ filters = {}
+
+ if query:
+ filters['query'] = query
+ if agent_id:
+ filters['ids'] = agent_id
+ if group_id:
+ filters['groupIds'] = group_id
+
+ payload = {
+ 'filter': filters
+ }
+
+ response = http_request('POST', endpoint_url, data=json.dumps(payload))
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ else:
+ return response
+
+
+def uninstall_agent():
+ """
+ Sends an uninstall command to all agents matching the input filter.
+ """
+ query = demisto.args().get('query', '')
+
+ agent_id = argToList(demisto.args().get('agent_id'))
+ group_id = argToList(demisto.args().get('group_id'))
+ if not (agent_id or group_id):
+ return_error('Expecting at least one of the following arguments to filter by: agent_id, group_id.')
+
+ affected_agents = shutdown_agents_request(query, agent_id, group_id)
+ agents = affected_agents.get('data', {}).get('affected', 0)
+ if agents > 0:
+ demisto.results(f' Uninstall was sent to {agents} agent(s).')
+ else:
+ return_error('No agents were affected.')
+
+
+# Event Commands
+
+def create_query_request(query, from_date, to_date):
+ endpoint_url = 'dv/init-query'
+ payload = {
+ 'query': query,
+ 'fromDate': from_date,
+ 'toDate': to_date
+ }
+
+ response = http_request('POST', endpoint_url, data=json.dumps(payload))
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ else:
+ return response.get('data', {}).get('queryId')
+
+
+def create_query():
+ query = demisto.args().get('query')
+ from_date = demisto.args().get('from_date')
+ to_date = demisto.args().get('to_date')
+
+ query_id = create_query_request(query, from_date, to_date)
+
+ context_entries = {
+ 'Query': query,
+ 'FromDate': from_date,
+ 'ToDate': to_date,
+ 'QueryID': query_id
+ }
+
+ context = {
+ 'SentinelOne.Query(val.QueryID && val.QueryID === obj.QueryID)': context_entries
+ }
+ return_outputs('The query ID is ' + str(query_id), context, query_id)
+
+
+def get_events_request(query_id=None, limit=None):
+ endpoint_url = 'dv/events'
+
+ params = {
+ 'query_id': query_id,
+ 'limit': limit
+ }
+
+ response = http_request('GET', endpoint_url, params)
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')
+ return {}
+
+
+def get_events():
+ """
+ Get all Deep Visibility events from query
+ """
+ contents = []
+ event_standards = []
+ headers = ['EventType', 'AgentName', 'SiteName', 'User', 'Time', 'AgentOS', 'ProcessID', 'ProcessUID',
+ 'ProcessName', 'MD5', 'SHA256']
+ query_id = demisto.args().get('query_id')
+ limit = int(demisto.args().get('limit'))
+
+ events = get_events_request(query_id, limit)
+ if events:
+ for event in events:
+ contents.append({
+ 'EventType': event.get('eventType'),
+ 'Endpoint': event.get('agentName'),
+ 'SiteName': event.get('siteName'),
+ 'User': event.get('user'),
+ 'Time': event.get('processStartTime'),
+ 'AgentOS': event.get('agentOs'),
+ 'ProcessID': event.get('pid'),
+ 'ProcessUID': event.get('processUniqueKey'),
+ 'ProcessName': event.get('processName'),
+ 'MD5': event.get('md5'),
+ 'SHA256': event.get('sha256')
+ })
+
+ event_standards.append({
+ 'Type': event.get('eventType'),
+ 'Name': event.get('processName'),
+ 'ID': event.get('pid'),
+ })
+
+ context = {
+ 'SentinelOne.Event(val.ProcessID && val.ProcessID === obj.ProcessID)': contents,
+ 'Event': event_standards
+ }
+
+ return_outputs(tableToMarkdown('SentinelOne Events', contents, headers, removeNull=True), context, events)
+ else:
+ demisto.results('No events were found.')
+
+
+def get_processes_request(query_id=None, limit=None):
+ endpoint_url = 'dv/events/process'
+
+ params = {
+ 'query_id': query_id,
+ 'limit': limit
+ }
+
+ response = http_request('GET', endpoint_url, params)
+ if response.get('errors'):
+ return_error(response.get('errors'))
+ if 'data' in response:
+ return response.get('data')
+ return {}
+
+
+def get_processes():
+ """
+ Get Deep Visibility events from query by event type - process
+ """
+ contents = []
+ headers = ['EventType', 'AgentName', 'SiteName', 'User', 'Time', 'ParentProcessID', 'ParentProcessUID',
+ 'ProcessName', 'ParentProcessName', 'ProcessDisplayName', 'ProcessID', 'ProcessUID',
+ 'SHA1', 'CMD', 'SubsystemType', 'IntegrityLevel', 'ParentProcessStartTime']
+ query_id = demisto.args().get('query_id')
+ limit = int(demisto.args().get('limit'))
+
+ processes = get_events_request(query_id, limit)
+ if processes:
+ for process in processes:
+ contents.append({
+ 'EventType': process.get('eventType'),
+ 'Endpoint': process.get('agentName'),
+ 'SiteName': process.get('siteName'),
+ 'User': process.get('user'),
+ 'Time': process.get('processStartTime'),
+ 'ParentProcessID': process.get('parentPid'),
+ 'ParentProcessUID': process.get('parentProcessUniqueKey'),
+ 'ParentProcessName': process.get('parentProcessName'),
+ 'ProcessID': process.get('pid'),
+ 'ProcessUID': process.get('processUniqueKey'),
+ 'ProcessName': process.get('processName'),
+ 'ProcessDisplayName': process.get('processDisplayName'),
+ 'SHA1': process.get('processImageSha1Hash'),
+ 'CMD': process.get('"processCmd'),
+ 'SubsystemType': process.get('processSubSystem'),
+ 'IntegrityLevel': process.get('processIntegrityLevel'),
+ 'ParentProcessStartTime': process.get('parentProcessStartTime')
+ })
+
+ context = {
+ 'SentinelOne.Event(val.ProcessID && val.ProcessID === obj.ProcessID)': contents
+ }
+
+ return_outputs(tableToMarkdown('SentinelOne Processes', contents, headers, removeNull=True), context, processes)
+
+ else:
+ demisto.results('No processes were found.')
+
+
+def fetch_incidents():
+ last_run = demisto.getLastRun()
+ last_fetch = last_run.get('time')
+
+ # handle first time fetch
+ if last_fetch is None:
+ last_fetch, _ = parse_date_range(FETCH_TIME, to_timestamp=True)
+
+ current_fetch = last_fetch
+ incidents = []
+ last_fetch_date_string = timestamp_to_datestring(last_fetch, '%Y-%m-%dT%H:%M:%S.%fZ')
+ threats = get_threats_request(limit=FETCH_LIMIT, created_after=last_fetch_date_string)
+ for threat in threats:
+ rank = threat.get('rank')
+ try:
+ rank = int(rank)
+ except TypeError:
+ rank = 0
+ # If no fetch threat rank is provided, bring everything, else only fetch above the threshold
+ if FETCH_THREAT_RANK and rank >= FETCH_THREAT_RANK:
+ incident = threat_to_incident(threat)
+ incident_date = date_to_timestamp(incident['occurred'], '%Y-%m-%dT%H:%M:%S.%fZ')
+ # update last run
+ if incident_date > last_fetch:
+ incidents.append(incident)
+
+ if incident_date > current_fetch:
+ current_fetch = incident_date
+
+ demisto.setLastRun({'time': current_fetch})
+ demisto.incidents(incidents)
+
+
+def threat_to_incident(threat):
+ incident = {}
+ incident['name'] = 'Sentinel One Threat: ' + threat.get('classification')
+ incident['occurred'] = threat.get('createdDate')
+ incident['rawJSON'] = json.dumps(threat)
+ return incident
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('command is %s' % (demisto.command()))
+
+try:
+ handle_proxy()
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module()
+ demisto.results('ok')
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+ elif demisto.command() == 'sentinelone-get-activities':
+ get_activities_command()
+ elif demisto.command() == 'sentinelone-get-threats':
+ get_threats_command()
+ elif demisto.command() == 'sentinelone-mark-as-threat':
+ mark_as_threat_command()
+ elif demisto.command() == 'sentinelone-mitigate-threat':
+ mitigate_threat_command()
+ elif demisto.command() == 'sentinelone-resolve-threat':
+ resolve_threat_command()
+ elif demisto.command() == 'sentinelone-threat-summary':
+ get_threat_summary_command()
+ elif demisto.command() == 'sentinelone-get-hash':
+ get_hash_command()
+ elif demisto.command() == 'sentinelone-get-white-list':
+ get_white_list_command()
+ elif demisto.command() == 'sentinelone-create-white-list-item':
+ create_white_item_command()
+ elif demisto.command() == 'sentinelone-get-sites':
+ get_sites_command()
+ elif demisto.command() == 'sentinelone-get-site':
+ get_site_command()
+ elif demisto.command() == 'sentinelone-reactivate-site':
+ reactivate_site_command()
+ elif demisto.command() == 'sentinelone-list-agents':
+ list_agents_command()
+ elif demisto.command() == 'sentinelone-get-agent':
+ get_agent_command()
+ elif demisto.command() == 'sentinelone-get-groups':
+ get_groups_command()
+ elif demisto.command() == 'sentinelone-move-agent':
+ move_agent_to_group_command()
+ elif demisto.command() == 'sentinelone-delete-group':
+ delete_group()
+ elif demisto.command() == 'sentinelone-agent-processes':
+ get_agent_processes()
+ elif demisto.command() == 'sentinelone-connect-agent':
+ connect_agent_to_network()
+ elif demisto.command() == 'sentinelone-disconnect-agent':
+ disconnect_agent_from_network()
+ elif demisto.command() == 'sentinelone-broadcast-message':
+ broadcast_message()
+ elif demisto.command() == 'sentinelone-get-events':
+ get_events()
+ elif demisto.command() == 'sentinelone-create-query':
+ create_query()
+ elif demisto.command() == 'sentinelone-get-processes':
+ get_processes()
+ elif demisto.command() == 'sentinelone-shutdown-agent':
+ shutdown_agents()
+ elif demisto.command() == 'sentinelone-uninstall-agent':
+ uninstall_agent()
+
+except Exception as e:
+ if demisto.command() == 'fetch-incidents':
+ LOG(str(e))
+ raise
+ else:
+ return_error(e)
diff --git a/Integrations/SentinelOne-V2/SentinelOne-V2.yml b/Integrations/SentinelOne-V2/SentinelOne-V2.yml
new file mode 100644
index 000000000000..16e5926492a8
--- /dev/null
+++ b/Integrations/SentinelOne-V2/SentinelOne-V2.yml
@@ -0,0 +1,1499 @@
+category: Endpoint
+commonfields:
+ id: SentinelOne V2
+ version: -1
+configuration:
+- display: Server URL (e.g., https://usea1.sentinelone.net)
+ name: url
+ required: true
+ type: 0
+- display: API Token
+ name: token
+ required: true
+ type: 4
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- defaultvalue: 3 days
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days, 3
+ months, 1 year)
+ name: fetch_time
+ required: false
+ type: 0
+- defaultvalue: '5'
+ display: Minimum risk score for importing incidents (0-10), where 0 is low risk
+ and 10 is high risk
+ name: fetch_threat_rank
+ required: false
+ type: 0
+- defaultvalue: '10'
+ display: 'Fetch limit: the maximum number of incidents to fetch'
+ name: fetch_limit
+ required: false
+ type: 0
+description: End point protection
+display: SentinelOne V2
+name: SentinelOne V2
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Filter by computer name.
+ isArray: false
+ name: computer_name
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'CSV list of scan statuses by which to filter the results, for
+ example: "started,aborted".'
+ isArray: false
+ name: scan_status
+ predefined:
+ - started
+ - none
+ - finished
+ - aborted
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'Included OS types, for example: "windows".'
+ isArray: true
+ name: os_type
+ predefined:
+ - windows
+ - windows_legacy
+ - macos
+ - linux
+ required: false
+ secret: false
+ - default: false
+ description: 'Endpoint created at timestamp, for example: "2018-02-27T04:49:26.257525Z".'
+ isArray: false
+ name: created_at
+ required: false
+ secret: false
+ - default: false
+ description: Minimum number of threats for an agent.
+ isArray: false
+ name: min_active_threats
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns all agents that match the specified criteria.
+ execution: false
+ name: sentinelone-list-agents
+ outputs:
+ - contextPath: SentinelOne.Agents.NetworkStatus
+ description: The agent network status.
+ type: string
+ - contextPath: SentinelOne.Agents.ID
+ description: The agent ID.
+ type: string
+ - contextPath: SentinelOne.Agents.AgentVersion
+ description: The agent software version.
+ type: string
+ - contextPath: SentinelOne.Agents.IsDecomissioned
+ description: Whether the agent is decommissioned.
+ type: boolean
+ - contextPath: SentinelOne.Agents.IsActive
+ description: Whether the agent is active.
+ type: boolean
+ - contextPath: SentinelOne.Agents.LastActiveDate
+ description: The last active date of the agent
+ type: date
+ - contextPath: SentinelOne.Agents.RegisteredAt
+ description: The registration date of the agent.
+ type: date
+ - contextPath: SentinelOne.Agents.ExternalIP
+ description: The agent IP address.
+ type: string
+ - contextPath: SentinelOne.Agents.ThreatCount
+ description: Number of active threats.
+ type: number
+ - contextPath: SentinelOne.Agents.EncryptedApplications
+ description: Whether disk encryption is enabled.
+ type: boolean
+ - contextPath: SentinelOne.Agents.OSName
+ description: Name of operating system.
+ type: string
+ - contextPath: SentinelOne.Agents.ComputerName
+ description: Name of agent computer.
+ type: string
+ - contextPath: SentinelOne.Agents.Domain
+ description: Domain name of the agent.
+ type: string
+ - contextPath: SentinelOne.Agents.CreatedAt
+ description: Creation time of the agent.
+ type: date
+ - contextPath: SentinelOne.Agents.SiteName
+ description: Site name associated with the agent.
+ type: string
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: Exclusion item type. Can be "file_type", "path", "white_hash",
+ "certificate", or "browser".
+ isArray: false
+ name: exclusion_type
+ predefined:
+ - file_type
+ - path
+ - white_hash
+ - certificate
+ - browser
+ required: true
+ secret: false
+ - default: false
+ description: Value of the exclusion item for the exclusion list.
+ isArray: false
+ name: exclusion_value
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: OS type. Can be "windows", "windows_legacy", "macos", or "linux".
+ OS type is required for hash exclusions.
+ isArray: false
+ name: os_type
+ predefined:
+ - windows
+ - windows_legacy.macos
+ - linux
+ required: true
+ secret: false
+ - default: false
+ description: Description for adding the item.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Exclusion mode (path exclusion only). Can be "suppress", "disable_in_process_monitor_deep",
+ "disable_in_process_monitor", "disable_all_monitors", or "disable_all_monitors_deep".
+ isArray: false
+ name: exclusion_mode
+ predefined:
+ - suppress
+ - disable_in_process_monitor_deep
+ - disable_in_process_monitor
+ - disable_all_monitors
+ - disable_all_monitors_deep
+ required: false
+ secret: false
+ - default: false
+ description: Excluded path for a path exclusion list.
+ isArray: false
+ name: path_exclusion_type
+ required: false
+ secret: false
+ - default: false
+ description: CSV list of group IDs by which to filter. Can be "site_ids" or
+ "group_ids".
+ isArray: true
+ name: group_ids
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates an exclusion item that matches the specified input filter.
+ execution: false
+ name: sentinelone-create-white-list-item
+ outputs:
+ - contextPath: SentinelOne.Exclusions.ID
+ description: The whitelisted entity ID.
+ type: string
+ - contextPath: SentinelOne.Exclusions.Type
+ description: The whitelisted item type.
+ type: string
+ - contextPath: SentinelOne.Exclusions.CreatedAt
+ description: Time when the whitelist item was created.
+ type: date
+ - arguments:
+ - default: false
+ description: 'List of IDs by which to filter, for example: "225494730938493804,225494730938493915".'
+ isArray: true
+ name: item_ids
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'CSV list of OS types by which to filter, for example: "windows,
+ linux".'
+ isArray: true
+ name: os_types
+ predefined:
+ - windows
+ - windows_legacy
+ - macos
+ - linux
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Exclusion type. Can be "file_type", "path", "white_hash", "certificate",
+ "browser".
+ isArray: false
+ name: exclusion_type
+ predefined:
+ - file_type
+ - path
+ - white_hash
+ - certificate
+ - browser
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: The maximum number of items to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Lists all exclusion items that match the specified input filter.
+ execution: false
+ name: sentinelone-get-white-list
+ outputs:
+ - contextPath: SentinelOne.Exclusions.ID
+ description: The item ID.
+ type: string
+ - contextPath: SentinelOne.Exclusions.Type
+ description: The exclusion item type.
+ type: string
+ - contextPath: SentinelOne.Exclusions.CreatedAt
+ description: Timestamp when the item was added.
+ type: date
+ - contextPath: SentinelOne.Exclusions.Value
+ description: Value of the added item.
+ type: string
+ - contextPath: SentinelOne.Exclusions.Source
+ description: Source of the added item.
+ type: string
+ - contextPath: SentinelOne.Exclusions.UserID
+ description: User ID of the user that added the item.
+ type: string
+ - contextPath: SentinelOne.Exclusions.UpdatedAt
+ description: Timestamp when the item was updated
+ type: date
+ - contextPath: SentinelOne.Exclusions.OsType
+ description: OS type.
+ type: string
+ - contextPath: SentinelOne.Exclusions.UserName
+ description: User name of the user that added the item.
+ type: string
+ - contextPath: SentinelOne.Exclusions.Mode
+ description: 'CSV list of modes by which to filter (ath exclusions only), for
+ example: "suppress".'
+ type: string
+ - arguments:
+ - default: false
+ description: The content hash.
+ isArray: false
+ name: hash
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets the reputation of a hash.
+ execution: false
+ name: sentinelone-get-hash
+ outputs:
+ - contextPath: SentinelOne.Hash.Rank
+ description: The hash reputation (1-10).
+ type: Number
+ - contextPath: SentinelOne.Hash.Hash
+ description: The content hash.
+ type: String
+ - contextPath: SentinelOne.Hash.Classification
+ description: The hash classification.
+ type: String
+ - contextPath: SentinelOne.Hash.Classification Source
+ description: The hash classification source.
+ type: String
+ - arguments:
+ - default: false
+ description: The content hash of the threat.
+ isArray: false
+ name: content_hash
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: CSV list of mitigation statuses. Can be "mitigated", "active",
+ "blocked", "suspicious", "pending", or "suspicious_resolved".
+ isArray: true
+ name: mitigation_status
+ predefined:
+ - mitigated
+ - active
+ - blocked
+ - suspicious
+ - pending
+ - suspicious_resolved
+ required: false
+ secret: false
+ - default: false
+ description: 'Searches for threats created before this date, for example: "2018-02-27T04:49:26.257525Z".'
+ isArray: false
+ name: created_before
+ required: false
+ secret: false
+ - default: false
+ description: 'Searches for threats created after this date, for example: "2018-02-27T04:49:26.257525Z".'
+ isArray: false
+ name: created_after
+ required: false
+ secret: false
+ - default: false
+ description: 'Searches for threats created on or before this date, for example:
+ "2018-02-27T04:49:26.257525Z".'
+ isArray: false
+ name: created_until
+ required: false
+ secret: false
+ - default: false
+ description: 'Search for threats created on or after this date, for example:
+ "2018-02-27T04:49:26.257525Z".'
+ isArray: false
+ name: created_from
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Whether to only return resolved threats.
+ isArray: false
+ name: resolved
+ predefined:
+ - 'false'
+ - 'true'
+ required: false
+ secret: false
+ - default: false
+ description: Threat display name. Can be a partial display name, not an exact
+ match.
+ isArray: false
+ name: display_name
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '20'
+ description: The maximum number of threats to return. Default is 20.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: Full free-text search for fields. Can be "content_hash", "file_display_name",
+ "file_path", "computer_name", or "uuid".
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: 'CSV list of threat IDs, for example: "225494730938493804,225494730938493915".'
+ isArray: true
+ name: threat_ids
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: ' CSV list of threat classifications to search, for example: "Malware",
+ "Network", "Benign".'
+ isArray: false
+ name: classifications
+ predefined:
+ - Engine
+ - Static
+ - Cloud
+ - Behavioral
+ required: false
+ secret: false
+ - default: false
+ description: Risk level threshold to retrieve (1-10).
+ isArray: false
+ name: rank
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns threats according to specified filters.
+ execution: false
+ name: sentinelone-get-threats
+ outputs:
+ - contextPath: SentinelOne.Threat.ID
+ description: The threat ID.
+ type: String
+ - contextPath: SentinelOne.Threat.AgentComputerName
+ description: The agent computer name.
+ type: String
+ - contextPath: SentinelOne.Threat.CreatedDate
+ description: File created date.
+ type: Date
+ - contextPath: SentinelOne.Threat.SiteID
+ description: The site ID.
+ type: String
+ - contextPath: SentinelOne.Threat.Classification
+ description: Classification name.
+ type: string
+ - contextPath: SentinelOne.Threat.MitigationStatus
+ description: The agent status.
+ type: String
+ - contextPath: SentinelOne.Threat.AgentID
+ description: The agent ID.
+ type: String
+ - contextPath: SentinelOne.Threat.Rank
+ description: Number representing cloud reputation (1-10).
+ type: Number
+ - contextPath: SentinelOne.Threat.MarkedAsBenign
+ description: Whether the threat is marked as benign.
+ type: Boolean
+ - arguments:
+ - default: false
+ description: 'CSV list of group IDs by which to filter, for example: "225494730938493804,225494730938493915".'
+ isArray: true
+ name: group_ids
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a dashboard threat summary.
+ execution: false
+ name: sentinelone-threat-summary
+ outputs:
+ - contextPath: SentinelOne.Threat.Active
+ description: Number of active threats in the system.
+ type: Number
+ - contextPath: SentinelOne.Threat.Total
+ description: Total number of threats in the system.
+ type: Number
+ - contextPath: SentinelOne.Threat.Mitigated
+ description: Number of mitigated threats in the system.
+ type: Number
+ - contextPath: SentinelOne.Threat.Suspicious
+ description: Number of suspicious threats in the system.
+ type: Number
+ - contextPath: SentinelOne.Threat.Blocked
+ description: Number of blocked threats in the system.
+ type: Number
+ - arguments:
+ - default: false
+ description: CSV list of threat IDs.
+ isArray: true
+ name: threat_ids
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Scope to use for exclusions. Can be "site" or "tenant".
+ isArray: false
+ name: target_scope
+ predefined:
+ - site
+ - tenant
+ required: true
+ secret: false
+ deprecated: false
+ description: Mark suspicious threats as threats
+ execution: false
+ name: sentinelone-mark-as-threat
+ outputs:
+ - contextPath: SentinelOne.Threat.ID
+ description: The threat ID.
+ type: String
+ - contextPath: SentinelOne.Threat.MarkedAsThreat
+ description: Whether the suspicious threat was successfully marked as a threat.
+ type: Boolean
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: Mitigation action. Can be "kill", "quarantine", "un-quarantine",
+ "remediate", or "rollback-remediation".
+ isArray: false
+ name: action
+ predefined:
+ - kill
+ - quarantine
+ - un-quarantine
+ - remediate
+ - rollback-remediation
+ required: true
+ secret: false
+ - default: false
+ description: CSV list of threat IDs.
+ isArray: true
+ name: threat_ids
+ required: true
+ secret: false
+ deprecated: false
+ description: Applies a mitigation action to a group of threats that match the
+ specified input filter.
+ execution: false
+ name: sentinelone-mitigate-threat
+ outputs:
+ - contextPath: SentinelOne.Threat.ID
+ description: The threat ID.
+ type: String
+ - contextPath: SentinelOne.Threat.Mitigated
+ description: Whether the threat was successfully mitigated.
+ type: Boolean
+ - contextPath: SentinelOne.Threat.Mitigation.Action
+ description: Number of threats affected.
+ type: Number
+ - arguments:
+ - default: false
+ description: CSV list of threat IDs.
+ isArray: true
+ name: threat_ids
+ required: true
+ secret: false
+ deprecated: false
+ description: Resolves threat using the threat ID.
+ execution: false
+ name: sentinelone-resolve-threat
+ outputs:
+ - contextPath: SentinelOne.Threat.ID
+ description: The threat ID.
+ type: String
+ - contextPath: SentinelOne.Threat.Resolved
+ description: Whether the threat was successfully resolved.
+ type: Boolean
+ - arguments:
+ - default: false
+ description: The agent ID.
+ isArray: false
+ name: agent_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns details of an agent, by agent ID.
+ execution: false
+ name: sentinelone-get-agent
+ outputs:
+ - contextPath: SentinelOne.Agent.NetworkStatus
+ description: The agent network status.
+ type: string
+ - contextPath: SentinelOne.Agent.ID
+ description: The agent ID.
+ type: string
+ - contextPath: SentinelOne.Agent.AgentVersion
+ description: The agent software version.
+ type: string
+ - contextPath: SentinelOne.Agent.IsDecomissioned
+ description: Whether the agent is decommissioned.
+ type: boolean
+ - contextPath: SentinelOne.Agent.IsActive
+ description: Whether the agent is active.
+ type: boolean
+ - contextPath: SentinelOne.Agent.LastActiveDate
+ description: The last active date of the agent.
+ type: date
+ - contextPath: SentinelOne.Agent.RegisteredAt
+ description: The registration date of the agent.
+ type: date
+ - contextPath: SentinelOne.Agent.ExternalIP
+ description: The agent IP address.
+ type: string
+ - contextPath: SentinelOne.Agent.ThreatCount
+ description: Number of active threats.
+ type: number
+ - contextPath: SentinelOne.Agent.EncryptedApplications
+ description: Whether disk encryption is enabled.
+ type: boolean
+ - contextPath: SentinelOne.Agent.OSName
+ description: Name of the operating system.
+ type: string
+ - contextPath: SentinelOne.Agent.ComputerName
+ description: Name of the agent computer.
+ type: string
+ - contextPath: SentinelOne.Agent.Domain
+ description: Domain name of the agent.
+ type: string
+ - contextPath: SentinelOne.Agent.CreatedAt
+ description: Agent creation time.
+ type: date
+ - contextPath: SentinelOne.Agent.SiteName
+ description: Site name associated with the agent.
+ type: string
+ - arguments:
+ - default: false
+ description: 'Timestamp of last update, for example: "2018-02-27T04:49:26.257525Z".'
+ isArray: false
+ name: updated_at
+ required: false
+ secret: false
+ - default: false
+ description: 'Full-text search for fields: name, account_name.'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Site type. Can be "Trial", "Paid", "POC", "DEV", or "NFR".
+ isArray: false
+ name: site_type
+ predefined:
+ - Trial
+ - Paid
+ - POC
+ - DEV
+ - NFR
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Returns sites that support the specified features. Can be "firewall-control",
+ "device-control", or "ioc".
+ isArray: false
+ name: features
+ predefined:
+ - firewall-control
+ - device-control
+ - ioc
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Site state. Can be "active", "deleted", or "expired".
+ isArray: false
+ name: state
+ predefined:
+ - active
+ - deleted
+ - expired
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The suite of product features active for this site. Can be "Core"
+ or "Complete".
+ isArray: false
+ name: suite
+ predefined:
+ - Core
+ - Complete
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Sites to which the user has Admin privileges.
+ isArray: false
+ name: admin_only
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: 'Account ID, for example: "225494730938493804".'
+ isArray: false
+ name: account_id
+ required: false
+ secret: false
+ - default: false
+ description: 'Site name, for example: "My Site".'
+ isArray: false
+ name: site_name
+ required: false
+ secret: false
+ - default: false
+ description: 'Timestamp of site creation, for example: "2018-02-27T04:49:26.257525Z".'
+ isArray: false
+ name: created_at
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: Maximum number of results to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns all sites that match the specified criteria.
+ execution: false
+ name: sentinelone-get-sites
+ outputs:
+ - contextPath: SentinelOne.Site.Creator
+ description: The creator name.
+ type: string
+ - contextPath: SentinelOne.Site.Name
+ description: The site name.
+ type: string
+ - contextPath: SentinelOne.Site.Type
+ description: The site type.
+ type: string
+ - contextPath: SentinelOne.Site.AccountName
+ description: The account name.
+ type: string
+ - contextPath: SentinelOne.Site.State
+ description: The site state.
+ type: string
+ - contextPath: SentinelOne.Site.HealthStatus
+ description: The health status of the site.
+ type: boolean
+ - contextPath: SentinelOne.Site.Suite
+ description: The suite to which the site belongs.
+ type: string
+ - contextPath: SentinelOne.Site.ActiveLicenses
+ description: Number of active licenses on the site.
+ type: number
+ - contextPath: SentinelOne.Site.ID
+ description: ID of the site.
+ type: string
+ - contextPath: SentinelOne.Site.TotalLicenses
+ description: Number of total licenses on the site.
+ type: number
+ - contextPath: SentinelOne.Site.CreatedAt
+ description: Timestamp when the site was created.
+ type: date
+ - contextPath: SentinelOne.Site.Expiration
+ description: Timestamp when the site will expire.
+ type: string
+ - contextPath: SentinelOne.Site.UnlimitedLicenses
+ description: Whether the site has unlimited licenses.
+ type: boolean
+ - arguments:
+ - default: false
+ description: ID of the site.
+ isArray: false
+ name: site_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns a site, by site ID.
+ execution: false
+ name: sentinelone-get-site
+ outputs:
+ - contextPath: SentinelOne.Site.Creator
+ description: The creator name.
+ type: string
+ - contextPath: SentinelOne.Site.Name
+ description: The site name.
+ type: string
+ - contextPath: SentinelOne.Site.Type
+ description: The site type.
+ type: string
+ - contextPath: SentinelOne.Site.AccountName
+ description: The account name.
+ type: string
+ - contextPath: SentinelOne.Site.State
+ description: The site state.
+ type: string
+ - contextPath: SentinelOne.Site.HealthStatus
+ description: The health status of the site.
+ type: boolean
+ - contextPath: SentinelOne.Site.Suite
+ description: The suite to which the site belongs.
+ type: string
+ - contextPath: SentinelOne.Site.ActiveLicenses
+ description: Number of active licenses on the site.
+ type: number
+ - contextPath: SentinelOne.Site.ID
+ description: ID of the site.
+ type: string
+ - contextPath: SentinelOne.Site.TotalLicenses
+ description: Number of total licenses on the site.
+ type: number
+ - contextPath: SentinelOne.Site.CreatedAt
+ description: Timestamp when the site was created.
+ type: date
+ - contextPath: SentinelOne.Site.Expiration
+ description: Timestamp when the site will expire.
+ type: string
+ - contextPath: SentinelOne.Site.UnlimitedLicenses
+ description: Unlimited licenses boolean.
+ type: boolean
+ - contextPath: SentinelOne.Site.AccountID
+ description: Account ID.
+ type: string
+ - contextPath: SentinelOne.Site.IsDefault
+ description: Whether the site is the default site.
+ type: boolean
+ - arguments:
+ - default: false
+ description: 'Site ID. Example: "225494730938493804".'
+ isArray: false
+ name: site_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Reactivates an expired site.
+ execution: false
+ name: sentinelone-reactivate-site
+ outputs:
+ - contextPath: SentinelOne.Site.ID
+ description: Site ID.
+ type: string
+ - contextPath: SentinelOne.Site.Reactivated
+ description: Whether the site was reactivated.
+ type: boolean
+ - arguments:
+ - default: false
+ description: 'Site ID of the site to expire, for example: "225494730938493804".'
+ isArray: false
+ name: site_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Expires a site.
+ execution: false
+ name: sentinelone-expire-site
+ outputs:
+ - contextPath: SentinelOne.Site.ID
+ description: ID of the site.
+ type: String
+ - contextPath: SentinelOne.Site.Expired
+ description: A boolean to check if the site was expired or not.
+ type: Boolean
+ - arguments:
+ - default: false
+ description: 'Return activities created after this timestamp, for example: "2018-02-27T04:49:26.257525Z".'
+ isArray: false
+ name: created_after
+ required: false
+ secret: false
+ - default: false
+ description: Email address of the user who invoked the activity (if applicable).
+ isArray: false
+ name: user_emails
+ required: false
+ secret: false
+ - default: false
+ description: 'List of Group IDs by which to filter, for example: "225494730938493804,225494730938493915".'
+ isArray: true
+ name: group_ids
+ required: false
+ secret: false
+ - default: false
+ description: 'Return activities created on or before this timestamp, for example:
+ "2018-02-27T04:49:26.257525Z".'
+ isArray: false
+ name: created_until
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'Include internal activities hidden from display, for example:
+ "False".'
+ isArray: false
+ name: include_hidden
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: 'CSV list of activity IDs by which to filter, for example: "225494730938493804,225494730938493915".'
+ isArray: true
+ name: activities_ids
+ required: false
+ secret: false
+ - default: false
+ description: 'Return activities created before this timestamp, for example:
+ "2018-02-27T04:49:26.257525Z".'
+ isArray: false
+ name: created_before
+ required: false
+ secret: false
+ - default: false
+ description: 'CSV list of threat IDs for which to return activities, for example:
+ "225494730938493804,225494730938493915".'
+ isArray: true
+ name: threats_ids
+ required: false
+ secret: false
+ - default: false
+ description: 'CSV of activity codes to return, for example: "52,53,71,72".'
+ isArray: true
+ name: activity_types
+ required: false
+ secret: false
+ - default: false
+ description: 'CSV list of user IDs for users that invoked the activity (if applicable),
+ for example: "225494730938493804,225494730938493915".'
+ isArray: true
+ name: user_ids
+ required: false
+ secret: false
+ - default: false
+ description: 'Return activities created on or after this timestamp, for example:
+ "2018-02-27T04:49:26.257525Z".'
+ isArray: false
+ name: created_from
+ required: false
+ secret: false
+ - default: false
+ description: 'Return activities created within this range (inclusive), for example:
+ "1514978764288-1514978999999".'
+ isArray: false
+ name: created_between
+ required: false
+ secret: false
+ - default: false
+ description: 'Return activities related to specified agents. Example: "225494730938493804,225494730938493915".'
+ isArray: true
+ name: agent_ids
+ required: false
+ secret: false
+ - default: false
+ description: Maximum number of items to return (1-100).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of activities.
+ execution: false
+ name: sentinelone-get-activities
+ outputs:
+ - contextPath: SentinelOne.Activity.AgentID
+ description: Related agent (if applicable).
+ type: String
+ - contextPath: SentinelOne.Activity.AgentUpdatedVersion
+ description: Agent's new version (if applicable).
+ type: String
+ - contextPath: SentinelOne.Activity.SiteID
+ description: Related site (if applicable).
+ type: String
+ - contextPath: SentinelOne.Activity.UserID
+ description: The user who invoked the activity (if applicable).
+ type: String
+ - contextPath: SentinelOne.Activity.SecondaryDescription
+ description: Secondary description.
+ type: String
+ - contextPath: SentinelOne.Activity.OsFamily
+ description: Agent's OS type (if applicable). Can be "linux", "macos", "windows",
+ or "windows_legacy".
+ type: String
+ - contextPath: SentinelOne.Activity.ActivityType
+ description: Activity type.
+ type: Number
+ - contextPath: SentinelOne.Activity.data.SiteID
+ description: The site ID.
+ type: String
+ - contextPath: SentinelOne.Activity.data.SiteName
+ description: The site name.
+ type: String
+ - contextPath: SentinelOne.Activity.data.username
+ description: The name of the site creator.
+ type: String
+ - contextPath: SentinelOne.Activity.Hash
+ description: Threat file hash (if applicable).
+ type: String
+ - contextPath: SentinelOne.Activity.UpdatedAt
+ description: Activity last updated time (UTC).
+ type: Date
+ - contextPath: SentinelOne.Activity.Comments
+ description: Comments for the activity.
+ type: String
+ - contextPath: SentinelOne.Activity.ThreatID
+ description: Related threat (if applicable).
+ type: String
+ - contextPath: SentinelOne.Activity.PrimaryDescription
+ description: Primary description for the activity.
+ type: String
+ - contextPath: SentinelOne.Activity.GroupID
+ description: Related group (if applicable).
+ type: String
+ - contextPath: SentinelOne.Activity.ID
+ description: Activity ID.
+ type: String
+ - contextPath: SentinelOne.Activity.CreatedAt
+ description: Activity creation time (UTC).
+ type: Date
+ - contextPath: SentinelOne.Activity.Description
+ description: Extra activity information.
+ type: String
+ - arguments:
+ - default: false
+ description: 'Group type, for example: "static".'
+ isArray: false
+ name: group_type
+ required: false
+ secret: false
+ - default: false
+ description: 'CSV list of group IDs by which to filter, for example: "225494730938493804,225494730938493915".'
+ isArray: true
+ name: group_ids
+ required: false
+ secret: false
+ - default: false
+ description: 'Group ID by which to filter, for example: "225494730938493804".'
+ isArray: false
+ name: group_id
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether this is the default group.
+ isArray: false
+ name: is_default
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: The name of the group.
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: Free-text search on fields name.
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: The rank sets the priority of a dynamic group over others, for
+ example, "1", which is the highest priority.
+ isArray: false
+ name: rank
+ required: false
+ secret: false
+ - default: false
+ description: Maximum number of items to return (1-200).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns data for the specified group.
+ execution: false
+ name: sentinelone-get-groups
+ outputs:
+ - contextPath: SentinelOne.Group.siteId
+ description: The ID of the site of which this group is a member.
+ type: String
+ - contextPath: SentinelOne.Group.filterName
+ description: If the group is dynamic, the name of the filter which is used to
+ associate agents.
+ type: String
+ - contextPath: SentinelOne.Group.creatorId
+ description: The ID of the user that created the group.
+ type: String
+ - contextPath: SentinelOne.Group.name
+ description: The name of the group.
+ type: String
+ - contextPath: SentinelOne.Group.creator
+ description: The user that created the group.
+ type: String
+ - contextPath: SentinelOne.Group.rank
+ description: The rank, which sets the priority of a dynamic group over others.
+ type: Number
+ - contextPath: SentinelOne.Group.updatedAt
+ description: Timestamp of the last update.
+ type: Date
+ - contextPath: SentinelOne.Group.totalAgents
+ description: Number of agents in the group.
+ type: Number
+ - contextPath: SentinelOne.Group.filterId
+ description: If the group is dynamic, the group ID of the filter that is used
+ to associate agents.
+ type: String
+ - contextPath: SentinelOne.Group.isDefault
+ description: Whether the groups is the default group of the site.
+ type: Boolean
+ - contextPath: SentinelOne.Group.inherits
+ description: Whether the policy is inherited from a site. "False" if the group
+ has its own edited policy.
+ type: Boolean
+ - contextPath: SentinelOne.Group.type
+ description: Group type. Can be static or dynamic
+ type: String
+ - contextPath: SentinelOne.Group.id
+ description: The ID of the group.
+ type: String
+ - contextPath: SentinelOne.Group.createdAt
+ description: Timestamp of group creation.
+ type: Date
+ - arguments:
+ - default: false
+ description: The ID of the group to move the agent to.
+ isArray: false
+ name: group_id
+ required: true
+ secret: false
+ - default: false
+ description: Agents IDs.
+ isArray: true
+ name: agents_ids
+ required: false
+ secret: false
+ deprecated: false
+ description: Moves agents to a new group.
+ execution: false
+ name: sentinelone-move-agent
+ outputs:
+ - contextPath: SentinelOne.Agent.AgentsMoved
+ description: The number of agents that were moved to another group.
+ type: Number
+ - arguments:
+ - default: false
+ description: The ID of the group to delete.
+ isArray: false
+ name: group_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes a group, by the group ID.
+ execution: false
+ name: sentinelone-delete-group
+ - arguments:
+ - default: false
+ description: The ID of the agent from which to retrieve the processes.
+ isArray: false
+ name: agents_ids
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves running processes for a specific agent.
+ execution: false
+ name: sentinelone-agent-processes
+ outputs:
+ - contextPath: SentinelOne.Agent.memoryUsage
+ description: Memory usage (MB).
+ type: Number
+ - contextPath: SentinelOne.Agent.startTime
+ description: The process start time.
+ type: Date
+ - contextPath: SentinelOne.Agent.pid
+ description: The process ID.
+ type: Number
+ - contextPath: SentinelOne.Agent.processName
+ description: The name of the process.
+ type: String
+ - contextPath: SentinelOne.Agent.cpuUsage
+ description: CPU usage (%).
+ type: Number
+ - contextPath: SentinelOne.Agent.executablePath
+ description: Executable path.
+ type: String
+ - arguments:
+ - default: false
+ description: A CSV list of agent IDs to connect to the network. Run the list-agents
+ command to get a list of agent IDs.
+ isArray: false
+ name: agent_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Connects agents to network.
+ execution: false
+ name: sentinelone-connect-agent
+ outputs:
+ - contextPath: SentinelOne.Agent.AgentsAffected
+ description: The number of affected agents.
+ type: Number
+ - contextPath: SentinelOne.Agent.ID
+ description: The IDs of the affected agents.
+ type: String
+ - arguments:
+ - default: false
+ description: A CSV list of agent IDs to disconnect from the network. Run the
+ list-agents command to get a list of agent IDs.
+ isArray: false
+ name: agent_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Disconnects agents from network.
+ execution: false
+ name: sentinelone-disconnect-agent
+ outputs:
+ - contextPath: SentinelOne.Agent.NetworkStatus
+ description: Agent network status.
+ type: String
+ - contextPath: SentinelOne.Agent.ID
+ description: The IDs of the affected agents.
+ type: String
+ - arguments:
+ - default: false
+ description: The Message to broadcast to agents.
+ isArray: false
+ name: message
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether to only include active agents. Default is "false".
+ isArray: false
+ name: active_agent
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: List of Group IDs by which to filter the results.
+ isArray: false
+ name: group_id
+ required: false
+ secret: false
+ - default: false
+ description: A list of Agent IDs by which to filter the results.
+ isArray: false
+ name: agent_id
+ required: false
+ secret: false
+ - default: false
+ description: Included network domains.
+ isArray: false
+ name: domain
+ required: false
+ secret: false
+ deprecated: false
+ description: Broadcasts a message to all agents that match the input filters.
+ execution: false
+ name: sentinelone-broadcast-message
+ - arguments:
+ - default: false
+ defaultValue: '50'
+ description: Maximum number of items to return (1-100). Default is "50".
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: 'QueryId obtained when creating a query in the sentinelone-create-query
+ command. Example: "q1xx2xx3".'
+ isArray: false
+ name: query_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns all Deep Visibility events that match the query.
+ execution: false
+ name: sentinelone-get-events
+ outputs:
+ - contextPath: SentinelOne.Event.ProcessUID
+ description: Process unique identifier.
+ type: String
+ - contextPath: SentinelOne.Event.SHA256
+ description: SHA256 hash of the file.
+ type: String
+ - contextPath: SentinelOne.Event.AgentOS
+ description: OS type. Can be "windows", "linux", "macos", or "windows_legac".
+ type: String
+ - contextPath: SentinelOne.Event.ProcessID
+ description: The process ID.
+ type: Number
+ - contextPath: SentinelOne.Event.User
+ description: User assigned to the event.
+ type: String
+ - contextPath: SentinelOne.Event.Time
+ description: Process start time.
+ type: Date
+ - contextPath: SentinelOne.Event.Endpoint
+ description: The agent name.
+ type: String
+ - contextPath: SentinelOne.Event.SiteName
+ description: Site name.
+ type: String
+ - contextPath: SentinelOne.Event.EventType
+ description: Event type. Can be "events", "file", "ip", "url", "dns", "process",
+ "registry", "scheduled_task", or "logins".
+ type: String
+ - contextPath: SentinelOne.Event.ProcessName
+ description: The name of the process.
+ type: String
+ - contextPath: SentinelOne.Event.MD5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: Event.ID
+ description: Event process ID.
+ type: String
+ - contextPath: Event.Name
+ description: Event name.
+ type: String
+ - contextPath: Event.Type
+ description: Event type.
+ type: String
+ - arguments:
+ - default: false
+ description: The query string for which to return events.
+ isArray: false
+ name: query
+ required: true
+ secret: false
+ - default: false
+ description: Query start date, for example, "2019-08-03T04:49:26.257525Z".
+ isArray: false
+ name: from_date
+ required: true
+ secret: false
+ - default: false
+ description: Query end date, for example, "2019-08-03T04:49:26.257525Z".
+ isArray: false
+ name: to_date
+ required: true
+ secret: false
+ deprecated: false
+ description: Runs a Deep Visibility Query and returns the queryId. You can use
+ the queryId for all other commands, such as the sentinelone-get-events command.
+ execution: false
+ name: sentinelone-create-query
+ outputs:
+ - contextPath: SentinelOne.Query.FromDate
+ description: Query start date.
+ type: Date
+ - contextPath: SentinelOne.Query.Query
+ description: The search query string.
+ type: String
+ - contextPath: SentinelOne.Query.QueryID
+ description: The query ID.
+ type: String
+ - contextPath: SentinelOne.Query.ToDate
+ description: Query end date.
+ type: Date
+ - arguments:
+ - default: false
+ description: 'The queryId that is returned when creating a query under Create
+ Query. Example: "q1xx2xx3". Get the query_id from the "get-query-id" command.'
+ isArray: false
+ name: query_id
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: Maximum number of items to return (1-100). Default is "50".
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of Deep Visibility events from query by event type
+ - process.
+ execution: false
+ name: sentinelone-get-processes
+ outputs:
+ - contextPath: SentinelOne.Event.ParentProcessID
+ description: Parent process ID.
+ type: Number
+ - contextPath: SentinelOne.Event.ProcessUID
+ description: The process unique identifier.
+ type: String
+ - contextPath: SentinelOne.Event.SHA1
+ description: SHA1 hash of the process image.
+ type: String
+ - contextPath: SentinelOne.Event.SubsystemType
+ description: Process sub-system.
+ type: String
+ - contextPath: SentinelOne.Event.ParentProcessStartTime
+ description: The parent process start time.
+ type: Date
+ - contextPath: SentinelOne.Event.ProcessID
+ description: The process ID.
+ type: Number
+ - contextPath: SentinelOne.Event.ParentProcessUID
+ description: Parent process unique identifier.
+ type: String
+ - contextPath: SentinelOne.Event.User
+ description: User assigned to the event.
+ type: String
+ - contextPath: SentinelOne.Event.Time
+ description: Start time of the process.
+ type: Date
+ - contextPath: SentinelOne.Event.ParentProcessName
+ description: Parent process name.
+ type: String
+ - contextPath: SentinelOne.Event.SiteName
+ description: Site name.
+ type: String
+ - contextPath: SentinelOne.Event.EventType
+ description: The event type.
+ type: String
+ - contextPath: SentinelOne.Event.Endpoint
+ description: The agent name (endpoint).
+ type: String
+ - contextPath: SentinelOne.Event.IntegrityLevel
+ description: Process integrity level.
+ type: String
+ - contextPath: SentinelOne.Event.CMD
+ description: Process CMD.
+ type: String
+ - contextPath: SentinelOne.Event.ProcessName
+ description: Process name.
+ type: String
+ - contextPath: SentinelOne.Event.ProcessDisplayName
+ description: Process display name.
+ type: String
+ - arguments:
+ - default: false
+ description: 'A free-text search term, will match applicable attributes (sub-string
+ match). Note: A device''s physical addresses will only be matched if they
+ start with the search term (not if they contain the search term).'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: A CSV list of agents IDs to shutdown.
+ isArray: false
+ name: agent_id
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the network group.
+ isArray: false
+ name: group_id
+ required: false
+ secret: false
+ deprecated: false
+ description: Sends a shutdown command to all agents that match the input filter.
+ execution: false
+ name: sentinelone-shutdown-agent
+ outputs:
+ - contextPath: SentinelOne.Agent.ID
+ description: The ID of the agent that was shutdown.
+ type: String
+ - arguments:
+ - default: false
+ description: 'A free-text search term, will match applicable attributes (sub-string
+ match). Note: A device''s physical addresses will only be matched if they
+ start with the search term (not if they contain the search term).'
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: A CSV list of agents IDs to shutdown.
+ isArray: false
+ name: agent_id
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the network group.
+ isArray: false
+ name: group_id
+ required: false
+ secret: false
+ deprecated: false
+ description: Sends an uninstall command to all agents that match the input filter.
+ execution: false
+ name: sentinelone-uninstall-agent
+ dockerimage: demisto/python3:3.7.3.286
+ isfetch: true
+ longRunning: false
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- SentinelOne V2 - test
diff --git a/Integrations/SentinelOne-V2/SentinelOne-V2_description.md b/Integrations/SentinelOne-V2/SentinelOne-V2_description.md
new file mode 100644
index 000000000000..1acc694215d9
--- /dev/null
+++ b/Integrations/SentinelOne-V2/SentinelOne-V2_description.md
@@ -0,0 +1,4 @@
+1. To get your API token, log in to your SentinelOne console.
+2. Click your user name in the upper-right corner, and select *My User*.
+3. Click the *Generate* link, located near the *API Token* field.
+4. Save your API token in a safe place because you will not be able to access it anymore.
\ No newline at end of file
diff --git a/Integrations/SentinelOne-V2/SentinelOne-V2_image.png b/Integrations/SentinelOne-V2/SentinelOne-V2_image.png
new file mode 100644
index 000000000000..78d52c443eb6
Binary files /dev/null and b/Integrations/SentinelOne-V2/SentinelOne-V2_image.png differ
diff --git a/Integrations/ServiceNow/CHANGELOG.md b/Integrations/ServiceNow/CHANGELOG.md
new file mode 100644
index 000000000000..caa88ed6e0b4
--- /dev/null
+++ b/Integrations/ServiceNow/CHANGELOG.md
@@ -0,0 +1,8 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+ -
+
+## [19.10.0] - 2019-10-03
+ - Fixed an issue with the **servicenow-upload-file*** command when the uploaded file is an info file.
diff --git a/Integrations/ServiceNow/ServiceNow.py b/Integrations/ServiceNow/ServiceNow.py
new file mode 100644
index 000000000000..3879e1cb8660
--- /dev/null
+++ b/Integrations/ServiceNow/ServiceNow.py
@@ -0,0 +1,1545 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import re
+import requests
+import json
+from datetime import datetime
+import shutil
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+if not demisto.params().get('proxy', False):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+
+def get_server_url():
+ url = demisto.params()['url']
+ url = re.sub('/[\/]+$/', '', url)
+ url = re.sub('\/$', '', url)
+ return url
+
+
+''' GLOBAL VARIABLES '''
+
+DEFAULTS = {
+ 'limit': 10,
+ 'offset': 0,
+ 'fetch_limit': 10,
+ 'fetch_time': '10 minutes',
+ 'ticket_type': 'incident'
+}
+
+USERNAME = demisto.params()['credentials']['identifier']
+PASSWORD = demisto.params()['credentials']['password']
+VERIFY_SSL = not demisto.params().get('insecure', False)
+API = '/api/now/'
+VERSION = demisto.params().get('api_version')
+PARAMS_TICKET_TYPE = demisto.params().get('ticket_type', DEFAULTS['ticket_type'])
+FETCH_TIME = demisto.params().get('fetch_time').strip()
+SYSPARM_QUERY = demisto.params().get('sysparm_query')
+SYSPARM_LIMIT = demisto.params().get('fetch_limit', DEFAULTS['fetch_limit'])
+TIMESTAMP_FIELD = demisto.params().get('timestamp_field', 'opened_at')
+TICKET_TYPE = demisto.params().get('ticket_type', DEFAULTS['ticket_type'])
+GET_ATTACHMENTS = demisto.params().get('get_attachments', False)
+
+if VERSION:
+ API += VERSION + '/'
+
+SERVER_URL = get_server_url() + API
+
+TICKET_STATES = {
+ 'incident': {
+ '1': '1 - New',
+ '2': '2 - In Progress',
+ '3': '3 - On Hold',
+ '4': '4 - Awaiting Caller',
+ '5': '5 - Awaiting Evidence',
+ '6': '6 - Resolved',
+ '7': '7 - Closed',
+ '8': '8 - Canceled'
+ },
+ 'problem': {
+ '1': '1 - Open',
+ '2': '2 - Known Error',
+ '3': '3 - Pending Change',
+ '4': '4 - Closed/Resolved'
+ },
+ 'change_request': {
+ '-5': '-5 - New',
+ '-4': '-4 - Assess',
+ '-3': '-3 - Authorize',
+ '-2': '-2 - Scheduled',
+ '-1': '-1 - Implement',
+ '0': '0 - Review',
+ '3': '3 - Closed',
+ '4': '4 - Canceled'
+ },
+ 'sc_task': {
+ '-5': '-5 - Pending',
+ '1': '1 - Open',
+ '2': '2 - Work In Progress',
+ '3': '3 - Closed Complete',
+ '4': '4 - Closed Incomplete',
+ '7': '7 - Closed Skipped'
+ },
+ 'sc_request': {
+ '1': '1 - Approved',
+ '3': '3 - Closed',
+ '4': '4 - Rejected'
+ }
+}
+
+TICKET_SEVERITY = {
+ '1': '1 - High',
+ '2': '2 - Medium',
+ '3': '3 - Low'
+}
+
+TICKET_PRIORITY = {
+ '1': '1 - Critical',
+ '2': '2 - High',
+ '3': '3 - Moderate',
+ '4': '4 - Low',
+ '5': '5 - Planning'
+}
+
+COMPUTER_STATUS = {
+ '1': 'In use',
+ '2': 'On order',
+ '3': 'On maintenance',
+ '6': 'In stock/In transit',
+ '7': 'Retired',
+ '100': 'Missing'
+}
+
+# Map SNOW severity to Demisto severity for incident creation
+SEVERITY_MAP = {
+ '1': 3,
+ '2': 2,
+ '3': 1
+}
+
+SNOW_ARGS = ['active', 'activity_due', 'opened_at', 'short_description', 'additional_assignee_list', 'approval_history',
+ 'approval_set', 'assigned_to', 'assignment_group',
+ 'business_duration', 'business_service', 'business_stc', 'calendar_duration', 'calendar_stc', 'caller_id',
+ 'caused_by', 'close_code', 'close_notes',
+ 'closed_at', 'closed_by', 'cmdb_ci', 'comments', 'comments_and_work_notes', 'company', 'contact_type',
+ 'correlation_display', 'correlation_id',
+ 'delivery_plan', 'delivery_task', 'description', 'due_date', 'expected_start', 'follow_up', 'group_list',
+ 'hold_reason', 'impact', 'incident_state',
+ 'knowledge', 'location', 'made_sla', 'notify', 'order', 'parent', 'parent_incident', 'priority',
+ 'problem_id', 'resolved_at', 'resolved_by', 'rfc',
+ 'severity', 'sla_due', 'state', 'subcategory', 'sys_tags', 'time_worked', 'urgency', 'user_input',
+ 'watch_list', 'work_end', 'work_notes', 'work_notes_list',
+ 'work_start', 'impact', 'incident_state', 'title', 'type', 'change_type', 'category', 'state', 'caller']
+
+# Every table in ServiceNow should have those fields
+DEFAULT_RECORD_FIELDS = {
+ 'sys_id': 'ID',
+ 'sys_updated_by': 'UpdatedBy',
+ 'sys_updated_on': 'UpdatedAt',
+ 'sys_created_by': 'CreatedBy',
+ 'sys_created_on': 'CreatedAt'
+}
+
+DEPRECATED_COMMANDS = ['servicenow-get', 'servicenow-incident-get',
+ 'servicenow-create', 'servicenow-incident-create',
+ 'servicenow-update', 'servicenow-query',
+ 'servicenow-incidents-query', 'servicenow-incident-update']
+
+''' HELPER FUNCTIONS '''
+
+
+def send_request(path, method='get', body=None, params=None, headers=None, file=None):
+ body = body if body is not None else {}
+ params = params if params is not None else {}
+
+ url = '{}{}'.format(SERVER_URL, path)
+ if not headers:
+ headers = {
+ 'Accept': 'application/json',
+ 'Content-Type': 'application/json'
+ }
+ if file:
+ # Not supported in v2
+ url = url.replace('v2', 'v1')
+ try:
+ file_entry = file['id']
+ file_name = file['name']
+ shutil.copy(demisto.getFilePath(file_entry)['path'], file_name)
+ with open(file_name, 'rb') as f:
+ files = {'file': f}
+ res = requests.request(method, url, headers=headers, params=params, data=body, files=files,
+ auth=(USERNAME, PASSWORD), verify=VERIFY_SSL)
+ shutil.rmtree(demisto.getFilePath(file_entry)['name'], ignore_errors=True)
+ except Exception as e:
+ raise Exception('Failed to upload file - ' + str(e))
+ else:
+ res = requests.request(method, url, headers=headers, data=json.dumps(body) if body else {}, params=params,
+ auth=(USERNAME, PASSWORD), verify=VERIFY_SSL)
+
+ try:
+ obj = res.json()
+ except Exception as e:
+ if not res.content:
+ return ''
+ raise Exception('Error parsing reply - {} - {}'.format(res.content, str(e)))
+
+ if 'error' in obj:
+ message = obj.get('error', {}).get('message')
+ details = obj.get('error', {}).get('detail')
+ if message == 'No Record found':
+ return {
+ # Return an empty results array
+ 'result': []
+ }
+ raise Exception('ServiceNow Error: {}, details: {}'.format(message, details))
+
+ if res.status_code < 200 or res.status_code >= 300:
+ raise Exception('Got status code {} with url {} with body {} with headers {}'
+ .format(str(res.status_code), url, str(res.content), str(res.headers)))
+
+ return obj
+
+
+def get_table_name(ticket_type=None):
+ if ticket_type:
+ return ticket_type
+ else:
+ if PARAMS_TICKET_TYPE:
+ return PARAMS_TICKET_TYPE
+ else:
+ return 'incident'
+
+
+def create_ticket_context(data, ticket_type):
+ context = {
+ 'ID': data.get('sys_id'),
+ 'Summary': data.get('short_description'),
+ 'Number': data.get('number'),
+ 'CreatedOn': data.get('sys_created_on'),
+ 'Active': data.get('active'),
+ 'AdditionalComments': data.get('comments'),
+ 'CloseCode': data.get('close_code'),
+ 'OpenedAt': data.get('opened_at')
+ }
+
+ # These fields refer to records in the database, the value is their system ID.
+ if 'closed_by' in data:
+ context['ResolvedBy'] = data['closed_by']['value'] if 'value' in data['closed_by'] else ''
+ if 'opened_by' in data:
+ context['OpenedBy'] = data['opened_by']['value'] if 'value' in data['opened_by'] else ''
+ context['Creator'] = data['opened_by']['value'] if 'value' in data['opened_by'] else ''
+ if 'assigned_to' in data:
+ context['Assignee'] = data['assigned_to']['value'] if 'value' in data['assigned_to'] else ''
+
+ # Try to map fields
+ if 'priority' in data:
+ # Backward compatibility
+ if demisto.command() in DEPRECATED_COMMANDS:
+ context['Priority'] = data['priority']
+ else:
+ context['Priority'] = TICKET_PRIORITY.get(data['priority'], data['priority'])
+ if 'state' in data:
+ mapped_state = data['state']
+ # Backward compatibility
+ if demisto.command() not in DEPRECATED_COMMANDS:
+ if ticket_type in TICKET_STATES:
+ mapped_state = TICKET_STATES[ticket_type].get(data['state'], mapped_state)
+ context['State'] = mapped_state
+
+ return createContext(context, removeNull=True)
+
+
+def get_ticket_context(data, ticket_type):
+ if not isinstance(data, list):
+ return create_ticket_context(data, ticket_type)
+
+ tickets = []
+ for d in data:
+ tickets.append(create_ticket_context(d, ticket_type))
+ return tickets
+
+
+def get_ticket_human_readable(tickets, ticket_type):
+ if not isinstance(tickets, list):
+ tickets = [tickets]
+
+ result = []
+ for ticket in tickets:
+
+ hr = {
+ 'Number': ticket.get('number'),
+ 'System ID': ticket['sys_id'],
+ 'Created On': ticket.get('sys_created_on'),
+ 'Created By': ticket.get('sys_created_by'),
+ 'Active': ticket.get('active'),
+ 'Close Notes': ticket.get('close_notes'),
+ 'Close Code': ticket.get('close_code'),
+ 'Description': ticket.get('description'),
+ 'Opened At': ticket.get('opened_at'),
+ 'Due Date': ticket.get('due_date'),
+ # This field refers to a record in the database, the value is its system ID.
+ 'Resolved By': ticket.get('closed_by', {}).get('value') if isinstance(ticket.get('closed_by'), dict)
+ else ticket.get('closed_by'),
+ 'Resolved At': ticket.get('resolved_at'),
+ 'SLA Due': ticket.get('sla_due'),
+ 'Short Description': ticket.get('short_description'),
+ 'Additional Comments': ticket.get('comments')
+ }
+
+ # Try to map the fields
+ if 'impact' in ticket:
+ hr['Impact'] = TICKET_SEVERITY.get(ticket['impact'], ticket['impact'])
+ if 'urgency' in ticket:
+ hr['Urgency'] = TICKET_SEVERITY.get(ticket['urgency'], ticket['urgency'])
+ if 'severity' in ticket:
+ hr['Severity'] = TICKET_SEVERITY.get(ticket['severity'], ticket['severity'])
+ if 'priority' in ticket:
+ hr['Priority'] = TICKET_PRIORITY.get(ticket['priority'], ticket['priority'])
+ if 'state' in ticket:
+ mapped_state = ticket['state']
+ if ticket_type in TICKET_STATES:
+ mapped_state = TICKET_STATES[ticket_type].get(ticket['state'], mapped_state)
+ hr['State'] = mapped_state
+ result.append(hr)
+ return result
+
+
+def get_ticket_fields(template, ticket_type):
+ # Inverse the keys and values of those dictionaries to map the arguments to their corresponding values in ServiceNow
+ args = unicode_to_str_recur(demisto.args())
+ inv_severity = {v: k for k, v in TICKET_SEVERITY.iteritems()}
+ inv_priority = {v: k for k, v in TICKET_PRIORITY.iteritems()}
+ states = TICKET_STATES.get(ticket_type)
+ inv_states = {v: k for k, v in states.iteritems()} if states else {}
+
+ body = {}
+ for arg in SNOW_ARGS:
+ input_arg = args.get(arg)
+ if input_arg:
+ if arg in ['impact', 'urgency', 'severity']:
+ body[arg] = inv_severity.get(input_arg, input_arg)
+ elif arg == 'priority':
+ body[arg] = inv_priority.get(input_arg, input_arg)
+ elif arg == 'state':
+ body[arg] = inv_states.get(input_arg, input_arg)
+ else:
+ body[arg] = input_arg
+ elif template and arg in template:
+ body[arg] = template[arg]
+
+ return body
+
+
+def get_body(fields, custom_fields):
+ body = {}
+
+ if fields:
+ for field in fields:
+ body[field] = fields[field]
+
+ if custom_fields:
+ for field in custom_fields:
+ # custom fields begin with "u_"
+ if field.startswith('u_'):
+ body[field] = custom_fields[field]
+ else:
+ body['u_' + field] = custom_fields[field]
+
+ return body
+
+
+def split_fields(fields):
+ dic_fields = {}
+
+ if fields:
+ # As received by the command
+ arr_fields = fields.split(';')
+
+ for f in arr_fields:
+ field = f.split('=')
+ if len(field) > 1:
+ dic_fields[field[0]] = field[1]
+
+ return dic_fields
+
+
+# Converts unicode elements of obj (incl. dictionary and list) to string recursively
+def unicode_to_str_recur(obj):
+ if isinstance(obj, dict):
+ obj = {unicode_to_str_recur(k): unicode_to_str_recur(v) for k, v in obj.iteritems()}
+ elif isinstance(obj, list):
+ obj = map(unicode_to_str_recur, obj)
+ elif isinstance(obj, unicode):
+ obj = obj.encode('utf-8')
+ return obj
+
+
+# Converts to an str
+def convert_to_str(obj):
+ if isinstance(obj, unicode):
+ return obj.encode('utf-8')
+ try:
+ return str(obj)
+ except ValueError:
+ return obj
+
+
+''' FUNCTIONS '''
+
+
+def get_template(name):
+ query_params = {'sysparm_limit': 1, 'sysparm_query': 'name=' + name}
+
+ ticket_type = 'sys_template'
+ path = 'table/' + ticket_type
+ res = send_request('GET', path, params=query_params)
+
+ if len(res['result']) == 0:
+ raise ValueError("Incorrect template name")
+
+ template = res['result'][0]['template'].split('^')
+ dic_template = {}
+
+ for i in range(len(template) - 1):
+ template_value = template[i].split('=')
+ if len(template_value) > 1:
+ dic_template[template_value[0]] = template_value[1]
+
+ return dic_template
+
+
+def get_ticket_command():
+ args = unicode_to_str_recur(demisto.args())
+ ticket_type = get_table_name(args.get('ticket_type'))
+ ticket_id = args.get('id')
+ number = args.get('number')
+ get_attachments = args.get('get_attachments', 'false')
+
+ res = get(ticket_type, ticket_id, number)
+ if not res or 'result' not in res:
+ return 'Cannot find ticket'
+
+ if isinstance(res['result'], list):
+ if len(res['result']) == 0:
+ return 'Cannot find ticket'
+ ticket = res['result'][0]
+ else:
+ ticket = res['result']
+
+ entries = [] # type: List[Dict]
+
+ if get_attachments.lower() != 'false':
+ entries = get_ticket_attachment_entries(ticket['sys_id'])
+
+ hr = get_ticket_human_readable(ticket, ticket_type)
+ context = get_ticket_context(ticket, ticket_type)
+
+ headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Created On', 'Created By',
+ 'Active', 'Close Notes', 'Close Code',
+ 'Description', 'Opened At', 'Due Date', 'Resolved By', 'Resolved At', 'SLA Due', 'Short Description',
+ 'Additional Comments']
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ServiceNow ticket', hr, headers=headers, removeNull=True),
+ 'EntryContext': {
+ 'Ticket(val.ID===obj.ID)': context,
+ 'ServiceNow.Ticket(val.ID===obj.ID)': context
+ }
+ }
+
+ entries.append(entry)
+
+ return entries
+
+
+def get_record_command():
+ args = unicode_to_str_recur(demisto.args())
+ table_name = args['table_name']
+ record_id = args['id']
+ fields = args.get('fields')
+
+ res = get(table_name, record_id)
+
+ if not res or 'result' not in res:
+ return 'Cannot find record'
+
+ if isinstance(res['result'], list):
+ if len(res['result']) == 0:
+ return 'Cannot find record'
+ record = res['result'][0]
+ else:
+ record = res['result']
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json']
+ }
+
+ if fields:
+ fields = argToList(fields)
+ if 'sys_id' not in fields:
+ # ID is added by default
+ fields.append('sys_id')
+ # filter the record for the required fields
+ record = dict(filter(lambda kv_pair: kv_pair[0] in fields, record.items()))
+ for k, v in record.iteritems():
+ if isinstance(v, dict):
+ # For objects that refer to a record in the database, take their value(system ID).
+ record[k] = v.get('value', record[k])
+ record['ID'] = record.pop('sys_id')
+ entry['ReadableContentsFormat'] = formats['markdown']
+ entry['HumanReadable'] = tableToMarkdown('ServiceNow record', record, removeNull=True)
+ entry['EntryContext'] = {
+ 'ServiceNow.Record(val.ID===obj.ID)': createContext(record)
+ }
+ else:
+ mapped_record = {DEFAULT_RECORD_FIELDS[k]: record[k] for k in DEFAULT_RECORD_FIELDS if k in record}
+ entry['ReadableContentsFormat'] = formats['markdown']
+ entry['HumanReadable'] = tableToMarkdown('ServiceNow record' + record_id, mapped_record, removeNull=True)
+ entry['EntryContext'] = {
+ 'ServiceNow.Record(val.ID===obj.ID)': createContext(mapped_record)
+ }
+
+ return entry
+
+
+def get(table_name, record_id, number=None):
+ path = None
+ query_params = {} # type: Dict
+ if record_id:
+ path = 'table/' + table_name + '/' + record_id
+ elif number:
+ path = 'table/' + table_name
+ query_params = {
+ 'number': number
+ }
+ else:
+ # Only in cases where the table is of type ticket
+ raise ValueError('servicenow-get-ticket requires either ticket ID (sys_id) or ticket number')
+
+ return send_request(path, 'get', params=query_params)
+
+
+def get_ticket_attachments(ticket_id):
+ path = 'attachment'
+ query_params = {
+ 'sysparm_query': 'table_sys_id=' + ticket_id
+ }
+
+ return send_request(path, 'get', params=query_params)
+
+
+def get_ticket_attachment_entries(ticket_id):
+ entries = []
+ links = [] # type: List[Tuple[str, str]]
+ attachments_res = get_ticket_attachments(ticket_id)
+ if 'result' in attachments_res and len(attachments_res['result']) > 0:
+ attachments = attachments_res['result']
+ links = [(attachment['download_link'], attachment['file_name']) for attachment in attachments]
+
+ for link in links:
+ file_res = requests.get(link[0], auth=(USERNAME, PASSWORD), verify=VERIFY_SSL)
+ if file_res is not None:
+ entries.append(fileResult(link[1], file_res.content))
+
+ return entries
+
+
+def update_ticket_command():
+ args = unicode_to_str_recur(demisto.args())
+ custom_fields = split_fields(args.get('custom_fields'))
+ template = args.get('template')
+ ticket_type = get_table_name(args.get('ticket_type'))
+ ticket_id = args['id']
+
+ if template:
+ template = get_template(template)
+ fields = get_ticket_fields(template, ticket_type)
+
+ res = update(ticket_type, ticket_id, fields, custom_fields)
+
+ if not res or 'result' not in res:
+ return_error('Unable to retrieve response')
+
+ hr = get_ticket_human_readable(res['result'], ticket_type)
+ context = get_ticket_context(res['result'], ticket_type)
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ServiceNow ticket updated successfully\nTicket type: ' + ticket_type,
+ hr, removeNull=True),
+ 'EntryContext': {
+ 'ServiceNow.Ticket(val.ID===obj.ID)': context
+ }
+ }
+
+ return entry
+
+
+def update_record_command():
+ args = unicode_to_str_recur(demisto.args())
+ table_name = args['table_name']
+ record_id = args['id']
+ fields = args.get('fields', {})
+ custom_fields = args.get('custom_fields')
+
+ if fields:
+ fields = split_fields(fields)
+ if custom_fields:
+ custom_fields = split_fields(custom_fields)
+
+ res = update(table_name, record_id, fields, custom_fields)
+
+ if not res or 'result' not in res:
+ return 'Could not retrieve record'
+
+ result = res['result']
+
+ mapped_record = {DEFAULT_RECORD_FIELDS[k]: result[k] for k in DEFAULT_RECORD_FIELDS if k in result}
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ServiceNow record updated successfully', mapped_record, removeNull=True),
+ 'EntryContext': {
+ 'ServiceNow.Record(val.ID===obj.ID)': createContext(mapped_record)
+ }
+ }
+
+ return entry
+
+
+def update(table_name, record_id, fields, custom_fields):
+ body = get_body(fields, custom_fields)
+ path = 'table/' + table_name + '/' + record_id
+
+ return send_request(path, 'patch', body=body)
+
+
+def create_ticket_command():
+ args = unicode_to_str_recur(demisto.args())
+ custom_fields = split_fields(args.get('custom_fields'))
+ template = args.get('template')
+ ticket_type = get_table_name(args.get('ticket_type'))
+
+ if template:
+ template = get_template(template)
+ fields = get_ticket_fields(template, ticket_type)
+
+ res = create(ticket_type, fields, custom_fields)
+
+ if not res or 'result' not in res:
+ return_error('Unable to retrieve response')
+
+ hr = get_ticket_human_readable(res['result'], ticket_type)
+ context = get_ticket_context(res['result'], ticket_type)
+
+ headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Created On', 'Created By',
+ 'Active', 'Close Notes', 'Close Code',
+ 'Description', 'Opened At', 'Due Date', 'Resolved By', 'Resolved At', 'SLA Due', 'Short Description',
+ 'Additional Comments']
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ServiceNow ticket created successfully', hr,
+ headers=headers, removeNull=True),
+ 'EntryContext': {
+ 'Ticket(val.ID===obj.ID)': context,
+ 'ServiceNow.Ticket(val.ID===obj.ID)': context
+ }
+ }
+
+ return entry
+
+
+def create_record_command():
+ args = unicode_to_str_recur(demisto.args())
+ table_name = args['table_name']
+ fields = args.get('fields')
+ custom_fields = args.get('custom_fields')
+
+ if fields:
+ fields = split_fields(fields)
+ if custom_fields:
+ custom_fields = split_fields(custom_fields)
+
+ res = create(table_name, fields, custom_fields)
+
+ if not res or 'result' not in res:
+ return 'Could not retrieve record'
+
+ result = res['result']
+
+ mapped_record = {DEFAULT_RECORD_FIELDS[k]: result[k] for k in DEFAULT_RECORD_FIELDS if k in result}
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ServiceNow record created successfully', mapped_record, removeNull=True),
+ 'EntryContext': {
+ 'ServiceNow.Record(val.ID===obj.ID)': createContext(mapped_record)
+ }
+ }
+
+ return entry
+
+
+def create(table_name, fields, custom_fields):
+ body = get_body(fields, custom_fields)
+ path = 'table/' + table_name
+
+ return send_request(path, 'post', body=body)
+
+
+def delete_ticket_command():
+ args = unicode_to_str_recur(demisto.args())
+ ticket_id = args['id']
+ ticket_type = get_table_name(args.get('ticket_type'))
+
+ res = delete(ticket_type, ticket_id)
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Ticket with ID ' + ticket_id + ' was successfully deleted.'
+ }
+
+ return entry
+
+
+def delete_record_command():
+ args = unicode_to_str_recur(demisto.args())
+ record_id = args['id']
+ table_name = args.get('table_name')
+
+ res = delete(table_name, record_id)
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Record with ID ' + record_id + ' was successfully deleted.'
+ }
+
+ return entry
+
+
+def delete(table_name, record_id):
+ path = 'table/' + table_name + '/' + record_id
+
+ return send_request(path, 'delete')
+
+
+def add_link_command():
+ args = unicode_to_str_recur(demisto.args())
+ ticket_id = args['id']
+ key = 'comments' if args.get('post-as-comment', 'false').lower() == 'true' else 'work_notes'
+ text = args.get('text', args['link'])
+ link = '[code]' + text + ' [/code]'
+ ticket_type = get_table_name(args.get('ticket_type'))
+
+ res = add_link(ticket_id, ticket_type, key, link)
+
+ if not res or 'result' not in res:
+ return_error('Unable to retrieve response')
+
+ headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Created On', 'Created By',
+ 'Active', 'Close Notes', 'Close Code',
+ 'Description', 'Opened At', 'Due Date', 'Resolved By', 'Resolved At', 'SLA Due', 'Short Description',
+ 'Additional Comments']
+
+ hr = get_ticket_human_readable(res['result'], ticket_type)
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Link successfully added to ServiceNow ticket', hr,
+ headers=headers, removeNull=True)
+ }
+
+ return entry
+
+
+def add_link(ticket_id, ticket_type, key, link):
+ body = {}
+ body[key] = link
+ path = 'table/' + ticket_type + '/' + ticket_id
+
+ return send_request(path, 'patch', body=body)
+
+
+def add_comment_command():
+ args = unicode_to_str_recur(demisto.args())
+ ticket_id = args['id']
+ key = 'comments' if args.get('post-as-comment', 'false').lower() == 'true' else 'work_notes'
+ text = args['comment']
+ ticket_type = get_table_name(args.get('ticket_type'))
+
+ res = add_comment(ticket_id, ticket_type, key, text)
+
+ if not res or 'result' not in res:
+ return_error('Unable to retrieve response')
+
+ headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Created On', 'Created By',
+ 'Active', 'Close Notes', 'Close Code',
+ 'Description', 'Opened At', 'Due Date', 'Resolved By', 'Resolved At', 'SLA Due', 'Short Description',
+ 'Additional Comments']
+
+ hr = get_ticket_human_readable(res['result'], ticket_type)
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Comment successfully added to ServiceNow ticket', hr,
+ headers=headers, removeNull=True)
+ }
+
+ return entry
+
+
+def add_comment(ticket_id, ticket_type, key, text):
+ body = {}
+ body[key] = text
+ path = 'table/' + ticket_type + '/' + ticket_id
+
+ return send_request(path, 'patch', body=body)
+
+
+def get_ticket_notes_command():
+ args = unicode_to_str_recur(demisto.args())
+ ticket_id = args['id']
+ limit = args.get('limit')
+ offset = args.get('offset')
+
+ comments_query = 'element_id=' + ticket_id + '^element=comments^ORelement=work_notes'
+
+ res = query('sys_journal_field', limit, offset, comments_query)
+
+ if not res or 'result' not in res:
+ return 'No results found'
+
+ headers = ['Value', 'CreatedOn', 'CreatedBy', 'Type']
+
+ mapped_notes = [{
+ 'Value': n.get('value'),
+ 'CreatedOn': n.get('sys_created_on'),
+ 'CreatedBy': n.get('sys_created_by'),
+ 'Type': 'Work Note' if n.get('element', '') == 'work_notes' else 'Comment'
+ } for n in res['result']]
+
+ if not mapped_notes:
+ return 'No results found'
+
+ ticket = {
+ 'ID': ticket_id,
+ 'Note': mapped_notes
+ }
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ServiceNow notes for ticket ' + ticket_id, mapped_notes, headers=headers,
+ headerTransform=pascalToSpace, removeNull=True),
+ 'EntryContext': {
+ 'ServiceNow.Ticket(val.ID===obj.ID)': createContext(ticket, removeNull=True)
+ }
+ }
+
+ return entry
+
+
+def query_tickets_command():
+ args = unicode_to_str_recur(demisto.args())
+ sysparm_limit = args.get('limit', DEFAULTS['limit'])
+ sysparm_query = args.get('query')
+ sysparm_offset = args.get('offset', DEFAULTS['offset'])
+
+ if not sysparm_query:
+ # backward compatibility
+ sysparm_query = args.get('sysparm_query')
+ ticket_type = get_table_name(args.get('ticket_type'))
+
+ res = query(ticket_type, sysparm_limit, sysparm_offset, sysparm_query)
+
+ if not res or 'result' not in res or len(res['result']) == 0:
+ return 'No results found'
+
+ hr = get_ticket_human_readable(res['result'], ticket_type)
+ context = get_ticket_context(res['result'], ticket_type)
+
+ headers = ['System ID', 'Number', 'Impact', 'Urgency', 'Severity', 'Priority', 'State', 'Created On', 'Created By',
+ 'Active', 'Close Notes', 'Close Code',
+ 'Description', 'Opened At', 'Due Date', 'Resolved By', 'Resolved At', 'SLA Due', 'Short Description',
+ 'Additional Comments']
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ServiceNow tickets', hr, headers=headers, removeNull=True),
+ 'EntryContext': {
+ 'Ticket(val.ID===obj.ID)': context,
+ 'ServiceNow.Ticket(val.ID===obj.ID)': context
+ }
+ }
+
+ return entry
+
+
+def query_table_command():
+ args = unicode_to_str_recur(demisto.args())
+ table_name = args['table_name']
+ sysparm_limit = args.get('limit', DEFAULTS['limit'])
+ sysparm_query = args.get('query')
+ sysparm_offset = args.get('offset', DEFAULTS['offset'])
+ fields = args.get('fields')
+
+ res = query(table_name, sysparm_limit, sysparm_offset, sysparm_query)
+
+ if not res or 'result' not in res or len(res['result']) == 0:
+ return 'No results found'
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json']
+ }
+
+ result = res['result']
+
+ if fields:
+ fields = argToList(fields)
+ if 'sys_id' not in fields:
+ # ID is added by default
+ fields.append('sys_id')
+ # Filter the records according to the given fields
+ records = [dict(filter(lambda kv_pair: kv_pair[0] in fields, r.iteritems())) for r in res['result']]
+ for r in records:
+ r['ID'] = r.pop('sys_id')
+ for k, v in r.iteritems():
+ if isinstance(v, dict):
+ # For objects that refer to a record in the database, take their value (system ID).
+ r[k] = v.get('value', v)
+ entry['ReadableContentsFormat'] = formats['markdown']
+ entry['HumanReadable'] = tableToMarkdown('ServiceNow records', records, removeNull=True)
+ entry['EntryContext'] = {
+ 'ServiceNow.Record(val.ID===obj.ID)': createContext(records)
+ }
+ else:
+ mapped_records = [{DEFAULT_RECORD_FIELDS[k]: r[k] for k in DEFAULT_RECORD_FIELDS if k in r} for r in result]
+ entry['ReadableContentsFormat'] = formats['markdown']
+ entry['HumanReadable'] = tableToMarkdown('ServiceNow records', mapped_records, removeNull=True)
+ entry['EntryContext'] = {
+ 'ServiceNow.Record(val.ID===obj.ID)': createContext(mapped_records)
+ }
+
+ return entry
+
+
+def query(table_name, sysparm_limit, sysparm_offset, sysparm_query):
+ query_params = {}
+ query_params['sysparm_limit'] = sysparm_limit
+ query_params['sysparm_offset'] = sysparm_offset
+ if sysparm_query:
+ query_params['sysparm_query'] = sysparm_query
+
+ path = 'table/' + table_name
+
+ return send_request(path, 'get', params=query_params)
+
+
+def upload_file_command():
+ args = unicode_to_str_recur(demisto.args())
+ ticket_type = get_table_name(args.get('ticket_type'))
+ ticket_id = args['id']
+ file_id = args['file_id']
+ file_name = args.get('file_name', demisto.dt(demisto.context(), "File(val.EntryID=='" + file_id + "').Name"))
+
+ # in case of info file
+ if not file_name:
+ file_name = demisto.dt(demisto.context(), "InfoFile(val.EntryID=='" + file_id + "').Name")
+
+ if not file_name:
+ return_error('Could not find the file')
+
+ file_name = file_name[0] if isinstance(file_name, list) else file_name
+
+ res = upload_file(ticket_id, file_id, file_name, ticket_type)
+
+ if not res or 'result' not in res or not res['result']:
+ return_error('Unable to retrieve response')
+
+ hr = {
+ 'Filename': res['result'].get('file_name'),
+ 'Download link': res['result'].get('download_link'),
+ 'System ID': res['result'].get('sys_id')
+ }
+
+ context = {
+ 'ID': ticket_id,
+ 'File': {}
+ }
+ context['File']['Filename'] = res['result'].get('file_name')
+ context['File']['Link'] = res['result'].get('download_link')
+ context['File']['SystemID'] = res['result'].get('sys_id')
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('File uploaded successfully', hr),
+ 'EntryContext': {
+ 'ServiceNow.Ticket(val.ID===obj.ID)': context,
+ 'Ticket(val.ID===obj.ID)': context
+ }
+ }
+
+ return entry
+
+
+def upload_file(ticket_id, file_id, file_name, ticket_type):
+ headers = {
+ 'Accept': 'application/json'
+ }
+
+ body = {
+ 'table_name': ticket_type,
+ 'table_sys_id': ticket_id,
+ 'file_name': file_name
+ }
+
+ path = 'attachment/upload'
+
+ return send_request(path, 'post', headers=headers, body=body, file={'id': file_id, 'name': file_name})
+
+
+# Deprecated
+def get_computer_command():
+ args = unicode_to_str_recur(demisto.args())
+ table_name = 'cmdb_ci_computer'
+ computer_name = args['computerName']
+
+ res = query(table_name, None, 0, 'u_code=' + computer_name)
+
+ if not res or 'result' not in res:
+ return 'Cannot find computer'
+ elif isinstance(res['result'], list):
+ if len(res['result']) == 0:
+ return 'Cannot find computer'
+ computer = res['result'][0]
+ else:
+ computer = res['result']
+
+ if computer['u_code'] != computer_name:
+ return 'Computer not found'
+
+ hr = {
+ 'ID': computer['sys_id'],
+ 'u_code (computer name)': computer['u_code'],
+ 'Support group': computer['support_group'],
+ 'Operating System': computer['os'],
+ 'Comments': computer['comments']
+ }
+
+ ec = createContext(computer, removeNull=True)
+ if 'support_group' in computer:
+ ec['support_group'] = computer['support_group']['value'] if 'value' in computer['support_group'] else ''
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': computer,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ServiceNow Computer', hr),
+ 'EntryContext': {
+ 'ServiceNowComputer(val.sys_id==obj.sys_id)': ec,
+ }
+ }
+
+ return entry
+
+
+def query_computers_command():
+ args = unicode_to_str_recur(demisto.args())
+ table_name = 'cmdb_ci_computer'
+ computer_id = args.get('computer_id')
+ computer_name = args.get('computer_name')
+ asset_tag = args.get('asset_tag')
+ computer_query = args.get('query', {})
+ offset = args.get('offset', DEFAULTS['offset'])
+ limit = args.get('limit', DEFAULTS['limit'])
+
+ if computer_id:
+ res = get(table_name, computer_id)
+ else:
+ if computer_name:
+ computer_query = 'name=' + computer_name
+ elif asset_tag:
+ computer_query = 'asset_tag=' + asset_tag
+
+ res = query(table_name, limit, offset, computer_query)
+
+ if not res or 'result' not in res:
+ return 'No computers found'
+
+ computers = res['result']
+ if not isinstance(computers, list):
+ computers = [computers]
+
+ if len(computers) == 0:
+ return 'No computers found'
+
+ headers = ['ID', 'AssetTag', 'Name', 'DisplayName', 'SupportGroup', 'OperatingSystem', 'Company', 'AssignedTo',
+ 'State', 'Cost', 'Comments']
+
+ mapped_computers = [{
+ 'ID': computer.get('sys_id'),
+ 'AssetTag': computer.get('asset_tag'),
+ 'Name': computer.get('name'),
+ 'DisplayName': '{} - {}'.format(computer.get('asset_tag', ''), computer.get('name', '')),
+ 'SupportGroup': computer.get('support_group'),
+ 'OperatingSystem': computer.get('os'),
+ 'Company': computer.get('company', {}).get('value')
+ if isinstance(computer.get('company'), dict) else computer.get('company'),
+ 'AssignedTo': computer.get('assigned_to', {}).get('value')
+ if isinstance(computer.get('assigned_to'), dict) else computer.get('assigned_to'),
+ 'State': COMPUTER_STATUS.get(computer.get('install_status', ''), computer.get('install_status')),
+ 'Cost': '{} {}'.format(computer.get('cost', ''), computer.get('cost_cc', '')).rstrip(),
+ 'Comments': computer.get('comments')
+ } for computer in computers]
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ServiceNow Computers', mapped_computers, headers=headers,
+ removeNull=True, headerTransform=pascalToSpace),
+ 'EntryContext': {
+ 'ServiceNow.Computer(val.ID===obj.ID)': createContext(mapped_computers, removeNull=True),
+ }
+ }
+
+ return entry
+
+
+def query_groups_command():
+ args = unicode_to_str_recur(demisto.args())
+ table_name = 'sys_user_group'
+ group_id = args.get('group_id')
+ group_name = args.get('group_name')
+ group_query = args.get('query', {})
+ offset = args.get('offset', DEFAULTS['offset'])
+ limit = args.get('limit', DEFAULTS['limit'])
+
+ if group_id:
+ res = get(table_name, group_id)
+ else:
+ if group_name:
+ group_query = 'name=' + group_name
+ res = query(table_name, limit, offset, group_query)
+
+ if not res or 'result' not in res:
+ return 'No groups found'
+
+ groups = res['result']
+ if not isinstance(groups, list):
+ groups = [groups]
+
+ if len(groups) == 0:
+ return 'No groups found'
+
+ headers = ['ID', 'Description', 'Name', 'Active', 'Manager', 'Updated']
+
+ mapped_groups = [{
+ 'ID': group.get('sys_id'),
+ 'Description': group.get('description'),
+ 'Name': group.get('name'),
+ 'Active': group.get('active'),
+ 'Manager': group.get('manager', {}).get('value')
+ if isinstance(group.get('manager'), dict) else group.get('manager'),
+ 'Updated': group.get('sys_updated_on'),
+ } for group in groups]
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ServiceNow Groups', mapped_groups, headers=headers,
+ removeNull=True, headerTransform=pascalToSpace),
+ 'EntryContext': {
+ 'ServiceNow.Group(val.ID===obj.ID)': createContext(mapped_groups, removeNull=True),
+ }
+ }
+
+ return entry
+
+
+def query_users_command():
+ args = unicode_to_str_recur(demisto.args())
+ table_name = 'sys_user'
+ user_id = args.get('user_id')
+ user_name = args.get('user_name')
+ user_query = args.get('query', {})
+ offset = args.get('offset', DEFAULTS['offset'])
+ limit = args.get('limit', DEFAULTS['limit'])
+
+ if user_id:
+ res = get(table_name, user_id)
+ else:
+ if user_name:
+ user_query = 'user_name=' + user_name
+ res = query(table_name, limit, offset, user_query)
+
+ if not res or 'result' not in res:
+ return 'No users found'
+ res = unicode_to_str_recur(res)
+
+ users = res['result']
+ if not isinstance(users, list):
+ users = [users]
+
+ if len(users) == 0:
+ return 'No users found'
+
+ headers = ['ID', 'Name', 'UserName', 'Email', 'Created', 'Updated']
+
+ mapped_users = [{
+ 'ID': user.get('sys_id'),
+ 'Name': '{} {}'.format(user.get('first_name', ''), user.get('last_name', '')).rstrip(),
+ 'UserName': user.get('user_name'),
+ 'Email': user.get('email'),
+ 'Created': user.get('sys_created_on'),
+ 'Updated': user.get('sys_updated_on'),
+ } for user in users]
+ mapped_users = unicode_to_str_recur(mapped_users)
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ServiceNow Users', mapped_users, headers=headers, removeNull=True,
+ headerTransform=pascalToSpace),
+ 'EntryContext': {
+ 'ServiceNow.User(val.ID===obj.ID)': createContext(mapped_users, removeNull=True),
+ }
+ }
+
+ return entry
+
+
+# Deprecated
+def get_groups_command():
+ args = unicode_to_str_recur(demisto.args())
+ table_name = 'sys_user_group'
+ group_name = args['name']
+ res = query(table_name, None, 0, 'name=' + group_name)
+
+ if not res or 'result' not in res:
+ return 'No groups found'
+
+ hr_groups = []
+ context_groups = []
+
+ for group in res['result']:
+ if group['name'] == group_name:
+ hr_groups.append({
+ 'ID': group['sys_id'],
+ 'Name': group['name'],
+ 'Description': group['description'],
+ 'Email': group['email'],
+ 'Active': group['active'],
+ 'Manager': ['manager']
+ })
+ context_groups.append({
+ 'GroupId': group['sys_id'],
+ 'GroupName': group['name']
+ })
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ServiceNow Group', hr_groups),
+ 'EntryContext': {
+ 'ServiceNowGroups(val.GroupId==obj.GroupId)': context_groups,
+ }
+ }
+
+ return entry
+
+
+def list_table_fields_command():
+ args = unicode_to_str_recur(demisto.args())
+ table_name = args['table_name']
+
+ res = get_table_fields(table_name)
+
+ if not res or 'result' not in res:
+ return 'Cannot find table'
+
+ if len(res['result']) == 0:
+ return 'Table contains no records'
+
+ fields = [{'Name': k} for k, v in res['result'][0].iteritems()]
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ServiceNow Table fields - ' + table_name, fields),
+ 'EntryContext': {
+ 'ServiceNow.Field': createContext(fields),
+ }
+ }
+
+ return entry
+
+
+def get_table_fields(table_name):
+ # Get one record
+ path = 'table/' + table_name + '?sysparm_limit=1'
+ res = send_request(path, 'GET')
+
+ return res
+
+
+def get_table_name_command():
+ args = unicode_to_str_recur(demisto.args())
+ label = args['label']
+ offset = args.get('offset', DEFAULTS['offset'])
+ limit = args.get('limit', DEFAULTS['limit'])
+
+ table_query = 'label=' + label
+
+ res = query('sys_db_object', limit, offset, table_query)
+
+ if not res or 'result' not in res:
+ return 'Cannot find table'
+
+ tables = res['result']
+
+ if len(tables) == 0:
+ return 'Cannot find table'
+
+ headers = ['ID', 'Name', 'SystemName']
+
+ mapped_tables = [{
+ 'ID': table.get('sys_id'),
+ 'Name': table.get('name'),
+ 'SystemName': table.get('sys_name')
+ } for table in tables]
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ServiceNow Tables for label - ' + label, mapped_tables,
+ headers=headers, headerTransform=pascalToSpace),
+ 'EntryContext': {
+ 'ServiceNow.Table(val.ID===obj.ID)': createContext(mapped_tables),
+ }
+ }
+
+ return entry
+
+
+def fetch_incidents():
+ query_params = {}
+ incidents = []
+ if FETCH_TIME:
+ fetch_time = FETCH_TIME
+ else:
+ fetch_time = DEFAULTS['fetch_time']
+
+ last_run = demisto.getLastRun()
+ if 'time' not in last_run:
+ snow_time, _ = parse_date_range(fetch_time, '%Y-%m-%d %H:%M:%S')
+ else:
+ snow_time = last_run['time']
+
+ query = ''
+ if SYSPARM_QUERY:
+ query += SYSPARM_QUERY + '^'
+ query += 'ORDERBY{0}^{0}>{1}'.format(TIMESTAMP_FIELD, snow_time)
+
+ if query:
+ query_params['sysparm_query'] = query
+
+ query_params['sysparm_limit'] = SYSPARM_LIMIT
+
+ path = 'table/' + TICKET_TYPE
+ res = send_request(path, 'get', params=query_params)
+
+ count = 0
+ parsed_snow_time = datetime.strptime(snow_time, '%Y-%m-%d %H:%M:%S')
+
+ for result in res.get('result', []):
+ labels = []
+
+ if TIMESTAMP_FIELD not in result:
+ raise ValueError("The timestamp field [{}]"
+ " does not exist in the ticket".format(TIMESTAMP_FIELD))
+
+ if count > SYSPARM_LIMIT:
+ break
+
+ try:
+ if datetime.strptime(result[TIMESTAMP_FIELD], '%Y-%m-%d %H:%M:%S') < parsed_snow_time:
+ continue
+ except Exception:
+ pass
+
+ for k, v in result.iteritems():
+ if isinstance(v, basestring):
+ labels.append({
+ 'type': k,
+ 'value': v
+ })
+ else:
+ labels.append({
+ 'type': k,
+ 'value': json.dumps(v)
+ })
+
+ severity = SEVERITY_MAP.get(result.get('severity', ''), 0)
+
+ file_names = []
+ if GET_ATTACHMENTS:
+ file_entries = get_ticket_attachment_entries(result['sys_id'])
+ for file_result in file_entries:
+ if file_result['Type'] == entryTypes['error']:
+ raise Exception('Error getting attachment: ' + str(file_result['Contents']))
+ file_names.append({
+ 'path': file_result['FileID'],
+ 'name': file_result['File']
+ })
+
+ incidents.append({
+ 'name': 'ServiceNow Incident ' + result.get('number'),
+ 'labels': labels,
+ 'details': json.dumps(result),
+ 'severity': severity,
+ 'attachment': file_names,
+ 'rawJSON': json.dumps(result)
+ })
+
+ count += 1
+ snow_time = result[TIMESTAMP_FIELD]
+
+ demisto.incidents(incidents)
+ demisto.setLastRun({'time': snow_time})
+
+
+def test_module():
+ # Validate fetch_time parameter is valid (if not, parse_date_range will raise the error message)
+ parse_date_range(FETCH_TIME, '%Y-%m-%d %H:%M:%S')
+
+ path = 'table/' + TICKET_TYPE + '?sysparm_limit=1'
+ res = send_request(path, 'GET')
+ if 'result' not in res:
+ return_error('ServiceNow error: ' + str(res))
+ ticket = res['result']
+ if ticket and demisto.params().get('isFetch'):
+ if isinstance(ticket, list):
+ ticket = ticket[0]
+ if TIMESTAMP_FIELD not in ticket:
+ raise ValueError("The timestamp field [{}]"
+ " does not exist in the ticket".format(TIMESTAMP_FIELD))
+
+
+LOG('Executing command ' + demisto.command())
+raise_exception = False
+try:
+ if demisto.command() == 'test-module':
+ test_module()
+ demisto.results('ok')
+ elif demisto.command() == 'fetch-incidents':
+ raise_exception = True
+ fetch_incidents()
+ elif demisto.command() == 'servicenow-get' or \
+ demisto.command() == 'servicenow-incident-update' or demisto.command() == 'servicenow-get-ticket':
+ demisto.results(get_ticket_command())
+ elif demisto.command() == 'servicenow-update' or \
+ demisto.command() == 'servicenow-incident-update' or demisto.command() == 'servicenow-update-ticket':
+ demisto.results(update_ticket_command())
+ elif demisto.command() == 'servicenow-create' or \
+ demisto.command() == 'servicenow-incident-create' or demisto.command() == 'servicenow-create-ticket':
+ demisto.results(create_ticket_command())
+ elif demisto.command() == 'servicenow-delete-ticket':
+ demisto.results(delete_ticket_command())
+ elif demisto.command() == 'servicenow-add-link' or demisto.command() == 'servicenow-incident-add-link':
+ demisto.results(add_link_command())
+ elif demisto.command() == 'servicenow-add-comment' or demisto.command() == 'servicenow-incident-add-comment':
+ demisto.results(add_comment_command())
+ elif demisto.command() == 'servicenow-query' or \
+ demisto.command() == 'servicenow-incidents-query' or demisto.command() == 'servicenow-query-tickets':
+ demisto.results(query_tickets_command())
+ elif demisto.command() == 'servicenow-upload-file' or demisto.command() == 'servicenow-incident-upload-file':
+ demisto.results(upload_file_command())
+ elif demisto.command() == 'servicenow-query-table':
+ demisto.results(query_table_command())
+ elif demisto.command() == 'servicenow-get-computer':
+ demisto.results(get_computer_command())
+ elif demisto.command() == 'servicenow-query-computers':
+ demisto.results(query_computers_command())
+ elif demisto.command() == 'servicenow-query-groups':
+ demisto.results(query_groups_command())
+ elif demisto.command() == 'servicenow-query-users':
+ demisto.results(query_users_command())
+ elif demisto.command() == 'servicenow-get-groups':
+ demisto.results(get_groups_command())
+ elif demisto.command() == 'servicenow-get-record':
+ demisto.results(get_record_command())
+ elif demisto.command() == 'servicenow-update-record':
+ demisto.results(update_record_command())
+ elif demisto.command() == 'servicenow-create-record':
+ demisto.results(create_record_command())
+ elif demisto.command() == 'servicenow-delete-record':
+ demisto.results(delete_record_command())
+ if demisto.command() == 'servicenow-list-table-fields':
+ demisto.results(list_table_fields_command())
+ if demisto.command() == 'servicenow-get-table-name':
+ demisto.results(get_table_name_command())
+ if demisto.command() == 'servicenow-get-ticket-notes':
+ demisto.results(get_ticket_notes_command())
+except Exception as e:
+ LOG(e)
+ LOG.print_log()
+ if not raise_exception:
+ return_error(str(e))
+ else:
+ raise
diff --git a/Integrations/ServiceNow/ServiceNow.yml b/Integrations/ServiceNow/ServiceNow.yml
new file mode 100644
index 000000000000..edd4486ec5a2
--- /dev/null
+++ b/Integrations/ServiceNow/ServiceNow.yml
@@ -0,0 +1,4147 @@
+category: Case Management
+commonfields:
+ id: ServiceNow
+ version: -1
+configuration:
+- display: ServiceNow URL, in the format https://company.service-now.com/
+ name: url
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: false
+ type: 9
+- defaultvalue: ''
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- defaultvalue: 'false'
+ display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: incident
+ display: Default ticket type for running ticket commands and fetching incidents
+ name: ticket_type
+ required: false
+ type: 0
+- display: ServiceNow API Version (e.g. 'v1')
+ name: api_version
+ required: false
+ type: 0
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- defaultvalue: stateNOT IN6,7
+ display: The query to use when fetching incidents
+ name: sysparm_query
+ required: false
+ type: 0
+- defaultvalue: '10'
+ display: How many incidents to fetch each time
+ name: fetch_limit
+ required: false
+ type: 0
+- defaultvalue: 10 minutes
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days, 3
+ months, 1 year)
+ name: fetch_time
+ required: false
+ type: 0
+- defaultvalue: opened_at
+ display: |-
+ Timestamp field to filter by (e.g., `opened_at`)
+ This is how the filter is applied to the query: “ORDERBYopened_at^opened_at>[Last Run]. To prevent duplicate incidents, this field is mandatory for fetching incidents.
+ name: timestamp_field
+ required: false
+ type: 0
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- display: Get incident attachments
+ name: get_attachments
+ required: false
+ type: 8
+description: IT service management
+display: ServiceNow
+name: ServiceNow
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: Ticket System ID
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ - sc_request
+ - sc_task
+ required: false
+ secret: false
+ - default: false
+ description: Ticket number to retrieve
+ isArray: false
+ name: number
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Whether to retrieve ticket attachments, default false
+ isArray: false
+ name: get_attachments
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieve ticket information by specific ticket ID
+ execution: false
+ name: servicenow-get-ticket
+ outputs:
+ - contextPath: ServiceNow.Ticket.ID
+ description: ServiceNow ticket ID
+ type: string
+ - contextPath: ServiceNow.Ticket.OpenedBy
+ description: ServiceNow ticket opener ID
+ type: string
+ - contextPath: ServiceNow.Ticket.CreatedOn
+ description: ServiceNow ticket creation date.
+ type: date
+ - contextPath: ServiceNow.Ticket.Assignee
+ description: ServiceNow ticket assignee ID
+ type: string
+ - contextPath: ServiceNow.Ticket.State
+ description: ServiceNow ticket state
+ type: string
+ - contextPath: ServiceNow.Ticket.Summary
+ description: ServiceNow ticket short summary
+ type: string
+ - contextPath: ServiceNow.Ticket.Number
+ description: ServiceNow ticket number
+ type: string
+ - contextPath: ServiceNow.Ticket.Active
+ description: ServiceNow ticket active
+ type: boolean
+ - contextPath: ServiceNow.Ticket.AdditionalComments
+ description: ServiceNow ticket comments
+ type: string
+ - contextPath: ServiceNow.Ticket.Priority
+ description: ServiceNow ticket priority
+ type: string
+ - contextPath: ServiceNow.Ticket.OpenedAt
+ description: ServiceNow ticket opening time
+ type: date
+ - contextPath: ServiceNow.Ticket.ResolvedBy
+ description: ServiceNow ticket resolver ID
+ type: string
+ - contextPath: ServiceNow.Ticket.CloseCode
+ description: ServiceNow ticket close code
+ type: string
+ - contextPath: File.Info
+ description: Attachment file info
+ type: string
+ - contextPath: File.Name
+ description: Attachment file name
+ type: string
+ - contextPath: File.Size
+ description: Attachment file size
+ type: number
+ - contextPath: File.SHA1
+ description: Attachment file SHA1
+ type: string
+ - contextPath: File.SHA256
+ description: Attachment file SHA256
+ type: string
+ - contextPath: File.EntryID
+ description: Attachment file entry ID
+ type: string
+ - contextPath: File.Type
+ description: Attachment file type
+ type: string
+ - contextPath: File.MD5
+ description: Attachment file MD5
+ type: string
+ - arguments:
+ - default: true
+ description: Ticket System ID to retrieve
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ - sc_request
+ - sc_task
+ required: false
+ secret: false
+ - default: false
+ description: Ticket number to retrieve
+ isArray: false
+ name: number
+ required: false
+ secret: false
+ - default: false
+ description: Whether to retrieve ticket attachments, default false
+ isArray: false
+ name: get_attachments
+ required: false
+ secret: false
+ deprecated: true
+ description: Deprecated. Use servicenow-get-ticket or servicenow-get-record instead
+ execution: false
+ name: servicenow-get
+ outputs:
+ - contextPath: Ticket.ID
+ description: The unique ticket identifier(sys_id).
+ type: string
+ - contextPath: Ticket.Creator
+ description: A string field that indicates the user who created the ticket.
+ type: string
+ - contextPath: Ticket.CreatedOn
+ description: The date and time when the ticket was created.
+ type: date
+ - contextPath: Ticket.Assignee
+ description: Specifies the user assigned to complete the ticket. By default,
+ this field uses a reference qualifier to only display users with the itil
+ role.
+ type: string
+ - contextPath: Ticket.State
+ description: Status of the ticket.
+ type: string
+ - contextPath: Ticket.Summary
+ description: A human-readable title for the record.
+ type: string
+ - contextPath: Ticket.Number
+ description: The display value of the ticket.
+ type: string
+ - contextPath: Ticket.Active
+ description: Specifies whether work is still being done on a task or whether
+ the work for the task is complete.
+ type: boolean
+ - contextPath: Ticket.AdditionalComments
+ description: Comments about the task record.
+ type: Unknown
+ - contextPath: Ticket.Priority
+ description: Specifies how high a priority the ticket should be for the assignee.
+ type: string
+ - contextPath: Ticket.OpenedAt
+ description: The date and time when the ticket was opened for the first time.
+ type: date
+ - contextPath: File.Info
+ description: Attachment file info
+ type: string
+ - contextPath: File.Name
+ description: Attachment file name
+ type: string
+ - contextPath: File.Size
+ description: Attachment file size
+ type: number
+ - contextPath: File.SHA1
+ description: Attachment file SHA1
+ type: string
+ - contextPath: File.SHA256
+ description: Attachment file SHA256
+ type: string
+ - contextPath: File.EntryID
+ description: Attachment file entry ID
+ type: string
+ - contextPath: File.Type
+ description: Attachment file type
+ type: string
+ - contextPath: File.MD5
+ description: Attachment file MD5
+ type: string
+ - arguments:
+ - default: true
+ description: Ticket System ID to retrieve
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ required: false
+ secret: false
+ - default: false
+ description: Ticket number to retrieve
+ isArray: false
+ name: number
+ required: false
+ secret: false
+ deprecated: true
+ description: Deprecated. Use servicenow-get-ticket or servicenow-get-record instead
+ execution: false
+ name: servicenow-incident-get
+ outputs:
+ - contextPath: Ticket.ID
+ description: ServiceNow ticket System ID
+ type: string
+ - contextPath: Ticket.Creator
+ description: ServiceNow ticket creator
+ type: string
+ - contextPath: Ticket.Assignee
+ description: ServiceNow ticket assignee
+ type: string
+ - contextPath: Ticket.State
+ description: ServiceNow ticket state
+ type: string
+ - contextPath: Ticket.Summary
+ description: ServiceNow ticket short summary
+ type: string
+ - contextPath: Ticket.Number
+ description: ServiceNow ticket number
+ type: string
+ - arguments:
+ - default: false
+ description: Short description of the ticket
+ isArray: false
+ name: short_description
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ - sc_request
+ - sc_task
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Ticket urgency
+ isArray: false
+ name: urgency
+ predefined:
+ - 3 - Low
+ - 2 - Medium
+ - 1 - High
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Ticket severity
+ isArray: false
+ name: severity
+ predefined:
+ - 3 - Low
+ - 2 - Medium
+ - 1 - High
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Ticket impact
+ isArray: false
+ name: impact
+ predefined:
+ - 3 - Low
+ - 2 - Medium
+ - 1 - High
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Set ticket as Active
+ isArray: false
+ name: active
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Set ticket ActivityDue - format "2016-07-02 21:51:11"
+ isArray: false
+ name: activity_due
+ required: false
+ secret: false
+ - default: false
+ description: List of assigned users to the ticket
+ isArray: false
+ name: additional_assignee_list
+ required: false
+ secret: false
+ - default: false
+ description: Ticket history approval
+ isArray: false
+ name: approval_history
+ required: false
+ secret: false
+ - default: false
+ description: Set ticket ApprovalSet - format "2016-07-02 21:51:11"
+ isArray: false
+ name: approval_set
+ required: false
+ secret: false
+ - default: false
+ description: To whom the ticket is assigned
+ isArray: false
+ name: assigned_to
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: business_duration
+ required: false
+ secret: false
+ - default: false
+ description: Business service
+ isArray: false
+ name: business_service
+ required: false
+ secret: false
+ - default: false
+ description: Business source
+ isArray: false
+ name: business_stc
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: calendar_duration
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: caller_id
+ required: false
+ secret: false
+ - default: false
+ description: Category of ticket
+ isArray: false
+ name: category
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: caused_by
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Ticket's close code
+ isArray: false
+ name: close_code
+ predefined:
+ - Solved (Work Around)
+ - Solved (Permanently)
+ - Solved Remotely (Work Around)
+ - Solved Remotely (Permanently)
+ - Not Solved (Not Reproducible)
+ - Not Solved (Too Costly)
+ - Closed/Resolved by Caller
+ required: false
+ secret: false
+ - default: false
+ description: Close notes of the ticket
+ isArray: false
+ name: close_notes
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: closed_at
+ required: false
+ secret: false
+ - default: false
+ description: User who closed the ticket
+ isArray: false
+ name: closed_by
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: cmdb_ci
+ required: false
+ secret: false
+ - default: false
+ description: Format type journal input
+ isArray: false
+ name: comments
+ required: false
+ secret: false
+ - default: false
+ description: Format type journal input
+ isArray: false
+ name: comments_and_work_notes
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: company
+ required: false
+ secret: false
+ - default: false
+ description: Contact type
+ isArray: false
+ name: contact_type
+ required: false
+ secret: false
+ - default: false
+ description: Correlation display
+ isArray: false
+ name: correlation_display
+ required: false
+ secret: false
+ - default: false
+ description: Correlation id
+ isArray: false
+ name: correlation_id
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: delivery_plan
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If you want to display comments, work_notes...
+ isArray: false
+ name: display
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Ticket description
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: due_date
+ required: false
+ secret: false
+ - default: false
+ description: Escalation
+ isArray: false
+ name: escalation
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: expected_start
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: follow_up
+ required: false
+ secret: false
+ - default: false
+ description: UID format list
+ isArray: false
+ name: group_list
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Is the ticket solved in the knowledge base
+ isArray: false
+ name: knowledge
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Location of the ticket
+ isArray: false
+ name: location
+ required: false
+ secret: false
+ - default: false
+ description: SLA of the ticket
+ isArray: false
+ name: made_sla
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Notify about this ticket
+ isArray: false
+ name: notify
+ predefined:
+ - '1'
+ - '0'
+ required: false
+ secret: false
+ - default: false
+ description: Order number
+ isArray: false
+ name: order
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: parent
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: parent_incident
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: problem_id
+ required: false
+ secret: false
+ - default: false
+ description: How many users included in this ticket before
+ isArray: false
+ name: reassignment_count
+ required: false
+ secret: false
+ - default: false
+ description: How many time the ticket has been reopened
+ isArray: false
+ name: reopen_count
+ required: false
+ secret: false
+ - default: false
+ description: 'Resolving time, Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: resolved_at
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: resolved_by
+ required: false
+ secret: false
+ - default: false
+ description: UID
+ isArray: false
+ name: rfc
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: sla_due
+ required: false
+ secret: false
+ - default: false
+ description: Subcategory
+ isArray: false
+ name: subcategory
+ required: false
+ secret: false
+ - default: false
+ description: Last updated by
+ isArray: false
+ name: sys_updated_by
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: sys_updated_on
+ required: false
+ secret: false
+ - default: false
+ description: Input from the end user
+ isArray: false
+ name: user_input
+ required: false
+ secret: false
+ - default: false
+ description: A list of watched tickets
+ isArray: false
+ name: watch_list
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: work_end
+ required: false
+ secret: false
+ - default: false
+ description: Format journal list
+ isArray: false
+ name: work_notes
+ required: false
+ secret: false
+ - default: false
+ description: List with UIDs
+ isArray: false
+ name: work_notes_list
+ required: false
+ secret: false
+ - default: false
+ description: Date when started to work on the ticket
+ isArray: false
+ name: work_start
+ required: false
+ secret: false
+ - default: false
+ description: Set AssignmentGroup - sys_id of group
+ isArray: false
+ name: assignment_group
+ required: false
+ secret: false
+ - default: false
+ description: integer
+ isArray: false
+ name: incident_state
+ required: false
+ secret: false
+ - default: false
+ description: Ticket number
+ isArray: false
+ name: number
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Priority of the ticket
+ isArray: false
+ name: priority
+ predefined:
+ - 5 - Planning
+ - 4 - Low
+ - 3 - Moderate
+ - 2 - High
+ - 1 - Critical
+ required: false
+ secret: false
+ - default: false
+ description: Template name to use as a base to create new tickets.
+ isArray: false
+ name: template
+ required: false
+ secret: false
+ - default: false
+ description: 'Custom(user defined) fields in the format: fieldname1=value;fieldname2=value;
+ custom fields start with a "u_".'
+ isArray: false
+ name: custom_fields
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: normal
+ description: Type of Change Request ticket
+ isArray: false
+ name: change_type
+ predefined:
+ - normal
+ - standard
+ - emergency
+ required: false
+ secret: false
+ - default: false
+ description: State of the ticket, e.g., "Closed" or "7" or "7 - Closed".
+ isArray: false
+ name: state
+ required: false
+ secret: false
+ - default: false
+ description: ' Ticket opening time, Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: opened_at
+ required: false
+ secret: false
+ - default: false
+ description: Caller system ID
+ isArray: false
+ name: caller
+ required: false
+ secret: false
+ deprecated: false
+ description: Create new ServiceNow ticket
+ execution: false
+ name: servicenow-create-ticket
+ outputs:
+ - contextPath: ServiceNow.Ticket.ID
+ description: ServiceNow ticket ID
+ type: string
+ - contextPath: ServiceNow.Ticket.OpenedBy
+ description: ServiceNow ticket opener ID
+ type: string
+ - contextPath: ServiceNow.Ticket.CreatedOn
+ description: ServiceNow ticket creation date.
+ type: date
+ - contextPath: ServiceNow.Ticket.Assignee
+ description: ServiceNow ticket assignee ID
+ type: string
+ - contextPath: ServiceNow.Ticket.State
+ description: ServiceNow ticket state
+ type: string
+ - contextPath: ServiceNow.Ticket.Summary
+ description: ServiceNow ticket short summary
+ type: string
+ - contextPath: ServiceNow.Ticket.Number
+ description: ServiceNow ticket number
+ type: string
+ - contextPath: ServiceNow.Ticket.Active
+ description: ServiceNow ticket active
+ type: boolean
+ - contextPath: ServiceNow.Ticket.AdditionalComments
+ description: ServiceNow ticket comments
+ type: string
+ - contextPath: ServiceNow.Ticket.Priority
+ description: ServiceNow ticket priority
+ type: string
+ - contextPath: ServiceNow.Ticket.OpenedAt
+ description: ServiceNow ticket opening time
+ type: date
+ - contextPath: ServiceNow.Ticket.ResolvedBy
+ description: ServiceNow ticket resolver ID
+ type: string
+ - contextPath: ServiceNow.Ticket.CloseCode
+ description: ServiceNow ticket close code
+ type: string
+ - arguments:
+ - default: false
+ description: Short description of the ticket
+ isArray: false
+ name: short_description
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ - sc_request
+ - sc_task
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Ticket urgency
+ isArray: false
+ name: urgency
+ predefined:
+ - 3 - Low
+ - 2 - Medium
+ - 1 - High
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Ticket severity
+ isArray: false
+ name: severity
+ predefined:
+ - 3 - Low
+ - 2 - Medium
+ - 1 - High
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Ticket impact
+ isArray: false
+ name: impact
+ predefined:
+ - 3 - Low
+ - 2 - Medium
+ - 1 - High
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Set ticket as Active
+ isArray: false
+ name: active
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Set ticket ActivityDue - format "2016-07-02 21:51:11" glide_date_time
+ isArray: false
+ name: activity_due
+ required: false
+ secret: false
+ - default: false
+ description: List of assigned users to the ticket
+ isArray: false
+ name: additional_assignee_list
+ required: false
+ secret: false
+ - default: false
+ description: Ticket history approval
+ isArray: false
+ name: approval_history
+ required: false
+ secret: false
+ - default: false
+ description: Set ticket ApprovalSet - format "2016-07-02 21:51:11" glide_date_time
+ isArray: false
+ name: approval_set
+ required: false
+ secret: false
+ - default: false
+ description: To whom the ticket is assigned
+ isArray: false
+ name: assigned_to
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: business_duration
+ required: false
+ secret: false
+ - default: false
+ description: Business service
+ isArray: false
+ name: business_service
+ required: false
+ secret: false
+ - default: false
+ description: Business source
+ isArray: false
+ name: business_stc
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: calendar_duration
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: caller_id
+ required: false
+ secret: false
+ - default: false
+ description: Category of ticket
+ isArray: false
+ name: category
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: caused_by
+ required: false
+ secret: false
+ - default: false
+ description: Ticket's close code
+ isArray: false
+ name: close_code
+ required: false
+ secret: false
+ - default: false
+ description: Close notes of the ticket
+ isArray: false
+ name: close_notes
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: closed_at
+ required: false
+ secret: false
+ - default: false
+ description: User who closed the ticket
+ isArray: false
+ name: closed_by
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: cmdb_ci
+ required: false
+ secret: false
+ - default: false
+ description: Format type journal input
+ isArray: false
+ name: comments
+ required: false
+ secret: false
+ - default: false
+ description: Format type journal input
+ isArray: false
+ name: comments_and_work_notes
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: company
+ required: false
+ secret: false
+ - default: false
+ description: Contact type
+ isArray: false
+ name: contact_type
+ required: false
+ secret: false
+ - default: false
+ description: Correlation display
+ isArray: false
+ name: correlation_display
+ required: false
+ secret: false
+ - default: false
+ description: Correlation id
+ isArray: false
+ name: correlation_id
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: delivery_plan
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If you want to display comments, work_notes...
+ isArray: false
+ name: display
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Ticket description
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: due_date
+ required: false
+ secret: false
+ - default: false
+ description: Escalation
+ isArray: false
+ name: escalation
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: expected_start
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: follow_up
+ required: false
+ secret: false
+ - default: false
+ description: UID format list
+ isArray: false
+ name: group_list
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Is the ticket solved in the knowledge base
+ isArray: false
+ name: knowledge
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Location of the ticket
+ isArray: false
+ name: location
+ required: false
+ secret: false
+ - default: false
+ description: SLA of the ticket
+ isArray: false
+ name: made_sla
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Notify about this ticket
+ isArray: false
+ name: notify
+ predefined:
+ - '1'
+ - '0'
+ required: false
+ secret: false
+ - default: false
+ description: Order number
+ isArray: false
+ name: order
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: parent
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: parent_incident
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: problem_id
+ required: false
+ secret: false
+ - default: false
+ description: How many users included in this ticket before
+ isArray: false
+ name: reassignment_count
+ required: false
+ secret: false
+ - default: false
+ description: How many time the ticket has been reopened
+ isArray: false
+ name: reopen_count
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: resolved_at
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: resolved_by
+ required: false
+ secret: false
+ - default: false
+ description: UID
+ isArray: false
+ name: rfc
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: sla_due
+ required: false
+ secret: false
+ - default: false
+ description: Subcategory
+ isArray: false
+ name: subcategory
+ required: false
+ secret: false
+ - default: false
+ description: Last updated by
+ isArray: false
+ name: sys_updated_by
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: sys_updated_on
+ required: false
+ secret: false
+ - default: false
+ description: Input from the end user
+ isArray: false
+ name: user_input
+ required: false
+ secret: false
+ - default: false
+ description: A list of watched tickets
+ isArray: false
+ name: watch_list
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: work_end
+ required: false
+ secret: false
+ - default: false
+ description: Format journal list
+ isArray: false
+ name: work_notes
+ required: false
+ secret: false
+ - default: false
+ description: List with UIDs
+ isArray: false
+ name: work_notes_list
+ required: false
+ secret: false
+ - default: false
+ description: Date when started to work on the ticket
+ isArray: false
+ name: work_start
+ required: false
+ secret: false
+ - default: false
+ description: Set AssignmentGroup - uuid of group like 46b87022a9fe198101a78787e40d7547
+ isArray: false
+ name: assignment_group
+ required: false
+ secret: false
+ - default: false
+ description: integer
+ isArray: false
+ name: incident_state
+ required: false
+ secret: false
+ - default: false
+ description: Ticket number
+ isArray: false
+ name: number
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Priority of the ticket (number)
+ isArray: false
+ name: priority
+ predefined:
+ - 5 - Planning
+ - 4 - Low
+ - 3 - Moderate
+ - 2 - High
+ - 1 - Critical
+ required: false
+ secret: false
+ - default: false
+ description: Template name to use as a base to create new tickets.
+ isArray: false
+ name: template
+ required: false
+ secret: false
+ - default: false
+ description: 'Custom(user defined) fields in the format: fieldname1=value;fieldname2=value;...'
+ isArray: false
+ name: custom_fields
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: normal
+ description: Type of Change Request ticket
+ isArray: false
+ name: type
+ predefined:
+ - normal
+ - standard
+ - emergency
+ required: false
+ secret: false
+ - default: false
+ description: State of the ticket
+ isArray: false
+ name: state
+ required: false
+ secret: false
+ deprecated: true
+ description: Deprecated. Use servicenow-create-ticket or servicenow-create-record
+ instead
+ execution: false
+ name: servicenow-create
+ outputs:
+ - contextPath: Ticket.ID
+ description: ServiceNow ticket System ID
+ type: string
+ - contextPath: Ticket.Creator
+ description: ServiceNow ticket creator
+ type: string
+ - contextPath: Ticket.Assignee
+ description: ServiceNow ticket assignee
+ type: string
+ - contextPath: Ticket.State
+ description: ServiceNow ticket state
+ type: string
+ - contextPath: Ticket.Summary
+ description: ServiceNow ticket short summary
+ type: string
+ - contextPath: Ticket.Number
+ description: ServiceNow ticket number
+ type: string
+ - arguments:
+ - default: false
+ description: Short description of the ticket
+ isArray: false
+ name: short_description
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ required: false
+ secret: false
+ - default: false
+ description: Ticket urgency
+ isArray: false
+ name: urgency
+ required: false
+ secret: false
+ - default: false
+ description: Ticket severity
+ isArray: false
+ name: severity
+ required: false
+ secret: false
+ - default: false
+ description: Ticket impact
+ isArray: false
+ name: impact
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Set ticket as Active
+ isArray: false
+ name: active
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Set ticket ActivityDue - format "2016-07-02 21:51:11" glide_date_time
+ isArray: false
+ name: activity_due
+ required: false
+ secret: false
+ - default: false
+ description: List of assigned users to the ticket
+ isArray: false
+ name: additional_assignee_list
+ required: false
+ secret: false
+ - default: false
+ description: Ticket history approval
+ isArray: false
+ name: approval_history
+ required: false
+ secret: false
+ - default: false
+ description: Set ticket ApprovalSet - format "2016-07-02 21:51:11" glide_date_time
+ isArray: false
+ name: approval_set
+ required: false
+ secret: false
+ - default: false
+ description: To whom the ticket is assigned
+ isArray: false
+ name: assigned_to
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: business_duration
+ required: false
+ secret: false
+ - default: false
+ description: Business service
+ isArray: false
+ name: business_service
+ required: false
+ secret: false
+ - default: false
+ description: Business source
+ isArray: false
+ name: business_stc
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: calendar_duration
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: caller_id
+ required: false
+ secret: false
+ - default: false
+ description: Category name
+ isArray: false
+ name: category
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: caused_by
+ required: false
+ secret: false
+ - default: false
+ description: Ticket's close code
+ isArray: false
+ name: close_code
+ required: false
+ secret: false
+ - default: false
+ description: Close notes of the ticket
+ isArray: false
+ name: close_notes
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: closed_at
+ required: false
+ secret: false
+ - default: false
+ description: User who closed the ticket
+ isArray: false
+ name: closed_by
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: cmdb_ci
+ required: false
+ secret: false
+ - default: false
+ description: Format type journal input
+ isArray: false
+ name: comments
+ required: false
+ secret: false
+ - default: false
+ description: Format type journal input
+ isArray: false
+ name: comments_and_work_notes
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: company
+ required: false
+ secret: false
+ - default: false
+ description: Contact type
+ isArray: false
+ name: contact_type
+ required: false
+ secret: false
+ - default: false
+ description: Correlation display
+ isArray: false
+ name: correlation_display
+ required: false
+ secret: false
+ - default: false
+ description: Correlation id
+ isArray: false
+ name: correlation_id
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: delivery_plan
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If you want to display comments, work_notes...
+ isArray: false
+ name: display
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Ticket description
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: due_date
+ required: false
+ secret: false
+ - default: false
+ description: Escalation
+ isArray: false
+ name: escalation
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: expected_start
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: follow_up
+ required: false
+ secret: false
+ - default: false
+ description: UID format list
+ isArray: false
+ name: group_list
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Is the ticket solved in the knowledge base
+ isArray: false
+ name: knowledge
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Location of the ticket
+ isArray: false
+ name: location
+ required: false
+ secret: false
+ - default: false
+ description: SLA of the ticket
+ isArray: false
+ name: made_sla
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Notify about this ticket
+ isArray: false
+ name: notify
+ predefined:
+ - '1'
+ - '0'
+ required: false
+ secret: false
+ - default: false
+ description: Order number
+ isArray: false
+ name: order
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: parent
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: parent_incident
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: problem_id
+ required: false
+ secret: false
+ - default: false
+ description: How many users included in this ticket before
+ isArray: false
+ name: reassignment_count
+ required: false
+ secret: false
+ - default: false
+ description: How many time the ticket has been reopened
+ isArray: false
+ name: reopen_count
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: resolved_at
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: resolved_by
+ required: false
+ secret: false
+ - default: false
+ description: UID
+ isArray: false
+ name: rfc
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: sla_due
+ required: false
+ secret: false
+ - default: false
+ description: Subcategory
+ isArray: false
+ name: subcategory
+ required: false
+ secret: false
+ - default: false
+ description: Last updated by
+ isArray: false
+ name: sys_updated_by
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: sys_updated_on
+ required: false
+ secret: false
+ - default: false
+ description: Input from the end user
+ isArray: false
+ name: user_input
+ required: false
+ secret: false
+ - default: false
+ description: A list of watched tickets
+ isArray: false
+ name: watch_list
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: work_end
+ required: false
+ secret: false
+ - default: false
+ description: Format journal list
+ isArray: false
+ name: work_notes
+ required: false
+ secret: false
+ - default: false
+ description: List with UIDs
+ isArray: false
+ name: work_notes_list
+ required: false
+ secret: false
+ - default: false
+ description: Date when started to work on the ticket
+ isArray: false
+ name: work_start
+ required: false
+ secret: false
+ - default: false
+ description: Set AssignmentGroup - uuid of group like 46b87022a9fe198101a78787e40d7547
+ isArray: false
+ name: assignment_group
+ required: false
+ secret: false
+ - default: false
+ description: integer
+ isArray: false
+ name: incident_state
+ required: false
+ secret: false
+ - default: false
+ description: Ticket number
+ isArray: false
+ name: number
+ required: false
+ secret: false
+ - default: false
+ description: Priority of the ticket (number)
+ isArray: false
+ name: priority
+ required: false
+ secret: false
+ - default: false
+ description: Template name to use as a base to create new tickets.
+ isArray: false
+ name: template
+ required: false
+ secret: false
+ deprecated: true
+ description: Create a new ServiceNow ticket
+ execution: false
+ name: servicenow-incident-create
+ outputs:
+ - contextPath: Ticket.ID
+ description: ServiceNow ticket System ID
+ type: string
+ - contextPath: Ticket.Creator
+ description: ServiceNow ticket creator
+ type: string
+ - contextPath: Ticket.Assignee
+ description: ServiceNow ticket assignee
+ type: string
+ - contextPath: Ticket.State
+ description: ServiceNow ticket state
+ type: string
+ - contextPath: Ticket.Summary
+ description: ServiceNow ticket short summary
+ type: string
+ - contextPath: Ticket.Number
+ description: ServiceNow ticket number
+ type: string
+ - arguments:
+ - default: false
+ description: Short description of the ticket
+ isArray: false
+ name: short_description
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ - sc_request
+ - sc_task
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Ticket urgency
+ isArray: false
+ name: urgency
+ predefined:
+ - 3 - Low
+ - 2 - Medium
+ - 1 - High
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Ticket severity
+ isArray: false
+ name: severity
+ predefined:
+ - 3 - Low
+ - 2 - Medium
+ - 1 - High
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Ticket impact
+ isArray: false
+ name: impact
+ predefined:
+ - 3 - Low
+ - 2 - Medium
+ - 1 - High
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Does the ticket active(true/false)
+ isArray: false
+ name: active
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: activity_due
+ required: false
+ secret: false
+ - default: false
+ description: List of assigned users to the ticket
+ isArray: false
+ name: additional_assignee_list
+ required: false
+ secret: false
+ - default: false
+ description: Ticket history approval
+ isArray: false
+ name: approval_history
+ required: false
+ secret: false
+ - default: false
+ description: Set ticket ApprovalSet - format "2016-07-02 21:51:11"
+ isArray: false
+ name: approval_set
+ required: false
+ secret: false
+ - default: false
+ description: To whom the ticket is assigned
+ isArray: false
+ name: assigned_to
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: business_duration
+ required: false
+ secret: false
+ - default: false
+ description: Business service
+ isArray: false
+ name: business_service
+ required: false
+ secret: false
+ - default: false
+ description: Business source
+ isArray: false
+ name: business_stc
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: calendar_duration
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: caller_id
+ required: false
+ secret: false
+ - default: false
+ description: Category name
+ isArray: false
+ name: category
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: caused_by
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Ticket's close code
+ isArray: false
+ name: close_code
+ predefined:
+ - Solved (Work Around)
+ - Solved (Permanently)
+ - Solved Remotely (Work Around)
+ - Solved Remotely (Permanently)
+ - Not Solved (Not Reproducible)
+ - Not Solved (Too Costly)
+ - Closed/Resolved by Caller
+ required: false
+ secret: false
+ - default: false
+ description: Close notes of the ticket
+ isArray: false
+ name: close_notes
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: closed_at
+ required: false
+ secret: false
+ - default: false
+ description: User who closed the ticket
+ isArray: false
+ name: closed_by
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: cmdb_ci
+ required: false
+ secret: false
+ - default: false
+ description: Format type journal input
+ isArray: false
+ name: comments
+ required: false
+ secret: false
+ - default: false
+ description: Format type journal input
+ isArray: false
+ name: comments_and_work_notes
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: company
+ required: false
+ secret: false
+ - default: false
+ description: Contact type
+ isArray: false
+ name: contact_type
+ required: false
+ secret: false
+ - default: false
+ description: Correlation display
+ isArray: false
+ name: correlation_display
+ required: false
+ secret: false
+ - default: false
+ description: Correlation id
+ isArray: false
+ name: correlation_id
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: delivery_plan
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If you want to display comments, work_notes...
+ isArray: false
+ name: display
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Ticket description
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: due_date
+ required: false
+ secret: false
+ - default: false
+ description: Escalation
+ isArray: false
+ name: escalation
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: expected_start
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: follow_up
+ required: false
+ secret: false
+ - default: false
+ description: UID format list
+ isArray: false
+ name: group_list
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Is the ticket solved in the knowledge base
+ isArray: false
+ name: knowledge
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Location of the ticket
+ isArray: false
+ name: location
+ required: false
+ secret: false
+ - default: false
+ description: SLA of the ticket
+ isArray: false
+ name: made_sla
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Notify about this ticket
+ isArray: false
+ name: notify
+ predefined:
+ - '1'
+ - '0'
+ required: false
+ secret: false
+ - default: false
+ description: Order number
+ isArray: false
+ name: order
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: parent
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: parent_incident
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: problem_id
+ required: false
+ secret: false
+ - default: false
+ description: How many users included in this ticket before
+ isArray: false
+ name: reassignment_count
+ required: false
+ secret: false
+ - default: false
+ description: How many time the ticket has been reopened
+ isArray: false
+ name: reopen_count
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: resolved_at
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: resolved_by
+ required: false
+ secret: false
+ - default: false
+ description: UID
+ isArray: false
+ name: rfc
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: sla_due
+ required: false
+ secret: false
+ - default: false
+ description: Subcategory
+ isArray: false
+ name: subcategory
+ required: false
+ secret: false
+ - default: false
+ description: Last updated by
+ isArray: false
+ name: sys_updated_by
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: sys_updated_on
+ required: false
+ secret: false
+ - default: false
+ description: Input from the end user
+ isArray: false
+ name: user_input
+ required: false
+ secret: false
+ - default: false
+ description: A list of watched tickets
+ isArray: false
+ name: watch_list
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: work_end
+ required: false
+ secret: false
+ - default: false
+ description: Format journal list
+ isArray: false
+ name: work_notes
+ required: false
+ secret: false
+ - default: false
+ description: List with UIDs
+ isArray: false
+ name: work_notes_list
+ required: false
+ secret: false
+ - default: false
+ description: Date when started to work on the ticket
+ isArray: false
+ name: work_start
+ required: false
+ secret: false
+ - default: false
+ description: UID
+ isArray: false
+ name: assignment_group
+ required: false
+ secret: false
+ - default: false
+ description: integer
+ isArray: false
+ name: incident_state
+ required: false
+ secret: false
+ - default: false
+ description: Ticket number
+ isArray: false
+ name: number
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Priority of the ticket
+ isArray: false
+ name: priority
+ predefined:
+ - 5 - Planning
+ - 4 - Low
+ - 3 - Moderate
+ - 2 - High
+ - 1 - Critical
+ required: false
+ secret: false
+ - default: false
+ description: System ID of the ticket to update
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: 'Custom(user defined) fields in the format: fieldname1=value;fieldname2=value;
+ custom fields start with a "u_".'
+ isArray: false
+ name: custom_fields
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: normal
+ description: Type of Change Request ticket
+ isArray: false
+ name: change_type
+ predefined:
+ - normal
+ - standard
+ - emergency
+ required: false
+ secret: false
+ - default: false
+ description: State of the ticket, e.g., "Closed" or "7" or "7 - Closed".
+ isArray: false
+ name: state
+ required: false
+ secret: false
+ - default: false
+ description: Caller system ID
+ isArray: false
+ name: caller
+ required: false
+ secret: false
+ deprecated: false
+ description: Update specific ticket
+ execution: false
+ name: servicenow-update-ticket
+ - arguments:
+ - default: false
+ description: Short description of the ticket
+ isArray: false
+ name: short_description
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ - sc_request
+ - sc_task
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Ticket urgency
+ isArray: false
+ name: urgency
+ predefined:
+ - 3 - Low
+ - 2 - Medium
+ - 1 - High
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Ticket severity
+ isArray: false
+ name: severity
+ predefined:
+ - 3 - Low
+ - 2 - Medium
+ - 1 - High
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Ticket impact
+ isArray: false
+ name: impact
+ predefined:
+ - 3 - Low
+ - 2 - Medium
+ - 1 - High
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Does the ticket active(true/false)
+ isArray: false
+ name: active
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: activity_due
+ required: false
+ secret: false
+ - default: false
+ description: List of assigned users to the ticket
+ isArray: false
+ name: additional_assignee_list
+ required: false
+ secret: false
+ - default: false
+ description: Ticket history approval
+ isArray: false
+ name: approval_history
+ required: false
+ secret: false
+ - default: false
+ description: Set ticket ApprovalSet - format "2016-07-02 21:51:11" glide_date_time
+ isArray: false
+ name: approval_set
+ required: false
+ secret: false
+ - default: false
+ description: To whom the ticket is assigned
+ isArray: false
+ name: assigned_to
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: business_duration
+ required: false
+ secret: false
+ - default: false
+ description: Business service
+ isArray: false
+ name: business_service
+ required: false
+ secret: false
+ - default: false
+ description: Business source
+ isArray: false
+ name: business_stc
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: calendar_duration
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: caller_id
+ required: false
+ secret: false
+ - default: false
+ description: Category name
+ isArray: false
+ name: category
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: caused_by
+ required: false
+ secret: false
+ - default: false
+ description: Ticket's close code
+ isArray: false
+ name: close_code
+ required: false
+ secret: false
+ - default: false
+ description: Close notes of the ticket
+ isArray: false
+ name: close_notes
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: closed_at
+ required: false
+ secret: false
+ - default: false
+ description: User who closed the ticket
+ isArray: false
+ name: closed_by
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: cmdb_ci
+ required: false
+ secret: false
+ - default: false
+ description: Format type journal input
+ isArray: false
+ name: comments
+ required: false
+ secret: false
+ - default: false
+ description: Format type journal input
+ isArray: false
+ name: comments_and_work_notes
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: company
+ required: false
+ secret: false
+ - default: false
+ description: Contact type
+ isArray: false
+ name: contact_type
+ required: false
+ secret: false
+ - default: false
+ description: Correlation display
+ isArray: false
+ name: correlation_display
+ required: false
+ secret: false
+ - default: false
+ description: Correlation id
+ isArray: false
+ name: correlation_id
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: delivery_plan
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If you want to display comments, work_notes...
+ isArray: false
+ name: display
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Ticket description
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: due_date
+ required: false
+ secret: false
+ - default: false
+ description: Escalation
+ isArray: false
+ name: escalation
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: expected_start
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: follow_up
+ required: false
+ secret: false
+ - default: false
+ description: UID format list
+ isArray: false
+ name: group_list
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Is the ticket solved in the knowledge base
+ isArray: false
+ name: knowledge
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Location of the ticket
+ isArray: false
+ name: location
+ required: false
+ secret: false
+ - default: false
+ description: SLA of the ticket
+ isArray: false
+ name: made_sla
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Notify about this ticket
+ isArray: false
+ name: notify
+ predefined:
+ - '1'
+ - '0'
+ required: false
+ secret: false
+ - default: false
+ description: Order number
+ isArray: false
+ name: order
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: parent
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: parent_incident
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: problem_id
+ required: false
+ secret: false
+ - default: false
+ description: How many users included in this ticket before
+ isArray: false
+ name: reassignment_count
+ required: false
+ secret: false
+ - default: false
+ description: How many time the ticket has been reopened
+ isArray: false
+ name: reopen_count
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: resolved_at
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: resolved_by
+ required: false
+ secret: false
+ - default: false
+ description: UID
+ isArray: false
+ name: rfc
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: sla_due
+ required: false
+ secret: false
+ - default: false
+ description: Subcategory
+ isArray: false
+ name: subcategory
+ required: false
+ secret: false
+ - default: false
+ description: Last updated by
+ isArray: false
+ name: sys_updated_by
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: sys_updated_on
+ required: false
+ secret: false
+ - default: false
+ description: Input from the end user
+ isArray: false
+ name: user_input
+ required: false
+ secret: false
+ - default: false
+ description: A list of watched tickets
+ isArray: false
+ name: watch_list
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: work_end
+ required: false
+ secret: false
+ - default: false
+ description: Format journal list
+ isArray: false
+ name: work_notes
+ required: false
+ secret: false
+ - default: false
+ description: List with UIDs
+ isArray: false
+ name: work_notes_list
+ required: false
+ secret: false
+ - default: false
+ description: Date when started to work on the ticket
+ isArray: false
+ name: work_start
+ required: false
+ secret: false
+ - default: false
+ description: UID
+ isArray: false
+ name: assignment_group
+ required: false
+ secret: false
+ - default: false
+ description: integer
+ isArray: false
+ name: incident_state
+ required: false
+ secret: false
+ - default: false
+ description: Ticket number
+ isArray: false
+ name: number
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Priority of the ticket
+ isArray: false
+ name: priority
+ predefined:
+ - 5 - Planning
+ - 4 - Low
+ - 3 - Moderate
+ - 2 - High
+ - 1 - Critical
+ required: false
+ secret: false
+ - default: false
+ description: System ID of the ticket to update
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: 'Custom(user defined) fields in the format: fieldname1=value;fieldname2=value;...'
+ isArray: false
+ name: custom_fields
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: normal
+ description: Type of Change Request ticket
+ isArray: false
+ name: type
+ predefined:
+ - normal
+ - standard
+ - emergency
+ required: false
+ secret: false
+ - default: false
+ description: State of the ticket
+ isArray: false
+ name: state
+ required: false
+ secret: false
+ deprecated: true
+ description: Deprecated. Use servicenow-update-ticket or servicenow-update-record
+ instead
+ execution: false
+ name: servicenow-update
+ - arguments:
+ - default: false
+ description: Short description of the ticket
+ isArray: false
+ name: short_description
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ required: false
+ secret: false
+ - default: false
+ description: Ticket urgency
+ isArray: false
+ name: urgency
+ required: false
+ secret: false
+ - default: false
+ description: Ticket severity
+ isArray: false
+ name: severity
+ required: false
+ secret: false
+ - default: false
+ description: Ticket impact
+ isArray: false
+ name: impact
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Does the ticket active(true/false)
+ isArray: false
+ name: active
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: activity_due
+ required: false
+ secret: false
+ - default: false
+ description: List of assigned users to the ticket
+ isArray: false
+ name: additional_assignee_list
+ required: false
+ secret: false
+ - default: false
+ description: Ticket history approval
+ isArray: false
+ name: approval_history
+ required: false
+ secret: false
+ - default: false
+ description: Set ticket ApprovalSet - format "2016-07-02 21:51:11" glide_date_time
+ isArray: false
+ name: approval_set
+ required: false
+ secret: false
+ - default: false
+ description: To whom the ticket is assigned
+ isArray: false
+ name: assigned_to
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: business_duration
+ required: false
+ secret: false
+ - default: false
+ description: Business service
+ isArray: false
+ name: business_service
+ required: false
+ secret: false
+ - default: false
+ description: Business source
+ isArray: false
+ name: business_stc
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: calendar_duration
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: caller_id
+ required: false
+ secret: false
+ - default: false
+ description: Category name
+ isArray: false
+ name: category
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: caused_by
+ required: false
+ secret: false
+ - default: false
+ description: Ticket's close code
+ isArray: false
+ name: close_code
+ required: false
+ secret: false
+ - default: false
+ description: Close notes of the ticket
+ isArray: false
+ name: close_notes
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: closed_at
+ required: false
+ secret: false
+ - default: false
+ description: User who closed the ticket
+ isArray: false
+ name: closed_by
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: cmdb_ci
+ required: false
+ secret: false
+ - default: false
+ description: Format type journal input
+ isArray: false
+ name: comments
+ required: false
+ secret: false
+ - default: false
+ description: Format type journal input
+ isArray: false
+ name: comments_and_work_notes
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: company
+ required: false
+ secret: false
+ - default: false
+ description: Contact type
+ isArray: false
+ name: contact_type
+ required: false
+ secret: false
+ - default: false
+ description: Correlation display
+ isArray: false
+ name: correlation_display
+ required: false
+ secret: false
+ - default: false
+ description: Correlation id
+ isArray: false
+ name: correlation_id
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: delivery_plan
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: If you want to display comments, work_notes...
+ isArray: false
+ name: display
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Ticket description
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: due_date
+ required: false
+ secret: false
+ - default: false
+ description: Escalation
+ isArray: false
+ name: escalation
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: expected_start
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: follow_up
+ required: false
+ secret: false
+ - default: false
+ description: UID format list
+ isArray: false
+ name: group_list
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Is the ticket solved in the knowledge base
+ isArray: false
+ name: knowledge
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Location of the ticket
+ isArray: false
+ name: location
+ required: false
+ secret: false
+ - default: false
+ description: SLA of the ticket
+ isArray: false
+ name: made_sla
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Notify about this ticket
+ isArray: false
+ name: notify
+ predefined:
+ - '1'
+ - '0'
+ required: false
+ secret: false
+ - default: false
+ description: Order number
+ isArray: false
+ name: order
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: parent
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: parent_incident
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: problem_id
+ required: false
+ secret: false
+ - default: false
+ description: How many users included in this ticket before
+ isArray: false
+ name: reassignment_count
+ required: false
+ secret: false
+ - default: false
+ description: How many time the ticket has been reopened
+ isArray: false
+ name: reopen_count
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: resolved_at
+ required: false
+ secret: false
+ - default: false
+ description: UID Format
+ isArray: false
+ name: resolved_by
+ required: false
+ secret: false
+ - default: false
+ description: UID
+ isArray: false
+ name: rfc
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: sla_due
+ required: false
+ secret: false
+ - default: false
+ description: Subcategory
+ isArray: false
+ name: subcategory
+ required: false
+ secret: false
+ - default: false
+ description: Last updated by
+ isArray: false
+ name: sys_updated_by
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: sys_updated_on
+ required: false
+ secret: false
+ - default: false
+ description: Input from the end user
+ isArray: false
+ name: user_input
+ required: false
+ secret: false
+ - default: false
+ description: A list of watched tickets
+ isArray: false
+ name: watch_list
+ required: false
+ secret: false
+ - default: false
+ description: 'Format: YYYY-MM-DD HH:MM:SS'
+ isArray: false
+ name: work_end
+ required: false
+ secret: false
+ - default: false
+ description: Format journal list
+ isArray: false
+ name: work_notes
+ required: false
+ secret: false
+ - default: false
+ description: List with UIDs
+ isArray: false
+ name: work_notes_list
+ required: false
+ secret: false
+ - default: false
+ description: Date when started to work on the ticket
+ isArray: false
+ name: work_start
+ required: false
+ secret: false
+ - default: false
+ description: UID
+ isArray: false
+ name: assignment_group
+ required: false
+ secret: false
+ - default: false
+ description: integer
+ isArray: false
+ name: incident_state
+ required: false
+ secret: false
+ - default: false
+ description: Ticket number
+ isArray: false
+ name: number
+ required: false
+ secret: false
+ - default: false
+ description: Priority of the ticket (number)
+ isArray: false
+ name: priority
+ required: false
+ secret: false
+ - default: false
+ description: System ID of the ticket to update
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: true
+ description: Deprecated. Use servicenow-update-ticket or servicenow-update-record
+ instead
+ execution: false
+ name: servicenow-incident-update
+ - arguments:
+ - default: false
+ description: Ticket System ID
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ - sc_request
+ - sc_task
+ required: false
+ secret: false
+ deprecated: false
+ description: Delete a ticket from ServiceNow
+ execution: false
+ name: servicenow-delete-ticket
+ - arguments:
+ - default: false
+ description: Ticket System ID
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ - sc_request
+ - sc_task
+ required: false
+ secret: false
+ - default: false
+ description: The actual link to publish in ServiceNow ticket, valid url format,
+ like http://www.demisto.com
+ isArray: false
+ name: link
+ required: true
+ secret: false
+ - default: false
+ description: Publish the link as comment on the ticket, if false will publish
+ the link as WorkNote, format bool
+ isArray: false
+ name: post-as-comment
+ required: false
+ secret: false
+ - default: false
+ description: The text to represent the link
+ isArray: false
+ name: text
+ required: false
+ secret: false
+ deprecated: false
+ description: Add a link to specific ticket
+ execution: false
+ name: servicenow-add-link
+ - arguments:
+ - default: false
+ description: Ticket System ID
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ required: false
+ secret: false
+ - default: false
+ description: The actual link to publish in ServiceNow ticket, valid url format,
+ like http://www.demisto.com
+ isArray: false
+ name: link
+ required: true
+ secret: false
+ - default: false
+ description: Publish the link as comment on the ticket, by default true, if
+ false will publish the link as WorkNote, format bool
+ isArray: false
+ name: post-as-comment
+ required: false
+ secret: false
+ - default: false
+ description: The text to represent the link
+ isArray: false
+ name: text
+ required: false
+ secret: false
+ deprecated: true
+ description: Deprecated. Use servicenow-add-link instead.
+ execution: false
+ name: servicenow-incident-add-link
+ - arguments:
+ - default: false
+ description: Ticket System ID
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ - sc_request
+ - sc_task
+ required: false
+ secret: false
+ - default: false
+ description: Comment to add
+ isArray: false
+ name: comment
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Specify to publish the note as comment on the ticket.
+ isArray: false
+ name: post-as-comment
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Add comment to specific ticket by providing ticket id
+ execution: false
+ name: servicenow-add-comment
+ - arguments:
+ - default: false
+ description: Ticket System ID
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ required: false
+ secret: false
+ - default: false
+ description: Comment to add
+ isArray: false
+ name: comment
+ required: true
+ secret: false
+ - default: false
+ description: Publish the link as comment on the ticket, if false will publish
+ the link as WorkNote, format bool
+ isArray: false
+ name: post-as-comment
+ required: false
+ secret: false
+ deprecated: true
+ description: Deprecated. Use servicenow-add-comment instead.
+ execution: false
+ name: servicenow-incident-add-comment
+ - arguments:
+ - default: false
+ defaultValue: '10'
+ description: Limit for how many tickets to retrieve
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ - sc_request
+ - sc_task
+ required: false
+ secret: false
+ - default: false
+ description: The query to run. To learn about querying in ServiceNow, see https://docs.servicenow.com/bundle/istanbul-servicenow-platform/page/use/common-ui-elements/reference/r_OpAvailableFiltersQueries.html
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: Starting record index to begin retrieving records from
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieve ticket info with a query
+ execution: false
+ name: servicenow-query-tickets
+ outputs:
+ - contextPath: Ticket.ID
+ description: The unique ticket identifier.
+ type: string
+ - contextPath: Ticket.Creator
+ description: A string field that indicates the user who created the ticket.
+ type: string
+ - contextPath: Ticket.CreatedOn
+ description: The date and time when the ticket was created.
+ type: date
+ - contextPath: Ticket.Assignee
+ description: Specifies the user assigned to complete the ticket. By default,
+ this field uses a reference qualifier to only display users with the itil
+ role.
+ type: string
+ - contextPath: Ticket.State
+ description: Status of the ticket.
+ type: string
+ - contextPath: Ticket.Summary
+ description: A human-readable title for the record.
+ type: string
+ - contextPath: Ticket.Number
+ description: The display value of the ticket.
+ type: string
+ - contextPath: Ticket.Active
+ description: Specifies whether work is still being done on a task or whether
+ the work for the task is complete.
+ type: boolean
+ - contextPath: Ticket.AdditionalComments
+ description: Comments about the task record.
+ type: Unknown
+ - contextPath: Ticket.Priority
+ description: Specifies how high a priority the ticket should be for the assignee.
+ type: string
+ - contextPath: Ticket.OpenedAt
+ description: The date and time when the ticket was opened for the first time.
+ type: date
+ - contextPath: Ticket.Escalation
+ description: Indicates how long the ticket has been open.
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: '10'
+ description: Limit for how many tickets to retrieve
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: Query
+ isArray: false
+ name: sysparm_query
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ - sc_request
+ - sc_task
+ required: false
+ secret: false
+ - default: false
+ description: The query to run. To learn about querying in ServiceNow, see https://docs.servicenow.com/bundle/istanbul-servicenow-platform/page/use/common-ui-elements/reference/r_OpAvailableFiltersQueries.html
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ deprecated: true
+ description: Deprecated. Use servicenow-query-table or servicenow-query-tickets
+ instead.
+ execution: false
+ name: servicenow-query
+ outputs:
+ - contextPath: Ticket.ID
+ description: ServiceNow ticket System ID
+ type: string
+ - contextPath: Ticket.Creator
+ description: ServiceNow ticket creator
+ type: string
+ - contextPath: Ticket.Assignee
+ description: ServiceNow ticket assignee
+ type: string
+ - contextPath: Ticket.State
+ description: ServiceNow ticket state
+ type: string
+ - contextPath: Ticket.Summary
+ description: ServiceNow ticket short summary
+ type: string
+ - contextPath: Ticket.Number
+ description: ServiceNow ticket number
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: '10'
+ description: Limit for how many tickets to retrieve
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: Query
+ isArray: false
+ name: sysparm_query
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ required: false
+ secret: false
+ deprecated: true
+ description: Deprecated. Use servicenow-query-table or servicenow-query-tickets
+ instead.
+ execution: false
+ name: servicenow-incidents-query
+ outputs:
+ - contextPath: Ticket.ID
+ description: ServiceNow ticket System ID
+ type: string
+ - contextPath: Ticket.Creator
+ description: ServiceNow ticket creator
+ type: string
+ - contextPath: Ticket.Assignee
+ description: ServiceNow ticket assignee
+ type: string
+ - contextPath: Ticket.State
+ description: ServiceNow ticket state
+ type: string
+ - contextPath: Ticket.Summary
+ description: ServiceNow ticket short summary
+ type: string
+ - contextPath: Ticket.Number
+ description: ServiceNow ticket number
+ type: string
+ - arguments:
+ - default: false
+ description: Ticket System ID
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ - sc_request
+ - sc_task
+ required: false
+ secret: false
+ - default: false
+ description: War-room entry ID that includes the file
+ isArray: false
+ name: file_id
+ required: true
+ secret: false
+ - default: false
+ description: Filename of uploaded file to override the existing file name in
+ entry
+ isArray: false
+ name: file_name
+ required: false
+ secret: false
+ deprecated: false
+ description: Upload a file to a specific ticket
+ execution: false
+ name: servicenow-upload-file
+ outputs:
+ - contextPath: ServiceNow.Ticket.File.Filename
+ description: Name of the file
+ type: string
+ - contextPath: ServiceNow.Ticket.File.Link
+ description: Download link for the file
+ type: string
+ - contextPath: ServiceNow.Ticket.File.SystemID
+ description: System ID of the file
+ type: string
+ - arguments:
+ - default: false
+ description: Ticket System ID
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: incident
+ description: Ticket type
+ isArray: false
+ name: ticket_type
+ predefined:
+ - incident
+ - problem
+ - change_request
+ required: false
+ secret: false
+ - default: false
+ description: War-room entry ID that includes the file
+ isArray: false
+ name: file_id
+ required: true
+ secret: false
+ - default: false
+ description: Filename of uploaded file to override the existing file name in
+ entry
+ isArray: false
+ name: file_name
+ required: false
+ secret: false
+ deprecated: true
+ description: Deprecated. Use servicenow-upload-file instead.
+ execution: false
+ name: servicenow-incident-upload-file
+ outputs:
+ - contextPath: Ticket.File.Filename
+ description: Name of the file
+ type: string
+ - contextPath: Ticket.File.Link
+ description: Download link for the file
+ type: string
+ - contextPath: Ticket.File.SystemID
+ description: System ID of the file
+ type: string
+ - arguments:
+ - default: false
+ description: Servicenow group name
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: true
+ description: Deprecated. Use servicenow-query-groups instead.
+ execution: false
+ name: servicenow-get-groups
+ outputs:
+ - contextPath: ServiceNowGroups.GroupId
+ description: Group Id
+ type: string
+ - contextPath: ServiceNowGroups.GroupName
+ description: Group name
+ type: string
+ - arguments:
+ - default: false
+ description: machine name
+ isArray: false
+ name: computerName
+ required: true
+ secret: false
+ deprecated: true
+ description: Deprecated. Use servicenow-query-computers instead.
+ execution: false
+ name: servicenow-get-computer
+ outputs:
+ - contextPath: ServiceNowComputer.sys_id
+ description: 'Id '
+ type: string
+ - contextPath: ServiceNowComputer.u_code
+ description: Code
+ type: string
+ - contextPath: ServiceNowComputer.support_group
+ description: Support group
+ type: string
+ - contextPath: ServiceNowComputer.os
+ description: Operating System
+ type: string
+ - contextPath: ServiceNowComputer.comments
+ description: Comments
+ type: string
+ - arguments:
+ - default: false
+ description: Record system ID
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: Comma separated table fields to display and output to the context,
+ e.g name,tag,company. ID field is added by default.
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: The name of the table to get the record from
+ isArray: false
+ name: table_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieve record information by specific record ID
+ execution: false
+ name: servicenow-get-record
+ outputs:
+ - contextPath: ServiceNow.Record.ID
+ description: The unique record identifier for the record.
+ type: string
+ - contextPath: ServiceNow.Record.UpdatedBy
+ description: A string field that indicates the user who most recently updated
+ the record.
+ type: string
+ - contextPath: ServiceNow.Record.UpdatedAt
+ description: A time-stamp field that indicates the date and time of the most
+ recent update.
+ type: date
+ - contextPath: ServiceNow.Record.CreatedBy
+ description: A string field that indicates the user who created the record.
+ type: string
+ - contextPath: ServiceNow.Record.CreatedOn
+ description: time-stamp field that indicates when a record was created.
+ type: date
+ - arguments:
+ - default: false
+ description: The name of the table to query
+ isArray: false
+ name: table_name
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Limit for how many tickets to retrieve
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: The query to run. For more information about querying in ServiceNow,
+ see https://docs.servicenow.com/bundle/istanbul-servicenow-platform/page/use/common-ui-elements/reference/r_OpAvailableFiltersQueries.html
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: Comma separated table fields to display and output to the context,
+ e.g name,tag,company. ID field is added by default.
+ isArray: true
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: Starting record index to begin retrieving records from
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ deprecated: false
+ description: Query a specified table in ServiceNow
+ execution: false
+ name: servicenow-query-table
+ outputs:
+ - contextPath: ServiceNow.Results.ID
+ description: The unique record identifier for the record.
+ type: string
+ - contextPath: ServiceNow.Results.UpdatedBy
+ description: A string field that indicates the user who most recently updated
+ the record.
+ type: string
+ - contextPath: ServiceNow.Results.UpdatedAt
+ description: A time-stamp field that indicates the date and time of the most
+ recent update.
+ type: date
+ - contextPath: ServiceNow.Results.CreatedBy
+ description: A string field that indicates the user who created the record.
+ type: string
+ - contextPath: ServiceNow.Results.CreatedOn
+ description: time-stamp field that indicates when a record was created.
+ type: date
+ - arguments:
+ - default: false
+ description: The name of the table to create a record in.
+ isArray: false
+ name: table_name
+ required: true
+ secret: false
+ - default: false
+ description: 'Fields and their values to create the record with, in the format:
+ fieldname1=value;fieldname2=value;...'
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: 'Custom(user defined) fields in the format: fieldname1=value;fieldname2=value;...'
+ isArray: false
+ name: custom_fields
+ required: false
+ secret: false
+ deprecated: false
+ description: Create a new record in a specified ServiceNow table
+ execution: false
+ name: servicenow-create-record
+ outputs:
+ - contextPath: ServiceNow.Record.ID
+ description: The unique record identifier for the record.
+ type: string
+ - contextPath: ServiceNow.Record.UpdatedBy
+ description: A string field that indicates the user who most recently updated
+ the record.
+ type: string
+ - contextPath: ServiceNow.Record.UpdatedAt
+ description: A time-stamp field that indicates the date and time of the most
+ recent update.
+ type: date
+ - contextPath: ServiceNow.Record.CreatedBy
+ description: A string field that indicates the user who created the record.
+ type: string
+ - contextPath: ServiceNow.Record.CreatedOn
+ description: time-stamp field that indicates when a record was created.
+ type: date
+ - arguments:
+ - default: false
+ description: The name of the table to update the record in
+ isArray: false
+ name: table_name
+ required: true
+ secret: false
+ - default: false
+ description: The system ID of the ticket to update
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: 'Fields and their values to update in the record, in the format:
+ fieldname1=value;fieldname2=value;...'
+ isArray: false
+ name: fields
+ required: false
+ secret: false
+ - default: false
+ description: 'Custom(User defined) fields and their values to update in the
+ record, in the format: fieldname1=value;fieldname2=value;...'
+ isArray: false
+ name: custom_fields
+ required: false
+ secret: false
+ deprecated: false
+ description: Update a record in a specified ServiceNow table
+ execution: false
+ name: servicenow-update-record
+ outputs:
+ - contextPath: ServiceNow.Record.ID
+ description: The unique record identifier for the record.
+ type: string
+ - contextPath: ServiceNow.Record.UpdatedBy
+ description: A string field that indicates the user who most recently updated
+ the record.
+ type: string
+ - contextPath: ServiceNow.Record.UpdatedAt
+ description: A time-stamp field that indicates the date and time of the most
+ recent update.
+ type: date
+ - contextPath: ServiceNow.Record.CreatedBy
+ description: A string field that indicates the user who created the record.
+ type: string
+ - contextPath: ServiceNow.Record.CreatedOn
+ description: time-stamp field that indicates when a record was created.
+ type: date
+ - arguments:
+ - default: false
+ description: The table name
+ isArray: false
+ name: table_name
+ required: true
+ secret: false
+ - default: false
+ description: The system ID of the ticket to delete
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Delete a record in a specified ServiceNow table
+ execution: false
+ name: servicenow-delete-record
+ - arguments:
+ - default: false
+ description: Table name
+ isArray: false
+ name: table_name
+ required: true
+ secret: false
+ deprecated: false
+ description: List API fields for a specified ServiceNow table
+ execution: false
+ name: servicenow-list-table-fields
+ outputs:
+ - contextPath: ServiceNow.Field
+ description: Table API field name
+ type: string
+ - arguments:
+ - default: false
+ description: Query by computer sys_id
+ isArray: false
+ name: computer_id
+ required: false
+ secret: false
+ - default: false
+ description: Query by computer name
+ isArray: false
+ name: computer_name
+ required: false
+ secret: false
+ - default: false
+ description: Query by specified query, for more information about querying in
+ ServiceNow, see https://docs.servicenow.com/bundle/istanbul-servicenow-platform/page/use/common-ui-elements/reference/r_OpAvailableFiltersQueries.html
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ description: Query by asset tag
+ isArray: false
+ name: asset_tag
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Query results limit
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: Starting record index to begin retrieving records from
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ deprecated: false
+ description: Query the cmdb_ci_computer table in ServiceNow
+ execution: false
+ name: servicenow-query-computers
+ outputs:
+ - contextPath: ServiceNow.Computer.ID
+ description: Computer sys_id
+ type: string
+ - contextPath: ServiceNow.Computer.AssetTag
+ description: Computer Asset tag
+ type: string
+ - contextPath: ServiceNow.Computer.Name
+ description: Computer name
+ type: string
+ - contextPath: ServiceNow.Computer.DisplayName
+ description: Computer display name
+ type: string
+ - contextPath: ServiceNow.Computer.SupportGroup
+ description: Computer support group
+ type: string
+ - contextPath: ServiceNow.Computer.OperatingSystem
+ description: Computer operating system
+ type: string
+ - contextPath: ServiceNow.Computer.Company
+ description: Computer company sys_id
+ type: string
+ - contextPath: ServiceNow.Computer.AssignedTo
+ description: Computer assigned to user sys_id
+ type: string
+ - contextPath: ServiceNow.Computer.State
+ description: Computer state
+ type: string
+ - contextPath: ServiceNow.Computer.Cost
+ description: Computer cost
+ type: string
+ - contextPath: ServiceNow.Computer.Comments
+ description: Computer comments
+ type: string
+ - arguments:
+ - default: false
+ description: Query by group sys_id
+ isArray: false
+ name: group_id
+ required: false
+ secret: false
+ - default: false
+ description: Query by group name
+ isArray: false
+ name: group_name
+ required: false
+ secret: false
+ - default: false
+ description: Query by specified query, for more information about querying in
+ ServiceNow, see https://docs.servicenow.com/bundle/istanbul-servicenow-platform/page/use/common-ui-elements/reference/r_OpAvailableFiltersQueries.html
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Query results limit
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: Starting record index to begin retrieving records from
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ deprecated: false
+ description: Query the sys_user_group table in ServiceNow
+ execution: false
+ name: servicenow-query-groups
+ outputs:
+ - contextPath: ServiceNow.Group.ID
+ description: Group sys_id
+ type: string
+ - contextPath: ServiceNow.Group.Description
+ description: Group description
+ type: string
+ - contextPath: ServiceNow.Group.Name
+ description: Group name
+ type: string
+ - contextPath: ServiceNow.Group.Manager
+ description: Group manager sys_id
+ type: string
+ - contextPath: ServiceNow.Group.Updated
+ description: Group update time
+ type: date
+ - arguments:
+ - default: false
+ description: Query by user sys_id
+ isArray: false
+ name: user_id
+ required: false
+ secret: false
+ - default: false
+ description: Query by username
+ isArray: false
+ name: user_name
+ required: false
+ secret: false
+ - default: false
+ description: Query by specified query, for more information about querying in
+ ServiceNow, see https://docs.servicenow.com/bundle/istanbul-servicenow-platform/page/use/common-ui-elements/reference/r_OpAvailableFiltersQueries.html
+ isArray: false
+ name: query
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Query results limit
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: Starting record index to begin retrieving records from
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ deprecated: false
+ description: Query the sys_user table in ServiceNow
+ execution: false
+ name: servicenow-query-users
+ outputs:
+ - contextPath: ServiceNow.User.ID
+ description: User sys_id
+ type: string
+ - contextPath: ServiceNow.User.Name
+ description: User name (first + last)
+ type: string
+ - contextPath: ServiceNow.User.UserName
+ description: User username
+ type: string
+ - contextPath: ServiceNow.User.Email
+ description: User email
+ type: string
+ - contextPath: ServiceNow.User.Created
+ description: User creation time
+ type: date
+ - contextPath: ServiceNow.User.Updated
+ description: User update time
+ type: date
+ - arguments:
+ - default: false
+ description: The table label, e.g Asset, Incident, IP address etc.
+ isArray: false
+ name: label
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Results limit
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: Starting record index to begin retrieving records from
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ deprecated: false
+ description: Get table names by a label to use in commands
+ execution: false
+ name: servicenow-get-table-name
+ outputs:
+ - contextPath: ServiceNow.Table.ID
+ description: Table system ID
+ type: string
+ - contextPath: ServiceNow.Table.Name
+ description: Table name to use in commands, e.g alm_asset
+ type: string
+ - contextPath: ServiceNow.Table.SystemName
+ description: Table system name, e.g Asset
+ type: string
+ - arguments:
+ - default: true
+ description: Ticket System ID
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Limit for the ticket notes
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: Offset of the ticket notes
+ isArray: false
+ name: offset
+ required: false
+ secret: false
+ deprecated: false
+ description: Get notes from the specified ServiceNow ticket - Read permissions
+ are required for the sys_journal_field table.
+ execution: false
+ name: servicenow-get-ticket-notes
+ outputs:
+ - contextPath: ServiceNow.Ticket.ID
+ description: Ticket ID
+ type: string
+ - contextPath: ServiceNow.Ticket.Note.Value
+ description: Ticket note value
+ type: unknown
+ - contextPath: ServiceNow.Ticket.Note.CreatedOn
+ description: Ticket note created on
+ type: date
+ - contextPath: ServiceNow.Ticket.Note.CreatedBy
+ description: Ticket note created by
+ type: string
+ - contextPath: ServiceNow.Ticket.Note.Type
+ description: Ticket note type
+ type: string
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- No test - Hibernating instance
diff --git a/Integrations/ServiceNow/ServiceNow_description.md b/Integrations/ServiceNow/ServiceNow_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/ServiceNow/ServiceNow_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/ServiceNow/ServiceNow_image.png b/Integrations/ServiceNow/ServiceNow_image.png
new file mode 100644
index 000000000000..8d977e51a9f8
Binary files /dev/null and b/Integrations/ServiceNow/ServiceNow_image.png differ
diff --git a/Integrations/Shodan_v2/CHANGELOG.md b/Integrations/Shodan_v2/CHANGELOG.md
new file mode 100644
index 000000000000..511b579dfd6f
--- /dev/null
+++ b/Integrations/Shodan_v2/CHANGELOG.md
@@ -0,0 +1,9 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+Display name clarification.
+
+## [19.9.1] - 2019-09-18
+#### New Integration
+A search engine used for searching Internet-connected devices
\ No newline at end of file
diff --git a/Integrations/Shodan_v2/Shodan_description.md b/Integrations/Shodan_v2/Shodan_description.md
new file mode 100644
index 000000000000..91d0c9dcb52f
--- /dev/null
+++ b/Integrations/Shodan_v2/Shodan_description.md
@@ -0,0 +1,437 @@
+## Overview
+---
+
+search engine for Internet-connected devices
+This integration was integrated and tested with version xx of Shodan_v2
+## Shodan_v2 Playbook
+---
+
+## Use Cases
+---
+
+## Configure Shodan_v2 on Demisto
+---
+
+1. Navigate to __Settings__ > __Integrations__ > __Servers & Services__.
+2. Search for Shodan_v2.
+3. Click __Add instance__ to create and configure a new integration instance.
+ * __Name__: a textual name for the integration instance.
+ * __Api Key__
+ * __Base url to Shodan API__
+ * __Trust self-signed certificate (insecure)__
+ * __Use system proxy settings__
+4. Click __Test__ to validate the URLs, token, and connection.
+## Fetched Incidents Data
+---
+
+## Commands
+---
+You can execute these commands from the Demisto CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+1. search
+2. ip
+3. shodan-search-count
+4. shodan-scan-ip
+5. shodan-scan-internet
+6. shodan-scan-status
+7. shodan-create-network-alert
+8. shodan-network-get-alert-by-id
+9. shodan-network-get-alerts
+10. shodan-network-delete-alert
+11. shodan-network-alert-set-trigger
+12. shodan-network-alert-remove-trigger
+13. shodan-network-alert-whitelist-service
+14. shodan-network-alert-remove-service-from-whitelist
+### 1. search
+---
+Search Shodan using the same query syntax as the website and use facets to get summary information for different properties.
+##### Base Command
+
+`search`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| query | Shodan search query. The provided string is used to search the database of banners in Shodan, with the additional option to provide filters inside the search query using a "filter:value" format. For example, the following search query would find Apache webservers located in Germany: "apache country:DE" | Required |
+| facets | A comma-separated list of properties to get summary information on. Property names can also be in the format of "property:count", where "count" is the number of facets that will be returned for a property (i.e. "country:100" to get the top 100 countries for a search query) | Optional |
+| page | Result page number to be fetched. Each page contains up to 100 results. | Optional |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Shodan.Banner.Org | String | The name of the organization that is assigned the IP space for this device |
+| Shodan.Banner.Isp | String | The ISP that is providing the organization with the IP space for this device. Consider this the "parent" of the organization in terms of IP ownership |
+| Shodan.Banner.Transport | String | Either "udp" or "tcp" to indicate which IP transport protocol was used to fetch the information |
+| Shodan.Banner.Asn | String | The autonomous system number (ex. "AS4837"). |
+| Shodan.Banner.IP | String | The IP address of the host as a string |
+| Shodan.Banner.Port | Number | The port number that the service is operating on |
+| Shodan.Banner.Ssl.versions | String | list of SSL versions that are supported by the server. If a version isnt supported the value is prefixed with a "-". Example: ["TLSv1", "-SSLv2"] means that the server supports TLSv1 but doesnt support SSLv2. |
+| Shodan.Banner.Hostnames | String | An array of strings containing all of the hostnames that have been assigned to the IP address for this device. |
+| Shodan.Banner.Location.City | String | The name of the city where the device is located |
+| Shodan.Banner.Location.Longitude | Number | The longitude for the geolocation of the device |
+| Shodan.Banner.Location.Latitude | Number | The latitude for the geolocation of the device |
+| Shodan.Banner.Location.Country | String | The name of the country where the device is located |
+| Shodan.Banner.Timestamp | Date | The timestamp for when the banner was fetched from the device in the UTC timezone |
+| Shodan.Banner.Domains | String | An array of strings containing the top-level domains for the hostnames of the device. This is a utility property in case you want to filter by TLD instead of subdomain. It is smart enough to handle global TLDs with several dots in the domain (ex. "co.uk") |
+| Shodan.Banner.OS | String | The operating system that powers the device |
+
+
+##### Command Example
+```!search query="country:HK product:Apache"```
+
+##### Human Readable Output
+
+
+### 2. ip
+---
+Returns all services that have been found on the given host IP.
+##### Base Command
+
+`ip`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| ip | Host IP address | Required |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| IP.ASN | Unknown | Autonomous System Number (ASN) such as IP owner |
+| IP.Address | Unknown | IP Address |
+| IP.Geo.Country | Unknown | Country of given IP |
+| IP.Geo.Description | Unknown | Description of location |
+| IP.Geo.Location | Unknown | Latitude and longitude of given IP |
+| IP.Hostname | Unknown | Hostname |
+| Shodan.IP.Tags | String | The tags related to the IP |
+| Shodan.IP.Latitude | Number | The latitude for the geolocation of the device |
+| Shodan.IP.Org | String | The name of the organization that is assigned the IP space for this device |
+| Shodan.IP.ASN | String | The autonomous system number (ex. "AS4837"). |
+| Shodan.IP.ISP | String | The ISP that is providing the organization with the IP space for this device. Consider this the "parent" of the organization in terms of IP ownership |
+| Shodan.IP.Longitude | Number | The Longitude for the geolocation of the device |
+| Shodan.IP.LastUpdate | Date | The timestamp for when the banner was fetched from the device in the UTC timezone |
+| Shodan.IP.CountryName | String | The name of the country where the device is located |
+| Shodan.IP.OS | String | The operating system that powers the device |
+| Shodan.IP.Port | Number | The port number that the service is operating on |
+| Shodan.IP.Address | String | The IP address of the host as a string |
+
+
+##### Command Example
+```!ip ip="8.8.8.8"```
+
+##### Human Readable Output
+
+
+### 3. shodan-search-count
+---
+This method behaves identical to "shodan-search" with the only difference that this method does not return any host results, it only returns the total number of results that matched the query and any facet information that was requested. As a result this method does not consume query credits.
+##### Base Command
+
+`shodan-search-count`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| query | Shodan search query. The provided string is used to search the database of banners in Shodan, with the additional option to provide filters inside the search query using a "filter:value" format. | Required |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Shodan.Search.ResultCount | Number | Number of results generated by the search query |
+
+
+##### Command Example
+```!shodan-search-count query="country:HK product:Apache"```
+
+##### Human Readable Output
+
+
+### 4. shodan-scan-ip
+---
+Use this method to request Shodan to crawl a network.
+##### Base Command
+
+`shodan-scan-ip`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| ips | A comma-separated list of IPs or netblocks (in CIDR notation) that should get crawled. | Required |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Shodan.Scan.ID | String | The unique scan ID that was returned by shodan-scan-ip. |
+| Shodan.Scan.Status | String | The status of the scan job |
+
+
+##### Command Example
+```!shodan-scan-ip ips="1.1.1.69"```
+
+##### Human Readable Output
+
+
+### 5. shodan-scan-internet
+---
+This method is restricted to security researchers and companies with a Shodan Enterprise Data license
+##### Base Command
+
+`shodan-scan-internet`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| port | The port that Shodan should crawl the Internet for | Required |
+| protocol | The name of the protocol that should be used to interrogate the port. | Required |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Shodan.Scan.ID | String | The id of the scan job |
+
+
+##### Command Example
+```!shodan-scan-internet port="80" protocol="http"```
+
+##### Human Readable Output
+
+
+### 6. shodan-scan-status
+---
+Check the progress of a previously submitted scan request
+##### Base Command
+
+`shodan-scan-status`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| scanID | The unique scan ID that was returned by shodan-scan. | Required |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Shodan.Scan.Id | String | The unique scan ID that was returned by shodan-scan |
+| Shodan.Scan.Status | String | The status of the scan job |
+
+
+##### Command Example
+```!shodan-scan-status scanID="fnFNYGzNGJFNE8lQ"```
+
+##### Human Readable Output
+
+
+### 7. shodan-create-network-alert
+---
+Use this method to create a network alert for a defined IP/ netblock which can be used to subscribe to changes/ events that are discovered within that range.
+##### Base Command
+
+`shodan-create-network-alert`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| alertName | The name to describe the network alert | Required |
+| ip | A list of IPs or network ranges defined using CIDR notation | Required |
+| expires | Number of seconds that the alert should be active | Optional |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Shodan.Alert.ID | String | The id of the alert subscription |
+| Shodan.Alert.Expires | String | Number of seconds that the alert should be active |
+
+
+##### Command Example
+```!shodan-create-network-alert alertName="test_alert" ip="1.1.1.1"```
+
+##### Human Readable Output
+
+
+### 8. shodan-network-get-alert-by-id
+---
+Get the details for a network alert
+##### Base Command
+
+`shodan-network-get-alert-by-id`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| alertID | AlertID | Required |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Shodan.Alert.ID | String | The id of the alert subscription |
+| Shodan.Alert.Expires | String | Number of seconds that the alert should be active |
+
+
+##### Command Example
+```!shodan-network-get-alert-by-id alertID="Y6KRMXWQ8FPNSHHY```
+
+##### Human Readable Output
+
+
+### 9. shodan-network-get-alerts
+---
+Get a list of all the created alerts
+##### Base Command
+
+`shodan-network-get-alerts`
+##### Input
+
+There are no input arguments for this command.
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| Shodan.Alert.ID | String | The id of the alert subscription |
+| Shodan.Alert.Expires | String | Number of seconds that the alert should be active |
+
+
+##### Command Example
+```!shodan-network-get-alerts```
+
+##### Human Readable Output
+
+
+### 10. shodan-network-delete-alert
+---
+Remove the specified network alert.
+##### Base Command
+
+`shodan-network-delete-alert`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| alertID | AlertID | Required |
+
+
+##### Context Output
+
+There is no context output for this command.
+
+##### Command Example
+```!shodan-network-delete-alert alertID="Y6KRMXWQ8FPNSHHY"```
+
+##### Human Readable Output
+
+
+### 11. shodan-network-alert-set-trigger
+---
+Get notifications when the specified trigger is met.
+##### Base Command
+
+`shodan-network-alert-set-trigger`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| alertID | AlertID | Required |
+| Trigger | Trigger name | Required |
+
+
+##### Context Output
+
+There is no context output for this command.
+
+##### Command Example
+```!shodan-network-alert-set-trigger alertID="Y6KRMXWQ8FPNSHHY" Trigger="any"```
+
+##### Human Readable Output
+
+
+### 12. shodan-network-alert-remove-trigger
+---
+Stop getting notifications for the specified trigger.
+##### Base Command
+
+`shodan-network-alert-remove-trigger`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| alertID | AlertID | Required |
+| Trigger | Trigger name | Required |
+
+
+##### Context Output
+
+There is no context output for this command.
+
+##### Command Example
+```!shodan-network-alert-remove-trigger alertID="Y6KRMXWQ8FPNSHHY" Trigger="any"```
+
+##### Human Readable Output
+
+
+### 13. shodan-network-alert-whitelist-service
+---
+Ignore the specified service when it is matched for the trigger.
+##### Base Command
+
+`shodan-network-alert-whitelist-service`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| alertID | AlertID | Required |
+| trigger | Trigger name | Required |
+| service | Service specified in the format "ip:port" (ex. "1.1.1.1:80") | Required |
+
+
+##### Context Output
+
+There is no context output for this command.
+
+##### Command Example
+```!shodan-network-alert-whitelist-service alertID="Y6KRMXWQ8FPNSHHY" trigger="any" service="1.1.1.1:80"```
+
+##### Human Readable Output
+
+
+### 14. shodan-network-alert-remove-service-from-whitelist
+---
+Start getting notifications again for the specified trigger
+##### Base Command
+
+`shodan-network-alert-remove-service-from-whitelist`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| alertID | AlertID | Required |
+| trigger | Trigger name | Required |
+| service | Service specified in the format "ip:port" (ex. "1.1.1.1:80") | Required |
+
+
+##### Context Output
+
+There is no context output for this command.
+
+##### Command Example
+```!shodan-network-alert-remove-service-from-whitelist alertID="Y6KRMXWQ8FPNSHHY" trigger="any" service="1.1.1.1:80"```
+
+##### Human Readable Output
+
+
+
diff --git a/Integrations/Shodan_v2/Shodan_image.png b/Integrations/Shodan_v2/Shodan_image.png
new file mode 100644
index 000000000000..88b7f18e7a65
Binary files /dev/null and b/Integrations/Shodan_v2/Shodan_image.png differ
diff --git a/Integrations/Shodan_v2/Shodan_v2.py b/Integrations/Shodan_v2/Shodan_v2.py
new file mode 100644
index 000000000000..e8e549c01d9c
--- /dev/null
+++ b/Integrations/Shodan_v2/Shodan_v2.py
@@ -0,0 +1,453 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import json
+import requests
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+API_KEY = demisto.params()['api_key']
+
+# Remove trailing slash to prevent wrong URL path to service
+API_URL = demisto.params()['api_url'].rstrip('/')
+
+# Should we use SSL
+USE_SSL = not demisto.params().get('insecure', False)
+
+# Remove proxy if not set to true in params
+if not demisto.params().get('proxy'):
+ os.environ.pop('HTTP_PROXY', None)
+ os.environ.pop('HTTPS_PROXY', None)
+ os.environ.pop('http_proxy', None)
+ os.environ.pop('https_proxy', None)
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, uri, params=None, data=None, headers=None):
+ if params is None:
+ params = {}
+
+ params.update({
+ 'key': API_KEY
+ })
+
+ url = f'{API_URL}{uri}'
+ res = requests.request(method,
+ url,
+ params=params,
+ data=data,
+ headers=headers,
+ verify=USE_SSL)
+
+ if res.status_code != 200:
+ error_msg = f'Error in API call {url} [{res.status_code}] - {res.reason}'
+ if 'application/json' in res.headers['content-type'] and 'error' in res.json():
+ error_msg += f': {res.json()["error"]}'
+
+ return_error(error_msg)
+
+ return res.json()
+
+
+def alert_to_demisto_result(alert):
+ ec = {
+ 'Shodan': {
+ 'Alert': {
+ 'ID': alert.get('id', ''),
+ 'Expires': alert.get('expires', 0)
+ }
+ }
+ }
+
+ human_readable = tableToMarkdown(f'Alert ID {ec["Shodan"]["Alert"]["ID"]}', {
+ 'Name': alert.get('name', ''),
+ 'IP': alert.get('filters', {'ip', ''})['ip'],
+ 'Expires': ec['Shodan']['Alert']['Expires']
+ })
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': alert,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def get_scan_status(scan_id):
+ res = http_request("GET", f'/shodan/scan/{scan_id}')
+
+ ec = {
+ 'Shodan': {
+ 'Scan': {
+ 'ID': res.get('id', ''),
+ 'Status': res.get('status', '')
+ }
+ }
+ }
+
+ human_readable = tableToMarkdown(f'Scanning results for scan {scan_id}', {
+ 'ID': ec['Shodan']['Scan']['ID'],
+ 'Status': ec['Shodan']['Scan']['Status']
+ })
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+
+
+def test_module():
+ """
+ Performs basic get request to get item samples
+ """
+ http_request('GET', '/shodan/ports', {'query': 'test'})
+
+
+def search_command():
+ query = demisto.args()['query']
+ facets = demisto.args().get('facets')
+ page = int(demisto.args().get('page', 1))
+
+ params = {'query': query}
+ if facets:
+ params['facets'] = facets
+ if page:
+ params['page'] = page
+
+ res = http_request('GET', '/shodan/host/search', params)
+
+ matches = res.get('matches', [])
+ for match in matches:
+ location = match.get('location', {'city': '', 'country_name': '', 'longitude': 0, 'latitude': 0})
+ ec = {
+ 'Shodan': {
+ 'Banner': {
+ 'Org': match.get('org', ''),
+ 'Isp': match.get('isp', ''),
+ 'Transport': match.get('transport', ''),
+ 'Asn': match.get('asn', ''),
+ 'IP': match.get('ip_str', ''),
+ 'Port': match.get('port', 0),
+ 'Ssl': {
+ 'versions': match.get('ssl', {'versions': []})['versions']
+ },
+ 'Hostnames': match.get('hostnames', []),
+ 'Location': {
+ 'City': location['city'],
+ 'Longitude': location['longitude'],
+ 'Latitude': location['latitude'],
+ 'Country': location['country_name']
+ },
+ 'Timestamp': match.get('timestamp', ''),
+ 'Domains': match.get('domains', []),
+ 'OS': match.get('os', '')
+ }
+ }
+ }
+
+ human_readable = tableToMarkdown(f'Search results for query "{query}" - page {page}, facets: {facets}',
+ {
+ 'IP': ec['Shodan']['Banner']['IP'],
+ 'Port': ec['Shodan']['Banner']['Port'],
+ 'Timestamp': ec['Shodan']['Banner']['Timestamp']
+ })
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': match,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+
+
+def ip_command():
+ ip = demisto.args()['ip']
+
+ res = http_request('GET', f'/shodan/host/{ip}')
+
+ hostnames = res.get('hostnames')
+ hostname = hostnames[0] if hostnames else '' # It's a list, only if it exists and not empty we take the first value
+
+ location = f'{round(res.get("latitude", 0.0), 3)},{round(res.get("longitude", 0.0), 3)}'
+
+ ip_details = {
+ 'ASN': res.get('asn', ''),
+ 'Address': ip,
+ 'Hostname': hostname,
+ 'Geo': {
+ 'Country': res.get('country_name', ''),
+ 'Location': location
+ }
+ }
+
+ shodan_ip_details = {
+ 'Tag': res.get('tags', []),
+ 'Latitude': res.get('latitude', 0.0),
+ 'Longitude': res.get('longitude', 0.0),
+ 'Org': res.get('org', ''),
+ 'ASN': res.get('asn', ''),
+ 'ISP': res.get('isp', ''),
+ 'LastUpdate': res.get('last_update', ''),
+ 'CountryName': res.get('country_name', ''),
+ 'Address': ip,
+ 'OS': res.get('os', ''),
+ 'Port': res.get('ports', [])
+ }
+
+ ec = {
+ outputPaths['ip']: ip_details,
+ 'Shodan': {
+ 'IP': shodan_ip_details
+ }
+ }
+
+ human_readable = tableToMarkdown(f'Shodan details for IP {ip}', {
+ 'Country': ec[outputPaths['ip']]['Geo']['Country'],
+ 'Location': ec[outputPaths['ip']]['Geo']['Location'],
+ 'ASN': ec[outputPaths['ip']]['ASN'],
+ 'ISP': ec['Shodan']['IP']['ISP'],
+ 'Ports': ', '.join([str(x) for x in ec['Shodan']['IP']['Port']]),
+ 'Hostname': ec[outputPaths['ip']]['Hostname']
+ })
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+
+
+def shodan_search_count_command():
+ query = demisto.args()['query']
+
+ res = http_request('GET', '/shodan/host/count', {'query': query})
+
+ ec = {
+ 'Shodan': {
+ 'Search': {
+ 'ResultCount': res.get('total', 0)
+ }
+ }
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': f'## {ec["Shodan"]["Search"]["ResultCount"]} results for query "{query}"',
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+
+
+def shodan_scan_ip_command():
+ ips = demisto.args()['ips']
+
+ res = http_request('POST', '/shodan/scan', data={'ips': ips})
+
+ if 'id' not in res:
+ demisto.results({
+ 'Type': entryTypes['error'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': f'## Unknown answer format, no "id" field in response',
+ 'HumanReadableFormat': formats['markdown'],
+ })
+
+ get_scan_status(res['id'])
+
+
+def shodan_scan_internet_command():
+ port = demisto.args()['port']
+
+ try:
+ port = int(port)
+ except ValueError:
+ return_error(f'Port must be number, not {port}')
+
+ protocol = demisto.args()['protocol']
+
+ res = http_request('POST', '/shodan/scan/internet', data={
+ 'port': port,
+ 'protocol': protocol
+ })
+
+ ec = {
+ 'Shodan': {
+ 'Scan': {
+ 'ID': res.get('id', '')
+ }
+ }
+ }
+
+ human_readable = tableToMarkdown(f'Intenet scanning results for port {port} and protocol {protocol}', {
+ 'ID': ec['Shodan']['Scan']['ID'],
+ })
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+
+
+def shodan_scan_status_command():
+ scan_id = demisto.args()['scanID']
+
+ get_scan_status(scan_id)
+
+
+def shodan_create_network_alert_command():
+ alert_name = demisto.args()['alertName']
+ ip = demisto.args()['ip']
+ try:
+ expires = int(demisto.args().get('expires', 0))
+ except ValueError:
+ return_error(f'Expires must be a number, not {expires}')
+
+ res = http_request('POST', '/shodan/alert', data=json.dumps({
+ 'name': alert_name,
+ 'filters': {
+ 'ip': ip
+ },
+ 'expires': expires
+ }), headers={'content-type': 'application/json'})
+
+ alert_to_demisto_result(res)
+
+
+def shodan_network_get_alert_by_id_command():
+ alert_id = demisto.args()['alertID']
+
+ res = http_request('GET', f'/shodan/alert/{alert_id}/info')
+
+ alert_to_demisto_result(res)
+
+
+def shodan_network_get_alerts_command():
+ res = http_request('GET', '/shodan/alert/info')
+
+ if len(res) == 0:
+ demisto.results('No alerts')
+ else:
+ for alert in res:
+ alert_to_demisto_result(alert)
+
+
+def shodan_network_delete_alert_command():
+ alert_id = demisto.args()['alertID']
+
+ http_request('DELETE', f'/shodan/alert/{alert_id}')
+
+ demisto.results(f'Deleted alert {alert_id}')
+
+
+def shodan_network_alert_set_trigger_command():
+ alert_id = demisto.args()['alertID']
+ trigger = demisto.args()['Trigger']
+
+ res = http_request('PUT', f'/shodan/alert/{alert_id}/trigger/{trigger}')
+
+ if not res.get('success', False):
+ return_error(f'Failed setting trigger {trigger} for alert {alert_id}')
+
+ demisto.results(f'Set trigger "{trigger}" for alert {alert_id}')
+
+
+def shodan_network_alert_remove_trigger_command():
+ alert_id = demisto.args()['alertID']
+ trigger = demisto.args()['Trigger']
+
+ res = http_request('DELETE', f'/shodan/alert/{alert_id}/trigger/{trigger}')
+
+ if not res.get('success', False):
+ return_error(f'Failed deleting trigger {trigger} for alert {alert_id}')
+
+ demisto.results(f'Deleted trigger "{trigger}" for alert {alert_id}')
+
+
+def shodan_network_alert_whitelist_service_command():
+ alert_id = demisto.args()['alertID']
+ trigger = demisto.args()['trigger']
+ service = demisto.args()['service']
+
+ res = http_request('PUT', f'/shodan/alert/{alert_id}/trigger/{trigger}/ignore/{service}')
+
+ if not res.get('success', False):
+ return_error(f'Failed whitelisting service "{service}" for trigger {trigger} in alert {alert_id}')
+
+ demisto.results(f'Whitelisted service "{service}" for trigger {trigger} in alert {alert_id}')
+
+
+def shodan_network_alert_remove_service_from_whitelist_command():
+ alert_id = demisto.args()['alertID']
+ trigger = demisto.args()['trigger']
+ service = demisto.args()['service']
+
+ res = http_request('DELETE', f'/shodan/alert/{alert_id}/trigger/{trigger}/ignore/{service}')
+
+ if not res.get('success', False):
+ return_error(
+ f'Failed removing service "{service}" for trigger {trigger} in alert {alert_id} from the whitelist')
+
+ demisto.results(f'Removed service "{service}" for trigger {trigger} in alert {alert_id} from the whitelist')
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module()
+ demisto.results('ok')
+elif demisto.command() == 'search':
+ search_command()
+elif demisto.command() == 'ip':
+ ip_command()
+elif demisto.command() == 'shodan-search-count':
+ shodan_search_count_command()
+elif demisto.command() == 'shodan-scan-ip':
+ shodan_scan_ip_command()
+elif demisto.command() == 'shodan-scan-internet':
+ shodan_scan_internet_command()
+elif demisto.command() == 'shodan-scan-status':
+ shodan_scan_status_command()
+elif demisto.command() == 'shodan-create-network-alert':
+ shodan_create_network_alert_command()
+elif demisto.command() == 'shodan-network-get-alert-by-id':
+ shodan_network_get_alert_by_id_command()
+elif demisto.command() == 'shodan-network-get-alerts':
+ shodan_network_get_alerts_command()
+elif demisto.command() == 'shodan-network-delete-alert':
+ shodan_network_delete_alert_command()
+elif demisto.command() == 'shodan-network-alert-set-trigger':
+ shodan_network_alert_set_trigger_command()
+elif demisto.command() == 'shodan-network-alert-remove-trigger':
+ shodan_network_alert_remove_trigger_command()
+elif demisto.command() == 'shodan-network-alert-whitelist-service':
+ shodan_network_alert_whitelist_service_command()
+elif demisto.command() == 'shodan-network-alert-remove-service-from-whitelist':
+ shodan_network_alert_remove_service_from_whitelist_command()
diff --git a/Integrations/Shodan_v2/Shodan_v2.yml b/Integrations/Shodan_v2/Shodan_v2.yml
new file mode 100644
index 000000000000..7a1c89e240aa
--- /dev/null
+++ b/Integrations/Shodan_v2/Shodan_v2.yml
@@ -0,0 +1,390 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: Shodan_v2
+ version: -1
+configuration:
+- display: Api Key
+ name: api_key
+ required: true
+ type: 4
+- defaultvalue: https://api.shodan.io
+ display: Base url to Shodan API
+ name: api_url
+ required: true
+ type: 0
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: A search engine used for searching Internet-connected devices
+display: Shodan v2
+name: Shodan_v2
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: The query for searching the database of banners. The search query supports filtering using the "filter:value" format to narrow your search. For example, the query "apache country:DE" returns Apache web servers located in Germany.
+ isArray: false
+ name: query
+ required: true
+ secret: false
+ - default: false
+ description: A CSV list of properties on which to get summary information. The search query supports filtering using the "property:count" format to define the number of facets to return for a property. For example, the query "country:100" returns the top 100 countries.
+ isArray: false
+ name: facets
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '1'
+ description: The page number of the fetched results. Each page contains a maximum of 100 results.
+ isArray: false
+ name: page
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches Shodan using facets to get summary information on properties.
+ execution: false
+ name: search
+ outputs:
+ - contextPath: Shodan.Banner.Org
+ description: The name of the organization to which the space of the IP address space for the searched device is assigned.
+ type: String
+ - contextPath: Shodan.Banner.Isp
+ description: The Internet Service Provider that provides the organization with the IP address space for the searched device.
+ type: String
+ - contextPath: Shodan.Banner.Transport
+ description: The IP address transport protocol used to fetch the summary information. Can be "UDP" or "TCP".
+ type: String
+ - contextPath: Shodan.Banner.Asn
+ description: The Autonomous System Number. For example, "AS4837".
+ type: String
+ - contextPath: Shodan.Banner.IP
+ description: The IP address of the host as a string.
+ type: String
+ - contextPath: Shodan.Banner.Port
+ description: The port number on which the service is operating.
+ type: Number
+ - contextPath: Shodan.Banner.Ssl.versions
+ description: The list of SSL versions that are supported by the server. Unsupported versions are prefixed with a "-". For example, ["TLSv1", "-SSLv2"] means that the server supports TLSv1, but does not support SSLv2.
+ type: String
+ - contextPath: Shodan.Banner.Hostnames
+ description: An array of strings containing all of the host names that have been assigned to the IP address for the searched device.
+ type: String
+ - contextPath: Shodan.Banner.Location.City
+ description: The city in which the searched device is located.
+ type: String
+ - contextPath: Shodan.Banner.Location.Longitude
+ description: The longitude of the geolocation of the searched device.
+ type: Number
+ - contextPath: Shodan.Banner.Location.Latitude
+ description: The latitude of the geolocation of the searched device.
+ type: Number
+ - contextPath: Shodan.Banner.Location.Country
+ description: The country in which the searched device is located.
+ type: String
+ - contextPath: Shodan.Banner.Timestamp
+ description: The timestamp in UTC format indicating when the banner was fetched from the searched device.
+ type: Date
+ - contextPath: Shodan.Banner.Domains
+ description: An array of strings containing the top-level domains for the host names of the searched device. It is a utility property for filtering by a top-level domain instead of a subdomain. It supports handling global top-level domains that have several dots in the domain. For example, "co.uk".
+ type: String
+ - contextPath: Shodan.Banner.OS
+ description: The operating system that powers the searched device.
+ type: String
+ - arguments:
+ - default: true
+ description: The IP address of the host.
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns all services that have been found on the IP address of the searched host.
+ execution: false
+ name: ip
+ outputs:
+ - contextPath: IP.ASN
+ description: The Autonomous System Number.
+ type: Unknown
+ - contextPath: IP.Address
+ description: The IP address.
+ type: Unknown
+ - contextPath: IP.Geo.Country
+ description: The country of a given IP address.
+ type: Unknown
+ - contextPath: IP.Geo.Description
+ description: The description of the location.
+ type: Unknown
+ - contextPath: IP.Geo.Location
+ description: The latitude and longitude of an IP address.
+ type: Unknown
+ - contextPath: IP.Hostname
+ description: The hostname of the IP address.
+ type: Unknown
+ - contextPath: Shodan.IP.Tags
+ description: The tags associated with the IP address.
+ type: String
+ - contextPath: Shodan.IP.Latitude
+ description: The latitude of the geolocation of the searched device.
+ type: Number
+ - contextPath: Shodan.IP.Org
+ description: The name of the organization to which the IP space for the searched device is assigned.
+ type: String
+ - contextPath: Shodan.IP.ASN
+ description: The Autonomous System Number. For example, "AS4837".
+ type: String
+ - contextPath: Shodan.IP.ISP
+ description: The Internet Service Provider that provides the organization with the IP space for the searched device.
+ type: String
+ - contextPath: Shodan.IP.Longitude
+ description: The longitude of the geolocation of the searched device.
+ type: Number
+ - contextPath: Shodan.IP.LastUpdate
+ description: The timestamp in UTC format indicating when the banner was fetched from the searched device.
+ type: Date
+ - contextPath: Shodan.IP.CountryName
+ description: The country in which the searched device is located.
+ type: String
+ - contextPath: Shodan.IP.OS
+ description: The operating system on which the searched device is running.
+ type: String
+ - contextPath: Shodan.IP.Port
+ description: The port number on which the service is operating.
+ type: Number
+ - contextPath: Shodan.IP.Address
+ description: The IP address of the host as a string.
+ type: String
+ - arguments:
+ - default: false
+ description: The query for searching the database of banners. The search query supports filtering using the "filter:value" format to narrow your search. For example, the query "apache country:DE" returns Apache web servers located in Germany.
+ isArray: false
+ name: query
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the total number of results that match only the specified query or facet settings. This command does not return host results. This command does not consume query credits.
+ execution: false
+ name: shodan-search-count
+ outputs:
+ - contextPath: Shodan.Search.ResultCount
+ description: The number of results matched in the search query.
+ type: Number
+ - arguments:
+ - default: false
+ description: A CSV list of IP addresses or netblocks for Shodan to crawl defined in CIDR notation.
+ isArray: false
+ name: ips
+ required: true
+ secret: false
+ deprecated: false
+ description: Requests Shodan to crawl a network.
+ execution: false
+ name: shodan-scan-ip
+ outputs:
+ - contextPath: Shodan.Scan.ID
+ description: The unique ID of the scan.
+ type: String
+ - contextPath: Shodan.Scan.Status
+ description: The status of the scan.
+ type: String
+ - arguments:
+ - default: false
+ description: The port for which Shodan crawls the Internet.
+ isArray: false
+ name: port
+ required: true
+ secret: false
+ - default: false
+ description: The name of the protocol used to interrogate the port.
+ isArray: false
+ name: protocol
+ required: true
+ secret: false
+ deprecated: false
+ description: Requests for Shodan to perform a scan on the specified port and protocol.
+ execution: false
+ name: shodan-scan-internet
+ outputs:
+ - contextPath: Shodan.Scan.ID
+ description: The ID of the initial scan.
+ type: String
+ - arguments:
+ - default: false
+ description: The unique ID of the initial scan.
+ isArray: false
+ name: scanID
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks the progress of a previously submitted scan request on the specified port and protocol.
+ execution: false
+ name: shodan-scan-status
+ outputs:
+ - contextPath: Shodan.Scan.Id
+ description: The unique ID of the scan request checked for progress.
+ type: String
+ - contextPath: Shodan.Scan.Status
+ description: The status of the scan job checked for progress.
+ type: String
+ - arguments:
+ - default: false
+ description: The name of the network alert.
+ isArray: false
+ name: alertName
+ required: true
+ secret: false
+ - default: false
+ description: A list of IP addresses or network ranges defined in CIDR notation.
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ - default: false
+ description: The number of seconds for the network alert to remain active.
+ isArray: false
+ name: expires
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a network alert for a defined IP address or netblock used for subscribing to changes or events that are discovered within the netblock's range.
+ execution: false
+ name: shodan-create-network-alert
+ outputs:
+ - contextPath: Shodan.Alert.ID
+ description: The ID of the subscription of the specified network alert.
+ type: String
+ - contextPath: Shodan.Alert.Expires
+ description: The number of seconds that the specified network alert remains active.
+ type: String
+ - arguments:
+ - default: false
+ description: The ID of the network alert.
+ isArray: false
+ name: alertID
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets the details of a network alert.
+ execution: false
+ name: shodan-network-get-alert-by-id
+ outputs:
+ - contextPath: Shodan.Alert.ID
+ description: The ID of the subscription of the network alert.
+ type: String
+ - contextPath: Shodan.Alert.Expires
+ description: The number of seconds that the network alert remains active.
+ type: String
+ - deprecated: false
+ description: Gets a list of all created network alerts.
+ execution: false
+ name: shodan-network-get-alerts
+ outputs:
+ - contextPath: Shodan.Alert.ID
+ description: The IDs of the subscriptions of the network alerts.
+ type: String
+ - contextPath: Shodan.Alert.Expires
+ description: The number of seconds that the network alerts remain active.
+ type: String
+ - arguments:
+ - default: false
+ description: The ID of the network alert to remove.
+ isArray: false
+ name: alertID
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes the specified network alert.
+ execution: false
+ name: shodan-network-delete-alert
+ - arguments:
+ - default: false
+ description: The ID of the network alert for which to enable notifications.
+ isArray: false
+ name: alertID
+ required: true
+ secret: false
+ - default: false
+ description: The name of the trigger.
+ isArray: false
+ name: Trigger
+ required: true
+ secret: false
+ deprecated: false
+ description: Enables receiving notifications for network alerts that are set off by the specified triggers.
+ execution: false
+ name: shodan-network-alert-set-trigger
+ - arguments:
+ - default: false
+ description: The ID of the network alert for which to disable notifications.
+ isArray: false
+ name: alertID
+ required: true
+ secret: false
+ - default: false
+ description: The name of the trigger.
+ isArray: false
+ name: Trigger
+ required: true
+ secret: false
+ deprecated: false
+ description: Disables receiving notifications for network alerts that are set off by the specified triggers.
+ execution: false
+ name: shodan-network-alert-remove-trigger
+ - arguments:
+ - default: false
+ description: The ID of the network alert for which to ignore the specified services.
+ isArray: false
+ name: alertID
+ required: true
+ secret: false
+ - default: false
+ description: The name of the trigger.
+ isArray: false
+ name: trigger
+ required: true
+ secret: false
+ - default: false
+ description: The service specified in the "ip:port" format. For example, "1.1.1.1:80".
+ isArray: false
+ name: service
+ required: true
+ secret: false
+ deprecated: false
+ description: Ignores the specified services for network alerts that are set off by the specified triggers.
+ execution: false
+ name: shodan-network-alert-whitelist-service
+ - arguments:
+ - default: false
+ description: The ID of the alert for which to resume the specified services.
+ isArray: false
+ name: alertID
+ required: true
+ secret: false
+ - default: false
+ description: The name of the trigger.
+ isArray: false
+ name: trigger
+ required: true
+ secret: false
+ - default: false
+ description: The service specified in the "ip:port" format. For example, "1.1.1.1:80".
+ isArray: false
+ name: service
+ required: true
+ secret: false
+ deprecated: false
+ description: Resumes receiving notifications for network alerts that are set off by the specified triggers.
+ execution: false
+ name: shodan-network-alert-remove-service-from-whitelist
+ dockerimage: demisto/python3:3.7.3.286
+ subtype: python3
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- "Test-Shodan_v2"
diff --git a/Integrations/SignalSciences/SignalSciences.py b/Integrations/SignalSciences/SignalSciences.py
new file mode 100644
index 000000000000..03242ad3e372
--- /dev/null
+++ b/Integrations/SignalSciences/SignalSciences.py
@@ -0,0 +1,1421 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+import json
+import requests
+
+''' GLOBAL VARS '''
+''' GLOBAL VARS '''
+USE_SSL = not demisto.params().get('insecure', False)
+
+EMAIL = demisto.params()['Email']
+TOKEN = demisto.params()['Token']
+CORPNAME = demisto.params()['corpName']
+FETCH_INTERVAL = demisto.params()['fetch_interval']
+SITES_TO_FETCH = demisto.params().get('sites_to_fetch', None)
+
+SERVER_URL = 'https://dashboard.signalsciences.net/api/v0/'
+
+'''SUFFIX ENDPOINTS'''
+GET_SITES_SUFFIX = 'corps/{0}/sites'
+WHITELIST_SUFFIX = 'corps/{0}/sites/{1}/whitelist'
+BLACKLIST_SUFFIX = 'corps/{0}/sites/{1}/blacklist'
+DELETE_WHITELIST_IP_SUFFIX = 'corps/{0}/sites/{1}/whitelist/{2}'
+DELETE_BLACKLIST_IP_SUFFIX = 'corps/{0}/sites/{1}/blacklist/{2}'
+SITE_CREATE_LIST_SUFFIX = 'corps/{0}/sites/{1}/lists'
+SITE_ACCESS_LIST_SUFFIX = 'corps/{0}/sites/{1}/lists/{2}'
+SITE_CREATE_ALERT_SUFFIX = 'corps/{0}/sites/{1}/alerts'
+SITE_ACCESS_ALERT_SUFFIX = 'corps/{0}/sites/{1}/alerts/{2}'
+CREATE_CORP_LIST_SUFFIX = 'corps/{0}/lists'
+ACCESS_CORP_LIST_SUFFIX = 'corps/{0}/lists/{1}'
+GET_EVENTS_SUFFIX = '/corps/{0}/sites/{1}/events'
+ACCESS_EVENT_SUFFIX = '/corps/{0}/sites/{1}/events/{2}'
+EXPIRE_EVENT_SUFFIX = '/corps/{0}/sites/{1}/events/{2}/expire'
+GET_REQUESTS_SUFFIX = '/corps/{0}/sites/{1}/requests'
+ACCESS_REQUEST_SUFFIX = '/corps/{0}/sites/{1}/requests/{2}'
+
+
+'''TABLE TITLES'''
+WHITELIST_TITLE = 'Signal Sciences - Whitelist'
+BLACKLIST_TITLE = 'Signal Sciences - Blacklist'
+SITES_LIST_TITLE = "Sites list"
+ADD_IP_TO_WHITELIST_TITLE = 'Signal Sciences - Adding an IP to Whitelist'
+ADD_IP_TO_BLACKLIST_TITLE = 'Signal Sciences - Adding an IP to Blacklist'
+ADD_ALERT_TITLE = 'Signal Sciences - Adding a new custom alert'
+UPDATE_LIST_TITLE = 'Signal Sciences - Updating a list'
+ALERT_LIST_TITLE = 'Signal Sciences - Alert list'
+LIST_OF_SITE_LISTS_TITLE = 'Signal Sciences - list of site lists'
+LIST_OF_CORP_LISTS_TITLE = 'Signal Sciences - list of corp lists'
+LIST_OF_EVENTS_TITLE = 'Signal Sciences - list of events'
+LIST_OF_REQUESTS_TITLE = 'Signal Sciences - list of requests'
+CREATE_SITE_LIST_TITLE = "Signal Sciences - creating a new site list \n\n List {0} has been successfully created"
+CREATE_CORP_LIST_TITLE = "Signal Sciences - creating a new corp list \n\n List {0} has been successfully created"
+DELETE_CORP_LIST_TITLE = "### Signal Sciences - deleting corp list \n\n List {0} has been successfully removed"
+EXPIRE_EVENT_TITLE = "### Signal Sciences - expiring event \n\n Event {0} has been successfully expired"
+WHITELIST_REMOVE_IP_TITLE = '### Signal Sciences - Removing an IP from Whitelist \n\n ' \
+ 'The IP {0} has been successfully removed from Whitelist.'
+DELETE_SITE_LIST_TITLE = "### Signal Sciences - deleting site list \n\n The list has been succesfully removed"
+BLACKLIST_REMOVE_IP_TITLE = '### Signal Sciences - Removing an IP from Blacklist \n\n ' \
+ 'The IP {0} has been successfully removed from Blacklist.'
+IP_ADDED_TO_WHITELIST_TITLE = "The IP {0} has been successfully added to whitelist."
+IP_ADDED_TO_BLACKLIST_TITLE = "The IP {0} has been successfully added to blacklist."
+
+
+'''TABLE HEADERS'''
+ADD_IP_HEADERS = ['Source', 'Note', 'Expiration date']
+WHITELIST_OR_BLACKLIST_HEADERS = ['ID', 'Source', 'Expiry Date', 'Note', 'Created Date', 'Created By']
+LIST_HEADERS = ['Name', 'ID', 'Type', 'Entries', 'Description', 'Created By', 'Created Date', 'Updated Date']
+GET_SITE_HEADERS = ['Name', 'Created Date']
+EVENT_HEADERS = ['ID', 'Timestamp', 'Source', 'Remote Country Code', 'Action', 'Reasons', 'Remote Hostname',
+ 'User Agents', 'Request Count', 'Tag Count', 'Window', 'Date Expires', 'Expired By']
+REQUEST_HEADER = ['ID', 'Timestamp', 'Remote Country Code', 'Remote Hostname', 'Remote IP', 'User Agent',
+ 'Method', 'Server Name', 'Protocol', 'Path', 'URI', 'Response Code', 'Response Size',
+ 'Response Millis', 'Agent Response Code', 'Tags']
+ALERT_HEADERS = ['ID', 'Site ID', 'Created Date', 'Tag Name', 'Action', 'Long Name', 'Interval (In Minutes)',
+ 'Threshold', 'Block Duration Seconds', 'Skip Notifications', 'Enabled']
+
+
+'''List Types dict'''
+
+LEGAL_SIGSCI_LIST_TYPES = {
+ 'ip',
+ 'country',
+ 'string',
+ 'wildcard'
+}
+
+
+''' HELPER FUNCTIONS '''
+
+
+def camel_case_to_spaces(string_in_camel_case):
+ """Given a string in camelcase, will turn it into spaces
+
+ Args:
+ string_in_camel_case(String): the string in camel case
+
+ Returns:
+ A new string, separated by spaces and every word starts with a capital letter
+ """
+ string_with_underscores = camel_case_to_underscore(string_in_camel_case)
+ new_string_with_spaces = string_with_underscores.replace('_', ' ')
+ return new_string_with_spaces.title()
+
+
+def dict_keys_from_camelcase_to_spaces(dict_with_camelcase_keys):
+ """Given a dict with keys in camelcase, returns a copy of it with keys in spaces (helloWorld becomes Hello World)
+
+ Args:
+ dict_with_camelcase_keys(Dictionary): the original dictionary, with keys in camelcase
+
+ Returns:
+ A new dictionary, with keys separated by spaces
+ """
+ dict_with_spaces_in_keys = {}
+ for key in dict_with_camelcase_keys:
+ key_with_spaces = camel_case_to_spaces(key)
+ dict_with_spaces_in_keys[key_with_spaces] = dict_with_camelcase_keys[key]
+ return dict_with_spaces_in_keys
+
+
+def return_list_of_dicts_with_spaces(list_of_camelcase_dicts):
+ """Given a list of dicts, iterates over it and for each dict makes all the keys with spaces instead of camelcase
+
+ Args:
+ list_of_camelcase_dicts(List): array of dictionaries
+
+ Returns:
+ A new array of dictionaries, with keys including spaces instead of camelcase
+ """
+ dicts_with_spaces = []
+ for dict_camelcase in list_of_camelcase_dicts:
+ dict_with_spaces = dict_keys_from_camelcase_to_spaces(dict_camelcase)
+ dicts_with_spaces.append(dict_with_spaces)
+
+ return dicts_with_spaces
+
+
+def has_api_call_failed(res):
+ """
+ Note: In SigSci, if an API call fails it returns a json with only 'message' in it.
+ """
+ if 'message' in res:
+ return True
+ return False
+
+
+def is_error_status(status):
+ if int(status) >= 400:
+ return True
+ return False
+
+
+def return_error_message(results_json):
+ error_message = results_json.get("message", None)
+ if error_message is None:
+ return_error("Error: An error occured")
+ return_error("Error: {0}".format(error_message))
+
+
+def http_request(method, url, params_dict=None, data=None, use_format_instead_of_raw=False):
+ LOG('running %s request with url=%s\nparams=%s' % (method, url, json.dumps(params_dict)))
+
+ headers = {
+ 'Content-Type': 'application/json',
+ 'x-api-user': EMAIL,
+ 'x-api-token': TOKEN
+ }
+
+ try:
+ # Some commands in Signal Sciences require sending the data in raw, and some in format
+ # To send in format, we use the 'data' argument in requests. for raw, we use the 'json' argument.
+ if use_format_instead_of_raw:
+ res = requests.request(method,
+ url,
+ verify=USE_SSL,
+ params=params_dict,
+ headers=headers,
+ data=json.dumps(data))
+ else:
+ res = requests.request(method,
+ url,
+ verify=USE_SSL,
+ params=params_dict,
+ headers=headers,
+ json=data)
+
+ if is_error_status(res.status_code):
+ return_error_message(res.json())
+
+ # references to delete from whitelist/blacklist only
+ if 'whitelist/' in url or 'blacklist/' in url:
+ return {}
+ if res.status_code == 204:
+ return {}
+ res_json = res.json()
+ if has_api_call_failed(res_json):
+ return {}
+ return res_json
+
+ except Exception as e:
+ LOG(e)
+ raise (e)
+
+
+def is_legal_list_type(list_type):
+ return list_type.lower() in LEGAL_SIGSCI_LIST_TYPES
+
+
+def represents_int(string_var):
+ if '.' in string_var:
+ return False
+ if string_var[0] in ('-', '+'):
+ return string_var[1:].isdigit()
+ return string_var.isdigit()
+
+
+def is_legal_interval_for_alert(interval):
+ """
+ Note: legal values for the interval on an alert are only 1, 10 or 60.
+ This function verifies the value given is compatible with this demand.
+ """
+ if not represents_int(interval):
+ return False
+ interval_int = int(interval)
+ if not (interval_int == 1 or interval_int == 10 or interval_int == 60):
+ return False
+ return True
+
+
+def validate_list_description_length(description):
+ if description is not None:
+ if len(description) > 140:
+ return_error("Error: Description given is too long. Description must be 140 characters or shorter")
+
+
+def validate_update_list_args(method, description):
+ if not (method == "Add" or method == "Remove"):
+ return_error("Error: Method given is illegal. Method must be 'Add' or 'Remove'")
+ validate_list_description_length(description)
+
+
+def validate_create_list_args(list_type, description):
+ if not is_legal_list_type(list_type):
+ return_error("Error: {0} is not a legal type for a list. Legal types are IP, String, "
+ "Country or Wildcard".format(list_type))
+ validate_list_description_length(description)
+
+
+def validate_alert_args(siteName, long_name, tag_name, interval, threshold, enabled, action):
+ if not represents_int(threshold):
+ return_error("Error: {0} is not a valid threshold value. Threshold must be an integer".format(threshold))
+ if not is_legal_interval_for_alert(interval):
+ return_error("Error: {0} is not a valid interval value. Interval value must be 1, 10 or 60".format(interval))
+ if len(long_name) < 3 or len(long_name) > 25:
+ return_error("Error: Illegal value for long_name argument - long_name must be between 3 and 25 characters long")
+ if not (enabled.lower() == 'true' or enabled.lower() == 'false'):
+ return_error("Error: Illegal value for 'enabled' argument - value must be 'True' or 'False'")
+ if not (action == 'info' or action == 'flagged'):
+ return_error("Error: Illegal value for 'action' argument - value must be 'info' or 'flagged'")
+
+
+def validate_get_events_args(from_time, until_time, sort, limit, page, action, ip, status):
+ if from_time is not None and not represents_int(str(from_time)):
+ return_error("Error: from_time must be an integer.")
+ if until_time is not None and not represents_int(str(until_time)):
+ return_error("Error: until_time must be an integer.")
+ if sort is not None and not (sort == "asc" or sort == "desc"):
+ return_error("Error: sort value must be 'asc' or 'desc'.")
+ if limit is not None and (not represents_int(str(limit)) or int(limit) < 0 or int(limit) > 1000):
+ return_error("Error: limit must be an integer, larger than 0 and at most 1000")
+ if action is not None and not (action == "flagged" or action == "info"):
+ return_error("Error: action value must be 'flagged' or 'info'")
+ if ip is not None and not is_ip_valid(str(ip)):
+ return_error("Error: illegal value for 'ip' argument. Must be a valid ip address")
+ if status is not None and not (status == 'active' or status == 'expired'):
+ return_error("Error: status value must be 'active' or 'expired'")
+ if page is not None and not represents_int(str(page)):
+ return_error("Error: page must be an integer.")
+
+
+def create_get_event_data_from_args(from_time, until_time, sort, since_id, max_id,
+ limit, page, action, tag, ip, status):
+ get_events_request_data = {}
+ if from_time is not None:
+ get_events_request_data['from'] = int(from_time)
+ if until_time is not None:
+ get_events_request_data['until'] = int(until_time)
+ if sort is not None:
+ get_events_request_data['sort'] = sort
+ if since_id is not None:
+ get_events_request_data['since_id'] = since_id
+ if max_id is not None:
+ get_events_request_data['max_id'] = max_id
+ if limit is not None:
+ get_events_request_data['limit'] = int(limit)
+ if page is not None:
+ get_events_request_data['page'] = int(page)
+ if action is not None:
+ get_events_request_data['action'] = action
+ if tag is not None:
+ get_events_request_data['tag'] = tag
+ if ip is not None:
+ get_events_request_data['ip'] = ip
+ if status is not None:
+ get_events_request_data['status'] = status
+ return get_events_request_data
+
+
+def event_entry_context_from_response(response_data):
+ entry_context = {
+ 'ID': response_data.get('id', ''),
+ 'Timestamp': response_data.get('timestamp', ''),
+ 'Source': response_data.get('source', ''),
+ 'Action': response_data.get('action', ''),
+ 'Reasons': response_data.get('reasons', ''),
+ 'RemoteCountryCode': response_data.get('remoteCountryCode', ''),
+ 'RemoteHostname': response_data.get('RemoteHostname', ''),
+ 'UserAgents': response_data.get('userAgents', ''),
+ 'RequestCount': response_data.get('requestCount', ''),
+ 'TagCount': response_data.get('tagCount', ''),
+ 'Window': response_data.get('window', ''),
+ 'DateExpires': response_data.get('expires', ''),
+ 'ExpiredBy': response_data.get('expiredBy', ''),
+ }
+ return entry_context
+
+
+def adjust_event_human_readable(entry_context_with_spaces, entry_context):
+ """Change keys in human readable data to match the headers.
+ """
+ entry_context_with_spaces["ID"] = entry_context.get("ID", "")
+
+
+def validate_fetch_requests_args(page, limit):
+ if limit is not None and (not represents_int(limit) or int(limit) < 0 or int(limit) > 1000):
+ return_error("Error: limit must be an integer, larger than 0 and at most 1000")
+ if page is not None and not represents_int(page):
+ return_error("Error: page must be an integer")
+
+
+def request_entry_context_from_response(response_data):
+ entry_context = {
+ 'ID': response_data.get('id', ''),
+ 'ServerHostName': response_data.get('serverHostName', ''),
+ 'RemoteIP': response_data.get('remoteIP', ''),
+ 'RemoteHostname': response_data.get('RemoteHostname', ''),
+ 'RemoteCountryCode': response_data.get('remoteCountryCode', ''),
+ 'UserAgent': response_data.get('userAgent', ''),
+ 'Timestamp': response_data.get('timestamp', ''),
+ 'Method': response_data.get('method', ''),
+ 'ServerName': response_data.get('serverName', ''),
+ 'Protocol': response_data.get('protocol', ''),
+ 'Path': response_data.get('path', ''),
+ 'URI': response_data.get('uri', ''),
+ 'ResponseCode': response_data.get('responseCode', ''),
+ 'ResponseSize': response_data.get('responseSize', ''),
+ 'ResponseMillis': response_data.get('responseMillis', ''),
+ 'AgentResponseCode': response_data.get('agentResponseCode', ''),
+ 'Tags': response_data.get('tags', ''),
+ }
+ return entry_context
+
+
+def adjust_request_human_readable(entry_context_with_spaces, entry_context):
+ """Change keys in human readable data to match the headers.
+ """
+ entry_context_with_spaces["ID"] = entry_context.get("ID", "")
+ entry_context_with_spaces["URI"] = entry_context.get("URI", "")
+ entry_context_with_spaces["Remote IP"] = entry_context.get("RemoteIP", "")
+
+
+def list_entry_context_from_response(response_data):
+ entry_context = {
+ 'ID': response_data.get('id', ''),
+ 'Name': response_data.get('name', ''),
+ 'Type': response_data.get('type', ''),
+ 'Entries': response_data.get('entries', ''),
+ 'Description': response_data.get('description', ''),
+ 'CreatedBy': response_data.get('createdBy', ''),
+ 'CreatedDate': response_data.get('created', ''),
+ 'UpdatedDate': response_data.get('updated', '')
+ }
+ return entry_context
+
+
+def adjust_list_human_readable(entry_context_with_spaces, entry_context):
+ """Change keys in human readable data to match the headers.
+ """
+ entry_context_with_spaces["ID"] = entry_context.get("ID", "")
+
+
+def alert_entry_context_from_response(response_data):
+ entry_context = {
+ 'ID': response_data.get('id', ''),
+ 'LongName': response_data.get('longName', ''),
+ 'SiteID': response_data.get('siteId', ''),
+ 'TagName': response_data.get('tagName', ''),
+ 'Interval': response_data.get('interval', ''),
+ 'Threshold': response_data.get('threshold', ''),
+ 'BlockDurationSeconds': response_data.get('blockDurationSeconds', ''),
+ 'SkipNotifications': response_data.get('skipNotifications', ''),
+ 'Enabled': response_data.get('enabled', ''),
+ 'Action': response_data.get('action', ''),
+ 'CreatedDate': response_data.get('created', ''),
+ }
+ return entry_context
+
+
+def adjust_alert_human_readable(entry_context_with_spaces, entry_context):
+ """Change keys in human readable data to match the headers.
+ """
+ entry_context_with_spaces["Interval (In Minutes)"] = entry_context_with_spaces.get("Interval", "")
+ entry_context_with_spaces["ID"] = entry_context.get("ID", "")
+ entry_context_with_spaces["Site ID"] = entry_context.get("siteID", "")
+
+
+def check_ip_is_valid(ip):
+ if not is_ip_valid(ip):
+ return_error("Error: IP argument is invalid. Please enter a valid IP address")
+
+
+def gen_entries_data_for_update_list_request(entries_list, method):
+ """Using the recieved args, generates the data object required by the API
+ in order to update a list (site or corp alike).
+ Args:
+ entries_list (list): a list containing IP addresses
+ method (string): The method we want to apply on the entries, either 'Add' or 'Remove'.
+ States if the IPs should be added or removed to the site/corp list.
+
+ Returns:
+ dict. Contains additions and deletions list with the entries we want to act on.
+ """
+ entries = {
+ "additions": [],
+ "deletions": []
+ } # type: Dict
+ entries_list_in_list_format = entries_list.split(',')
+ if method == "Add":
+ entries["additions"] = entries_list_in_list_format
+ else:
+ entries["deletions"] = entries_list_in_list_format
+ return entries
+
+
+def gen_context_for_add_to_whitelist_or_blacklist(response_data):
+ ip_context = {}
+ ip_context['ID'] = response_data.get('id', '')
+ ip_context['Note'] = response_data.get('note', '')
+ ip_context['Source'] = response_data.get('source', '')
+ ip_context['CreatedBy'] = response_data.get('createdBy', '')
+ ip_context['CreatedDate'] = response_data.get('created', '')
+ ip_context['ExpiryDate'] = response_data.get('expires', '')
+ return ip_context
+
+
+def generate_whitelist_or_blacklist_ip_context(response_data):
+ ips_contexts = []
+ for ip_data in response_data:
+ cur_ip_context = gen_context_for_add_to_whitelist_or_blacklist(ip_data)
+ ips_contexts.append(cur_ip_context)
+ return ips_contexts
+
+
+def gen_human_readable_for_add_to_whitelist_or_blacklist(ip_context):
+ human_readable = {}
+ human_readable['Note'] = ip_context['Note']
+ human_readable['Source'] = ip_context['Source']
+ human_readable['Expiration date'] = ip_context['ExpiryDate'] if ip_context['ExpiryDate'] else "Not Set"
+ return human_readable
+
+
+def add_ip_to_whitelist_or_blacklist(url, ip, note, expires=None):
+ check_ip_is_valid(ip)
+ data = {
+ 'source': ip,
+ 'note': note
+ }
+ if expires is not None:
+ data['expires'] = expires
+ res = http_request('PUT', url, data=data)
+ return res
+
+
+def get_all_sites_in_corp():
+ get_sites_request_response = get_sites()
+ data_of_sites_in_corp = get_sites_request_response.get('data', [])
+ return data_of_sites_in_corp
+
+
+def get_list_of_all_site_names_in_corp():
+ data_of_sites_in_corp = get_all_sites_in_corp()
+ list_of_all_sites_names_in_corp = []
+ for site_data in data_of_sites_in_corp:
+ site_name = site_data['name']
+ list_of_all_sites_names_in_corp.append(site_name)
+ return list_of_all_sites_names_in_corp
+
+
+def get_list_of_site_names_to_fetch():
+ list_of_site_names_to_fetch = None
+ if SITES_TO_FETCH:
+ list_of_site_names_to_fetch = SITES_TO_FETCH.split(',')
+ else:
+ list_of_site_names_to_fetch = get_list_of_all_site_names_in_corp()
+ return list_of_site_names_to_fetch
+
+
+def remove_milliseconds_from_iso(date_in_iso_format):
+ date_parts_arr = date_in_iso_format.split('.')
+ date_in_iso_without_milliseconds = date_parts_arr[0]
+ return date_in_iso_without_milliseconds
+
+
+def get_events_from_given_sites(list_of_site_names_to_fetch, desired_from_time_in_posix):
+ events_from_given_sites = [] # type: List[Any]
+ for site_name in list_of_site_names_to_fetch:
+ fetch_from_site_response_json = get_events(siteName=site_name, from_time=desired_from_time_in_posix)
+
+ events_fetched_from_site = fetch_from_site_response_json.get('data', [])
+ events_from_given_sites.extend(events_fetched_from_site)
+ return events_from_given_sites
+
+
+def datetime_to_posix_without_milliseconds(datetime_object):
+ timestamp_in_unix_millisecond = date_to_timestamp(datetime_object, 'datetime.datetime')
+ posix_with_ms = timestamp_in_unix_millisecond
+ posix_without_ms = str(posix_with_ms).split(',')[0]
+ return posix_without_ms
+
+
+'''COMMANDS'''
+
+
+def test_module():
+ try:
+ url = SERVER_URL + 'corps'
+ http_request('GET', url)
+ except Exception as e:
+ raise Exception(e.message)
+ demisto.results("ok")
+
+
+def create_corp_list(list_name, list_type, entries_list, description=None):
+ """This method sends a request to the Signal Sciences API to create a new corp list.
+ Note:
+ Illegal entries (not compatible with the type) will result in a 404.
+ They will be handled by the http_request function.
+
+ Args:
+ list_name (string): A name for the newly created list.
+ list_type (string): The desired type for the newly created list.
+ entries_list (list): A list of entries, consistent with the given type.
+ description (string): A description for the newly created list.
+
+ Returns:
+ dict. The data returned from the Signal Sciences API in response to the request, loaded into a json.
+ """
+ validate_create_list_args(list_type, description)
+
+ url = SERVER_URL + CREATE_CORP_LIST_SUFFIX.format(CORPNAME)
+ entries_list_in_list_format = entries_list.split(',')
+ data_for_request = {
+ 'name': list_name.lower(),
+ 'type': list_type.lower(),
+ 'entries': entries_list_in_list_format
+ }
+ if description is not None:
+ data_for_request['description'] = description
+ new_list_data = http_request('POST', url, data=data_for_request)
+ return new_list_data
+
+
+def create_corp_list_command():
+ args = demisto.args()
+ response_data = create_corp_list(args['list_name'], args['list_type'], args['entries_list'],
+ args.get('description', None))
+ entry_context = list_entry_context_from_response(response_data)
+ entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
+ human_readable = tableToMarkdown(CREATE_CORP_LIST_TITLE.format(args['list_name']), entry_context_with_spaces,
+ headers=LIST_HEADERS, removeNull=True)
+
+ adjust_list_human_readable(entry_context_with_spaces, entry_context)
+
+ return_outputs(
+ raw_response=response_data,
+ readable_output=human_readable,
+ outputs={
+ 'SigSciences.Corp.List(val.ID==obj.ID)': entry_context,
+ }
+ )
+
+
+def get_corp_list(list_id):
+ url = SERVER_URL + ACCESS_CORP_LIST_SUFFIX.format(CORPNAME, list_id)
+ list_data = http_request('GET', url)
+ return list_data
+
+
+def get_corp_list_command():
+ args = demisto.args()
+ response_data = get_corp_list(args['list_id'])
+ entry_context = list_entry_context_from_response(response_data)
+ title = "Found data about list with ID: {0}".format(args['list_id'])
+ entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
+ adjust_list_human_readable(entry_context_with_spaces, entry_context)
+ human_readable = tableToMarkdown(title, entry_context_with_spaces, headers=LIST_HEADERS, removeNull=True)
+ return_outputs(
+ raw_response=response_data,
+ readable_output=human_readable,
+ outputs={
+ 'SigSciences.Corp.List(val.ID==obj.ID)': entry_context,
+ }
+ )
+
+
+def delete_corp_list(list_id):
+ url = SERVER_URL + ACCESS_CORP_LIST_SUFFIX.format(CORPNAME, list_id)
+ list_data = http_request('DELETE', url)
+ return list_data
+
+
+def delete_corp_list_command():
+ args = demisto.args()
+ response_data = delete_corp_list(args['list_id'])
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': response_data,
+ 'HumanReadable': DELETE_CORP_LIST_TITLE.format(args['list_id'])
+ })
+
+
+def update_corp_list(list_id, method, entries_list, description=None):
+ validate_update_list_args(method, description)
+ entries_in_update_format = gen_entries_data_for_update_list_request(entries_list, method)
+ url = SERVER_URL + ACCESS_CORP_LIST_SUFFIX.format(CORPNAME, list_id)
+ data_for_request = {
+ 'entries': entries_in_update_format
+ }
+ if description is not None:
+ data_for_request['description'] = description
+ response_data = http_request('PATCH', url, data=data_for_request)
+ return response_data
+
+
+def update_corp_list_command():
+ args = demisto.args()
+ response_data = update_corp_list(args['list_id'], args['method'], args['entries_list'],
+ args.get('description', None))
+ entry_context = list_entry_context_from_response(response_data)
+ entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
+ adjust_list_human_readable(entry_context_with_spaces, entry_context)
+ human_readable = tableToMarkdown(UPDATE_LIST_TITLE, entry_context_with_spaces,
+ headers=LIST_HEADERS, removeNull=True)
+ return_outputs(
+ raw_response=response_data,
+ readable_output=human_readable,
+ outputs={
+ 'SigSciences.Corp.List(val.ID==obj.ID)': entry_context,
+ }
+ )
+
+
+def get_all_corp_lists():
+ url = SERVER_URL + CREATE_CORP_LIST_SUFFIX.format(CORPNAME)
+ response_data = http_request('GET', url)
+ return response_data
+
+
+def get_all_corp_lists_command():
+ response_data = get_all_corp_lists()
+ list_of_corp_lists = response_data.get('data', [])
+
+ corp_lists_contexts = []
+ for corp_list_data in list_of_corp_lists:
+ cur_corp_list_context = list_entry_context_from_response(corp_list_data)
+ corp_lists_contexts.append(cur_corp_list_context)
+
+ sidedata = "Number of corp lists in corp: {0}".format(len(list_of_corp_lists))
+ corp_lists_contexts_with_spaces = return_list_of_dicts_with_spaces(corp_lists_contexts)
+
+ for i in range(len(corp_lists_contexts)):
+ adjust_list_human_readable(corp_lists_contexts_with_spaces[i], corp_lists_contexts[i])
+
+ human_readable = tableToMarkdown(LIST_OF_CORP_LISTS_TITLE, corp_lists_contexts_with_spaces, headers=LIST_HEADERS,
+ removeNull=True, metadata=sidedata)
+ return_outputs(
+ raw_response=response_data,
+ readable_output=human_readable,
+ outputs={
+ 'SigSciences.Corp.List(val.ID==obj.ID)': corp_lists_contexts,
+ }
+ )
+
+
+def get_events(siteName, from_time=None, until_time=None, sort=None, since_id=None, max_id=None, limit=None, page=None,
+ action=None, tag=None, ip=None, status=None):
+
+ validate_get_events_args(from_time, until_time, sort, limit, page, action, ip, status)
+ url = SERVER_URL + GET_EVENTS_SUFFIX.format(CORPNAME, siteName)
+ data_for_request = create_get_event_data_from_args(from_time, until_time, sort, since_id, max_id,
+ limit, page, action, tag, ip, status)
+ events_data_response = http_request('GET', url, data=data_for_request)
+
+ return events_data_response
+
+
+def get_events_command():
+ args = demisto.args()
+ response_data = get_events(args['siteName'], args.get('from_time', None),
+ args.get('until_time', None), args.get('sort', None),
+ args.get('since_id', None), args.get('max_id', None),
+ args.get('limit', None), args.get('page', None),
+ args.get('action', None), args.get('tag', None),
+ args.get('ip', None), args.get('status', None))
+
+ list_of_events = response_data.get('data', [])
+ events_contexts = []
+ for event_data in list_of_events:
+ cur_event_context = event_entry_context_from_response(event_data)
+ events_contexts.append(cur_event_context)
+
+ events_contexts_with_spaces = return_list_of_dicts_with_spaces(events_contexts)
+
+ for i in range(len(events_contexts)):
+ adjust_list_human_readable(events_contexts_with_spaces[i], events_contexts[i])
+
+ sidedata = "Number of events in site: {0}".format(len(list_of_events))
+ human_readable = tableToMarkdown(LIST_OF_EVENTS_TITLE, events_contexts_with_spaces, removeNull=True,
+ headers=EVENT_HEADERS, metadata=sidedata)
+ return_outputs(
+ raw_response=response_data,
+ readable_output=human_readable,
+ outputs={
+ 'SigSciences.Corp.Site.Event(val.ID==obj.ID)': events_contexts,
+ }
+ )
+
+
+def get_event_by_id(siteName, event_id):
+ url = SERVER_URL + ACCESS_EVENT_SUFFIX.format(CORPNAME, siteName, event_id)
+ event_data_response = http_request('GET', url)
+ return event_data_response
+
+
+def get_event_by_id_command():
+ args = demisto.args()
+ response_data = get_event_by_id(args['siteName'], args['event_id'])
+ entry_context = event_entry_context_from_response(response_data)
+ title = "Found data about event with ID: {0}".format(args['event_id'])
+
+ entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
+ adjust_event_human_readable(entry_context_with_spaces, entry_context)
+
+ human_readable = tableToMarkdown(title, entry_context_with_spaces, headers=EVENT_HEADERS, removeNull=True)
+ return_outputs(
+ raw_response=response_data,
+ readable_output=human_readable,
+ outputs={
+ 'SigSciences.Corp.Site.Event(val.ID==obj.ID)': entry_context,
+ }
+ )
+
+
+def expire_event(siteName, event_id):
+ url = SERVER_URL + EXPIRE_EVENT_SUFFIX.format(CORPNAME, siteName, event_id)
+ event_data_response = http_request('POST', url)
+ return event_data_response
+
+
+def expire_event_command():
+ args = demisto.args()
+ response_data = expire_event(args['siteName'], args['event_id'])
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': response_data,
+ 'HumanReadable': EXPIRE_EVENT_TITLE.format(args['event_id'])
+ })
+
+
+def get_requests(siteName, page, limit, query):
+ url = SERVER_URL + GET_REQUESTS_SUFFIX.format(CORPNAME, siteName)
+ validate_fetch_requests_args(page, limit)
+ data_for_request = {}
+ if page is not None:
+ data_for_request['page'] = page
+ if limit is not None:
+ data_for_request['limit'] = limit
+ if query is not None:
+ data_for_request['q'] = query
+
+ requests_data_response = http_request('GET', url, data=data_for_request)
+ return requests_data_response
+
+
+def get_requests_command():
+ args = demisto.args()
+ response_data = get_requests(args['siteName'], args.get('page', None), args.get('limit', None),
+ args.get('query', None))
+ list_of_requests = response_data.get('data', [])
+ requests_contexts = []
+ for request_data in list_of_requests:
+ cur_request_context = request_entry_context_from_response(request_data)
+ requests_contexts.append(cur_request_context)
+
+ requests_contexts_with_spaces = return_list_of_dicts_with_spaces(requests_contexts)
+
+ for i in range(len(requests_contexts)):
+ adjust_list_human_readable(requests_contexts_with_spaces[i], requests_contexts[i])
+
+ sidedata = "Number of requests in site: {0}".format(len(list_of_requests))
+ human_readable = tableToMarkdown(LIST_OF_REQUESTS_TITLE, requests_contexts_with_spaces, headers=REQUEST_HEADER,
+ removeNull=True, metadata=sidedata)
+ return_outputs(
+ raw_response=response_data,
+ readable_output=human_readable,
+ outputs={
+ 'SigSciences.Corp.Site.Request(val.ID==obj.ID)': requests_contexts,
+ }
+ )
+
+
+def get_request_by_id(siteName, request_id):
+ url = SERVER_URL + ACCESS_REQUEST_SUFFIX.format(CORPNAME, siteName, request_id)
+ request_data_response = http_request('GET', url)
+ return request_data_response
+
+
+def get_request_by_id_command():
+ args = demisto.args()
+ response_data = get_request_by_id(args['siteName'], args['request_id'])
+ entry_context = request_entry_context_from_response(response_data)
+ title = "Found data about request with ID: {0}".format(args['request_id'])
+
+ entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
+ adjust_request_human_readable(entry_context_with_spaces, entry_context)
+
+ human_readable = tableToMarkdown(title, entry_context_with_spaces, headers=REQUEST_HEADER, removeNull=True)
+ return_outputs(
+ raw_response=response_data,
+ readable_output=human_readable,
+ outputs={
+ 'SigSciences.Corp.Site.Request(val.ID==obj.ID)': entry_context,
+ }
+ )
+
+
+def create_site_list(siteName, list_name, list_type, entries_list, description=None):
+ validate_create_list_args(list_type, description)
+ url = SERVER_URL + SITE_CREATE_LIST_SUFFIX.format(CORPNAME, siteName)
+ entries_list_in_list_format = entries_list.split(',')
+ data_for_request = {
+ 'name': list_name.lower(),
+ 'type': list_type.lower(),
+ 'entries': entries_list_in_list_format
+ }
+ if description is not None:
+ data_for_request['description'] = description
+
+ new_list_data = http_request('POST', url, data=data_for_request)
+ return new_list_data
+
+
+def create_site_list_command():
+ args = demisto.args()
+ response_data = create_site_list(args['siteName'], args['list_name'],
+ args['list_type'], args['entries_list'], args.get('description', None))
+ entry_context = list_entry_context_from_response(response_data)
+ entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
+ adjust_list_human_readable(entry_context_with_spaces, entry_context)
+
+ human_readable = tableToMarkdown(CREATE_SITE_LIST_TITLE.format(args['list_name']), entry_context_with_spaces,
+ headers=LIST_HEADERS, removeNull=True)
+ return_outputs(
+ raw_response=response_data,
+ readable_output=human_readable,
+ outputs={
+ 'SigSciences.Corp.Site.List(val.ID==obj.ID)': entry_context,
+ }
+ )
+
+
+def get_site_list(siteName, list_id):
+ url = SERVER_URL + SITE_ACCESS_LIST_SUFFIX.format(CORPNAME, siteName, list_id)
+ list_data = http_request('GET', url)
+ return list_data
+
+
+def get_site_list_command():
+ args = demisto.args()
+ response_data = get_site_list(args['siteName'], args['list_id'])
+ entry_context = list_entry_context_from_response(response_data)
+ entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
+ adjust_list_human_readable(entry_context_with_spaces, entry_context)
+
+ title = "Found data about list with ID: {0}".format(args['list_id'])
+ human_readable = tableToMarkdown(title, entry_context_with_spaces, headers=LIST_HEADERS, removeNull=True)
+ return_outputs(
+ raw_response=response_data,
+ readable_output=human_readable,
+ outputs={
+ 'SigSciences.Corp.Site.List(val.ID==obj.ID)': entry_context,
+ }
+ )
+
+
+def delete_site_list(siteName, list_id):
+ url = SERVER_URL + SITE_ACCESS_LIST_SUFFIX.format(CORPNAME, siteName, list_id)
+ list_data = http_request('DELETE', url)
+ return list_data
+
+
+def delete_site_list_command():
+ args = demisto.args()
+ response_data = delete_site_list(args['siteName'], args['list_id'])
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': response_data,
+ 'HumanReadable': DELETE_SITE_LIST_TITLE.format(args['list_id'])
+ })
+
+
+def update_site_list(siteName, list_id, method, entries_list, description=None):
+ validate_update_list_args(method, description)
+
+ entries_in_update_format = gen_entries_data_for_update_list_request(entries_list, method)
+ url = SERVER_URL + SITE_ACCESS_LIST_SUFFIX.format(CORPNAME, siteName, list_id)
+ data_for_request = {
+ 'entries': entries_in_update_format
+ }
+ if description is not None:
+ data_for_request['description'] = description
+ response_data = http_request('PATCH', url, data=data_for_request)
+ return response_data
+
+
+def update_site_list_command():
+ args = demisto.args()
+ response_data = update_site_list(args['siteName'], args['list_id'],
+ args['method'], args['entries_list'], args.get('description', None))
+ entry_context = list_entry_context_from_response(response_data)
+ entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
+ adjust_list_human_readable(entry_context_with_spaces, entry_context)
+
+ human_readable = tableToMarkdown(UPDATE_LIST_TITLE, entry_context_with_spaces,
+ headers=LIST_HEADERS, removeNull=True)
+ return_outputs(
+ raw_response=response_data,
+ readable_output=human_readable,
+ outputs={
+ 'SigSciences.Corp.Site.List(val.ID==obj.ID)': entry_context,
+ }
+ )
+
+
+def get_all_site_lists(siteName):
+ url = SERVER_URL + SITE_CREATE_LIST_SUFFIX.format(CORPNAME, siteName)
+ response_data = http_request('GET', url)
+ return response_data
+
+
+def get_all_site_lists_command():
+ args = demisto.args()
+ response_data = get_all_site_lists(args['siteName'])
+ list_of_site_lists = response_data.get('data', [])
+
+ site_lists_contexts = []
+ for site_list_data in list_of_site_lists:
+ cur_site_context = list_entry_context_from_response(site_list_data)
+ site_lists_contexts.append(cur_site_context)
+
+ site_lists_contexts_with_spaces = return_list_of_dicts_with_spaces(site_lists_contexts)
+
+ for i in range(len(site_lists_contexts)):
+ adjust_list_human_readable(site_lists_contexts_with_spaces[i], site_lists_contexts[i])
+
+ sidedata = "Number of site lists in site: {0}".format(len(list_of_site_lists))
+ human_readable = tableToMarkdown(LIST_OF_SITE_LISTS_TITLE, site_lists_contexts_with_spaces, headers=LIST_HEADERS,
+ removeNull=True, metadata=sidedata)
+ return_outputs(
+ raw_response=response_data,
+ readable_output=human_readable,
+ outputs={
+ 'SigSciences.Corp.Site.List(val.ID==obj.ID)': site_lists_contexts,
+ }
+ )
+
+
+def add_alert(siteName, long_name, tag_name, interval, threshold, enabled, action):
+ validate_alert_args(siteName, long_name, tag_name, interval, threshold, enabled, action)
+ url = SERVER_URL + SITE_CREATE_ALERT_SUFFIX.format(CORPNAME, siteName)
+ data_for_request = {
+ 'tagName': tag_name,
+ 'longName': long_name,
+ 'interval': int(interval),
+ 'threshold': int(threshold),
+ 'enabled': bool(enabled),
+ 'action': action
+ }
+ response_data = http_request('POST', url, data=data_for_request)
+ return response_data
+
+
+def add_alert_command():
+ args = demisto.args()
+ response_data = add_alert(args['siteName'], args['long_name'], args['tag_name'],
+ args['interval'], args['threshold'], args['enabled'], args['action'])
+
+ entry_context = alert_entry_context_from_response(response_data)
+ entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
+ # changing key of Interval to Interval (In Minutes) for human readable
+ adjust_alert_human_readable(entry_context_with_spaces, entry_context)
+
+ human_readable = tableToMarkdown(ADD_ALERT_TITLE, entry_context_with_spaces, headers=ALERT_HEADERS, removeNull=True)
+ return_outputs(
+ raw_response=response_data,
+ readable_output=human_readable,
+ outputs={
+ 'SigSciences.Corp.Site.Alert(val.ID==obj.ID)': entry_context,
+ }
+ )
+
+
+def get_alert(siteName, alert_id):
+ url = SERVER_URL + SITE_ACCESS_ALERT_SUFFIX.format(CORPNAME, siteName, alert_id)
+ response_data = http_request('GET', url)
+ return response_data
+
+
+def get_alert_command():
+ args = demisto.args()
+ response_data = get_alert(args['siteName'], args['alert_id'])
+ entry_context = alert_entry_context_from_response(response_data)
+ entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
+
+ # changing key of Interval to Interval (In Minutes) for human readable
+ adjust_alert_human_readable(entry_context_with_spaces, entry_context)
+
+ title = "Data found for alert id: {0}".format(args['alert_id'])
+ human_readable = tableToMarkdown(title, entry_context_with_spaces, headers=ALERT_HEADERS, removeNull=True)
+ return_outputs(
+ raw_response=response_data,
+ readable_output=human_readable,
+ outputs={
+ 'SigSciences.Corp.Site.Alert(val.ID==obj.ID)': entry_context,
+ }
+ )
+
+
+def delete_alert(siteName, alert_id):
+ url = SERVER_URL + SITE_ACCESS_ALERT_SUFFIX.format(CORPNAME, siteName, alert_id)
+ response_data = http_request('DELETE', url)
+ return response_data
+
+
+def delete_alert_command():
+ args = demisto.args()
+ response_data = delete_alert(args['siteName'], args['alert_id'])
+ title = "Alert {0} deleted succesfully".format(args['alert_id'])
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': response_data,
+ 'HumanReadable': title
+ })
+
+
+def update_alert(siteName, alert_id, tag_name, long_name, interval, threshold, enabled, action):
+ validate_alert_args(siteName, long_name, tag_name, interval, threshold, enabled, action)
+ url = SERVER_URL + SITE_ACCESS_ALERT_SUFFIX.format(CORPNAME, siteName, alert_id)
+ data_for_request = {
+ 'tagName': tag_name,
+ 'longName': long_name,
+ 'interval': int(interval),
+ 'threshold': int(threshold),
+ 'action': action,
+ 'enabled': bool(enabled)
+ }
+ request_response = http_request('PATCH', url, data=data_for_request)
+ return request_response
+
+
+def update_alert_command():
+ args = demisto.args()
+ response_data = update_alert(args['siteName'], args['alert_id'], args['tag_name'], args['long_name'],
+ args['interval'], args['threshold'], args['enabled'], args['action'])
+ title = "Updated alert {0}. new values:".format(args['alert_id'])
+ entry_context = alert_entry_context_from_response(response_data)
+ entry_context_with_spaces = dict_keys_from_camelcase_to_spaces(entry_context)
+
+ # changing key of Interval to Interval (In Minutes) for human readable
+ adjust_alert_human_readable(entry_context_with_spaces, entry_context)
+
+ human_readable = tableToMarkdown(title, entry_context_with_spaces, headers=ALERT_HEADERS, removeNull=True)
+ return_outputs(
+ raw_response=response_data,
+ readable_output=human_readable,
+ outputs={
+ 'SigSciences.Corp.Site.Alert(val.ID==obj.ID)': entry_context,
+ }
+ )
+
+
+def get_all_alerts(siteName):
+ url = SERVER_URL + SITE_CREATE_ALERT_SUFFIX.format(CORPNAME, siteName)
+ response_data = http_request('GET', url)
+ return response_data
+
+
+def get_all_alerts_command():
+ args = demisto.args()
+ response_data = get_all_alerts(args['siteName'])
+ alerts_list = response_data.get('data', [])
+ alerts_contexts = []
+ for alert_data in alerts_list:
+ cur_alert_context = alert_entry_context_from_response(alert_data)
+ alerts_contexts.append(cur_alert_context)
+
+ alerts_contexts_with_spaces = return_list_of_dicts_with_spaces(alerts_contexts)
+
+ # changing key of Interval to Interval (In Minutes) for human readable in all alert contexts
+ for i in range(len(alerts_contexts)):
+ adjust_alert_human_readable(alerts_contexts_with_spaces[i], alerts_contexts[i])
+
+ sidedata = "Number of alerts in site: {0}".format(len(alerts_list))
+ return_outputs(
+ raw_response=response_data,
+ readable_output=tableToMarkdown(ALERT_LIST_TITLE, alerts_contexts_with_spaces,
+ headers=ALERT_HEADERS, removeNull=True, metadata=sidedata),
+ outputs={
+ 'SigSciences.Corp.Site.Alert(val.ID==obj.ID)': alerts_contexts,
+ }
+ )
+
+
+def get_whitelist(siteName):
+ url = SERVER_URL + WHITELIST_SUFFIX.format(CORPNAME, siteName)
+ site_whitelist = http_request('GET', url)
+ return site_whitelist
+
+
+def get_whitelist_command():
+ """Get the whitelist data for siteName"""
+ args = demisto.args()
+ site_whitelist = get_whitelist(args['siteName'])
+ data = site_whitelist.get('data', [])
+ whitelist_ips_contexts = generate_whitelist_or_blacklist_ip_context(data)
+ whitelist_ips_contexts_with_spaces = return_list_of_dicts_with_spaces(whitelist_ips_contexts)
+
+ sidedata = "Number of IPs in the Whitelist {0}".format(len(data))
+ return_outputs(
+ raw_response=site_whitelist,
+ readable_output=tableToMarkdown(WHITELIST_TITLE, whitelist_ips_contexts_with_spaces,
+ WHITELIST_OR_BLACKLIST_HEADERS, removeNull=True, metadata=sidedata),
+ outputs={
+ 'SigSciences.Corp.Site.Whitelist(val.ID==obj.ID)': whitelist_ips_contexts,
+ }
+ )
+
+
+def get_blacklist(siteName):
+ url = SERVER_URL + BLACKLIST_SUFFIX.format(CORPNAME, siteName)
+ site_blacklist = http_request('GET', url)
+ return site_blacklist
+
+
+def get_blacklist_command():
+ """Get blacklist data for siteName"""
+ args = demisto.args()
+ site_blacklist = get_blacklist(args['siteName'])
+ data = site_blacklist.get('data', [])
+ blacklist_ips_contexts = generate_whitelist_or_blacklist_ip_context(data)
+ blacklist_ips_contexts_with_spaces = return_list_of_dicts_with_spaces(blacklist_ips_contexts)
+
+ sidedata = "Number of IPs in the Blacklist {0}".format(len(data))
+ return_outputs(
+ raw_response=site_blacklist,
+ readable_output=tableToMarkdown(BLACKLIST_TITLE, blacklist_ips_contexts_with_spaces,
+ WHITELIST_OR_BLACKLIST_HEADERS, removeNull=True, metadata=sidedata),
+ outputs={
+ 'SigSciences.Corp.Site.Blacklist(val.ID==obj.ID)': blacklist_ips_contexts,
+ }
+ )
+
+
+def add_ip_to_whitelist(siteName, ip, note, expires=None):
+ url = SERVER_URL + WHITELIST_SUFFIX.format(CORPNAME, siteName)
+ res = add_ip_to_whitelist_or_blacklist(url, ip, note, expires)
+ return res
+
+
+def add_ip_to_whitelist_command():
+ """Add an ip to the whitelist"""
+ args = demisto.args()
+ response_data = add_ip_to_whitelist(args['siteName'], args['ip'], args['note'], args.get('expires', None))
+ whitelist_ip_context = gen_context_for_add_to_whitelist_or_blacklist(response_data)
+ human_readable = gen_human_readable_for_add_to_whitelist_or_blacklist(whitelist_ip_context)
+
+ return_outputs(
+ raw_response=response_data,
+ readable_output=tableToMarkdown(ADD_IP_TO_WHITELIST_TITLE, human_readable, headers=ADD_IP_HEADERS,
+ removeNull=True, metadata=IP_ADDED_TO_WHITELIST_TITLE.format(args['ip'])),
+ outputs={
+ 'SigSciences.Corp.Site.Whitelist(val.ID==obj.ID)': whitelist_ip_context,
+ }
+ )
+
+
+def add_ip_to_blacklist(siteName, ip, note, expires=None):
+ url = SERVER_URL + BLACKLIST_SUFFIX.format(CORPNAME, siteName)
+ res = add_ip_to_whitelist_or_blacklist(url, ip, note, expires)
+ return res
+
+
+def add_ip_to_blacklist_command():
+ """Add an ip to the blacklist"""
+ args = demisto.args()
+ response_data = add_ip_to_blacklist(args['siteName'], args['ip'], args['note'], args.get('expires', None))
+ blacklist_ip_context = gen_context_for_add_to_whitelist_or_blacklist(response_data)
+ human_readable = gen_human_readable_for_add_to_whitelist_or_blacklist(blacklist_ip_context)
+
+ return_outputs(
+ raw_response=response_data,
+ readable_output=tableToMarkdown(ADD_IP_TO_BLACKLIST_TITLE, human_readable,
+ headers=ADD_IP_HEADERS, removeNull=True,
+ metadata=IP_ADDED_TO_BLACKLIST_TITLE.format(args['ip'])),
+ outputs={
+ 'SigSciences.Corp.Site.Blacklist(val.ID==obj.ID)': blacklist_ip_context,
+ }
+ )
+
+
+def whitelist_remove_ip(siteName, ip):
+ check_ip_is_valid(ip)
+ site_whitelist = get_whitelist(siteName)
+ data = site_whitelist.get('data', [])
+ for item in data:
+ if item.get('source', '') == ip:
+ url = SERVER_URL + DELETE_WHITELIST_IP_SUFFIX.format(CORPNAME, siteName, item.get('id', ''))
+ res = http_request('DELETE', url)
+
+ if 'res' not in locals():
+ return_error("The IP {0} was not found on the Whitelist".format(ip))
+
+ return site_whitelist
+
+
+def whitelist_remove_ip_command():
+ """Remove an ip from the whitelist"""
+ args = demisto.args()
+ response_data = whitelist_remove_ip(args['siteName'], args['IP'])
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': response_data,
+ 'HumanReadable': WHITELIST_REMOVE_IP_TITLE.format(args['IP']),
+ })
+
+
+def blacklist_remove_ip(siteName, ip):
+ check_ip_is_valid(ip)
+ site_blacklist = get_blacklist(siteName)
+ data = site_blacklist.get('data', [])
+ for item in data:
+ if item.get('source', '') == ip:
+ url = SERVER_URL + DELETE_BLACKLIST_IP_SUFFIX.format(CORPNAME, siteName, item.get('id', ''))
+ res = http_request('DELETE', url)
+
+ if 'res' not in locals():
+ return_error("The IP {0} was not found on the Blacklist".format(ip))
+
+ return site_blacklist
+
+
+def blacklist_remove_ip_command():
+ """Remove an ip from the blacklist"""
+ args = demisto.args()
+ response_data = blacklist_remove_ip(args['siteName'], args['IP'])
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': response_data,
+ 'HumanReadable': BLACKLIST_REMOVE_IP_TITLE.format(args['IP']),
+ })
+
+
+def get_sites():
+ url = SERVER_URL + GET_SITES_SUFFIX.format(CORPNAME)
+ res = http_request('GET', url)
+ return res
+
+
+def get_sites_command():
+ """Get the sites list"""
+ res = get_sites()
+ data = res.get('data', [])
+
+ outputs = []
+ for item in data:
+ output = {}
+ output['Name'] = item.get('name', '')
+ output['CreatedDate'] = item.get('created', '')
+
+ outputs.append(output)
+
+ outputs_with_spaces = return_list_of_dicts_with_spaces(outputs)
+
+ return_outputs(
+ raw_response=res,
+ readable_output=tableToMarkdown(SITES_LIST_TITLE, outputs_with_spaces, headers=GET_SITE_HEADERS,
+ removeNull=True),
+ outputs={
+ 'SigSciences.Sites(val.Name==obj.Name)': outputs,
+ }
+ )
+
+
+def fetch_incidents():
+ now_utc = datetime.utcnow()
+ most_recent_event_time = None
+
+ last_run_data = demisto.getLastRun()
+ if last_run_data:
+ last_run_time = last_run_data['time']
+ else:
+ date_time_interval_ago = now_utc - timedelta(minutes=int(FETCH_INTERVAL))
+ date_time_interval_ago_posix = datetime_to_posix_without_milliseconds(date_time_interval_ago)
+ last_run_time = date_time_interval_ago_posix
+
+ list_of_sites_to_fetch = get_list_of_site_names_to_fetch()
+ events_array = get_events_from_given_sites(list_of_sites_to_fetch, last_run_time)
+ incidents = []
+ for event in events_array:
+ event_time = event['timestamp']
+ event_time = datetime.strptime(event_time[:-1], "%Y-%m-%dT%H:%M:%S")
+ event_time = datetime_to_posix_without_milliseconds(event_time)
+ if event_time > last_run_time:
+ incidents.append({
+ 'name': str(event['id']) + " - SignalSciences",
+ 'occurred': event['timestamp'],
+ 'rawJSON': json.dumps(event)
+ })
+ if event_time > most_recent_event_time:
+ most_recent_event_time = event_time
+
+ demisto.incidents(incidents)
+ demisto.setLastRun({'time': most_recent_event_time})
+
+
+''' EXECUTION CODE '''
+
+LOG('command is %s' % (demisto.command(),))
+
+try:
+ if demisto.command() == 'test-module':
+ test_module()
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+ elif demisto.command() == 'sigsci-get-whitelist':
+ get_whitelist_command()
+ elif demisto.command() == 'sigsci-get-blacklist':
+ get_blacklist_command()
+ elif demisto.command() == 'sigsci-whitelist-add-ip':
+ add_ip_to_whitelist_command()
+ elif demisto.command() == 'sigsci-blacklist-add-ip':
+ add_ip_to_blacklist_command()
+ elif demisto.command() == 'sigsci-whitelist-remove-ip':
+ whitelist_remove_ip_command()
+ elif demisto.command() == 'sigsci-blacklist-remove-ip':
+ blacklist_remove_ip_command()
+ elif demisto.command() == 'sigsci-get-sites':
+ get_sites_command()
+ elif demisto.command() == 'sigsci-create-corp-list':
+ create_corp_list_command()
+ elif demisto.command() == 'sigsci-get-corp-list':
+ get_corp_list_command()
+ elif demisto.command() == 'sigsci-delete-corp-list':
+ delete_corp_list_command()
+ elif demisto.command() == 'sigsci-update-corp-list':
+ update_corp_list_command()
+ elif demisto.command() == 'sigsci-get-all-corp-lists':
+ get_all_corp_lists_command()
+ elif demisto.command() == 'sigsci-create-site-list':
+ create_site_list_command()
+ elif demisto.command() == 'sigsci-get-site-list':
+ get_site_list_command()
+ elif demisto.command() == 'sigsci-delete-site-list':
+ delete_site_list_command()
+ elif demisto.command() == 'sigsci-update-site-list':
+ update_site_list_command()
+ elif demisto.command() == 'sigsci-get-all-site-lists':
+ get_all_site_lists_command()
+ elif demisto.command() == 'sigsci-add-alert':
+ add_alert_command()
+ elif demisto.command() == 'sigsci-get-alert':
+ get_alert_command()
+ elif demisto.command() == 'sigsci-delete-alert':
+ delete_alert_command()
+ elif demisto.command() == 'sigsci-update-alert':
+ update_alert_command()
+ elif demisto.command() == 'sigsci-get-all-alerts':
+ get_all_alerts_command()
+ elif demisto.command() == 'sigsci-get-events':
+ get_events_command()
+ elif demisto.command() == 'sigsci-expire-event':
+ expire_event_command()
+ elif demisto.command() == 'sigsci-get-event-by-id':
+ get_event_by_id_command()
+ elif demisto.command() == 'sigsci-get-requests':
+ get_requests_command()
+ elif demisto.command() == 'sigsci-get-request-by-id':
+ get_request_by_id_command()
+
+
+except Exception as e:
+ return_error(e.message)
diff --git a/Integrations/SignalSciences/SignalSciences.yml b/Integrations/SignalSciences/SignalSciences.yml
new file mode 100644
index 000000000000..e8a9fbdfd75e
--- /dev/null
+++ b/Integrations/SignalSciences/SignalSciences.yml
@@ -0,0 +1,963 @@
+commonfields:
+ id: Signal Sciences WAF
+ version: -1
+name: Signal Sciences WAF
+display: Signal Sciences WAF
+category: Network Security
+description: Protect your web application using Signal Sciences.
+configuration:
+- display: Email
+ name: Email
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Token
+ name: Token
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Corporation Name
+ name: corpName
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Fetch incidents
+ name: isFetch
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Incident type
+ name: incidentType
+ defaultvalue: ""
+ type: 13
+ required: false
+- display: Fetch Interval (in minutes)
+ name: fetch_interval
+ defaultvalue: "720"
+ type: 0
+ required: false
+- display: CSV list of sites to fetch from.
+ If no sites are specified, events from all corporation's sites will be fetched.
+ name: sites_to_fetch
+ defaultvalue: ""
+ type: 12
+ required: false
+script:
+ script: ''
+ type: python
+ commands:
+ - name: sigsci-get-whitelist
+ arguments:
+ - name: siteName
+ required: true
+ description: The site that holds the whitelist you want to get.
+ outputs:
+ - contextPath: SigSciences.Corp.Site.Whitelist.ID
+ description: ID for this particular entry.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Whitelist.Source
+ description: IP Address present in the whitelist.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Whitelist.ExpiryDate
+ description: Expiration Timestamp.
+ type: date
+ - contextPath: SigSciences.Corp.Site.Whitelist.Note
+ description: Note associated with the tag.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Whitelist.CreatedDate
+ description: The created date timestamp
+ type: date
+ - contextPath: SigSciences.Corp.Site.Whitelist.CreatedBy
+ description: The user who added this source.
+ type: string
+ description: Fetches a site's whitelist, which resides on the Signal Sciences' platform.
+ - name: sigsci-get-blacklist
+ arguments:
+ - name: siteName
+ required: true
+ description: The site that holds the blacklist you wish to get
+ outputs:
+ - contextPath: SigSciences.Corp.Site.Blacklist.ID
+ description: The ID for this entry.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Blacklist.Source
+ description: The IP address on the blacklist.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Blacklist.ExpiryDate
+ description: The expiration timestamp.
+ type: date
+ - contextPath: SigSciences.Corp.Site.Blacklist.Note
+ description: The note associated with the tag.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Blacklist.CreatedDate
+ description: The created date timestamp
+ type: date
+ - contextPath: SigSciences.Corp.Site.Blacklist.CreatedBy
+ description: The user who added this source.
+ type: string
+ description: Fetches a site's blacklist, which resides on the Signal Sciences' platform.
+ - name: sigsci-whitelist-add-ip
+ arguments:
+ - name: siteName
+ required: true
+ description: The site that holds the whitelist you want to add an IP address to.
+ - name: ip
+ required: true
+ description: The IP address to add to the site's whitelist.
+ - name: note
+ required: true
+ description: The note associated with the tag.
+ - name: expires
+ description: The datetime for the IP address to removed from the site's whitelist (in RFC3339 format). To keep the IP address on the site's whitelist indefinitely, do not specify this argument.
+ outputs:
+ - contextPath: SigSciences.Corp.Site.Whitelist.Source
+ description: The IP address on the whitelist.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Whitelist.Note
+ description: The note associated with the tag.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Whitelist.ID
+ description: The ID for this entry.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Whitelist.CreatedBy
+ description: The user who added this source.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Whitelist.CreatedDate
+ description: The created date timestamp
+ type: date
+ - contextPath: SigSciences.Corp.Site.Whitelist.ExpiryDate
+ description: The expiration timestamp.
+ type: date
+ description: Adds an IP address to a site's whitelist, which resides the Signal Sciences' platform.
+ - name: sigsci-blacklist-add-ip
+ arguments:
+ - name: siteName
+ required: true
+ description: The site that holds the blacklist you wish to add to
+ - name: ip
+ required: true
+ description: The IP address to add to the site's blacklist.
+ - name: note
+ required: true
+ description: The note associated with the tag.
+ - name: expires
+ description: The datetime for the IP address to removed from the site's blacklist (in RFC3339 format). To keep the IP address on the site's blacklist indefinitely, do not specify this argument.
+ outputs:
+ - contextPath: SigSciences.Corp.Site.Blacklist.Source
+ description: The IP address on the blacklist.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Blacklist.Note
+ description: The note associated with the tag.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Blacklist.ID
+ description: The ID for this entry.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Blacklist.CreatedBy
+ description: The user who added this source.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Blacklist.CreatedDate
+ description: The created date timestamp.
+ type: date
+ - contextPath: SigSciences.Corp.Site.Blacklist.ExpiryDate
+ description: The expiration timestamp.
+ type: date
+ description: Adds an IP to a site's blacklist, which resides on the Signal Sciences' platform.
+ - name: sigsci-whitelist-remove-ip
+ arguments:
+ - name: siteName
+ required: true
+ description: The site that contains the whitelist you want to remove an IP address from.
+ - name: IP
+ required: true
+ description: The IP address entry to remove.
+ description: Removes an IP address from a site's whitelist, which resides on the Signal Sciences' platform.
+ - name: sigsci-blacklist-remove-ip
+ arguments:
+ - name: siteName
+ required: true
+ description: The site that contains the blacklist you want to remove an IP address from.
+ - name: IP
+ required: true
+ description: The IP address entry to remove.
+ description: Removes an IP from a site's blacklist, which resides on the Signal Sciences' platform.
+ - name: sigsci-get-sites
+ arguments: []
+ outputs:
+ - contextPath: SigSciences.Site.Name
+ description: The site name.
+ type: string
+ - contextPath: SigSciences.Site.CreatedDate
+ description: The site creation date.
+ type: date
+ description: Returns all site names from the Signal Sciences platform.
+ - name: sigsci-create-site-list
+ arguments:
+ - name: siteName
+ required: true
+ description: The name of the site in Signal Sciences you want to add a list to.
+ - name: list_name
+ required: true
+ description: The name of the list to create on Signal Sciences.
+ - name: list_type
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - IP
+ - String
+ - Country
+ - Wildcard
+ description: The type for the list you wish to create on Signal Sciences. Legal
+ types are IP, String, Country or Wildcard
+ - name: entries_list
+ required: true
+ description: A CSV list of values, consistent with the list's type.
+ isArray: true
+ - name: description
+ description: Short text that describes the new list.
+ outputs:
+ - contextPath: SigSciences.Corp.Site.List.Name
+ description: The name of the list.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.Type
+ description: The type of the list.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.Entries
+ description: The entry list of the list.
+ - contextPath: SigSciences.Corp.Site.List.ID
+ description: The ID of the list.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.Description
+ description: The description of the list.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.CreatedBy
+ description: The creator of the list.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.CreatedDate
+ description: The creation date of the list.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.UpdatedDate
+ description: The last update date of the list.
+ type: string
+ description: Creates a new list for a given site on the Signal Sciences platform.
+ - name: sigsci-get-site-list
+ arguments:
+ - name: siteName
+ required: true
+ description: The name of the site in Signal Sciences that the list you're searching
+ for belongs to.
+ - name: list_id
+ required: true
+ description: The ID of the list.
+ outputs:
+ - contextPath: SigSciences.Corp.Site.List.Name
+ description: The name of the list.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.Type
+ description: The type of the list.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.Entries
+ description: The entry list of the list.
+ - contextPath: SigSciences.Corp.Site.List.ID
+ description: The ID of the list.
+ - contextPath: SigSciences.Corp.Site.List.Description
+ description: The description of the list.
+ - contextPath: SigSciences.Corp.Site.List.CreatedBy
+ description: 'The creator of the list. '
+ - contextPath: SigSciences.Corp.Site.List.CreatedDate
+ description: The creation date of the list.
+ - contextPath: SigSciences.Corp.Site.List.UpdatedDate
+ description: The last update date of the list.
+ description: Returns all the data about a site list.
+ - name: sigsci-delete-site-list
+ arguments:
+ - name: siteName
+ required: true
+ description: The name of the site in Signal Sciences you want to delete a list from.
+ - name: list_id
+ required: true
+ description: The ID of the list in Signal Sciences to delete.
+ description: Deletes a site list in Signal Sciences.
+ - name: sigsci-update-site-list
+ arguments:
+ - name: siteName
+ required: true
+ description: The name of the site in Signal Sciences that the list to update belongs to.
+ - name: list_id
+ required: true
+ description: The ID of the list to update.
+ - name: method
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - Add
+ - Remove
+ description: The method to use, must be "Add" or "Remove". The method
+ will determine whether the given entries are added to or removed from the list.
+ - name: entries_list
+ required: true
+ description: A CSV list of values, consistent with the list's type.
+ isArray: true
+ - name: description
+ description: A description for the updated list.
+ outputs:
+ - contextPath: SigSciences.Corp.Site.List.Name
+ description: The name of the list in Signal Sciences.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.Type
+ description: The type of the list in Signal Sciences.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.Entries
+ description: The entry list of the list in Signal Sciences.
+ type: unknown
+ - contextPath: SigSciences.Corp.Site.List.ID
+ description: The ID of the list in Signal Sciences.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.Description
+ description: The description of the list in Signal Sciences. Maximum is 140 characters.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.CreatedBy
+ description: The creator of the list in Signal Sciences.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.CreatedDate
+ description: The creation date of the list in Signal Sciences.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.UpdatedDate
+ description: The last update date of the list in Signal Sciences.
+ type: string
+ description: Updates a site list in Signal Sciences.
+ - name: sigsci-add-alert
+ arguments:
+ - name: siteName
+ required: true
+ description: The name of the site to add an alert to.
+ - name: long_name
+ required: true
+ description: A human readable description of the alert. Must be between 3 and
+ 25 characters.
+ - name: interval
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - "1"
+ - "10"
+ - "60"
+ description: The number of minutes of past traffic to examine. Must be 1, 10
+ or 60.
+ - name: tag_name
+ required: true
+ description: The name of the tag whose occurrences the alert is watching. Must
+ match an existing tag.
+ - name: threshold
+ required: true
+ description: ' The number of occurrences of the tag in the specified interval that are required to
+ trigger the alert.'
+ - name: enabled
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - "True"
+ - "False"
+ description: A flag to toggle this alert ("True" or "False").
+ - name: action
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - info
+ - flagged
+ description: A flag that describes what happens when the alert is triggered.
+ "info" creates an incident in the dashboard. "flagged" creates an incident
+ and blocks traffic for 24 hours.
+ outputs:
+ - contextPath: SigSciences.Corp.Site.Alert.ID
+ description: The unique ID of the alert.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.SiteID
+ description: The ID of the site.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.TagName
+ description: The name of the tag whose occurrences the alert is watching.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.LongName
+ description: A short description of the alert.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.Interval
+ description: The number of minutes of past traffic to examine.
+ type: number
+ - contextPath: SigSciences.Corp.Site.Alert.Threshold
+ description: The number of occurrences of the tag in the specified interval that are required to
+ trigger the alert.
+ type: number
+ - contextPath: SigSciences.Corp.Site.Alert.BlockDurationSeconds
+ description: ' The number of seconds that this alert is active.'
+ type: number
+ - contextPath: SigSciences.Corp.Site.Alert.SkipNotifications
+ description: A flag to disable external notifications - Slack, webhooks, emails, and so on.
+ type: boolean
+ - contextPath: SigSciences.Corp.Site.Alert.Enabled
+ description: A flag to toggle this alert.
+ type: boolean
+ - contextPath: SigSciences.Corp.Site.Alert.Action
+ description: A flag that describes what happens when the alert is triggered.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.CreatedDate
+ description: The timestamp of event (RFC3339 format).
+ type: date
+ description: Adds a custom alert to a site in Signal Sciences.
+ - name: sigsci-get-alert
+ arguments:
+ - name: siteName
+ required: true
+ description: The name of the site the alert you want to get data for belongs to.
+ - name: alert_id
+ required: true
+ description: The ID of the alert to retrieve.
+ outputs:
+ - contextPath: SigSciences.Corp.Site.Alert.ID
+ description: The unique ID of the alert.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.SiteID
+ description: The ID of the site.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.TagName
+ description: The name of the tag whose occurrences the alert is watching.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.LongName
+ description: A short description of the alert.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.Interval
+ description: The number of minutes of past traffic to examine.
+ type: number
+ - contextPath: SigSciences.Corp.Site.Alert.Threshold
+ description: The number of occurrences of the tag in the specified interval that are required to
+ trigger the alert.
+ type: number
+ - contextPath: SigSciences.Corp.Site.Alert.BlockDurationSeconds
+ description: ' The number of seconds this alert is active.'
+ type: number
+ - contextPath: SigSciences.Corp.Site.Alert.SkipNotifications
+ description: A flag to disable external notifications - Slack, webhooks, emails, and so on.
+ type: boolean
+ - contextPath: SigSciences.Corp.Site.Alert.Enabled
+ description: A flag to toggle this alert.
+ type: boolean
+ - contextPath: SigSciences.Corp.Site.Alert.Action
+ description: A flag that describes what happens when the alert is triggered.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.CreatedDate
+ description: The timestamp of the event (RFC3339 format).
+ type: date
+ description: Retrieves data for an alert.
+ - name: sigsci-delete-alert
+ arguments:
+ - name: siteName
+ required: true
+ description: The name of the site to delete an alert from.
+ - name: alert_id
+ required: true
+ description: The ID of the alert to delete.
+ description: Deletes an alert from a given site in Signal Sciences.
+ - name: sigsci-update-alert
+ arguments:
+ - name: siteName
+ required: true
+ description: The site of the alert to update.
+ - name: alert_id
+ required: true
+ description: The ID of the alert to update.
+ - name: tag_name
+ required: true
+ description: The name of the tag whose occurrences the alert is watching.
+ - name: long_name
+ required: true
+ description: A human readable description of the alert. Must be between 3 and
+ 25 characters.
+ - name: interval
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - "1"
+ - "10"
+ - "60"
+ description: The number of minutes of past traffic to examine.
+ - name: threshold
+ required: true
+ description: The number of occurrences of the tag in the specified interval that are required to
+ trigger the alert.
+ - name: enabled
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - "True"
+ - "False"
+ description: A flag to toggle this alert.
+ - name: action
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - info
+ - flagged
+ description: A flag that describes what happens when the alert is triggered.
+ "info" creates an incident in the dashboard. "flagged" creates an incident
+ and blocks traffic for 24 hours.
+ outputs:
+ - contextPath: SigSciences.Corp.Site.Alert.CreatedDate
+ description: The unique ID of the alert.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.SiteID
+ description: The ID of the site.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.TagName
+ description: The name of the tag whose occurrences the alert is watching.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.LongName
+ description: A short description of the alert.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.Interval
+ description: The number of minutes of past traffic to examine.
+ type: number
+ - contextPath: SigSciences.Corp.Site.Alert.Threshold
+ description: The number of occurrences of the tag in the specified interval that are required to
+ trigger the alert.
+ type: number
+ - contextPath: SigSciences.Corp.Site.Alert.BlockDurationSeconds
+ description: ' The number of seconds that this alert is active.'
+ type: number
+ - contextPath: SigSciences.Corp.Site.Alert.SkipNotifications
+ description: A flag to disable external notifications - Slack, webhooks, emails, and so on.
+ type: boolean
+ - contextPath: SigSciences.Corp.Site.Alert.Enabled
+ description: A flag to toggle this alert.
+ type: boolean
+ - contextPath: SigSciences.Corp.Site.Alert.Action
+ description: A flag that describes what happens when the alert is triggered.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.CreatedDate
+ description: The timestamp of event (RFC3339 format).
+ type: date
+ description: Updates the attributes of a given alert.
+ - name: sigsci-get-all-alerts
+ arguments:
+ - name: siteName
+ required: true
+ description: The name of site to get alerts for.
+ outputs:
+ - contextPath: SigSciences.Corp.Site.Alert.ID
+ description: The unique ID of the alert.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.SiteID
+ description: The ID of the site.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.TagName
+ description: The name of the tag whose occurrences the alert is watching.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.LongName
+ description: A short description of the alert.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.Interval
+ description: The number of minutes of past traffic to examine.
+ type: number
+ - contextPath: SigSciences.Corp.Site.Alert.Threshold
+ description: The number of occurrences of the tag in the specified interval that are required to
+ trigger the alert.
+ type: number
+ - contextPath: SigSciences.Corp.Site.Alert.BlockDurationSeconds
+ description: ' The number of seconds this alert is active.'
+ type: number
+ - contextPath: SigSciences.Corp.Site.Alert.SkipNotification
+ description: A flag to disable external notifications - Slack, webhooks, emails, and so on.
+ type: boolean
+ - contextPath: SigSciences.Corp.Site.Alert.Enabled
+ description: A flag to toggle this alert.
+ type: boolean
+ - contextPath: SigSciences.Corp.Site.Alert.Action
+ description: A flag that describes what happens when the alert is triggered.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Alert.CreatedDate
+ description: The timestamp of event (RFC3339 format).
+ type: date
+ description: Retrieves all alerts for given a site.
+ - name: sigsci-get-all-site-lists
+ arguments:
+ - name: siteName
+ required: true
+ description: The name of the site to retrieve lists for.
+ outputs:
+ - contextPath: SigSciences.Corp.Site.List.Name
+ description: The name of the list in Signal Sciences.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.Type
+ description: The type of the list in Signal Sciences.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.Entries
+ description: The entry list of the list in Signal Sciences.
+ - contextPath: SigSciences.Corp.Site.List.ID
+ description: The ID of the list in Signal Sciences.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.Description
+ description: The description of the list in Signal Sciences. Maximum is 140 characters.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.CreatedBy
+ description: The creator of the list in Signal Sciences.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.CreatedDate
+ description: The creation date of the list in Signal Sciences.
+ type: string
+ - contextPath: SigSciences.Corp.Site.List.UpdatedDate
+ description: The last update date of the list in Signal Sciences.
+ type: string
+ description: Retrieves all site lists for a given site.
+ - name: sigsci-create-corp-list
+ arguments:
+ - name: list_name
+ required: true
+ description: A name for the new list.
+ - name: list_type
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - IP
+ - String
+ - Country
+ - Wildcard
+ description: The type for the new list. Legal types are "IP", "String", "Country", or "Wildcard".
+ - name: entries_list
+ required: true
+ description: A CSV list of values, consistent with the list's type.
+ isArray: true
+ - name: description
+ description: Short text that describes the new list.
+ outputs:
+ - contextPath: SigSciences.Corp.List.Name
+ description: The name of the list.
+ type: string
+ - contextPath: SigSciences.Corp.List.Type
+ description: The type of the list.
+ type: string
+ - contextPath: SigSciences.Corp.List.Entries
+ description: The entry list of the list.
+ - contextPath: SigSciences.Corp.List.ID
+ description: The ID of the list.
+ type: string
+ - contextPath: SigSciences.Corp.List.Description
+ description: The description of the list.
+ type: string
+ - contextPath: SigSciences.Corp.List.CreatedBy
+ description: The creator of the list.
+ type: string
+ - contextPath: SigSciences.Corp.List.CreatedDate
+ description: The creation date of the list.
+ type: string
+ - contextPath: SigSciences.Corp.List.UpdatedDate
+ description: The last update date of the list.
+ type: string
+ description: Creates a new corp list.
+ - name: sigsci-get-corp-list
+ arguments:
+ - name: list_id
+ required: true
+ description: The ID of the list to get data for.
+ outputs:
+ - contextPath: SigSciences.Corp.List.Name
+ description: The name of the list.
+ - contextPath: SigSciences.Corp.List.Type
+ description: The type of the list.
+ - contextPath: SigSciences.Corp.List.Entries
+ description: The entry list of the list.
+ - contextPath: SigSciences.Corp.List.ID
+ description: The ID of the list.
+ - contextPath: SigSciences.Corp.List.Description
+ description: The description of the list.
+ - contextPath: SigSciences.Corp.List.CreatedBy
+ description: The creator of the list.
+ - contextPath: SigSciences.Corp.List.CreatedDate
+ description: The creation date of the list.
+ - contextPath: SigSciences.Corp.List.UpdatedDate
+ description: The last update date of the list.
+ description: Retrieves all data about a given corp list.
+ - name: sigsci-delete-corp-list
+ arguments:
+ - name: list_id
+ required: true
+ description: The ID of the list to delete.
+ description: Deletes a given corp list.
+ - name: sigsci-update-corp-list
+ arguments:
+ - name: list_id
+ required: true
+ description: The ID of the list you wish to update
+ - name: method
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - Add
+ - Remove
+ description: The method you wish to use - must be "Add" or "Remove". The method
+ will determine whether the entries you provide are added to the list or removed
+ from it
+ - name: entries_list
+ required: true
+ description: A list of values, consistent with the list's type, separated by
+ commas
+ isArray: true
+ - name: description
+ description: A description for the updated list.
+ outputs:
+ - contextPath: SigSciences.Corp.List.Name
+ description: The name of the list.
+ - contextPath: SigSciences.Corp.List.Type
+ description: The type of the list.
+ - contextPath: SigSciences.Corp.List.Entries
+ description: The entry list of the list.
+ - contextPath: SigSciences.Corp.List.ID
+ description: The ID of the list.
+ - contextPath: SigSciences.Corp.List.Description
+ description: The description of the list.
+ - contextPath: SigSciences.Corp.List.CreatedBy
+ description: The creator of the list.
+ - contextPath: SigSciences.Corp.List.CreatedDate
+ description: The creation date of the list.
+ - contextPath: SigSciences.Corp.List.UpdatedDate
+ description: The last update date of the list.
+ description: Updates (add or delete) entries for a given corp list.
+ - name: sigsci-get-all-corp-lists
+ arguments: []
+ outputs:
+ - contextPath: SigSciences.Corp.List.Name
+ description: The name of the list.
+ - contextPath: SigSciences.Corp.List.Type
+ description: The type of the list.
+ - contextPath: SigSciences.Corp.List.Entries
+ description: The entry list of the list.
+ - contextPath: SigSciences.Corp.List.ID
+ description: The ID of the list.
+ - contextPath: SigSciences.Corp.List.Description
+ description: The description of the list.
+ - contextPath: SigSciences.Corp.List.CreatedBy
+ description: The creator of the list.
+ - contextPath: SigSciences.Corp.List.CreatedDate
+ description: The creation date of the list.
+ - contextPath: SigSciences.Corp.List.UpdatedDate
+ description: The last update date of the list.
+ description: Retrieves data about all lists for the given corp.
+ - name: sigsci-get-events
+ arguments:
+ - name: siteName
+ required: true
+ description: The name of the site to fetch events from.
+ - name: from_time
+ description: The POSIX Unix time to start.
+ - name: until_time
+ description: ' The POSIX Unix time to end. '
+ - name: sort
+ auto: PREDEFINED
+ predefined:
+ - asc
+ - desc
+ description: The sort order ("asc" or "desc").
+ - name: since_id
+ description: The ID of the first object in the set.
+ - name: max_id
+ description: ' The ID of the last object in the set. '
+ - name: limit
+ description: The maximum number of entries to return.
+ - name: page
+ description: The page of the results.
+ - name: action
+ auto: PREDEFINED
+ predefined:
+ - flagged
+ - info
+ description: ' The action to filter by (''flagged'' or ''info''). '
+ - name: tag
+ description: The tag to filter by. Must be a valid tag name.
+ - name: ip
+ description: The ID to filter by.
+ - name: status
+ auto: PREDEFINED
+ predefined:
+ - active
+ - expired
+ description: The status to filter by ("active" or "expired").
+ outputs:
+ - contextPath: SigSciences.Corp.Site.Event.ID
+ description: The unique ID of the event.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Event.Timestamp
+ description: ' The timestamp of the event (RFC3339 format).'
+ type: date
+ - contextPath: SigSciences.Corp.Site.Event.Source
+ description: The source information, for example, "IP".
+ type: string
+ - contextPath: SigSciences.Corp.Site.Event.RemoteCountryCode
+ description: The country code.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Event.RemoteHostname
+ description: The remote hostname.
+ type: string
+ - contextPath: SigSciences.Corp.Site.Event.UserAgents
+ description: An array of user agents.
+ - contextPath: SigSciences.Corp.Site.Event.Action
+ description: If "flagged", the IP address is flagged and subsequent malicious requests
+ will be blocked. If "info", the IP address is flagged and subsequent requests will be
+ logged.
+ - contextPath: SigSciences.Corp.Site.Event.Reasons
+ description: The reason the event was triggered.
+ - contextPath: SigSciences.Corp.Site.Event.RequestCount
+ description: The total number of requests.
+ type: number
+ - contextPath: SigSciences.Corp.Site.Event.TagCount
+ description: The total number of tags.
+ type: number
+ - contextPath: SigSciences.Corp.Site.Event.Window
+ description: The time window (in seconds) when the items were detected.
+ type: number
+ - contextPath: SigSciences.Corp.Site.Event.DateExpires
+ description: The date the event expires (RFC3339 format).
+ type: string
+ - contextPath: SigSciences.Corp.Site.Event.ExpiredBy
+ description: The email address of the user that expired the event (if the event is expired
+ manually).
+ type: string
+ description: Fetches events from Signal Sciences.
+ - name: sigsci-get-event-by-id
+ arguments:
+ - name: siteName
+ required: true
+ description: Name of the site to get the event from
+ - name: event_id
+ required: true
+ description: The ID of the event.
+ outputs:
+ - contextPath: SigSciences.Corp.Site.Event.ID
+ description: The unique ID of the event.
+ - contextPath: SigSciences.Corp.Site.Event.Timestamp
+ description: ' The timestamp of the event (RFC3339 format).'
+ - contextPath: SigSciences.Corp.Site.Event.Source
+ description: Source information, for example, "IP".
+ - contextPath: SigSciences.Corp.Site.Event.RemoteCountryCode
+ description: The country code.
+ - contextPath: SigSciences.Corp.Site.Event.RemoteHostname
+ description: The remote hostname.
+ - contextPath: SigSciences.Corp.Site.Event.UserAgents
+ description: An array of user agents.
+ - contextPath: SigSciences.Corp.Site.Event.Action
+ description: If "flagged", the IP address is flagged and subsequent malicious requests
+ will be blocked. If "info", the IP address is flagged and subsequent requests will be
+ logged.
+ - contextPath: SigSciences.Corp.Site.Event.Reasons
+ description: The reason the event was triggered.
+ - contextPath: SigSciences.Corp.Site.Event.RequestCount
+ description: The total number of requests.
+ - contextPath: SigSciences.Corp.Site.Event.TagCount
+ description: The total number of tags.
+ - contextPath: SigSciences.Corp.Site.Event.Window
+ description: The time window (in seconds) when the items were detected.
+ - contextPath: SigSciences.Corp.Site.Event.DateExpires
+ description: The date the event expires (RFC3339 format).
+ - contextPath: SigSciences.Corp.Site.Event.ExpiredBy
+ description: The email address of the user that expired the event (if the event is expired
+ manually).
+ description: Retrieves an event by the event ID.
+ - name: sigsci-expire-event
+ arguments:
+ - name: siteName
+ required: true
+ description: The name of the site to expire an event from.
+ - name: event_id
+ required: true
+ description: The ID of the event to expire.
+ description: Expires a given event.
+ - name: sigsci-get-requests
+ arguments:
+ - name: siteName
+ required: true
+ description: The name of the site to get requests from.
+ - name: page
+ description: The page of the results. Maximum is 1000 requests.
+ - name: limit
+ description: The number of entries to return.
+ - name: q
+ description: The search query, in Signal Sciences syntax. If a query
+ is not supplied, no requests will be returned.
+ outputs:
+ - contextPath: SigSciences.Corp.Site.Request.ID
+ description: The unique ID of the request.
+ - contextPath: SigSciences.Corp.Site.Request.ServerHostName
+ description: The server hostname.
+ - contextPath: SigSciences.Corp.Site.Request.RemoteIP
+ description: The remote IP address.
+ - contextPath: SigSciences.Corp.Site.Request.RemoteHostName
+ description: The remote hostname.
+ - contextPath: SigSciences.Corp.Site.Request.RemoteCountryCode
+ description: The remote country code.
+ - contextPath: SigSciences.Corp.Site.Request.UserAgent
+ description: ' The user agent of the request.'
+ - contextPath: SigSciences.Corp.Site.Request.Timestamp
+ description: The timestamp (RFC3339 format).
+ - contextPath: SigSciences.Corp.Site.Request.Method
+ description: The HTTP method, for example, "PUT".
+ - contextPath: SigSciences.Corp.Site.Request.ServerName
+ description: The server name.
+ - contextPath: SigSciences.Corp.Site.Request.Protocol
+ description: ' The HTTP protocol, for example, "HTTP/1.1" .'
+ - contextPath: SigSciences.Corp.Site.Request.Path
+ description: The path.
+ - contextPath: SigSciences.Corp.Site.Request.URI
+ description: The URI.
+ - contextPath: SigSciences.Corp.Site.Request.ResponseCode
+ description: The HTTP response code.
+ - contextPath: SigSciences.Corp.Site.Request.ResponseSize
+ description: ' The HTTP response size.'
+ - contextPath: SigSciences.Corp.Site.Request.ResponseMillis
+ description: ' The response time in millis.'
+ - contextPath: SigSciences.Corp.Site.Request.AgentResponseCode
+ description: The agent response code.
+ - contextPath: SigSciences.Corp.Site.Request.Tags
+ description: The array of relevant tags.
+ description: Retrieves requests from a given site according to a search query.
+ - name: sigsci-get-request-by-id
+ arguments:
+ - name: siteName
+ required: true
+ description: The name of the site to get from
+ - name: request_id
+ required: true
+ description: The ID of the request to get.
+ outputs:
+ - contextPath: SigSciences.Corp.Site.Request.ID
+ description: The unique ID of the request.
+ - contextPath: SigSciences.Corp.Site.Request.ServerHostName
+ description: ' Server hostname.'
+ - contextPath: SigSciences.Corp.Site.Request.RemoteIP
+ description: ' The remote IP address.'
+ - contextPath: SigSciences.Corp.Site.Request.RemoteHostName
+ description: ' The remote hostname.'
+ - contextPath: SigSciences.Corp.Site.Request.RemoteCountryCode
+ description: The remote country code.
+ - contextPath: SigSciences.Corp.Site.Request.UserAgent
+ description: The user agent of the request.
+ - contextPath: SigSciences.Corp.Site.Request.Timestamp
+ description: ' The timestamp RFC3339 date time serverHostname string.'
+ - contextPath: SigSciences.Corp.Site.Request.Method
+ description: The HTTP method, for example, "PUT".
+ - contextPath: SigSciences.Corp.Site.Request.ServerName
+ description: ' The server name.'
+ - contextPath: SigSciences.Corp.Site.Request.Protocol
+ description: ' The HTTP protocol, for example "HTTP/1.1".'
+ - contextPath: SigSciences.Corp.Site.Request.Path
+ description: The path.
+ - contextPath: SigSciences.Corp.Site.Request.URI
+ description: The URI.
+ - contextPath: SigSciences.Corp.Site.Request.ResponseCode
+ description: ' The HTTP response code.'
+ - contextPath: SigSciences.Corp.Site.Request.ResponseSize
+ description: ' The HTTP response size.'
+ - contextPath: SigSciences.Corp.Site.Request.ResponseMillis
+ description: The response time in milliseconds.
+ - contextPath: SigSciences.Corp.Site.Request.AgentResponseCode
+ description: The agent response code.
+ - contextPath: SigSciences.Corp.Site.Request.Tags
+ description: An array of relevant tags.
+ description: Retrieves a request by request ID.
+ isfetch: true
+ runonce: false
diff --git a/Integrations/SignalSciences/SignalSciences_description.md b/Integrations/SignalSciences/SignalSciences_description.md
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/Integrations/SignalSciences/SignalSciences_image.png b/Integrations/SignalSciences/SignalSciences_image.png
new file mode 100644
index 000000000000..d787b3ad5c75
Binary files /dev/null and b/Integrations/SignalSciences/SignalSciences_image.png differ
diff --git a/Integrations/Slack/CHANGELOG.md b/Integrations/Slack/CHANGELOG.md
new file mode 100644
index 000000000000..c144e1f4b28e
--- /dev/null
+++ b/Integrations/Slack/CHANGELOG.md
@@ -0,0 +1,28 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+Added support for changing the display name and icon for the Demisto bot in Slack.
+
+## [19.10.0] - 2019-10-03
+Added support for sending blocks (graphical attachments) in messages. For more information see the integration documentation.
+
+## [19.9.1] - 2019-09-18
+Direct message - support multiline JSON in incident creation
+
+
+
+## [19.9.0] - 2019-09-04
+ - Added 6 new commands:
+ - ***close-channel*** (now with optional channel argument)
+ - ***slack-create-channel***
+ - ***slack-invite-to-channel***
+ - ***slack-kick-from-channel***
+ - ***slack-rename-channel***
+ - ***slack-get-user-details***
+ - Added support for removing the Slack admin (API token owner) when mirroring an incident.
+
+
+## [19.8.2] - 2019-08-22
+#### New Integration
+Sends messages and notifications to your Slack Team.
diff --git a/Integrations/Slack/README.md b/Integrations/Slack/README.md
new file mode 100644
index 000000000000..b5f2350a88f1
--- /dev/null
+++ b/Integrations/Slack/README.md
@@ -0,0 +1,757 @@
+
+ Send messages and notifications to your Slack Team.
+ This integration was integrated and tested with version 4.0.1 of Slack, and is available from Demisto 5.0.
+
+Use Cases
+
+Mirror Demisto investigations War Room to Slack channels and vice-versa.
+Send notifications, message and files from Demisto to Slack.
+Get notifications in Slack about events in Demisto.
+Manage Demisto incidents via direct messages to the Demisto bot.
+Manage Slack channels (create, edit, invite, kick, close).
+ Detailed Description
+
+To allow us access to Slack, the Demisto app has to be added to the relevant workspace. Do so by clicking on the following [link](https://oproxy.demisto.ninja/slack).
+ After adding the Demisto app, you will get an Access Token and Bot Token, which should be inserted in the integration instance configuration's corresponding fields.
+
+Configure SlackV2 on Demisto
+
+ Navigate to Settings > Integrations
+ > Servers & Services .
+ Search for SlackV2.
+
+ Click Add instance to create and configure a new integration instance.
+
+ Name : a textual name for the integration instance.
+ Slack API access token : A token received by adding the application (Starts with xoxp).
+ Slack API bot token : A token received by adding the application (Starts with xoxb).
+ Dedicated Slack channel to receive notifications
+ Send notifications about incidents to the dedicated channel
+ Minimum incident severity to send messages to slack by
+ Type of incidents created in Slack
+ Allow external users to create incidents via DM
+ Use system proxy settings
+ Trust any certificate (not secure)
+ Long running instance. Required for investigation mirroring and direct messages.
+
+
+
+
+
+ Click Test to validate the new instance.
+
+
+Commands
+
+ You can execute these commands from the Demisto CLI, as part of an automation, or in a playbook.
+ After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+
+ mirror-investigation: mirror-investigation
+ send-notification: send-notification
+ close-channel: close-channel
+ slack-send-file: slack-send-file
+ slack-set-channel-topic: slack-set-channel-topic
+ slack-create-channel: slack-create-channel
+ slack-invite-to-channel: slack-invite-to-channel
+ slack-kick-from-channel: slack-kick-from-channel
+ slack-rename-channel: slack-rename-channel
+ slack-get-user-details: slack-get-user-details
+
+1. mirror-investigation
+
+Mirrors the investigation between Slack and the Demisto War Room.
+Base Command
+
+ mirror-investigation
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ type
+ The mirroring type. Can be "all", which mirrors everything, "chat", which mirrors only chats (not commands), or "none", which stops all mirroring.
+ Optional
+
+
+ autoclose
+ Whether the channel is auto-closed when an investigation is closed. Can be "true" or "false". Default is "true".
+ Optional
+
+
+ direction
+ The mirroring direction. Can be "FromDemisto", "ToDemisto", or "Both". Default value is "Both".
+ Optional
+
+
+ mirrorTo
+ The channel type. Can be "channel" or "group". The default value is "group".
+ Optional
+
+
+ channelName
+ The name of the channel. The default is "incident-".
+ Optional
+
+
+ channelTopic
+ The topic of the channel.
+ Optional
+
+
+ kickAdmin
+ Whether to remove the Slack administrator (channel creator) from the mirrored channel.
+ Optional
+
+
+
+
+
+Context Output
+There are no context output for this command.
+
+Command Example
+
+ !mirror-investigation type=all autoclose=true direction=Both channelName=my-mirror channelTopic=my-incident
+
+
+Human Readable Output
+
+Investigation mirrored successfully, channel: my-mirror
+
+
+2. send-notification
+
+Sends a message to a user, group, or channel.
+Base Command
+
+ send-notification
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ message
+ The message content.
+ Optional
+
+
+ to
+ The user to whom to send the message. Can be either the username or email address.
+ Optional
+
+
+ channel
+ The name of the Slack channel to which to send the message.
+ Optional
+
+
+ entry
+ An entry ID to send as a link.
+ Optional
+
+
+ ignoreAddURL
+ Whether to include a URL to the relevant component in Demisto. Can be "true" or "false". Default value is "false".
+ Optional
+
+
+ threadID
+ The ID of the thread to which to reply - can be retrieved from a previous send-notification command.
+ Optional
+
+
+ blocks
+ A JSON string of Slack blocks to send in the message.
+ Optional
+
+
+
+
+
+Context Output
+
+
+
+
+ Path
+
+
+ Type
+
+
+ Description
+
+
+
+
+
+ Slack.Thread.ID
+ String
+ b'The Slack thread ID.'
+
+
+
+
+
+Command Example
+
+ !send-notification channel=general message="I love Demisto"
+
+Context Example
+
+{
+ "Slack.Thread": {
+ "ID": "1567407432.000500"
+ }
+}
+
+Human Readable Output
+
+
+Message sent to Slack successfully.
+Thread ID is: 1567407432.000500
+
+
+
+3. close-channel
+
+Archives a Slack channel.
+Base Command
+
+ close-channel
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ channel
+ The name of the channel to archive. If not provided, the mirrored investigation channel is archived (if the channel exists).
+ Optional
+
+
+
+
+
+Context Output
+There are no context output for this command.
+
+Command Example
+
+ !close-channel
+
+
+Human Readable Output
+
+Channel successfully archived.
+
+
+4. slack-send-file
+
+Sends a file to a user, channel, or group. If not specified, the file is sent to the mirrored investigation channel (if the channel exists).
+Base Command
+
+ slack-send-file
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ file
+ The ID of the file entry to send.
+ Required
+
+
+ to
+ The user to whom to send the file. Can be the username or the email address.
+ Optional
+
+
+ group
+ The name of the Slack group (private channel) to which to send the file.
+ Optional
+
+
+ channel
+ The name of the Slack channel to which to send the file.
+ Optional
+
+
+ threadID
+ The ID of the thread to which to reply - can be retrieved from a previous send-notification command.
+ Optional
+
+
+ comment
+ A comment to add to the file.
+ Optional
+
+
+
+
+
+Context Output
+There are no context output for this command.
+
+Command Example
+
+ !slack-send-file file=1@2 channel=general
+
+
+Human Readable Output
+
+File sent to Slack successfully.
+
+
+5. slack-set-channel-topic
+
+Sets the topic for a channel.
+Base Command
+
+ slack-set-channel-topic
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ channel
+ The channel name. If not specified, the topic of the mirrored investigation channel is set (if the channel exists).
+ Optional
+
+
+ topic
+ The topic for the channel.
+ Required
+
+
+
+
+
+Context Output
+There are no context output for this command.
+
+Command Example
+
+ !slack-set-channel-topic channel=general topic="Demisto rocks"
+
+
+Human Readable Output
+
+
+Topic successfully set.
+
+
+
+6. slack-create-channel
+
+Creates a channel in Slack.
+Base Command
+
+ slack-create-channel
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ type
+ The channel type. Can be "private" or "public".
+ Optional
+
+
+ name
+ The name of the channel.
+ Required
+
+
+ users
+ A CSV list of user names or email addresses to invite to the channel. For example: "user1, user2...".
+ Optional
+
+
+
+
+
+Context Output
+There are no context output for this command.
+
+Command Example
+
+ !slack-create-channel name=my-channel topic=cool-topic type=private users=demisto_integration
+
+
+Human Readable Output
+
+Successfully created the channel my-channel.
+
+
+7. slack-invite-to-channel
+
+Invites users to join a channel.
+Base Command
+
+ slack-invite-to-channel
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ users
+ A CSV list of usernames or email addresses to invite to join the channel. For example: "user1, user2...".
+ Required
+
+
+ channel
+ The name of the channel to which to invite the users. If the name of the channel is not specified, the name of the mirrored investigation channel is used (if the channel exists).
+ Optional
+
+
+
+
+
+Context Output
+There are no context output for this command.
+
+Command Example
+
+ !slack-invite-to-channel channel=my-channel users=cool-user
+
+
+Human Readable Output
+
+Successfully invited users to the channel.
+
+
+8. slack-kick-from-channel
+
+Removes users from the specified channel.
+Base Command
+
+ slack-kick-from-channel
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ users
+ A CSV list of usernames or email addresses to remove from the a channel. For example: "user1, user2..."
+ Required
+
+
+ channel
+ The name of the channel from which to remove the users. If the name of the channel is not specified, the mirrored investigation channel is used (if the channel exists).
+ Optional
+
+
+
+
+
+Context Output
+There are no context output for this command.
+
+Command Example
+
+ !slack-kick-from-channel channel=my-channel users=cool-user
+
+
+Human Readable Output
+
+Successfully kicked users from the channel.
+
+
+9. slack-rename-channel
+
+Renames a channel in Slack.
+Base Command
+
+ slack-rename-channel
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ name
+ The new name of the channel.
+ Required
+
+
+ channel
+ The current name of the channel. If the name of the channel is not specified, the mirrored investigation channel is used (if the channel exists).
+ Optional
+
+
+
+
+
+Context Output
+There are no context output for this command.
+
+Command Example
+
+ !slack-rename-channel channel=my-channel name=your-channel
+
+
+Human Readable Output
+
+Channel renamed successfully.
+
+
+10. slack-get-user-details
+
+Get details about a specified user.
+Base Command
+
+ slack-get-user-details
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ user
+ The Slack user (username or email).
+ Required
+
+
+
+
+
+Context Output
+
+
+
+
+ Path
+
+
+ Type
+
+
+ Description
+
+
+
+
+
+ Slack.User.ID
+ String
+ b'The ID of the user.'
+
+
+ Slack.User.Username
+ String
+ b'The username of the user.'
+
+
+ Slack.User.Name
+ String
+ b'The actual name of the user.'
+
+
+ Slack.User.DisplayName
+ String
+ b'The display name of the user.'
+
+
+ Slack.User.Email
+ String
+ b'The email address of the user.'
+
+
+
+
+
+Command Example
+
+ !slack-get-user-details user="cool_user"
+
+Context Example
+
+{
+ "Slack.User": {
+ "ID": "UXXXXXXXX",
+ "Name": "Cool User",
+ "Username": "cool_user",
+ "Email": "cool_user@coolorg.com"
+ }
+}
+
+Human Readable Output
+
+
Details for Slack user: cool_user
+
+
+
+ ID
+ Username
+ Name
+ Email
+
+
+
+
+ UXXXXXXXX
+ cool_user
+ Cool User
+ cool_user@coolorg.com
+
+
+
+
+
+Additional Information
+Direct messages
+You may send direct messages to the Demisto app on Slack using the following commands:
+
+list incidents [page x]
+list my incidents [page x]
+list my tasks
+list closed incidents
+new incident [details]
+mirror incident-id
+
+Notifications
+The integration sends notifications according to the configured notification settings found in the user preferences page in Demisto, and additionally to the pre determined channel dedicated for incident notifications (according to the integration configuration).
+Blocks and interactve components
+The integration supports sending "blocks" to Slack. Blocks are a series of components that can be combined to create visually rich and compellingly interactive messages. In the integration, they can be sent as an array of JSON. More infortmation about that here. You can experiment with and build your own blocks here.
+The integration also allows some level of interactivity. When a user interacts with an element in a Slack message, Slack sends a request with the relevant information.
+This request is processed and stored by a dedicated endpoint outside of Demisto in the address: https://oproxy.demisto.ninja. What the integration currently allows is polling this endpoint for user interactions that contain entitlement strings(these are used to perform actions in Demisto by outside users, see the SlackAsk script for an example). What that means is that in order to enable interactivity using the integration, connection to this endpoint has to be enabled.
+Known Limitations
+
+ Channels are created by the Slack user who authorized the application. Thus, this user will be in every channel that the integration creates. You cannot kick this user, but they can leave.
+ The integration can only manage channels that the application is a member of. Otherwise those channels will not be found.
+ Currently, the integration does not support working without verifying SSL certificates. The parameter applies only to the endpoint for interactive responses.
+
+Troubleshooting
+If messages are not mirrored in Demisto, or direct messages are not handled properly, check the integration status on the integration page:
+
+
+If you're having further issues, contact us at support@demisto.com and attach the server logs.
diff --git a/Integrations/Slack/Slack.py b/Integrations/Slack/Slack.py
new file mode 100644
index 000000000000..c02b21114fac
--- /dev/null
+++ b/Integrations/Slack/Slack.py
@@ -0,0 +1,1545 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import slack
+from slack.errors import SlackApiError
+
+from distutils.util import strtobool
+import asyncio
+import concurrent
+import requests
+from typing import Tuple
+
+# disable unsecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' CONSTANTS '''
+
+
+SEVERITY_DICT = {
+ 'Unknown': 0,
+ 'Low': 1,
+ 'Medium': 2,
+ 'High': 3,
+ 'Critical': 4
+}
+
+
+USER_TAG_EXPRESSION = '<@(.*?)>'
+CHANNEL_TAG_EXPRESSION = '<#(.*?)>'
+URL_EXPRESSION = r'<(https?://.+?)(?:\|.+)?>'
+GUID_REGEX = r'(\{){0,1}[0-9a-fA-F]{8}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{12}(\}){0,1}'
+ENTITLEMENT_REGEX = r'{}@(({})|\d+)(\|\S+)?\b'.format(GUID_REGEX, GUID_REGEX)
+MESSAGE_FOOTER = '\n**From Slack**'
+MIRROR_TYPE = 'mirrorEntry'
+INCIDENT_OPENED = 'incidentOpened'
+INCIDENT_NOTIFICATION_CHANNEL = 'incidentNotificationChannel'
+PLAYGROUND_INVESTIGATION_TYPE = 9
+WARNING_ENTRY_TYPE = 11
+ENDPOINT_URL = 'https://oproxy.demisto.ninja/slack-poll'
+POLL_INTERVAL_MINUTES = 1
+DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
+
+''' GLOBALS '''
+
+
+BOT_TOKEN: str
+ACCESS_TOKEN: str
+PROXY: str
+DEDICATED_CHANNEL: str
+CLIENT: slack.WebClient
+CHANNEL_CLIENT: slack.WebClient
+ALLOW_INCIDENTS: bool
+NOTIFY_INCIDENTS: bool
+INCIDENT_TYPE: str
+SEVERITY_THRESHOLD: int
+VERIFY_CERT: bool
+QUESTION_LIFETIME: int
+BOT_NAME: str
+BOT_ICON_URL: str
+
+''' HELPER FUNCTIONS '''
+
+
+def get_bot_id() -> str:
+ """
+ Gets the app bot ID
+ :return: The app bot ID
+ """
+ response = CLIENT.auth_test()
+
+ return response.get('user_id')
+
+
+def test_module():
+ """
+ Sends a test message to the dedicated slack channel.
+ """
+ if not DEDICATED_CHANNEL:
+ return_error('A dedicated slack channel must be provided.')
+ channel = get_conversation_by_name(DEDICATED_CHANNEL)
+ if not channel:
+ return_error('Dedicated channel not found.')
+ message = 'Hi there! This is a test message.'
+
+ kwargs = {
+ 'text': message
+ }
+ if BOT_NAME:
+ kwargs['username'] = BOT_NAME
+ if BOT_ICON_URL:
+ kwargs['icon_url'] = BOT_ICON_URL
+ CLIENT.chat_postMessage(channel=channel.get('id'), **kwargs)
+
+ demisto.results('ok')
+
+
+def get_user_by_name(user_to_search: str) -> dict:
+ """
+ Gets a slack user by a user name
+ :param user_to_search: The user name or email
+ :return: A slack user object
+ """
+
+ user: dict = {}
+ users: list = []
+ integration_context = demisto.getIntegrationContext()
+
+ user_to_search = user_to_search.lower()
+ if integration_context.get('users'):
+ users = json.loads(integration_context['users'])
+ users_filter = list(filter(lambda u: u.get('name', '').lower() == user_to_search
+ or u.get('profile', {}).get('email', '').lower() == user_to_search
+ or u.get('real_name', '').lower() == user_to_search, users))
+ if users_filter:
+ user = users_filter[0]
+ if not user:
+ response = CLIENT.users_list(limit=200)
+ while True:
+ workspace_users = response['members'] if response and response.get('members', []) else []
+ cursor = response.get('response_metadata', {}).get('next_cursor')
+ users_filter = list(filter(lambda u: u.get('name', '').lower() == user_to_search
+ or u.get('profile', {}).get('email', '').lower() == user_to_search
+ or u.get('real_name', '').lower() == user_to_search, workspace_users))
+ if users_filter:
+ break
+ if not cursor:
+ break
+ response = CLIENT.users_list(limit=200, cursor=cursor)
+
+ if users_filter:
+ user = users_filter[0]
+ users.append(user)
+ set_to_latest_integration_context('users', users)
+ else:
+ return {}
+
+ return user
+
+
+def search_slack_users(users) -> list:
+ """
+ Search given users in Slack
+ :param users: The users to find
+ :return: The slack users
+ """
+ slack_users = []
+
+ if not isinstance(users, list):
+ users = [users]
+
+ for user in users:
+ slack_user = get_user_by_name(user)
+ if not slack_user:
+ demisto.results({
+ 'Type': WARNING_ENTRY_TYPE,
+ 'Contents': 'User {} not found in Slack'.format(user),
+ 'ContentsFormat': formats['text']
+ })
+ else:
+ slack_users.append(slack_user)
+ return slack_users
+
+
+def find_mirror_by_investigation() -> dict:
+ mirror: dict = {}
+ investigation = demisto.investigation()
+ if investigation:
+ integration_context = demisto.getIntegrationContext()
+ if integration_context.get('mirrors'):
+ mirrors = json.loads(integration_context['mirrors'])
+ investigation_filter = list(filter(lambda m: investigation.get('id') == m['investigation_id'],
+ mirrors))
+ if investigation_filter:
+ mirror = investigation_filter[0]
+
+ return mirror
+
+
+def set_to_latest_integration_context(key: str, value, wait: bool = False):
+ if wait:
+ time.sleep(5)
+
+ integration_context = demisto.getIntegrationContext()
+
+ integration_context[key] = json.dumps(value)
+
+ demisto.setIntegrationContext(integration_context)
+
+
+''' MIRRORING '''
+
+
+async def get_slack_name(slack_id: str, client) -> str:
+ """
+ Get the slack name of a provided user or channel by its ID
+ :param client: The slack client
+ :param slack_id: The slack user or channel ID
+ :return: The slack user or channel name
+ """
+ if not slack_id:
+ return ''
+
+ integration_context = demisto.getIntegrationContext()
+ prefix = slack_id[0]
+ slack_name = ''
+
+ if prefix in ['C', 'D', 'G']:
+ slack_id = slack_id.split('|')[0]
+ conversation: dict = {}
+ if integration_context.get('conversations'):
+ conversations = list(filter(lambda c: c['id'] == slack_id,
+ json.loads(integration_context['conversations'])))
+ if conversations:
+ conversation = conversations[0]
+ if not conversation:
+ conversation = (await client.conversations_info(channel=slack_id)).get('channel', {})
+ slack_name = conversation.get('name', '')
+ elif prefix == 'U':
+ user: dict = {}
+ if integration_context.get('users'):
+ users = list(filter(lambda u: u['id'] == slack_id, json.loads(integration_context['users'])))
+ if users:
+ user = users[0]
+ if not user:
+ user = (await client.users_info(user=slack_id)).get('user', {})
+
+ slack_name = user.get('name', '')
+
+ return slack_name
+
+
+async def clean_message(message: str, client: slack.WebClient) -> str:
+ """
+ Prettifies a slack message - replaces tags and URLs with clean expressions
+ :param message: The slack message
+ :param client: The slack client
+ :return: The clean slack message
+ """
+ matches = re.findall(USER_TAG_EXPRESSION, message)
+ matches += re.findall(CHANNEL_TAG_EXPRESSION, message)
+ message = re.sub(USER_TAG_EXPRESSION, r'\1', message)
+ message = re.sub(CHANNEL_TAG_EXPRESSION, r'\1', message)
+ for match in matches:
+ slack_name = await get_slack_name(match, client)
+ message = message.replace(match, slack_name)
+
+ resolved_message = re.sub(URL_EXPRESSION, r'\1', message)
+
+ return resolved_message
+
+
+def invite_users_to_conversation(conversation_id: str, users_to_invite: list):
+ """
+ Invites users to a provided conversation using a provided slack client with a channel token.
+ :param conversation_id: The slack conversation ID to invite the users to.
+ :param users_to_invite: The user slack IDs to invite.
+ """
+ for user in users_to_invite:
+ try:
+ CHANNEL_CLIENT.conversations_invite(channel=conversation_id, users=user)
+ except SlackApiError as e:
+ message = str(e)
+ if message.find('cant_invite_self') == -1:
+ raise
+
+
+def kick_users_from_conversation(conversation_id: str, users_to_kick: list):
+ """
+ Kicks users from a provided conversation using a provided slack client with a channel token.
+ :param conversation_id: The slack conversation ID to kick the users from.
+ :param users_to_kick: The user slack IDs to kick.
+ """
+ for user in users_to_kick:
+ try:
+ CHANNEL_CLIENT.conversations_kick(channel=conversation_id, user=user)
+ except SlackApiError as e:
+ message = str(e)
+ if message.find('cant_invite_self') == -1:
+ raise
+
+
+def mirror_investigation():
+ """
+ Updates the integration context with a new or existing mirror.
+ """
+ mirror_type = demisto.args().get('type', 'all')
+ auto_close = demisto.args().get('autoclose', 'true')
+ mirror_direction = demisto.args().get('direction', 'both')
+ mirror_to = demisto.args().get('mirrorTo', 'group')
+ channel_name = demisto.args().get('channelName', '')
+ channel_topic = demisto.args().get('channelTopic', '')
+ kick_admin = bool(strtobool(demisto.args().get('kickAdmin', 'false')))
+
+ investigation = demisto.investigation()
+
+ if investigation.get('type') == PLAYGROUND_INVESTIGATION_TYPE:
+ return_error('Can not perform this action in playground.')
+
+ integration_context = demisto.getIntegrationContext()
+
+ if not integration_context or not integration_context.get('mirrors', []):
+ mirrors: list = []
+ else:
+ mirrors = json.loads(integration_context['mirrors'])
+ if not integration_context or not integration_context.get('conversations', []):
+ conversations: list = []
+ else:
+ conversations = json.loads(integration_context['conversations'])
+
+ investigation_id = investigation.get('id')
+ users = investigation.get('users')
+ slack_users = search_slack_users(users)
+ send_first_message = False
+ users_to_invite = list(map(lambda u: u.get('id'), slack_users))
+ current_mirror = list(filter(lambda m: m['investigation_id'] == investigation_id, mirrors))
+ channel_filter: list = []
+ if channel_name:
+ channel_filter = list(filter(lambda m: m['channel_name'] == channel_name, mirrors))
+
+ if not current_mirror:
+ channel_name = channel_name or 'incident-{}'.format(investigation_id)
+
+ if not channel_filter:
+ if mirror_to == 'channel':
+ conversation = CHANNEL_CLIENT.channels_create(name=channel_name).get('channel', {})
+ else:
+ conversation = CHANNEL_CLIENT.groups_create(name=channel_name).get('group', {})
+
+ conversation_name = conversation.get('name')
+ conversation_id = conversation.get('id')
+ conversations.append(conversation)
+
+ send_first_message = True
+ else:
+ mirrored_channel = channel_filter[0]
+ conversation_id = mirrored_channel['channel_id']
+ conversation_name = mirrored_channel['channel_name']
+
+ mirror = {
+ 'channel_id': conversation_id,
+ 'channel_name': conversation_name,
+ 'investigation_id': investigation.get('id'),
+ 'mirror_type': mirror_type,
+ 'mirror_direction': mirror_direction,
+ 'mirror_to': mirror_to,
+ 'auto_close': bool(strtobool(auto_close)),
+ 'mirrored': False
+ }
+ else:
+ mirror = mirrors.pop(mirrors.index(current_mirror[0]))
+ conversation_id = mirror['channel_id']
+ if mirror_type:
+ mirror['mirror_type'] = mirror_type
+ if auto_close:
+ mirror['auto_close'] = bool(strtobool(auto_close))
+ if mirror_direction:
+ mirror['mirror_direction'] = mirror_direction
+ if mirror_to and mirror['mirror_to'] != mirror_to:
+ return_error('Cannot change the Slack channel type from Demisto.')
+ if channel_name:
+ return_error('Cannot change the Slack channel name.')
+ if channel_topic:
+ return_error('Cannot change the Slack channel topic.')
+ conversation_name = mirror['channel_name']
+ mirror['mirrored'] = False
+
+ set_topic = False
+ if channel_topic:
+ set_topic = True
+ else:
+ mirror_name = 'incident-{}'.format(investigation_id)
+ channel_filter = list(filter(lambda m: m['channel_name'] == conversation_name, mirrors))
+ if 'channel_topic' in mirror:
+ channel_topic = mirror['channel_topic']
+ elif channel_filter:
+ channel_mirror = channel_filter[0]
+ channel_topic = channel_mirror['channel_topic']
+ else:
+ channel_topic = ''
+ mirrored_investigations_ids = list(map(lambda m: 'incident-{}'
+ .format(m['investigation_id']), channel_filter))
+ if not channel_topic or channel_topic.find('incident-') != -1:
+ new_topic = ', '.join(mirrored_investigations_ids + [mirror_name])
+ if channel_topic != new_topic:
+ channel_topic = new_topic
+ set_topic = True
+
+ if set_topic:
+ CHANNEL_CLIENT.conversations_setTopic(channel=conversation_id, topic=channel_topic)
+ mirror['channel_topic'] = channel_topic
+
+ if mirror_type != 'none':
+ if integration_context.get('bot_id'):
+ bot_id = integration_context['bot_id']
+ else:
+ bot_id = get_bot_id()
+ users_to_invite += [bot_id]
+ invite_users_to_conversation(conversation_id, users_to_invite)
+
+ integration_context['bot_id'] = bot_id
+
+ mirrors.append(mirror)
+
+ set_to_latest_integration_context('mirrors', mirrors)
+ set_to_latest_integration_context('conversations', conversations)
+
+ if kick_admin:
+ CHANNEL_CLIENT.conversations_leave(channel=conversation_id)
+ if send_first_message:
+ server_links = demisto.demistoUrls()
+ server_link = server_links.get('server')
+ message = ('This channel was created to mirror incident {}. \n View it on: {}#/WarRoom/{}'
+ .format(investigation_id, server_link, investigation_id))
+ kwargs = {
+ 'text': message
+ }
+ if BOT_NAME:
+ kwargs['username'] = BOT_NAME
+ if BOT_ICON_URL:
+ kwargs['icon_url'] = BOT_ICON_URL
+ CLIENT.chat_postMessage(channel=conversation_id, **kwargs)
+
+ demisto.results('Investigation mirrored successfully, channel: {}'.format(conversation_name))
+
+
+def long_running_loop():
+ """
+ Runs in a long running container - checking for newly mirrored investigations and answered questions.
+ """
+ while True:
+ error = ''
+ try:
+ check_for_mirrors()
+ check_for_answers(datetime.utcnow())
+ except requests.exceptions.ConnectionError as e:
+ error = 'Could not connect to the Slack endpoint: {}'.format(str(e))
+ except Exception as e:
+ error = 'An error occurred: {}'.format(str(e))
+ demisto.error(error)
+ finally:
+ if error:
+ demisto.updateModuleHealth(error)
+ time.sleep(5)
+
+
+def check_for_answers(now: datetime):
+ """
+ Checks for answered questions
+ :param now: The current date.
+ """
+
+ integration_context = demisto.getIntegrationContext()
+ questions = integration_context.get('questions', [])
+ users = integration_context.get('users', [])
+ if questions:
+ questions = json.loads(questions)
+ if users:
+ users = json.loads(users)
+ now_string = datetime.strftime(now, DATE_FORMAT)
+
+ for question in questions:
+ if question.get('last_poll_time'):
+ if question.get('expiry'):
+ # Check if the question expired - if it did, answer it with the default response and remove it
+ expiry = datetime.strptime(question['expiry'], DATE_FORMAT)
+ if expiry < now:
+ answer_question(question.get('default_response'), question, questions)
+ continue
+ # Check if it has been enough time(determined by the POLL_INTERVAL_MINUTES parameter)
+ # since the last polling time. if not, continue to the next question until it has.
+ last_poll_time = datetime.strptime(question['last_poll_time'], DATE_FORMAT)
+ delta = now - last_poll_time
+ minutes = delta.total_seconds() / 60
+ if minutes < POLL_INTERVAL_MINUTES:
+ continue
+ demisto.info('Slack - polling for an answer for entitlement {}'.format(question.get('entitlement')))
+ question['last_poll_time'] = now_string
+ headers = {'Accept': 'application/json', 'Content-Type': 'application/json'}
+ body = {
+ 'token': BOT_TOKEN,
+ 'entitlement': question.get('entitlement')
+ }
+ res = requests.post(ENDPOINT_URL, data=json.dumps(body), headers=headers, verify=VERIFY_CERT)
+ if res.status_code != 200:
+ demisto.error('Slack - failed to poll for answers: {}, status code: {}'
+ .format(res.content, res.status_code))
+ continue
+ answer: dict = {}
+ try:
+ answer = res.json()
+ except Exception:
+ demisto.info('Slack - Could not parse response for entitlement {}: {}'
+ .format(question.get('entitlement'), res.content))
+ pass
+ if not answer:
+ continue
+ payload_json: str = answer.get('payload', '')
+ if not payload_json:
+ continue
+ payload = json.loads(payload_json)
+
+ actions = payload.get('actions', [])
+ if actions:
+ demisto.info('Slack - received answer from user for entitlement {}.'.format(question.get('entitlement')))
+ user_id = payload.get('user', {}).get('id')
+ user_filter = list(filter(lambda u: u['id'] == user_id, users))
+ if user_filter:
+ user = user_filter[0]
+ else:
+ user = CLIENT.users_info(user=user_id).get('user', {})
+ users.append(user)
+ set_to_latest_integration_context('users', users)
+
+ answer_question(actions[0].get('text', {}).get('text'), question, questions,
+ user.get('profile', {}).get('email'))
+
+ questions = list(filter(lambda q: q.get('remove', False) is False, questions))
+ set_to_latest_integration_context('questions', questions)
+
+
+def answer_question(text: str, question: dict, questions: list, email: str = ''):
+ content, guid, incident_id, task_id = extract_entitlement(question.get('entitlement', ''), text)
+ try:
+ demisto.handleEntitlementForUser(incident_id, guid, email, content, task_id)
+ except Exception as e:
+ demisto.error('Failed handling entitlement {}: {}'.format(question.get('entitlement'), str(e)))
+ question['remove'] = True
+ set_to_latest_integration_context('questions', questions)
+
+
+def check_for_mirrors():
+ """
+ Checks for newly created mirrors and updates the server accordingly
+ """
+ integration_context = demisto.getIntegrationContext()
+ if integration_context.get('mirrors'):
+ mirrors = json.loads(integration_context['mirrors'])
+ for mirror in mirrors:
+ if not mirror['mirrored']:
+ demisto.info('Mirroring: {}'.format(mirror['investigation_id']))
+ mirror = mirrors.pop(mirrors.index(mirror))
+ if mirror['mirror_to'] and mirror['mirror_direction'] and mirror['mirror_type']:
+ investigation_id = mirror['investigation_id']
+ mirror_type = mirror['mirror_type']
+ auto_close = mirror['auto_close']
+ direction = mirror['mirror_direction']
+ if isinstance(auto_close, str):
+ auto_close = bool(strtobool(auto_close))
+ demisto.mirrorInvestigation(investigation_id, '{}:{}'.format(mirror_type, direction), auto_close)
+ mirror['mirrored'] = True
+ mirrors.append(mirror)
+ else:
+ demisto.info('Could not mirror {}'.format(mirror['investigation_id']))
+
+ set_to_latest_integration_context('mirrors', mirrors)
+
+
+def extract_entitlement(entitlement: str, text: str) -> Tuple[str, str, str, str]:
+ """
+ Extracts entitlement components from an entitlement string
+ :param entitlement: The entitlement itself
+ :param text: The actual reply text
+ :return: Entitlement components
+ """
+ parts = entitlement.split('@')
+ guid = parts[0]
+ id_and_task = parts[1].split('|')
+ incident_id = id_and_task[0]
+ task_id = ''
+ if len(id_and_task) > 1:
+ task_id = id_and_task[1]
+ content = text.replace(entitlement, '', 1)
+
+ return content, guid, incident_id, task_id
+
+
+async def slack_loop():
+ """
+ Starts a Slack RTM client while checking the connection.
+ """
+ while True:
+ loop = asyncio.get_running_loop()
+ rtm_client = None
+ try:
+ rtm_client = slack.RTMClient(
+ token=BOT_TOKEN,
+ run_async=True,
+ loop=loop,
+ auto_reconnect=False
+ )
+ client_future = rtm_client.start()
+ while True:
+ await asyncio.sleep(10)
+ if rtm_client._websocket is None or rtm_client._websocket.closed or client_future.done():
+ ex = client_future.exception()
+ if ex:
+ demisto.error('Slack client raised an exception: {}'.format(ex))
+ demisto.info('Slack - websocket is closed or done')
+ break
+ except Exception as e:
+ error = 'Slack client raised an exception: {}'.format(e)
+ await handle_listen_error(error)
+ finally:
+ # If we got here, the websocket is closed or the client can't connect. Will try to connect every 5 seconds.
+ if rtm_client and not rtm_client._stopped:
+ rtm_client.stop()
+ await asyncio.sleep(5)
+
+
+async def handle_listen_error(error: str):
+ """
+ Logs an error and updates the module health accordingly.
+ :param error: The error string.
+ """
+ demisto.error(error)
+ demisto.updateModuleHealth(error)
+
+
+async def start_listening():
+ """
+ Starts a Slack RTM client and checks for mirrored incidents.
+ """
+ executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
+ loop = asyncio.get_running_loop()
+ loop.run_in_executor(executor, long_running_loop)
+ await slack_loop()
+
+
+async def handle_dm(user: dict, text: str, client: slack.WebClient):
+ """
+ Handles a direct message sent to the bot
+ :param user: The user who sent the message
+ :param text: The message text
+ :param client: The Slack client
+ :return: Text to return to the user
+ """
+ demisto.info('Slack - handling direct message.')
+ message: str = text.lower()
+ if message.find('incident') != -1 and (message.find('create') != -1
+ or message.find('open') != -1
+ or message.find('new') != -1):
+ user_email = user.get('profile', {}).get('email')
+ if user_email:
+ demisto_user = demisto.findUser(email=user_email)
+ else:
+ demisto_user = demisto.findUser(username=user.get('name'))
+
+ if not demisto_user and not ALLOW_INCIDENTS:
+ data = 'You are not allowed to create incidents.'
+ else:
+ try:
+ data = await translate_create(demisto_user, text)
+ except Exception as e:
+ data = 'Failed creating incidents: {}'.format(str(e))
+ else:
+ try:
+ data = demisto.directMessage(text, user.get('name'), user.get('profile', {}).get('email'), ALLOW_INCIDENTS)
+ except Exception as e:
+ data = str(e)
+
+ if not data:
+ data = 'Sorry, I could not perform the selected operation.'
+ im = await client.im_open(user=user.get('id'))
+ channel = im.get('channel', {}).get('id')
+ kwargs = {
+ 'text': data
+ }
+ if BOT_NAME:
+ kwargs['username'] = BOT_NAME
+ if BOT_ICON_URL:
+ kwargs['icon_url'] = BOT_ICON_URL
+ await client.chat_postMessage(channel=channel, **kwargs)
+
+
+async def translate_create(demisto_user: dict, message: str) -> str:
+ """
+ Processes an incident creation message
+ :param demisto_user: The Demisto user associated with the message (if exists)
+ :param message: The creation message
+ :return: Creation result
+ """
+ json_pattern = r'(?<=json=).*'
+ name_pattern = r'(?<=name=).*'
+ type_pattern = r'(?<=type=).*'
+ message = message.replace("\n", '').replace('`', '')
+ json_match = re.search(json_pattern, message)
+ created_incident = None
+ data = ''
+ if json_match:
+ if re.search(name_pattern, message) or re.search(type_pattern, message):
+ data = 'No other properties other than json should be specified.'
+ else:
+ incidents_json = json_match.group()
+ incidents = json.loads(incidents_json.replace('“', '"').replace('â€', '"'))
+ if not isinstance(incidents, list):
+ incidents = [incidents]
+ created_incident = await create_incidents(demisto_user, incidents)
+
+ if not created_incident:
+ data = 'Failed creating incidents.'
+ else:
+ name_match = re.search(name_pattern, message)
+ if not name_match:
+ data = 'Please specify arguments in the following manner: name= type=[type] or json=.'
+ else:
+ incident_name = re.sub('type=.*', '', name_match.group()).strip()
+ incident_type = ''
+
+ type_match = re.search(type_pattern, message)
+ if type_match:
+ incident_type = re.sub('name=.*', '', type_match.group()).strip()
+
+ incident = {'name': incident_name}
+
+ incident_type = incident_type or INCIDENT_TYPE
+ if incident_type:
+ incident['type'] = incident_type
+
+ created_incident = await create_incidents(demisto_user, [incident])
+ if not created_incident:
+ data = 'Failed creating incidents.'
+
+ if created_incident:
+ if isinstance(created_incident, list):
+ created_incident = created_incident[0]
+ server_links = demisto.demistoUrls()
+ server_link = server_links.get('server')
+ data = ('Successfully created incident {}.\n View it on: {}#/WarRoom/{}'
+ .format(created_incident['name'], server_link, created_incident['id']))
+
+ return data
+
+
+async def create_incidents(demisto_user: dict, incidents: list) -> dict:
+ """
+ Creates incidents according to a provided JSON object
+ :param demisto_user: The demisto user associated with the request (if exists)
+ :param incidents: The incidents JSON
+ :return: The creation result
+ """
+ if demisto_user:
+ data = demisto.createIncidents(incidents, userID=demisto_user['id'])
+ else:
+ data = demisto.createIncidents(incidents)
+
+ return data
+
+
+@slack.RTMClient.run_on(event='message')
+async def listen(**payload):
+ """
+ Listens to Slack RTM messages
+ :param payload: The message payload
+ """
+ data: dict = payload.get('data', {})
+ data_type: str = payload.get('type', '')
+ client: slack.WebClient = payload.get('web_client')
+
+ if data_type == 'error':
+ error = payload.get('error', {})
+ await handle_listen_error('Slack API has thrown an error. Code: {}, Message: {}.'
+ .format(error.get('code'), error.get('msg')))
+ return
+ try:
+ subtype = data.get('subtype', '')
+ text = data.get('text', '')
+ user_id = data.get('user', '')
+ channel = data.get('channel', '')
+ message_bot_id = data.get('bot_id', '')
+ thread = data.get('thread_ts', '')
+ message = data.get('message', {})
+
+ if subtype == 'bot_message' or message_bot_id or message.get('subtype') == 'bot_message':
+ return
+
+ integration_context = demisto.getIntegrationContext()
+ user = await get_user_by_id_async(client, integration_context, user_id)
+ entitlement_reply = await check_and_handle_entitlement(text, user, thread)
+ if entitlement_reply:
+ kwargs = {
+ 'text': entitlement_reply,
+ 'thread_ts': thread
+ }
+ if BOT_NAME:
+ kwargs['username'] = BOT_NAME
+ if BOT_ICON_URL:
+ kwargs['icon_url'] = BOT_ICON_URL
+ await client.chat_postMessage(channel=channel, **kwargs)
+ elif channel and channel[0] == 'D':
+ # DM
+ await handle_dm(user, text, client)
+ else:
+ if not integration_context or 'mirrors' not in integration_context:
+ return
+
+ channel_id = data.get('channel')
+ mirrors = json.loads(integration_context['mirrors'])
+ mirror_filter = list(filter(lambda m: m['channel_id'] == channel_id, mirrors))
+ if not mirror_filter:
+ return
+
+ for mirror in mirror_filter:
+ if mirror['mirror_direction'] == 'FromDemisto' or mirror['mirror_type'] == 'none':
+ return
+
+ if not mirror['mirrored']:
+ # In case the investigation is not mirrored yet
+ mirror = mirrors.pop(mirrors.index(mirror))
+ if mirror['mirror_to'] and mirror['mirror_direction'] and mirror['mirror_type']:
+ investigation_id = mirror['investigation_id']
+ mirror_type = mirror['mirror_type']
+ auto_close = mirror['auto_close']
+ direction = mirror['mirror_direction']
+ if isinstance(auto_close, str):
+ auto_close = bool(strtobool(auto_close))
+ demisto.info('Mirroring: {}'.format(investigation_id))
+ demisto.mirrorInvestigation(investigation_id, '{}:{}'.format(mirror_type, direction),
+ auto_close)
+ mirror['mirrored'] = True
+ mirrors.append(mirror)
+ set_to_latest_integration_context('mirrors', mirrors)
+
+ investigation_id = mirror['investigation_id']
+ await handle_text(client, investigation_id, text, user)
+ # Reset module health
+ demisto.updateModuleHealth("")
+ except Exception as e:
+ await handle_listen_error('Error occurred while listening to Slack: {}'.format(str(e)))
+
+
+async def get_user_by_id_async(client, integration_context, user_id):
+ user: dict = {}
+ users: list = []
+ if integration_context.get('users'):
+ users = json.loads(integration_context['users'])
+ user_filter = list(filter(lambda u: u['id'] == user_id, users))
+ if user_filter:
+ user = user_filter[0]
+ if not user:
+ user = (await client.users_info(user=user_id)).get('user', {})
+ users.append(user)
+ set_to_latest_integration_context('users', users)
+
+ return user
+
+
+async def handle_text(client: slack.WebClient, investigation_id: str, text: str, user: dict):
+ """
+ Handles text received in the Slack workspace (not DM)
+ :param client: The Slack client
+ :param investigation_id: The mirrored investigation ID
+ :param text: The received text
+ :param user: The sender
+ """
+ demisto.info('Slack - adding entry to incident {}'.format(investigation_id))
+ if text:
+ demisto.addEntry(id=investigation_id,
+ entry=await clean_message(text, client),
+ username=user.get('name', ''),
+ email=user.get('profile', {}).get('email', ''),
+ footer=MESSAGE_FOOTER
+ )
+
+
+async def check_and_handle_entitlement(text: str, user: dict, thread_id: str) -> str:
+ """
+ Handles an entitlement message (a reply to a question)
+ :param text: The message text
+ :param user: The user who sent the reply
+ :param thread_id: The thread ID
+ :return: If the message contains entitlement, return a reply.
+ """
+
+ entitlement_match = re.search(ENTITLEMENT_REGEX, text)
+ if entitlement_match:
+ demisto.info('Slack - handling entitlement in message.')
+ content, guid, incident_id, task_id = extract_entitlement(entitlement_match.group(), text)
+ demisto.handleEntitlementForUser(incident_id, guid, user.get('profile', {}).get('email'), content, task_id)
+
+ return 'Thank you for your response.'
+ else:
+ integration_context = demisto.getIntegrationContext()
+ questions = integration_context.get('questions', [])
+ if questions and thread_id:
+ questions = json.loads(questions)
+ question_filter = list(filter(lambda q: q.get('thread') == thread_id, questions))
+ if question_filter:
+ demisto.info('Slack - handling entitlement in thread.')
+ entitlement = question_filter[0].get('entitlement')
+ reply = question_filter[0].get('reply', 'Thank you for your response.')
+ content, guid, incident_id, task_id = extract_entitlement(entitlement, text)
+ demisto.handleEntitlementForUser(incident_id, guid, user.get('profile', {}).get('email'), content,
+ task_id)
+ questions.remove(question_filter[0])
+ set_to_latest_integration_context('questions', questions)
+
+ return reply
+
+ return ''
+
+
+''' SEND '''
+
+
+def get_conversation_by_name(conversation_name: str) -> dict:
+ """
+ Get a slack conversation by its name
+ :param conversation_name: The conversation name
+ :return: The slack conversation
+ """
+ response = CLIENT.conversations_list(types='private_channel,public_channel', limit=200)
+ conversation: dict = {}
+ while True:
+ conversations = response['channels'] if response and response.get('channels') else []
+ cursor = response.get('response_metadata', {}).get('next_cursor')
+ conversation_filter = list(filter(lambda c: c.get('name') == conversation_name, conversations))
+ if conversation_filter:
+ break
+ if not cursor:
+ break
+ response = CLIENT.conversations_list(types='private_channel,public_channel', limit=200, cursor=cursor)
+
+ if conversation_filter:
+ conversation = conversation_filter[0]
+
+ return conversation
+
+
+def slack_send():
+ """
+ Sends a message to slack
+ """
+ message = demisto.args().get('message', '')
+ to = demisto.args().get('to')
+ channel = demisto.args().get('channel')
+ group = demisto.args().get('group')
+ message_type = demisto.args().get('messageType', '') # From server
+ original_message = demisto.args().get('originalMessage', '') # From server
+ entry = demisto.args().get('entry')
+ ignore_add_url = demisto.args().get('ignoreAddURL', False) or demisto.args().get('IgnoreAddURL', False)
+ thread_id = demisto.args().get('threadID', '')
+ severity = demisto.args().get('severity') # From server
+ blocks = demisto.args().get('blocks')
+ entitlement = ''
+
+ if message_type == MIRROR_TYPE and original_message.find(MESSAGE_FOOTER) != -1:
+ # return so there will not be a loop of messages
+ return
+
+ if (to and group) or (to and channel) or (to and channel and group):
+ return_error('Only one destination can be provided.')
+
+ if severity:
+ try:
+ severity = int(severity)
+ except Exception:
+ severity = None
+ pass
+
+ if channel == INCIDENT_NOTIFICATION_CHANNEL or (not channel and message_type == INCIDENT_OPENED):
+ channel = DEDICATED_CHANNEL
+
+ if channel == DEDICATED_CHANNEL and ((severity is not None and severity < SEVERITY_THRESHOLD)
+ or not NOTIFY_INCIDENTS):
+ channel = None
+
+ if not (to or group or channel):
+ return_error('Either a user, group or channel must be provided.')
+
+ reply = ''
+ expiry = ''
+ default_response = ''
+ if blocks:
+ entitlement_match = re.search(ENTITLEMENT_REGEX, blocks)
+ if entitlement_match:
+ try:
+ parsed_message = json.loads(blocks)
+ entitlement = parsed_message.get('entitlement')
+ blocks = parsed_message.get('blocks')
+ reply = parsed_message.get('reply')
+ expiry = parsed_message.get('expiry')
+ default_response = parsed_message.get('default_response')
+ except Exception:
+ demisto.info('Slack - could not parse JSON from entitlement blocks.')
+ pass
+ elif message:
+ entitlement_match = re.search(ENTITLEMENT_REGEX, message)
+ if entitlement_match:
+ try:
+ parsed_message = json.loads(message)
+ entitlement = parsed_message.get('entitlement')
+ message = parsed_message.get('message')
+ reply = parsed_message.get('reply')
+ expiry = parsed_message.get('expiry')
+ default_response = parsed_message.get('default_response')
+ except Exception:
+ demisto.info('Slack - could not parse JSON from entitlement message.')
+ pass
+
+ response = slack_send_request(to, channel, group, entry, ignore_add_url, thread_id, message=message, blocks=blocks)
+
+ if response:
+ thread = response.get('ts')
+ if entitlement:
+ save_entitlement(entitlement, thread, reply, expiry, default_response)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': 'Message sent to Slack successfully.\nThread ID is: {}'.format(thread),
+ 'ContentsFormat': formats['text'],
+ 'EntryContext': {
+ 'Slack.Thread(val.ID===obj.ID)': {
+ 'ID': thread
+ },
+ }
+ })
+ else:
+ demisto.results('Could not send the message to Slack.')
+
+
+def save_entitlement(entitlement, thread, reply, expiry, default_response):
+ """
+ Saves an entitlement with its thread
+ :param entitlement: The entitlement
+ :param thread: The thread
+ :param reply: The reply to send to the user.
+ :param expiry: The question expiration date.
+ :param default_response: The response to send if the question times out.
+ """
+ integration_context = demisto.getIntegrationContext()
+ questions = integration_context.get('questions', [])
+ if questions:
+ questions = json.loads(integration_context['questions'])
+ questions.append({
+ 'thread': thread,
+ 'entitlement': entitlement,
+ 'reply': reply,
+ 'expiry': expiry,
+ 'default_response': default_response
+ })
+
+ set_to_latest_integration_context('questions', questions)
+
+
+def slack_send_file():
+ """
+ Sends a file to slack
+ """
+ to = demisto.args().get('to')
+ channel = demisto.args().get('channel')
+ group = demisto.args().get('group')
+ entry_id = demisto.args().get('file')
+ thread_id = demisto.args().get('threadID')
+ comment = demisto.args().get('comment', '')
+
+ if not (to or channel or group):
+ mirror = find_mirror_by_investigation()
+ if mirror:
+ channel = mirror.get('channel_name')
+
+ if not (to or channel or group):
+ return_error('Either a user, group or channel must be provided.')
+
+ file_path = demisto.getFilePath(entry_id)
+ with open(file_path['path'], 'rb') as file:
+ data = file.read()
+
+ file = {
+ 'data': data,
+ 'name': file_path['name'],
+ 'comment': comment
+ }
+
+ response = slack_send_request(to, channel, group, thread_id=thread_id, file=file)
+ if response:
+ demisto.results('File sent to Slack successfully.')
+ else:
+ demisto.results('Could not send the file to Slack.')
+
+
+def send_message(destinations: list, entry: str, ignore_add_url: bool, integration_context: dict, message: str,
+ thread_id: str, blocks: str):
+ """
+ Sends a message to Slack.
+ :param destinations: The destinations to send to.
+ :param entry: A WarRoom entry to send.
+ :param ignore_add_url: Do not add a Demisto URL to the message.
+ :param integration_context: Current integration context.
+ :param message: The message to send.
+ :param thread_id: The Slack thread ID to send the message to.
+ :param blocks: Message blocks to send
+ :return: The Slack send response.
+ """
+ if not message:
+ if blocks:
+ message = 'New message from SOC Bot'
+ # This is shown in the notification bubble from Slack
+ else:
+ message = '\n'
+
+ if message and not blocks:
+ if ignore_add_url and isinstance(ignore_add_url, str):
+ ignore_add_url = bool(strtobool(ignore_add_url))
+ if not ignore_add_url:
+ investigation = demisto.investigation()
+ server_links = demisto.demistoUrls()
+ if investigation:
+ if investigation.get('type') != PLAYGROUND_INVESTIGATION_TYPE:
+ link = server_links.get('warRoom')
+ if link:
+ if entry:
+ link += '/' + entry
+ message += '\n{} {}'.format('View it on:', link)
+ else:
+ link = server_links.get('server', '')
+ if link:
+ message += '\n{} {}'.format('View it on:', link + '#/home')
+ try:
+ response = send_message_to_destinations(destinations, message, thread_id, blocks)
+ except SlackApiError as e:
+ if str(e).find('not_in_channel') == -1 and str(e).find('channel_not_found') == -1:
+ raise
+ bot_id = integration_context.get('bot_id')
+ if not bot_id:
+ bot_id = get_bot_id()
+ for dest in destinations:
+ invite_users_to_conversation(dest, [bot_id])
+ response = send_message_to_destinations(destinations, message, thread_id, blocks)
+ return response
+
+
+def send_message_to_destinations(destinations: list, message: str, thread_id: str, blocks: str = '') -> dict:
+ """
+ Sends a message to provided destinations Slack.
+ :param destinations: Destinations to send to.
+ :param message: The message to send.
+ :param thread_id: Slack thread ID to send to.
+ :param blocks: Message blocks to send
+ :return: The Slack send response.
+ """
+ response: dict = {}
+ kwargs: dict = {}
+
+ if message:
+ kwargs['text'] = message
+ if blocks:
+ block_list = json.loads(blocks)
+ kwargs['blocks'] = block_list
+ if thread_id:
+ kwargs['thread_ts'] = thread_id
+ if BOT_NAME:
+ kwargs['username'] = BOT_NAME
+ if BOT_ICON_URL:
+ kwargs['icon_url'] = BOT_ICON_URL
+
+ for destination in destinations:
+ response = CLIENT.chat_postMessage(channel=destination, **kwargs)
+ return response
+
+
+def send_file(destinations: list, file: dict, integration_context: dict, thread_id: str) -> dict:
+ """
+ Sends a file to Slack.
+ :param destinations: Destinations to send the file to.
+ :param file: The file to send.
+ :param integration_context: The current integration context.
+ :param thread_id: A Slack thread to send to.
+ :return: The Slack send response.
+ """
+ try:
+ response = send_file_to_destinations(destinations, file, thread_id)
+ except SlackApiError as e:
+ if str(e).find('not_in_channel') == -1 and str(e).find('channel_not_found') == -1:
+ raise
+ bot_id = integration_context.get('bot_id')
+ if not bot_id:
+ bot_id = get_bot_id()
+ integration_context['bot_id'] = bot_id
+ for dest in destinations:
+ invite_users_to_conversation(dest, [bot_id])
+ response = send_file_to_destinations(destinations, file, thread_id)
+ return response
+
+
+def send_file_to_destinations(destinations: list, file: dict, thread_id: str) -> dict:
+ """
+ Sends a file to provided destinations in Slack.
+ :param destinations: The destinations to send to.
+ :param file: The file to send.
+ :param thread_id: A thread ID to send to.
+ :return: The Slack send response.
+ """
+ response: dict = {}
+ kwargs = {
+ 'filename': file['name'],
+ 'initial_comment': file['comment']
+ }
+ for destination in destinations:
+ kwargs['channels'] = destination
+ if thread_id:
+ kwargs['thread_ts'] = thread_id
+
+ response = CLIENT.files_upload(file=file['data'], **kwargs)
+ return response
+
+
+def slack_send_request(to: str, channel: str, group: str, entry: str = '', ignore_add_url: bool = False,
+ thread_id: str = '', message: str = '', blocks: str = '', file: dict = None) -> dict:
+ """
+ Requests to send a message or a file to Slack.
+ :param to: A Slack user to send to.
+ :param channel: A Slack channel to send to.
+ :param group: A Slack private channel to send to.
+ :param entry: WarRoom entry to send.
+ :param ignore_add_url: Do not add a Demisto URL to the message.
+ :param thread_id: The Slack thread ID to send to.
+ :param message: A message to send.
+ :param blocks: Blocks to send with a slack message
+ :param file: A file to send.
+ :return: The Slack send response.
+ """
+
+ integration_context = demisto.getIntegrationContext()
+ conversations: list = []
+ mirrors: list = []
+ if integration_context:
+ if 'conversations' in integration_context:
+ conversations = json.loads(integration_context['conversations'])
+ if 'mirrors' in integration_context:
+ mirrors = json.loads(integration_context['mirrors'])
+
+ destinations = []
+
+ if to:
+ if isinstance(to, list):
+ to = to[0]
+ user = get_user_by_name(to)
+ if not user:
+ demisto.error('Could not find the Slack user {}'.format(to))
+ else:
+ im = CLIENT.im_open(user=user.get('id'))
+ destinations.append(im.get('channel', {}).get('id'))
+ if channel or group:
+ if not destinations:
+ destination_name = channel or group
+ conversation_filter = list(filter(lambda c: c.get('name') == destination_name, conversations))
+ if conversation_filter:
+ conversation = conversation_filter[0]
+ conversation_id = conversation.get('id')
+ else:
+ mirrored_channel_filter = list(filter(lambda m: 'incident-{}'
+ .format(m['investigation_id']) == destination_name, mirrors))
+ if mirrored_channel_filter:
+ channel_mirror = mirrored_channel_filter[0]
+ conversation_id = channel_mirror['channel_id']
+ else:
+ conversation = get_conversation_by_name(destination_name)
+ if not conversation:
+ return_error('Could not find the Slack conversation {}'.format(destination_name))
+ conversations.append(conversation)
+ set_to_latest_integration_context('conversations', conversations)
+ conversation_id = conversation.get('id')
+
+ if conversation_id:
+ destinations.append(conversation_id)
+
+ if not destinations:
+ return_error('Could not find any destination to send to.')
+
+ if file:
+ response = send_file(destinations, file, integration_context, thread_id)
+ return response
+
+ response = send_message(destinations, entry, ignore_add_url, integration_context, message,
+ thread_id, blocks)
+
+ return response
+
+
+def set_channel_topic():
+ """
+ Sets a topic for a slack channel
+ """
+
+ channel = demisto.args().get('channel')
+ topic = demisto.args().get('topic')
+
+ channel_id = ''
+
+ if not channel:
+ mirror = find_mirror_by_investigation()
+ if mirror:
+ channel_id = mirror.get('channel_id', '')
+ # We need to update the topic in the mirror
+ integration_context = demisto.getIntegrationContext()
+ mirrors = json.loads(integration_context['mirrors'])
+ mirror = mirrors.pop(mirrors.index(mirror))
+ mirror['channel_topic'] = topic
+ mirrors.append(mirror)
+ set_to_latest_integration_context('mirrors', mirrors)
+ else:
+ channel = get_conversation_by_name(channel)
+ channel_id = channel.get('id')
+
+ if not channel_id:
+ return_error('Channel not found - the Demisto app needs to be a member of the channel in order to look it up.')
+
+ CHANNEL_CLIENT.conversations_setTopic(channel=channel_id, topic=topic)
+
+ demisto.results('Topic successfully set.')
+
+
+def rename_channel():
+ """
+ Renames a slack channel
+ """
+
+ channel = demisto.args().get('channel')
+ new_name = demisto.args().get('name')
+
+ channel_id = ''
+
+ if not channel:
+ mirror = find_mirror_by_investigation()
+ if mirror:
+ channel_id = mirror.get('channel_id', '')
+ # We need to update the name in the mirror
+ integration_context = demisto.getIntegrationContext()
+ mirrors = json.loads(integration_context['mirrors'])
+ mirror = mirrors.pop(mirrors.index(mirror))
+ mirror['channel_name'] = new_name
+ mirrors.append(mirror)
+ set_to_latest_integration_context('mirrors', mirrors)
+ else:
+ channel = get_conversation_by_name(channel)
+ channel_id = channel.get('id')
+
+ if not channel_id:
+ return_error('Channel not found - the Demisto app needs to be a member of the channel in order to look it up.')
+
+ CHANNEL_CLIENT.conversations_rename(channel=channel_id, name=new_name)
+
+ demisto.results('Channel renamed successfully.')
+
+
+def close_channel():
+ """
+ Archives a slack channel by name or its incident ID if mirrored.
+ """
+ channel = demisto.args().get('channel')
+ channel_id = ''
+
+ if not channel:
+ mirror = find_mirror_by_investigation()
+ if mirror:
+ channel_id = mirror.get('channel_id', '')
+ # We need to update the topic in the mirror
+ integration_context = demisto.getIntegrationContext()
+ mirrors = json.loads(integration_context['mirrors'])
+ mirror = mirrors.pop(mirrors.index(mirror))
+ channel_id = mirror['channel_id']
+ # Check for other mirrors on the archived channel
+ channel_mirrors = list(filter(lambda m: channel_id == m['channel_id'], mirrors))
+ for mirror in channel_mirrors:
+ mirrors.remove(mirror)
+
+ set_to_latest_integration_context('mirrors', mirrors)
+ else:
+ channel = get_conversation_by_name(channel)
+ channel_id = channel.get('id')
+
+ if not channel_id:
+ return_error('Channel not found - the Demisto app needs to be a member of the channel in order to look it up.')
+
+ CHANNEL_CLIENT.conversations_archive(channel=channel_id)
+
+ demisto.results('Channel successfully archived.')
+
+
+def create_channel():
+ """
+ Creates a channel in Slack using the provided arguments.
+ """
+ channel_type = demisto.args().get('type', 'private')
+ channel_name = demisto.args()['name']
+ users = argToList(demisto.args().get('users', []))
+ topic = demisto.args().get('topic')
+
+ if channel_type != 'private':
+ conversation = CHANNEL_CLIENT.channels_create(name=channel_name).get('channel', {})
+ else:
+ conversation = CHANNEL_CLIENT.groups_create(name=channel_name).get('group', {})
+
+ if users:
+ slack_users = search_slack_users(users)
+ invite_users_to_conversation(conversation.get('id'), list(map(lambda u: u.get('id'), slack_users)))
+ if topic:
+ CHANNEL_CLIENT.conversations_setTopic(channel=conversation.get('id'), topic=topic)
+
+ demisto.results('Successfully created the channel {}.'.format(conversation.get('name')))
+
+
+def invite_to_channel():
+ channel = demisto.args().get('channel')
+ users = argToList(demisto.args().get('users', []))
+
+ channel_id = ''
+
+ if not channel:
+ mirror = find_mirror_by_investigation()
+ if mirror:
+ channel_id = mirror['channel_id']
+ else:
+ channel = get_conversation_by_name(channel)
+ channel_id = channel.get('id')
+
+ if not channel_id:
+ return_error('Channel not found - the Demisto app needs to be a member of the channel in order to look it up.')
+
+ slack_users = search_slack_users(users)
+ if slack_users:
+ invite_users_to_conversation(channel_id, list(map(lambda u: u.get('id'), slack_users)))
+ else:
+ return_error('No users found')
+
+ demisto.results('Successfully invited users to the channel.')
+
+
+def kick_from_channel():
+ channel = demisto.args().get('channel')
+ users = argToList(demisto.args().get('users', []))
+
+ channel_id = ''
+
+ if not channel:
+ mirror = find_mirror_by_investigation()
+ if mirror:
+ channel_id = mirror['channel_id']
+ else:
+ channel = get_conversation_by_name(channel)
+ channel_id = channel.get('id')
+
+ if not channel_id:
+ return_error('Channel not found - the Demisto app needs to be a member of the channel in order to look it up.')
+
+ slack_users = search_slack_users(users)
+ if slack_users:
+ kick_users_from_conversation(channel_id, list(map(lambda u: u.get('id'), slack_users)))
+ else:
+ return_error('No users found')
+
+ demisto.results('Successfully kicked users from the channel.')
+
+
+def get_user():
+ user = demisto.args()['user']
+
+ slack_user = get_user_by_name(user)
+ if not slack_user:
+ return_error('User not found')
+
+ profile = slack_user.get('profile', {})
+ result_user = {
+ 'ID': slack_user.get('id'),
+ 'Username': slack_user.get('name'),
+ 'Name': profile.get('real_name_normalized') or profile.get('real_name'),
+ 'DisplayName': profile.get('display_name'),
+ 'Email': profile.get('email')
+ }
+
+ hr = tableToMarkdown('Details for Slack user: ' + user, result_user,
+ headers=['ID', 'Username', 'Name', 'DisplayName', 'Email'], headerTransform=pascalToSpace,
+ removeNull=True)
+ context = {
+ 'Slack.User(val.ID === obj.ID)': createContext(result_user, removeNull=True)
+ }
+
+ return_outputs(hr, context, slack_user)
+
+
+def long_running_main():
+ """
+ Starts the long running thread.
+ """
+ asyncio.run(start_listening())
+
+
+def init_globals():
+ """
+ Initializes global variables according to the integration parameters
+ """
+ global BOT_TOKEN, ACCESS_TOKEN, PROXY, DEDICATED_CHANNEL, CLIENT, CHANNEL_CLIENT
+ global SEVERITY_THRESHOLD, ALLOW_INCIDENTS, NOTIFY_INCIDENTS, INCIDENT_TYPE, VERIFY_CERT
+ global BOT_NAME, BOT_ICON_URL
+
+ BOT_TOKEN = demisto.params().get('bot_token')
+ ACCESS_TOKEN = demisto.params().get('access_token')
+ PROXY = handle_proxy().get('https')
+ DEDICATED_CHANNEL = demisto.params().get('incidentNotificationChannel')
+ CLIENT = slack.WebClient(token=BOT_TOKEN, proxy=PROXY)
+ CHANNEL_CLIENT = slack.WebClient(token=ACCESS_TOKEN, proxy=PROXY)
+ SEVERITY_THRESHOLD = SEVERITY_DICT.get(demisto.params().get('min_severity', 'Low'), 1)
+ ALLOW_INCIDENTS = demisto.params().get('allow_incidents', False)
+ NOTIFY_INCIDENTS = demisto.params().get('notify_incidents', True)
+ INCIDENT_TYPE = demisto.params().get('incidentType')
+ VERIFY_CERT = not demisto.params().get('unsecure', False)
+ BOT_NAME = demisto.params().get('bot_name')
+ BOT_ICON_URL = demisto.params().get('bot_icon')
+
+
+def main():
+ """
+ Main
+ """
+
+ init_globals()
+
+ commands = {
+ 'test-module': test_module,
+ 'long-running-execution': long_running_main,
+ 'slack-mirror-investigation': mirror_investigation,
+ 'mirror-investigation': mirror_investigation,
+ 'slack-send': slack_send,
+ 'send-notification': slack_send,
+ 'slack-send-file': slack_send_file,
+ 'slack-set-channel-topic': set_channel_topic,
+ 'close-channel': close_channel,
+ 'slack-close-channel': close_channel,
+ 'slack-create-channel': create_channel,
+ 'slack-invite-to-channel': invite_to_channel,
+ 'slack-kick-from-channel': kick_from_channel,
+ 'slack-rename-channel': rename_channel,
+ 'slack-get-user-details': get_user,
+ }
+
+ try:
+ command_func = commands[demisto.command()]
+ command_func()
+ except Exception as e:
+ LOG(e)
+ return_error(str(e))
+
+
+if __name__ in ['__main__', '__builtin__', 'builtins']:
+ main()
diff --git a/Integrations/Slack/Slack.yml b/Integrations/Slack/Slack.yml
new file mode 100644
index 000000000000..0bd7a17e7a1e
--- /dev/null
+++ b/Integrations/Slack/Slack.yml
@@ -0,0 +1,517 @@
+category: Messaging
+commonfields:
+ id: SlackV2
+ version: -1
+configuration:
+- display: Slack API access token
+ name: access_token
+ required: true
+ type: 4
+- display: Slack API bot token
+ name: bot_token
+ required: true
+ type: 4
+- display: Dedicated Slack channel to receive notifications
+ name: incidentNotificationChannel
+ required: false
+ type: 0
+- defaultvalue: 'true'
+ display: Send notifications about incidents to the dedicated channel
+ name: notify_incidents
+ required: false
+ type: 8
+- defaultvalue: Low
+ display: Minimum incident severity to send messages to slack by
+ name: min_severity
+ options:
+ - Unknown
+ - Low
+ - Medium
+ - High
+ - Critical
+ required: false
+ type: 15
+- defaultvalue: Unclassified
+ display: Type of incidents created in Slack
+ name: incidentType
+ required: false
+ type: 13
+- display: Allow external users to create incidents via DM
+ name: allow_incidents
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Trust any certificate (not secure)
+ name: unsecure
+ required: false
+ type: 8
+- defaultvalue: 'true'
+ display: Long running instance. Required for investigation mirroring and direct
+ messages.
+ name: longRunning
+ required: false
+ type: 8
+- display: Bot display name in Slack (Demisto Integration by default)
+ name: bot_name
+ required: false
+ type: 0
+- display: Bot icon in Slack - URL Image (Demisto icon by default)
+ name: bot_icon
+ required: false
+ type: 0
+description: Send messages and notifications to your Slack team.
+display: Slack v2
+name: SlackV2
+script:
+ commands:
+ - arguments:
+ - auto: PREDEFINED
+ default: true
+ defaultValue: all
+ description: The mirroring type. Can be "all", which mirrors everything, "chat",
+ which mirrors only chats (not commands), or "none", which stops all mirroring.
+ isArray: false
+ name: type
+ predefined:
+ - all
+ - chat
+ - none
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: Whether the channel is auto-closed when an investigation is closed.
+ Can be "true" or "false". Default is "true".
+ isArray: false
+ name: autoclose
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: both
+ description: The mirroring direction. Can be "FromDemisto", "ToDemisto", or
+ "Both". Default is "Both".
+ isArray: false
+ name: direction
+ predefined:
+ - Both
+ - FromDemisto
+ - ToDemisto
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: group
+ description: The channel type. Can be "channel" (public) or "group" (private).
+ isArray: false
+ name: mirrorTo
+ predefined:
+ - channel
+ - group
+ required: false
+ secret: false
+ deprecated: true
+ description: Deprecated. Use the "mirror-investigation" command instead.
+ execution: false
+ name: slack-mirror-investigation
+ - arguments:
+ - auto: PREDEFINED
+ default: true
+ defaultValue: all
+ description: The mirroring type. Can be "all", which mirrors everything, "chat",
+ which mirrors only chats (not commands), or "none", which stops all mirroring.
+ isArray: false
+ name: type
+ predefined:
+ - all
+ - chat
+ - none
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: Whether the channel is auto-closed when an investigation is closed.
+ Can be "true" or "false". Default is "true".
+ isArray: false
+ name: autoclose
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: both
+ description: The mirroring direction. Can be "FromDemisto", "ToDemisto", or
+ "Both". Default value is "Both".
+ isArray: false
+ name: direction
+ predefined:
+ - Both
+ - FromDemisto
+ - ToDemisto
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: group
+ description: The channel type. Can be "channel" or "group". The default value
+ is "group".
+ isArray: false
+ name: mirrorTo
+ predefined:
+ - channel
+ - group
+ required: false
+ secret: false
+ - default: false
+ description: The name of the channel. The default is "incident-".
+ isArray: false
+ name: channelName
+ required: false
+ secret: false
+ - default: false
+ description: The topic of the channel.
+ isArray: false
+ name: channelTopic
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Whether to remove the Slack administrator (channel creator) from
+ the mirrored channel.
+ isArray: false
+ name: kickAdmin
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Mirrors the investigation between Slack and the Demisto War Room.
+ execution: false
+ name: mirror-investigation
+ - arguments:
+ - default: true
+ description: The message content.
+ isArray: false
+ name: message
+ required: false
+ secret: false
+ - default: false
+ description: The user to whom to send the message. Can be either the username
+ or email address.
+ isArray: false
+ name: to
+ required: false
+ secret: false
+ - default: false
+ description: The name of the Slack channel to which to send the message.
+ isArray: false
+ name: channel
+ required: false
+ secret: false
+ - default: false
+ description: An entry ID to send as a link.
+ isArray: false
+ name: entry
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Whether to include a URL to the relevant component in Demisto.
+ Can be "true" or "false". Default value is "false".
+ isArray: false
+ name: ignoreAddURL
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the thread to which to reply - can be retrieved from
+ a previous send-notification command.
+ isArray: false
+ name: threadID
+ required: false
+ secret: false
+ - default: false
+ description: A JSON string of Slack blocks to send in the message.
+ isArray: false
+ name: blocks
+ required: false
+ secret: false
+ deprecated: false
+ description: Sends a message to a user, group, or channel.
+ execution: false
+ name: send-notification
+ outputs:
+ - contextPath: Slack.Thread.ID
+ description: The Slack thread ID.
+ type: String
+ - arguments:
+ - default: true
+ description: The text content of the message.
+ isArray: false
+ name: message
+ required: false
+ secret: false
+ - default: false
+ description: Either a user name or email of a Slack user to send to.
+ isArray: false
+ name: to
+ required: false
+ secret: false
+ - default: false
+ description: A Slack channel name to send to.
+ isArray: false
+ name: channel
+ required: false
+ secret: false
+ - default: false
+ description: A Slack group (private channel) name to send to.
+ isArray: false
+ name: group
+ required: false
+ secret: false
+ - default: false
+ description: An entry ID to send as a link.
+ isArray: false
+ name: entry
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Whether to add a URL in Slack to the relevant component in Demisto.
+ Default is "false".
+ isArray: false
+ name: IgnoreAddURL
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the thread to which to reply.
+ isArray: false
+ name: threadID
+ required: false
+ secret: false
+ deprecated: true
+ description: Deprecated. Use the "send-notification" command instead.
+ execution: false
+ name: slack-send
+ - deprecated: true
+ description: Deprecated. Use the "close-channel" command instead.
+ execution: false
+ name: slack-close-channel
+ - arguments:
+ - default: false
+ description: The name of the channel to archive. If not provided, the mirrored
+ investigation channel is archived (if the channel exists).
+ isArray: false
+ name: channel
+ required: false
+ secret: false
+ deprecated: false
+ description: Archives a Slack channel.
+ execution: false
+ name: close-channel
+ - arguments:
+ - default: true
+ description: The ID of the file entry to send.
+ isArray: false
+ name: file
+ required: true
+ secret: false
+ - default: false
+ description: The user to whom to send the file. Can be the username or the email
+ address.
+ isArray: false
+ name: to
+ required: false
+ secret: false
+ - default: false
+ description: The name of the Slack group (private channel) to which to send
+ the file.
+ isArray: false
+ name: group
+ required: false
+ secret: false
+ - default: false
+ description: The name of the Slack channel to which to send the file.
+ isArray: false
+ name: channel
+ required: false
+ secret: false
+ - default: false
+ description: The ID of the thread to which to reply - can be retrieved from
+ a previous send-notification command.
+ isArray: false
+ name: threadID
+ required: false
+ secret: false
+ - default: false
+ description: A comment to add to the file.
+ isArray: false
+ name: comment
+ required: false
+ secret: false
+ deprecated: false
+ description: Sends a file to a user, channel, or group. If not specified, the
+ file is sent to the mirrored investigation channel (if the channel exists).
+ execution: false
+ name: slack-send-file
+ - arguments:
+ - default: false
+ description: The channel name. If not specified, the topic of the mirrored investigation
+ channel is set (if the channel exists).
+ isArray: false
+ name: channel
+ required: false
+ secret: false
+ - default: true
+ description: The topic for the channel.
+ isArray: false
+ name: topic
+ required: true
+ secret: false
+ deprecated: false
+ description: Sets the topic for a channel.
+ execution: false
+ name: slack-set-channel-topic
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: private
+ description: The channel type. Can be "private" or "public".
+ isArray: false
+ name: type
+ predefined:
+ - private
+ - public
+ required: false
+ secret: false
+ - default: true
+ description: The name of the channel.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: 'A CSV list of user names or email addresses to invite to the channel.
+ For example: "user1, user2...".'
+ isArray: false
+ name: users
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a channel in Slack.
+ execution: false
+ name: slack-create-channel
+ - arguments:
+ - default: true
+ description: 'A CSV list of usernames or email addresses to invite to join the
+ channel. For example: "user1, user2...".'
+ isArray: false
+ name: users
+ required: true
+ secret: false
+ - default: false
+ description: The name of the channel to which to invite the users. If the name
+ of the channel is not specified, the name of the mirrored investigation channel
+ is used (if the channel exists).
+ isArray: false
+ name: channel
+ required: false
+ secret: false
+ deprecated: false
+ description: Invites users to join a channel.
+ execution: false
+ name: slack-invite-to-channel
+ - arguments:
+ - default: true
+ description: 'A CSV list of usernames or email addresses to remove from the
+ a channel. For example: "user1, user2..."'
+ isArray: false
+ name: users
+ required: true
+ secret: false
+ - default: false
+ description: The name of the channel from which to remove the users. If the
+ name of the channel is not specified, the mirrored investigation channel is
+ used (if the channel exists).
+ isArray: false
+ name: channel
+ required: false
+ secret: false
+ deprecated: false
+ description: Removes users from the specified channel.
+ execution: false
+ name: slack-kick-from-channel
+ - arguments:
+ - default: true
+ description: The new name of the channel.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The current name of the channel. If the name of the channel is
+ not specified, the mirrored investigation channel is used (if the channel
+ exists).
+ isArray: false
+ name: channel
+ required: false
+ secret: false
+ deprecated: false
+ description: Renames a channel in Slack.
+ execution: false
+ name: slack-rename-channel
+ - arguments:
+ - default: true
+ description: The Slack user (username or email).
+ isArray: false
+ name: user
+ required: true
+ secret: false
+ deprecated: false
+ description: Get details about a specified user.
+ execution: false
+ name: slack-get-user-details
+ outputs:
+ - contextPath: Slack.User.ID
+ description: The ID of the user.
+ type: String
+ - contextPath: Slack.User.Username
+ description: The username of the user.
+ type: String
+ - contextPath: Slack.User.Name
+ description: The actual name of the user.
+ type: String
+ - contextPath: Slack.User.DisplayName
+ description: The display name of the user.
+ type: String
+ - contextPath: Slack.User.Email
+ description: The email address of the user.
+ type: String
+ dockerimage: demisto/slack:1.0.0.2218
+ isfetch: false
+ longRunning: true
+ longRunningPort: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+fromversion: 5.0.0
diff --git a/Integrations/Slack/Slack_description.md b/Integrations/Slack/Slack_description.md
new file mode 100644
index 000000000000..0c681051b4e1
--- /dev/null
+++ b/Integrations/Slack/Slack_description.md
@@ -0,0 +1,2 @@
+ To allow us access to Slack, the Demisto app has to be added to the relevant workspace. Do so by clicking on the following [link](https://oproxy.demisto.ninja/slack).
+ After adding the Demisto app, you will get an Access Token and Bot Token, which should be inserted in the integration instance configuration's corresponding fields.
\ No newline at end of file
diff --git a/Integrations/Slack/Slack_image.png b/Integrations/Slack/Slack_image.png
new file mode 100644
index 000000000000..7396594da793
Binary files /dev/null and b/Integrations/Slack/Slack_image.png differ
diff --git a/Integrations/Slack/Slack_test.py b/Integrations/Slack/Slack_test.py
new file mode 100644
index 000000000000..8ba6c7fdd681
--- /dev/null
+++ b/Integrations/Slack/Slack_test.py
@@ -0,0 +1,3199 @@
+import slack
+import pytest
+import asyncio
+import demistomock as demisto
+import json
+import datetime
+from unittest.mock import mock_open
+
+USERS = '''[{
+ "id": "U012A3CDE",
+ "team_id": "T012AB3C4",
+ "name": "spengler",
+ "deleted": false,
+ "color": "9f69e7",
+ "real_name": "spengler",
+ "tz": "America/Los_Angeles",
+ "tz_label": "Pacific Daylight Time",
+ "tz_offset": -25200,
+ "profile": {
+ "avatar_hash": "ge3b51ca72de",
+ "status_text": "Print is dead",
+ "status_emoji": ":books:",
+ "real_name": "Egon Spengler",
+ "display_name": "spengler",
+ "real_name_normalized": "Egon Spengler",
+ "display_name_normalized": "spengler",
+ "email": "spengler@ghostbusters.example.com",
+ "team": "T012AB3C4"
+ },
+ "is_admin": true,
+ "is_owner": false,
+ "is_primary_owner": false,
+ "is_restricted": false,
+ "is_ultra_restricted": false,
+ "is_bot": false,
+ "updated": 1502138686,
+ "is_app_user": false,
+ "has_2fa": false
+},
+{
+ "id": "U07QCRPA4",
+ "team_id": "T0G9PQBBK",
+ "name": "glinda",
+ "deleted": false,
+ "color": "9f69e7",
+ "real_name": "Glinda Southgood",
+ "tz": "America/Los_Angeles",
+ "tz_label": "Pacific Daylight Time",
+ "tz_offset": -25200,
+ "profile": {
+ "avatar_hash": "8fbdd10b41c6",
+ "first_name": "Glinda",
+ "last_name": "Southgood",
+ "title": "Glinda the Good",
+ "phone": "",
+ "skype": "",
+ "real_name": "Glinda Southgood",
+ "real_name_normalized": "Glinda Southgood",
+ "display_name": "Glinda the Fairly Good",
+ "display_name_normalized": "Glinda the Fairly Good",
+ "email": "Glenda@south.oz.coven"
+ },
+ "is_admin": true,
+ "is_owner": false,
+ "is_primary_owner": false,
+ "is_restricted": false,
+ "is_ultra_restricted": false,
+ "is_bot": false,
+ "updated": 1480527098,
+ "has_2fa": false
+}]'''
+
+CONVERSATIONS = '''[{
+ "id": "C012AB3CD",
+ "name": "general",
+ "is_channel": true,
+ "is_group": false,
+ "is_im": false,
+ "created": 1449252889,
+ "creator": "U012A3CDE",
+ "is_archived": false,
+ "is_general": true,
+ "unlinked": 0,
+ "name_normalized": "general",
+ "is_shared": false,
+ "is_ext_shared": false,
+ "is_org_shared": false,
+ "pending_shared": [],
+ "is_pending_ext_shared": false,
+ "is_member": true,
+ "is_private": false,
+ "is_mpim": false,
+ "topic": {
+ "value": "Company-wide announcements and work-based matters",
+ "creator": "",
+ "last_set": 0
+ },
+ "purpose": {
+ "value": "This channel is for team-wide communication and announcements. All team members are in this channel.",
+ "creator": "",
+ "last_set": 0
+ },
+ "previous_names": [],
+ "num_members": 4
+},
+{
+ "id": "C061EG9T2",
+ "name": "random",
+ "is_channel": true,
+ "is_group": false,
+ "is_im": false,
+ "created": 1449252889,
+ "creator": "U061F7AUR",
+ "is_archived": false,
+ "is_general": false,
+ "unlinked": 0,
+ "name_normalized": "random",
+ "is_shared": false,
+ "is_ext_shared": false,
+ "is_org_shared": false,
+ "pending_shared": [],
+ "is_pending_ext_shared": false,
+ "is_member": true,
+ "is_private": false,
+ "is_mpim": false,
+ "topic": {
+ "value": "Non-work banter and water cooler conversation",
+ "creator": "",
+ "last_set": 0
+ },
+ "purpose": {
+ "value": "A place for non-work-related flimflam.",
+ "creator": "",
+ "last_set": 0
+ },
+ "previous_names": [],
+ "num_members": 4
+}]'''
+
+
+BOT = '''{
+ "ok": true,
+ "url": "https://subarachnoid.slack.com/",
+ "team": "Subarachnoid Workspace",
+ "user": "grace",
+ "team_id": "T12345678",
+ "user_id": "W12345678"
+}'''
+
+MIRRORS = '''
+ [{
+ "channel_id":"GKQ86DVPH",
+ "channel_name": "incident-681",
+ "channel_topic": "incident-681",
+ "investigation_id":"681",
+ "mirror_type":"all",
+ "mirror_direction":"both",
+ "mirror_to":"group",
+ "auto_close":true,
+ "mirrored":true
+ },
+ {
+ "channel_id":"GKB19PA3V",
+ "channel_name": "group2",
+ "channel_topic": "cooltopic",
+ "investigation_id":"684",
+ "mirror_type":"all",
+ "mirror_direction":"both",
+ "mirror_to":"group",
+ "auto_close":true,
+ "mirrored":true
+ },
+ {
+ "channel_id":"GKB19PA3V",
+ "channel_name": "group2",
+ "channel_topic": "cooltopic",
+ "investigation_id":"692",
+ "mirror_type":"all",
+ "mirror_direction":"both",
+ "mirror_to":"group",
+ "auto_close":true,
+ "mirrored":true
+ },
+ {
+ "channel_id":"GKNEJU4P9",
+ "channel_name": "group3",
+ "channel_topic": "incident-713",
+ "investigation_id":"713",
+ "mirror_type":"all",
+ "mirror_direction":"both",
+ "mirror_to":"group",
+ "auto_close":true,
+ "mirrored":true
+ },
+ {
+ "channel_id":"GL8GHC0LV",
+ "channel_name": "group5",
+ "channel_topic": "incident-734",
+ "investigation_id":"734",
+ "mirror_type":"all",
+ "mirror_direction":"both",
+ "mirror_to":"group",
+ "auto_close":true,
+ "mirrored":true
+ }]
+'''
+
+BLOCK_JSON = [{
+ 'type': 'section',
+ 'text': {
+ 'type': 'mrkdwn',
+ 'text': 'text'
+ }
+}, {
+ 'type': 'actions',
+ 'elements': [{
+ 'type': 'button',
+ 'text': {
+ 'type': 'plain_text',
+ 'emoji': True,
+ 'text': 'yes'
+ },
+ 'style': 'primary',
+ 'value': '{\"entitlement\": \"e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43\", \"reply\": \"Thanks bro\"}',
+ }, {
+ 'type': 'button',
+ 'text': {
+ 'type': 'plain_text',
+ 'emoji': True,
+ 'text': 'no'
+ },
+ 'style': 'danger',
+ 'value': '{\"entitlement\": \"e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43\", \"reply\": \"Thanks bro\"}',
+ }]}]
+
+PAYLOAD_JSON = r'''
+ {
+ "type":"block_actions",
+ "team":{
+ "id":"T9XJ4RGNQ",
+ "domain":"dombo60"
+ },
+ "user":{
+ "id":"U012A3CDE",
+ "username":"spengler",
+ "name":"spengler",
+ "team_id":"T9XJ4RGNQ"
+ },
+ "api_app_id":"AMU4M2QL8",
+ "token":"GBGG7mn61zg0a62MT9blXJnn",
+ "container":{
+ "type":"message",
+ "message_ts":"1567945126.000100",
+ "channel_id":"DMGSNFCSX",
+ "is_ephemeral":false
+ },
+ "trigger_id":"754598374743.337616866772.8c4b2dc28ca7fd4c8941247c1a01c7dd",
+ "channel":{
+ "id":"DMGSNFCSX",
+ "name":"directmessage"
+ },
+ "message":{
+ "type":"message",
+ "subtype":"bot_message",
+ "text":"This content can't be displayed.",
+ "ts":"1567945126.000100",
+ "username":"BlockTest",
+ "bot_id":"BMWFS6KSA",
+ "blocks":[
+ {
+ "type":"section",
+ "block_id":"F9iYK",
+ "text":{
+ "type":"mrkdwn",
+ "text":"Hopa this is a test. ",
+ "verbatim":false
+ },
+ "accessory":{
+ "type":"button",
+ "text":{
+ "type":"plain_text",
+ "text":"Eyy",
+ "emoji":true
+ },
+ "value":"{\"entitlement\": \"e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43\", \"reply\": \"Thanks bro\"}",
+ "action_id":"W9J"
+ }
+ }
+ ]
+ },
+ "response_url":"hooks.slack.com",
+ "actions":[
+ {
+ "action_id":"W9J",
+ "block_id":"F9iYK",
+ "text":{
+ "type":"plain_text",
+ "text":"Eyy",
+ "emoji":true
+ },
+ "value":"{\"entitlement\": \"e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43\", \"reply\": \"Thanks bro\"}",
+ "type":"button",
+ "action_ts":"1567949681.728426"
+ }
+ ]
+ }
+'''
+
+
+def get_integration_context():
+ return INTEGRATION_CONTEXT
+
+
+def set_integration_context(integration_context):
+ global INTEGRATION_CONTEXT
+ INTEGRATION_CONTEXT = integration_context
+
+
+RETURN_ERROR_TARGET = 'Slack.return_error'
+
+
+@pytest.fixture(autouse=True)
+def setup():
+ from Slack import init_globals
+
+ set_integration_context({
+ 'mirrors': MIRRORS,
+ 'users': USERS,
+ 'conversations': CONVERSATIONS,
+ 'bot_id': 'W12345678'
+ })
+
+ init_globals()
+
+
+@pytest.mark.asyncio
+async def test_get_slack_name(mocker):
+ from Slack import get_slack_name
+
+ # Set
+
+ async def users_info(user):
+ if user != 'alexios':
+ return {'user': json.loads(USERS)[0]}
+ return None
+
+ async def conversations_info(channel):
+ if channel != 'lulz':
+ return {'channel': json.loads(CONVERSATIONS)[0]}
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext')
+ mocker.patch.object(slack.WebClient, 'users_info', side_effect=users_info)
+ mocker.patch.object(slack.WebClient, 'conversations_info', side_effect=conversations_info)
+
+ # Assert
+
+ # User in integration context
+ user_id = 'U012A3CDE'
+ name = await get_slack_name(user_id, slack.WebClient)
+ assert name == 'spengler'
+ assert slack.WebClient.users_info.call_count == 0
+
+ # User not in integration context
+ unknown_user = 'USASSON'
+ name = await get_slack_name(unknown_user, slack.WebClient)
+ assert name == 'spengler'
+ assert slack.WebClient.users_info.call_count == 1
+
+ # User does not exist
+ nonexisting_user = 'alexios'
+ name = await get_slack_name(nonexisting_user, slack.WebClient)
+ assert name == ''
+ assert slack.WebClient.users_info.call_count == 1
+
+ # Channel in integration context
+ channel_id = 'C012AB3CD'
+ name = await get_slack_name(channel_id, slack.WebClient)
+ assert name == 'general'
+ assert slack.WebClient.conversations_info.call_count == 0
+
+ # Channel not in integration context
+ unknown_channel = 'CSASSON'
+ name = await get_slack_name(unknown_channel, slack.WebClient)
+ assert name == 'general'
+ assert slack.WebClient.users_info.call_count == 1
+
+ # Channel doesn't exist
+ nonexisting_channel = 'lulz'
+ name = await get_slack_name(nonexisting_channel, slack.WebClient)
+ assert name == ''
+ assert slack.WebClient.users_info.call_count == 1
+
+
+@pytest.mark.asyncio
+async def test_clean_message(mocker):
+ from Slack import clean_message
+
+ # Set
+
+ async def users_info(user):
+ return {'user': json.loads(USERS)[0]}
+
+ async def conversations_info(channel):
+ return {'channel': json.loads(CONVERSATIONS)[0]}
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(slack.WebClient, 'users_info', side_effect=users_info)
+ mocker.patch.object(slack.WebClient, 'conversations_info', side_effect=conversations_info)
+
+ user_message = 'Hello <@U012A3CDE>!'
+ channel_message = 'Check <#C012AB3CD>'
+ link_message = 'Go to '
+
+ # Arrange
+
+ clean_user_message = await clean_message(user_message, slack.WebClient)
+ clean_channel_message = await clean_message(channel_message, slack.WebClient)
+ clean_link_message = await clean_message(link_message, slack.WebClient)
+
+ # Assert
+
+ assert clean_user_message == 'Hello spengler!'
+ assert clean_channel_message == 'Check general'
+ assert clean_link_message == 'Go to https://www.google.com/lulz'
+
+
+def test_get_user_by_name(mocker):
+ from Slack import get_user_by_name
+ # Set
+
+ def users_list(**kwargs):
+ users = {'members': json.loads(USERS)}
+ new_user = {
+ 'name': 'perikles',
+ 'profile': {
+ 'email': 'perikles@acropoli.com',
+ },
+ 'id': 'U012B3CUI'
+ }
+
+ users['members'].append(new_user)
+ return users
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+
+ # Assert
+
+ # User name exists in integration context
+ username = 'spengler'
+ user = get_user_by_name(username)
+ assert user['id'] == 'U012A3CDE'
+ assert slack.WebClient.users_list.call_count == 0
+
+ # User email exists in integration context
+ email = 'spengler@ghostbusters.example.com'
+ user = get_user_by_name(email)
+ assert user['id'] == 'U012A3CDE'
+ assert slack.WebClient.users_list.call_count == 0
+
+ # User name doesn't exist in integration context
+ username = 'perikles'
+ user = get_user_by_name(username)
+ assert user['id'] == 'U012B3CUI'
+ assert slack.WebClient.users_list.call_count == 1
+
+ set_integration_context({
+ 'mirrors': MIRRORS,
+ 'users': USERS,
+ 'conversations': CONVERSATIONS,
+ 'bot_id': 'W12345678'
+ })
+
+ # User email doesn't exist in integration context
+ email = 'perikles@acropoli.com'
+ user = get_user_by_name(email)
+ assert user['id'] == 'U012B3CUI'
+ assert slack.WebClient.users_list.call_count == 2
+
+ # User doesn't exist
+ username = 'alexios'
+ user = get_user_by_name(username)
+ assert user == {}
+ assert slack.WebClient.users_list.call_count == 3
+
+
+def test_get_user_by_name_paging(mocker):
+ from Slack import get_user_by_name
+ # Set
+
+ def users_list(**kwargs):
+ if len(kwargs) == 1:
+ return {'members': json.loads(USERS), 'response_metadata': {
+ 'next_cursor': 'dGVhbTpDQ0M3UENUTks='
+ }}
+ else:
+ return {'members': [{
+ 'id': 'U248918AB',
+ 'name': 'alexios'
+ }], 'response_metadata': {
+ 'next_cursor': ''
+ }}
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+
+ # Arrange
+ user = get_user_by_name('alexios')
+ args = slack.WebClient.users_list.call_args_list
+ first_args = args[0][1]
+ second_args = args[1][1]
+
+ # Assert
+ assert len(first_args) == 1
+ assert first_args['limit'] == 200
+ assert len(second_args) == 2
+ assert second_args['cursor'] == 'dGVhbTpDQ0M3UENUTks='
+ assert user['id'] == 'U248918AB'
+ assert slack.WebClient.users_list.call_count == 2
+
+
+def test_mirror_investigation_new_mirror(mocker):
+ from Slack import mirror_investigation
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ mocker.patch.object(demisto, 'args', return_value={})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '999', 'users': ['spengler', 'alexios']})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'demistoUrls', return_value={'server': 'https://www.eizelulz.com:8443'})
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'channels_create', return_value={'channel': {
+ 'id': 'new_channel', 'name': 'incident-999'
+ }})
+ mocker.patch.object(slack.WebClient, 'groups_create', return_value={'group': {
+ 'id': 'new_group', 'name': 'incident-999'
+ }})
+ mocker.patch.object(slack.WebClient, 'conversations_invite')
+ mocker.patch.object(slack.WebClient, 'conversations_setTopic')
+ mocker.patch.object(slack.WebClient, 'chat_postMessage')
+
+ new_mirror = {
+ 'channel_id': 'new_group',
+ 'channel_name': 'incident-999',
+ 'channel_topic': 'incident-999',
+ 'investigation_id': '999',
+ 'mirror_type': 'all',
+ 'mirror_direction': 'both',
+ 'mirror_to': 'group',
+ 'auto_close': True,
+ 'mirrored': False
+ }
+ # Arrange
+
+ mirror_investigation()
+ error_results = demisto.results.call_args_list[0][0]
+ success_results = demisto.results.call_args_list[1][0]
+ message_args = slack.WebClient.chat_postMessage.call_args[1]
+
+ new_context = demisto.setIntegrationContext.call_args[0][0]
+ new_mirrors = json.loads(new_context['mirrors'])
+ new_conversations = json.loads(new_context['conversations'])
+ our_conversation_filter = list(filter(lambda c: c['id'] == 'new_group', new_conversations))
+ our_conversation = our_conversation_filter[0]
+ our_mirror_filter = list(filter(lambda m: '999' == m['investigation_id'], new_mirrors))
+ our_mirror = our_mirror_filter[0]
+
+ # Assert
+
+ assert slack.WebClient.groups_create.call_count == 1
+ assert slack.WebClient.users_list.call_count == 1
+ assert slack.WebClient.conversations_invite.call_count == 2
+ assert slack.WebClient.conversations_setTopic.call_count == 1
+ assert slack.WebClient.chat_postMessage.call_count == 1
+
+ assert error_results[0]['Contents'] == 'User alexios not found in Slack'
+ assert success_results[0] == 'Investigation mirrored successfully, channel: incident-999'
+ assert message_args['channel'] == 'new_group'
+ assert message_args['text'] == 'This channel was created to mirror incident 999.' \
+ ' \n View it on: https://www.eizelulz.com:8443#/WarRoom/999'
+
+ assert len(our_conversation_filter) == 1
+ assert len(our_mirror_filter) == 1
+ assert our_conversation == {'id': 'new_group', 'name': 'incident-999'}
+ assert our_mirror == new_mirror
+
+
+def test_mirror_investigation_new_mirror_with_name(mocker):
+ from Slack import mirror_investigation
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ mocker.patch.object(demisto, 'args', return_value={'channelName': 'coolname'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '999', 'users': ['spengler', 'alexios']})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'demistoUrls', return_value={'server': 'https://www.eizelulz.com:8443'})
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'channels_create', return_value={'channel': {
+ 'id': 'new_channel', 'name': 'coolname'
+ }})
+ mocker.patch.object(slack.WebClient, 'groups_create', return_value={'group': {
+ 'id': 'new_group', 'name': 'coolname'
+ }})
+ mocker.patch.object(slack.WebClient, 'conversations_invite')
+ mocker.patch.object(slack.WebClient, 'conversations_setTopic')
+ mocker.patch.object(slack.WebClient, 'chat_postMessage')
+
+ new_mirror = {
+ 'channel_id': 'new_group',
+ 'channel_name': 'coolname',
+ 'channel_topic': 'incident-999',
+ 'investigation_id': '999',
+ 'mirror_type': 'all',
+ 'mirror_direction': 'both',
+ 'mirror_to': 'group',
+ 'auto_close': True,
+ 'mirrored': False
+ }
+ # Arrange
+
+ mirror_investigation()
+ error_results = demisto.results.call_args_list[0][0]
+ success_results = demisto.results.call_args_list[1][0]
+ message_args = slack.WebClient.chat_postMessage.call_args[1]
+
+ new_context = demisto.setIntegrationContext.call_args[0][0]
+ new_mirrors = json.loads(new_context['mirrors'])
+ new_conversations = json.loads(new_context['conversations'])
+ our_conversation_filter = list(filter(lambda c: c['id'] == 'new_group', new_conversations))
+ our_conversation = our_conversation_filter[0]
+ our_mirror_filter = list(filter(lambda m: '999' == m['investigation_id'], new_mirrors))
+ our_mirror = our_mirror_filter[0]
+
+ # Assert
+
+ assert slack.WebClient.groups_create.call_count == 1
+ assert slack.WebClient.users_list.call_count == 1
+ assert slack.WebClient.conversations_invite.call_count == 2
+ assert slack.WebClient.conversations_setTopic.call_count == 1
+ assert slack.WebClient.chat_postMessage.call_count == 1
+
+ assert error_results[0]['Contents'] == 'User alexios not found in Slack'
+ assert success_results[0] == 'Investigation mirrored successfully, channel: coolname'
+ assert message_args['channel'] == 'new_group'
+ assert message_args['text'] == 'This channel was created to mirror incident 999.' \
+ ' \n View it on: https://www.eizelulz.com:8443#/WarRoom/999'
+
+ assert len(our_conversation_filter) == 1
+ assert len(our_mirror_filter) == 1
+ assert our_conversation == {'id': 'new_group', 'name': 'coolname'}
+ assert our_mirror == new_mirror
+
+
+def test_mirror_investigation_new_mirror_with_topic(mocker):
+ from Slack import mirror_investigation
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ mocker.patch.object(demisto, 'args', return_value={'channelName': 'coolname', 'channelTopic': 'cooltopic'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '999', 'users': ['spengler', 'alexios']})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'demistoUrls', return_value={'server': 'https://www.eizelulz.com:8443'})
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'channels_create', return_value={'channel': {
+ 'id': 'new_channel', 'name': 'coolname'
+ }})
+ mocker.patch.object(slack.WebClient, 'groups_create', return_value={'group': {
+ 'id': 'new_group', 'name': 'coolname'
+ }})
+ mocker.patch.object(slack.WebClient, 'conversations_invite')
+ mocker.patch.object(slack.WebClient, 'conversations_setTopic')
+ mocker.patch.object(slack.WebClient, 'chat_postMessage')
+
+ new_mirror = {
+ 'channel_id': 'new_group',
+ 'channel_name': 'coolname',
+ 'channel_topic': 'cooltopic',
+ 'investigation_id': '999',
+ 'mirror_type': 'all',
+ 'mirror_direction': 'both',
+ 'mirror_to': 'group',
+ 'auto_close': True,
+ 'mirrored': False
+ }
+ # Arrange
+
+ mirror_investigation()
+ topic_args = slack.WebClient.conversations_setTopic.call_args[1]
+ success_results = demisto.results.call_args_list[1][0]
+ error_results = demisto.results.call_args_list[0][0]
+ new_context = demisto.setIntegrationContext.call_args[0][0]
+ new_mirrors = json.loads(new_context['mirrors'])
+ new_conversations = json.loads(new_context['conversations'])
+ our_conversation_filter = list(filter(lambda c: c['id'] == 'new_group', new_conversations))
+ our_conversation = our_conversation_filter[0]
+ our_mirror_filter = list(filter(lambda m: '999' == m['investigation_id'], new_mirrors))
+ our_mirror = our_mirror_filter[0]
+ message_args = slack.WebClient.chat_postMessage.call_args[1]
+
+ # Assert
+
+ assert slack.WebClient.groups_create.call_count == 1
+ assert slack.WebClient.users_list.call_count == 1
+ assert slack.WebClient.conversations_invite.call_count == 2
+ assert slack.WebClient.conversations_setTopic.call_count == 1
+ assert slack.WebClient.chat_postMessage.call_count == 1
+
+ assert error_results[0]['Contents'] == 'User alexios not found in Slack'
+ assert success_results[0] == 'Investigation mirrored successfully, channel: coolname'
+ assert message_args['channel'] == 'new_group'
+ assert message_args['text'] == 'This channel was created to mirror incident 999.' \
+ ' \n View it on: https://www.eizelulz.com:8443#/WarRoom/999'
+
+ assert topic_args['channel'] == 'new_group'
+ assert topic_args['topic'] == 'cooltopic'
+ assert len(our_conversation_filter) == 1
+ assert len(our_mirror_filter) == 1
+ assert our_conversation == {'id': 'new_group', 'name': 'coolname'}
+ assert our_mirror == new_mirror
+
+
+def test_mirror_investigation_existing_mirror_error_type(mocker):
+ from Slack import mirror_investigation
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ mocker.patch.object(demisto, 'args', return_value={'type': 'chat', 'autoclose': 'false',
+ 'direction': 'FromDemisto', 'mirrorTo': 'channel'})
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET, side_effect=InterruptedError())
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '681', 'users': ['spengler']})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'channels_create')
+ mocker.patch.object(slack.WebClient, 'groups_create')
+ mocker.patch.object(slack.WebClient, 'conversations_invite')
+ mocker.patch.object(slack.WebClient, 'conversations_setTopic')
+
+ # Arrange
+ with pytest.raises(InterruptedError):
+ mirror_investigation()
+
+ err_msg = return_error_mock.call_args[0][0]
+
+ # Assert
+ assert slack.WebClient.conversations_setTopic.call_count == 0
+ assert slack.WebClient.groups_create.call_count == 0
+ assert slack.WebClient.channels_create.call_count == 0
+ assert slack.WebClient.users_list.call_count == 0
+
+ assert return_error_mock.call_count == 1
+ assert err_msg == 'Cannot change the Slack channel type from Demisto.'
+
+
+def test_mirror_investigation_existing_mirror_error_name(mocker):
+ from Slack import mirror_investigation
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ mocker.patch.object(demisto, 'args', return_value={'channelName': 'eyy'})
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET, side_effect=InterruptedError())
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '681', 'users': ['spengler']})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'channels_create')
+ mocker.patch.object(slack.WebClient, 'groups_create')
+ mocker.patch.object(slack.WebClient, 'conversations_invite')
+
+ # Arrange
+
+ with pytest.raises(InterruptedError):
+ mirror_investigation()
+
+ err_msg = return_error_mock.call_args[0][0]
+
+ # Assert
+
+ assert slack.WebClient.groups_create.call_count == 0
+ assert slack.WebClient.channels_create.call_count == 0
+ assert slack.WebClient.users_list.call_count == 0
+
+ assert return_error_mock.call_count == 1
+ assert err_msg == 'Cannot change the Slack channel name.'
+
+
+def test_mirror_investigation_existing_investigation(mocker):
+ from Slack import mirror_investigation
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ mocker.patch.object(demisto, 'args', return_value={'type': 'chat', 'autoclose': 'false',
+ 'direction': 'FromDemisto', 'mirrorTo': 'group'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '681', 'users': ['spengler']})
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'channels_create')
+ mocker.patch.object(slack.WebClient, 'groups_create')
+ mocker.patch.object(slack.WebClient, 'conversations_invite')
+ mocker.patch.object(slack.WebClient, 'conversations_setTopic')
+
+ new_mirror = {
+ 'channel_id': 'GKQ86DVPH',
+ 'investigation_id': '681',
+ 'channel_name': 'incident-681',
+ 'channel_topic': 'incident-681',
+ 'mirror_type': 'chat',
+ 'mirror_direction': 'FromDemisto',
+ 'mirror_to': 'group',
+ 'auto_close': False,
+ 'mirrored': False
+ }
+ # Arrange
+
+ mirror_investigation()
+
+ # Assert
+
+ assert slack.WebClient.groups_create.call_count == 0
+ assert slack.WebClient.channels_create.call_count == 0
+ assert slack.WebClient.users_list.call_count == 0
+ assert slack.WebClient.conversations_invite.call_count == 2
+ assert slack.WebClient.conversations_setTopic.call_count == 0
+
+ success_results = demisto.results.call_args_list[0][0]
+ assert success_results[0] == 'Investigation mirrored successfully, channel: incident-681'
+
+ new_context = demisto.setIntegrationContext.call_args[0][0]
+ new_mirrors = json.loads(new_context['mirrors'])
+ our_mirror_filter = list(filter(lambda m: '681' == m['investigation_id'], new_mirrors))
+ our_mirror = our_mirror_filter[0]
+
+ assert len(our_mirror_filter) == 1
+ assert our_mirror == new_mirror
+
+
+def test_mirror_investigation_existing_channel(mocker):
+ from Slack import mirror_investigation
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ mocker.patch.object(demisto, 'args', return_value={'channelName': 'group3', 'type': 'chat', 'autoclose': 'false',
+ 'direction': 'FromDemisto', 'mirrorTo': 'group'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '999', 'users': ['spengler']})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'channels_create')
+ mocker.patch.object(slack.WebClient, 'groups_create')
+ mocker.patch.object(slack.WebClient, 'conversations_invite')
+ mocker.patch.object(slack.WebClient, 'conversations_setTopic')
+
+ new_mirror = {
+ 'channel_id': 'GKNEJU4P9',
+ 'channel_name': 'group3',
+ 'investigation_id': '999',
+ 'channel_topic': 'incident-713, incident-999',
+ 'mirror_type': 'chat',
+ 'mirror_direction': 'FromDemisto',
+ 'mirror_to': 'group',
+ 'auto_close': False,
+ 'mirrored': False
+ }
+ # Arrange
+
+ mirror_investigation()
+
+ # Assert
+
+ assert slack.WebClient.groups_create.call_count == 0
+ assert slack.WebClient.channels_create.call_count == 0
+ assert slack.WebClient.users_list.call_count == 0
+ assert slack.WebClient.conversations_invite.call_count == 2
+ assert slack.WebClient.conversations_setTopic.call_count == 1
+
+ success_results = demisto.results.call_args_list[0][0]
+ assert success_results[0] == 'Investigation mirrored successfully, channel: group3'
+
+ new_context = demisto.setIntegrationContext.call_args[0][0]
+ new_mirrors = json.loads(new_context['mirrors'])
+ our_mirror_filter = list(filter(lambda m: '999' == m['investigation_id'], new_mirrors))
+ our_mirror = our_mirror_filter[0]
+
+ assert len(our_mirror_filter) == 1
+ assert our_mirror == new_mirror
+
+
+def test_mirror_investigation_existing_channel_remove_mirror(mocker):
+ from Slack import mirror_investigation
+
+ # Set
+
+ mirrors = json.loads(MIRRORS)
+ mirrors.append({
+ 'channel_id': 'GKB19PA3V',
+ 'channel_name': 'group2',
+ 'channel_topic': 'cooltopic',
+ 'investigation_id': '999',
+ 'mirror_type': 'all',
+ 'mirror_direction': 'both',
+ 'mirror_to': 'group',
+ 'auto_close': True,
+ 'mirrored': True
+ })
+
+ set_integration_context({
+ 'mirrors': json.dumps(mirrors),
+ 'users': USERS,
+ 'conversations': CONVERSATIONS,
+ 'bot_id': 'W12345678'
+ })
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '999', 'users': ['spengler']})
+ mocker.patch.object(demisto, 'args', return_value={'type': 'none'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'channels_create')
+ mocker.patch.object(slack.WebClient, 'groups_create')
+ mocker.patch.object(slack.WebClient, 'conversations_invite')
+ mocker.patch.object(slack.WebClient, 'conversations_setTopic')
+
+ new_mirror = {
+ 'channel_id': 'GKB19PA3V',
+ 'channel_name': 'group2',
+ 'channel_topic': 'cooltopic',
+ 'investigation_id': '999',
+ 'mirror_type': 'none',
+ 'mirror_direction': 'both',
+ 'mirror_to': 'group',
+ 'auto_close': True,
+ 'mirrored': False
+ }
+ # Arrange
+
+ mirror_investigation()
+
+ # Assert
+
+ assert slack.WebClient.groups_create.call_count == 0
+ assert slack.WebClient.channels_create.call_count == 0
+ assert slack.WebClient.users_list.call_count == 0
+ assert slack.WebClient.conversations_invite.call_count == 0
+ assert slack.WebClient.conversations_setTopic.call_count == 0
+
+ success_results = demisto.results.call_args_list[0][0]
+ assert success_results[0] == 'Investigation mirrored successfully, channel: group2'
+
+ new_context = demisto.setIntegrationContext.call_args[0][0]
+ new_mirrors = json.loads(new_context['mirrors'])
+ our_mirror_filter = list(filter(lambda m: '999' == m['investigation_id'], new_mirrors))
+ our_mirror = our_mirror_filter[0]
+
+ assert len(our_mirror_filter) == 1
+ assert our_mirror == new_mirror
+
+
+def test_mirror_investigation_existing_channel_with_topic(mocker):
+ from Slack import mirror_investigation
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ mocker.patch.object(demisto, 'args', return_value={'channelName': 'group2', 'type': 'chat', 'autoclose': 'false',
+ 'direction': 'FromDemisto', 'mirrorTo': 'group'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '999', 'users': ['spengler']})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'channels_create')
+ mocker.patch.object(slack.WebClient, 'groups_create')
+ mocker.patch.object(slack.WebClient, 'conversations_invite')
+ mocker.patch.object(slack.WebClient, 'conversations_setTopic')
+
+ new_mirror = {
+ 'channel_id': 'GKB19PA3V',
+ 'channel_name': 'group2',
+ 'channel_topic': 'cooltopic',
+ 'investigation_id': '999',
+ 'mirror_type': 'chat',
+ 'mirror_direction': 'FromDemisto',
+ 'mirror_to': 'group',
+ 'auto_close': False,
+ 'mirrored': False,
+ }
+ # Arrange
+
+ mirror_investigation()
+
+ # Assert
+
+ assert slack.WebClient.groups_create.call_count == 0
+ assert slack.WebClient.channels_create.call_count == 0
+ assert slack.WebClient.users_list.call_count == 0
+ assert slack.WebClient.conversations_invite.call_count == 2
+ assert slack.WebClient.conversations_setTopic.call_count == 0
+
+ success_results = demisto.results.call_args_list[0][0]
+ assert success_results[0] == 'Investigation mirrored successfully, channel: group2'
+
+ new_context = demisto.setIntegrationContext.call_args[0][0]
+ new_mirrors = json.loads(new_context['mirrors'])
+ our_mirror_filter = list(filter(lambda m: '999' == m['investigation_id'], new_mirrors))
+ our_mirror = our_mirror_filter[0]
+
+ assert len(our_mirror_filter) == 1
+ assert our_mirror == new_mirror
+
+
+def test_check_for_mirrors(mocker):
+ from Slack import check_for_mirrors
+
+ # Set
+ mirrors = json.loads(MIRRORS)
+ mirrors.append({
+ 'channel_id': 'new_group',
+ 'channel_name': 'channel',
+ 'investigation_id': '999',
+ 'mirror_type': 'all',
+ 'mirror_direction': 'both',
+ 'mirror_to': 'group',
+ 'auto_close': True,
+ 'mirrored': False
+ })
+
+ set_integration_context({
+ 'mirrors': json.dumps(mirrors),
+ 'users': USERS,
+ 'conversations': CONVERSATIONS,
+ 'bot_id': 'W12345678'
+ })
+
+ new_mirror = {
+ 'channel_id': 'new_group',
+ 'channel_name': 'channel',
+ 'investigation_id': '999',
+ 'mirror_type': 'all',
+ 'mirror_direction': 'both',
+ 'mirror_to': 'group',
+ 'auto_close': True,
+ 'mirrored': True
+ }
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'mirrorInvestigation')
+
+ # Arrange
+
+ check_for_mirrors()
+
+ mirror_id = demisto.mirrorInvestigation.call_args[0][0]
+ mirror_type = demisto.mirrorInvestigation.call_args[0][1]
+ auto_close = demisto.mirrorInvestigation.call_args[0][2]
+
+ new_context = demisto.setIntegrationContext.call_args[0][0]
+ new_mirrors = json.loads(new_context['mirrors'])
+ our_mirror_filter = list(filter(lambda m: '999' == m['investigation_id'], new_mirrors))
+ our_mirror = our_mirror_filter[0]
+
+ # Assert
+
+ assert len(our_mirror_filter) == 1
+ assert our_mirror == new_mirror
+
+ assert mirror_id == '999'
+ assert mirror_type == 'all:both'
+ assert auto_close is True
+
+
+@pytest.mark.asyncio
+async def test_slack_loop_should_exit(mocker):
+ from Slack import slack_loop
+
+ # Set
+ class MyFuture:
+ @staticmethod
+ def done():
+ return True
+
+ @staticmethod
+ def exception():
+ return None
+
+ @asyncio.coroutine
+ def yeah_im_not_going_to_run(time):
+ return "sup"
+
+ mocker.patch.object(demisto, 'info')
+ mocker.patch.object(asyncio, 'sleep', side_effect=yeah_im_not_going_to_run)
+
+ with pytest.raises(InterruptedError):
+ mocker.patch.object(slack.RTMClient, 'start', side_effect=[MyFuture()])
+ # Exits the while True
+ mocker.patch.object(slack.RTMClient, 'stop', side_effect=InterruptedError())
+
+ # Arrange
+ await slack_loop()
+
+ # Assert
+ assert slack.RTMClient.start.call_count == 1
+
+
+@pytest.mark.asyncio
+async def test_handle_dm_create_demisto_user(mocker):
+ import Slack
+
+ # Set
+
+ @asyncio.coroutine
+ def fake_translate(demisto_user, message):
+ return "sup"
+
+ @asyncio.coroutine
+ def fake_message(channel, text):
+ return "sup"
+
+ @asyncio.coroutine
+ def fake_im(user):
+ return {
+ 'channel': {
+ 'id': 'ey'
+ }
+ }
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'findUser', return_value={'id': 'demisto_id'})
+ mocker.patch.object(slack.WebClient, 'im_open', side_effect=fake_im)
+ mocker.patch.object(slack.WebClient, 'chat_postMessage', side_effect=fake_message)
+ mocker.patch.object(Slack, 'translate_create', side_effect=fake_translate)
+
+ user = json.loads(USERS)[0]
+
+ # Arrange
+ await Slack.handle_dm(user, 'open 123 incident', slack.WebClient)
+ await Slack.handle_dm(user, 'new incident abu ahmad', slack.WebClient)
+ await Slack.handle_dm(user, 'incident create 817', slack.WebClient)
+ await Slack.handle_dm(user, 'incident open', slack.WebClient)
+ await Slack.handle_dm(user, 'incident new', slack.WebClient)
+ await Slack.handle_dm(user, 'create incident name=abc type=Access', slack.WebClient)
+
+ # Assert
+ assert Slack.translate_create.call_count == 6
+
+ demisto_user = Slack.translate_create.call_args[0][0]
+ incident_string = Slack.translate_create.call_args[0][1]
+ assert demisto_user == {'id': 'demisto_id'}
+ assert incident_string == 'create incident name=abc type=Access'
+
+
+@pytest.mark.asyncio
+async def test_handle_dm_nondemisto_user_shouldnt_create(mocker):
+ import Slack
+
+ # Set
+
+ @asyncio.coroutine
+ def fake_translate(demisto_user, message):
+ return "sup"
+
+ @asyncio.coroutine
+ def fake_message(channel, text):
+ return "sup"
+
+ @asyncio.coroutine
+ def fake_im(user):
+ return {
+ 'channel': {
+ 'id': 'ey'
+ }
+ }
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'findUser', return_value=None)
+ mocker.patch.object(Slack, 'translate_create', side_effect=fake_translate)
+ mocker.patch.object(slack.WebClient, 'chat_postMessage', side_effect=fake_message)
+ mocker.patch.object(slack.WebClient, 'im_open', side_effect=fake_im)
+ user = json.loads(USERS)[0]
+
+ # Arrange
+ await Slack.handle_dm(user, 'create incident abc', slack.WebClient)
+
+ # Assert
+ assert Slack.translate_create.call_count == 0
+
+
+@pytest.mark.asyncio
+async def test_handle_dm_nondemisto_user_should_create(mocker):
+ import Slack
+
+ mocker.patch.object(demisto, 'params', return_value={'allow_incidents': 'true'})
+
+ Slack.init_globals()
+
+ # Set
+
+ @asyncio.coroutine
+ def fake_translate(demisto_user, message):
+ return "sup"
+
+ @asyncio.coroutine
+ def fake_message(channel, text):
+ return "sup"
+
+ @asyncio.coroutine
+ def fake_im(user):
+ return {
+ 'channel': {
+ 'id': 'ey'
+ }
+ }
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'findUser', return_value=None)
+ mocker.patch.object(Slack, 'translate_create', side_effect=fake_translate)
+ mocker.patch.object(slack.WebClient, 'im_open', side_effect=fake_im)
+ mocker.patch.object(slack.WebClient, 'chat_postMessage', side_effect=fake_message)
+ user = json.loads(USERS)[0]
+
+ # Arrange
+ await Slack.handle_dm(user, 'create incident abc', slack.WebClient)
+
+ # Assert
+ assert Slack.translate_create.call_count == 1
+
+ demisto_user = Slack.translate_create.call_args[0][0]
+ assert demisto_user is None
+
+
+@pytest.mark.asyncio
+async def test_handle_dm_non_create_nonexisting_user(mocker):
+ from Slack import handle_dm
+
+ # Set
+
+ @asyncio.coroutine
+ def fake_message(channel, text):
+ return 'sup'
+
+ @asyncio.coroutine
+ def fake_im(user):
+ return {
+ 'channel': {
+ 'id': 'ey'
+ }
+ }
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'findUser', return_value=None)
+ mocker.patch.object(demisto, 'directMessage', return_value=None)
+ mocker.patch.object(slack.WebClient, 'im_open', side_effect=fake_im)
+ mocker.patch.object(slack.WebClient, 'chat_postMessage', side_effect=fake_message)
+ user = json.loads(USERS)[0]
+
+ # Arrange
+ await handle_dm(user, 'wazup', slack.WebClient)
+
+ message = demisto.directMessage.call_args[0][0]
+ username = demisto.directMessage.call_args[0][1]
+ email = demisto.directMessage.call_args[0][2]
+ allow = demisto.directMessage.call_args[0][3]
+
+ # Assert
+ assert message == 'wazup'
+ assert username == 'spengler'
+ assert email == 'spengler@ghostbusters.example.com'
+ assert allow is False
+
+
+@pytest.mark.asyncio
+async def test_handle_dm_empty_message(mocker):
+ from Slack import handle_dm
+
+ # Set
+ @asyncio.coroutine
+ def fake_message(channel, text):
+ if not text:
+ raise InterruptedError()
+
+ @asyncio.coroutine
+ def fake_im(user):
+ return {
+ 'channel': {
+ 'id': 'ey'
+ }
+ }
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'findUser', return_value=None)
+ mocker.patch.object(demisto, 'directMessage', return_value=None)
+ mocker.patch.object(slack.WebClient, 'im_open', side_effect=fake_im)
+ mocker.patch.object(slack.WebClient, 'chat_postMessage', side_effect=fake_message)
+ user = json.loads(USERS)[0]
+
+ # Arrange
+ await handle_dm(user, 'wazup', slack.WebClient)
+
+ message_args = slack.WebClient.chat_postMessage.call_args[1]
+
+ # Assert
+ assert message_args['text'] == 'Sorry, I could not perform the selected operation.'
+
+
+@pytest.mark.asyncio
+async def test_handle_dm_create_with_error(mocker):
+ import Slack
+
+ # Set
+
+ @asyncio.coroutine
+ def fake_translate(demisto_user, message):
+ return "sup"
+
+ @asyncio.coroutine
+ def fake_message(channel, text):
+ return "sup"
+
+ @asyncio.coroutine
+ def fake_im(user):
+ return {
+ 'channel': {
+ 'id': 'ey'
+ }
+ }
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'findUser', return_value={'id': 'demisto_id'})
+ mocker.patch.object(slack.WebClient, 'im_open', side_effect=fake_im)
+ mocker.patch.object(slack.WebClient, 'chat_postMessage', side_effect=fake_message)
+ mocker.patch.object(Slack, 'translate_create', side_effect=InterruptedError('omg'))
+
+ user = json.loads(USERS)[0]
+
+ # Arrange
+ await Slack.handle_dm(user, 'open 123 incident', slack.WebClient)
+
+ # Assert
+ assert Slack.translate_create.call_count == 1
+
+ demisto_user = Slack.translate_create.call_args[0][0]
+ incident_string = Slack.translate_create.call_args[0][1]
+ chat_args = slack.WebClient.chat_postMessage.call_args[1]
+
+ assert demisto_user == {'id': 'demisto_id'}
+ assert incident_string == 'open 123 incident'
+ assert chat_args == {'channel': 'ey', 'text': 'Failed creating incidents: omg'}
+
+
+@pytest.mark.asyncio
+async def test_translate_create(mocker):
+ # Set
+ import Slack
+
+ @asyncio.coroutine
+ def this_doesnt_create_incidents(demisto_user, incidents_json):
+ return {
+ 'id': 'new_incident',
+ 'name': 'New Incident'
+ }
+
+ mocker.patch.object(Slack, 'create_incidents', side_effect=this_doesnt_create_incidents)
+ mocker.patch.object(demisto, 'demistoUrls', return_value={'server': 'https://www.eizelulz.com:8443'})
+
+ demisto_user = {'id': 'demisto_user'}
+
+ json_message = 'create incident json={“nameâ€: “xyzâ€, “roleâ€: “Analystâ€}'
+ wrong_json_message = 'create incident json={"name": "xyz"} name=abc'
+ name_message = 'create incident name=eyy'
+ name_type_message = 'create incident name= eyy type= Access'
+ type_name_message = 'create incident type= Access name= eyy'
+ type_message = 'create incident type= Phishing'
+
+ success_message = 'Successfully created incident New Incident.\n' \
+ ' View it on: https://www.eizelulz.com:8443#/WarRoom/new_incident'
+
+ # Arrange
+ json_data = await Slack.translate_create(demisto_user, json_message)
+ wrong_json_data = await Slack.translate_create(demisto_user, wrong_json_message)
+ name_data = await Slack.translate_create(demisto_user, name_message)
+ name_type_data = await Slack.translate_create(demisto_user, name_type_message)
+ type_name_data = await Slack.translate_create(demisto_user, type_name_message)
+ type_data = await Slack.translate_create(demisto_user, type_message)
+
+ create_args = Slack.create_incidents.call_args_list
+ json_args = create_args[0][0][1]
+ name_args = create_args[1][0][1]
+ name_type_args = create_args[2][0][1]
+ type_name_args = create_args[3][0][1]
+
+ # Assert
+
+ assert Slack.create_incidents.call_count == 4
+
+ assert json_args == [{"name": "xyz", "role": "Analyst"}]
+ assert name_args == [{"name": "eyy"}]
+ assert name_type_args == [{"name": "eyy", "type": "Access"}]
+ assert type_name_args == [{"name": "eyy", "type": "Access"}]
+
+ assert json_data == success_message
+ assert wrong_json_data == 'No other properties other than json should be specified.'
+ assert name_data == success_message
+ assert name_type_data == success_message
+ assert type_name_data == success_message
+ assert type_data == 'Please specify arguments in the following manner: name= type=[type] or json=.'
+
+
+@pytest.mark.asyncio
+async def test_translate_create_newline_json(mocker):
+ # Set
+ import Slack
+
+ @asyncio.coroutine
+ def this_doesnt_create_incidents(demisto_user, incidents_json):
+ return {
+ 'id': 'new_incident',
+ 'name': 'New Incident'
+ }
+
+ mocker.patch.object(Slack, 'create_incidents', side_effect=this_doesnt_create_incidents)
+ mocker.patch.object(demisto, 'demistoUrls', return_value={'server': 'https://www.eizelulz.com:8443'})
+
+ demisto_user = {'id': 'demisto_user'}
+
+ json_message =\
+ '''```
+ create incident json={
+ "name":"xyz",
+ "details": "1.1.1.1,8.8.8.8"
+ ```
+ }'''
+
+ success_message = 'Successfully created incident New Incident.\n' \
+ ' View it on: https://www.eizelulz.com:8443#/WarRoom/new_incident'
+
+ # Arrange
+ json_data = await Slack.translate_create(demisto_user, json_message)
+
+ create_args = Slack.create_incidents.call_args
+ json_args = create_args[0][1]
+
+ # Assert
+
+ assert Slack.create_incidents.call_count == 1
+
+ assert json_args == [{"name": "xyz", "details": "1.1.1.1,8.8.8.8"}]
+
+ assert json_data == success_message
+
+
+@pytest.mark.asyncio
+async def test_get_user_by_id_async_user_exists(mocker):
+ from Slack import get_user_by_id_async
+
+ # Set
+
+ async def users_info(user):
+ return {'user': json.loads(USERS)[0]}
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(slack.WebClient, 'users_info', side_effect=users_info)
+
+ user_id = 'U012A3CDE'
+
+ # Arrange
+ user = await get_user_by_id_async(slack.WebClient, demisto.getIntegrationContext(), user_id)
+
+ # Assert
+ assert slack.WebClient.users_info.call_count == 0
+ assert demisto.setIntegrationContext.call_count == 0
+ assert user['name'] == 'spengler'
+
+
+@pytest.mark.asyncio
+async def test_get_user_by_id_async_user_doesnt_exist(mocker):
+ from Slack import get_user_by_id_async
+
+ # Set
+
+ async def users_info(user):
+ return {'user': json.loads(USERS)[0]}
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext')
+ mocker.patch.object(slack.WebClient, 'users_info', side_effect=users_info)
+
+ user_id = 'XXXXXXX'
+
+ # Arrange
+ user = await get_user_by_id_async(slack.WebClient, demisto.getIntegrationContext(), user_id)
+
+ # Assert
+
+ assert slack.WebClient.users_info.call_count == 1
+ assert demisto.setIntegrationContext.call_count == 1
+ assert user['name'] == 'spengler'
+
+
+@pytest.mark.asyncio
+async def test_handle_text(mocker):
+ import Slack
+
+ # Set
+
+ @asyncio.coroutine
+ def fake_clean(text, client):
+ return 'מה הולך'
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'addEntry')
+ mocker.patch.object(Slack, 'clean_message', side_effect=fake_clean)
+
+ user = json.loads(USERS)[0]
+ investigation_id = '999'
+ text = 'מה הולך'
+
+ # Arrange
+ await Slack.handle_text(slack.WebClient, investigation_id, text, user)
+ entry_args = demisto.addEntry.call_args[1]
+
+ # Assert
+ assert demisto.addEntry.call_count == 1
+ assert entry_args['id'] == '999'
+ assert entry_args['entry'] == 'מה הולך'
+ assert entry_args['username'] == 'spengler'
+ assert entry_args['email'] == 'spengler@ghostbusters.example.com'
+ assert entry_args['footer'] == '\n**From Slack**'
+
+
+def test_check_for_answers(mocker, requests_mock):
+ import Slack
+
+ # Set
+
+ mocker.patch.object(demisto, 'handleEntitlementForUser')
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+
+ requests_mock.post(
+ 'https://oproxy.demisto.ninja/slack-poll',
+ json={'payload': PAYLOAD_JSON}
+ )
+
+ integration_context = get_integration_context()
+ integration_context['questions'] = json.dumps([{
+ 'thread': 'cool',
+ 'entitlement': 'e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43',
+ 'reply': 'Thanks bro',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse',
+ 'last_poll_time': '2019-09-26 18:34:25'
+ }])
+
+ set_integration_context(integration_context)
+
+ # Arrange
+ Slack.check_for_answers(datetime.datetime(2019, 9, 26, 18, 38, 25))
+
+ result_args = demisto.handleEntitlementForUser.call_args_list[0][0]
+
+ # Assert
+ assert demisto.handleEntitlementForUser.call_count == 1
+
+ assert result_args[0] == '22'
+ assert result_args[1] == 'e95cb5a1-e394-4bc5-8ce0-508973aaf298'
+ assert result_args[2] == 'spengler@ghostbusters.example.com'
+ assert result_args[3] == 'Eyy'
+ assert result_args[4] == '43'
+
+ # Should delete the question
+ assert demisto.getIntegrationContext()['questions'] == json.dumps([])
+
+
+def test_check_for_answers_continue(mocker, requests_mock):
+ import Slack
+
+ # Set
+ mocker.patch.object(demisto, 'handleEntitlementForUser')
+ mocker.patch.object(demisto, 'error')
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+
+ requests_mock.post(
+ 'https://oproxy.demisto.ninja/slack-poll',
+ [{'json': {}, 'status_code': 200},
+ {'json': 'error', 'status_code': 401},
+ {'json': {'payload': PAYLOAD_JSON}, 'status_code': 200}]
+
+ )
+
+ integration_context = get_integration_context()
+ integration_context['questions'] = json.dumps([{
+ 'thread': 'notcool',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@30|44',
+ 'reply': 'Thanks bro',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse',
+ 'last_poll_time': '2019-09-26 18:34:25'
+ }, {
+ 'thread': 'notcool2',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@30|44',
+ 'reply': 'Thanks bro',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse',
+ 'last_poll_time': '2019-09-26 18:34:25'
+ }, {
+ 'thread': 'cool',
+ 'entitlement': 'e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43',
+ 'reply': 'Thanks bro',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse',
+ 'last_poll_time': '2019-09-26 18:34:25'
+ }])
+
+ set_integration_context(integration_context)
+
+ # Arrange
+ Slack.check_for_answers(datetime.datetime(2019, 9, 26, 18, 38, 25))
+
+ result_args = demisto.handleEntitlementForUser.call_args_list[0][0]
+
+ # Assert
+ assert demisto.handleEntitlementForUser.call_count == 1
+ assert demisto.error.call_count == 1
+
+ assert result_args[0] == '22'
+ assert result_args[1] == 'e95cb5a1-e394-4bc5-8ce0-508973aaf298'
+ assert result_args[2] == 'spengler@ghostbusters.example.com'
+ assert result_args[3] == 'Eyy'
+ assert result_args[4] == '43'
+
+ # Should delete the question
+ assert demisto.getIntegrationContext()['questions'] == json.dumps([{
+ 'thread': 'notcool',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@30|44',
+ 'reply': 'Thanks bro',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse',
+ 'last_poll_time': '2019-09-26 18:38:25'
+ }, {
+ 'thread': 'notcool2',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@30|44',
+ 'reply': 'Thanks bro',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse',
+ 'last_poll_time': '2019-09-26 18:38:25'
+ }])
+
+
+def test_check_for_answers_no_answer(mocker, requests_mock):
+ import Slack
+
+ # Set
+ mocker.patch.object(demisto, 'handleEntitlementForUser')
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+
+ requests_mock.post(
+ 'https://oproxy.demisto.ninja/slack-poll',
+ json={}
+ )
+
+ integration_context = get_integration_context()
+ integration_context['questions'] = json.dumps([{
+ 'thread': 'cool',
+ 'entitlement': 'e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43',
+ 'reply': 'Thanks bro',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse',
+ 'last_poll_time': '2019-09-26 18:34:25'
+ }, {
+ 'thread': 'notcool',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@30|44',
+ 'reply': 'Thanks bro',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse',
+ 'last_poll_time': '2019-09-26 18:34:25'
+ }])
+
+ set_integration_context(integration_context)
+
+ # Arrange
+ Slack.check_for_answers(datetime.datetime(2019, 9, 26, 18, 38, 25))
+
+ # Assert
+
+ assert demisto.handleEntitlementForUser.call_count == 0
+
+ # Should not delete the question
+ assert demisto.getIntegrationContext()['questions'] == json.dumps([{
+ 'thread': 'cool',
+ 'entitlement': 'e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43',
+ 'reply': 'Thanks bro',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse',
+ 'last_poll_time': '2019-09-26 18:38:25'
+ }, {
+ 'thread': 'notcool',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@30|44',
+ 'reply': 'Thanks bro',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse',
+ 'last_poll_time': '2019-09-26 18:38:25'
+ }])
+
+
+def test_check_for_answers_no_answer_expires(mocker, requests_mock):
+ import Slack
+
+ # Set
+ mocker.patch.object(demisto, 'handleEntitlementForUser')
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+
+ requests_mock.post(
+ 'https://oproxy.demisto.ninja/slack-poll',
+ json={}
+ )
+
+ integration_context = get_integration_context()
+ integration_context['questions'] = json.dumps([{
+ 'thread': 'cool',
+ 'entitlement': 'e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43',
+ 'reply': 'Thanks bro',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse',
+ 'last_poll_time': '2019-09-26 18:34:25'
+ }, {
+ 'thread': 'notcool',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@30|44',
+ 'reply': 'Thanks bro',
+ 'expiry': '2019-09-26 18:35:25',
+ 'default_response': 'NoResponse',
+ 'last_poll_time': '2019-09-26 18:34:25'
+ }])
+
+ set_integration_context(integration_context)
+
+ # Arrange
+ Slack.check_for_answers(datetime.datetime(2019, 9, 26, 18, 38, 25))
+
+ result_args = demisto.handleEntitlementForUser.call_args_list[0][0]
+
+ # Assert
+ assert demisto.handleEntitlementForUser.call_count == 1
+
+ assert result_args[0] == '30'
+ assert result_args[1] == '4404dae8-2d45-46bd-85fa-64779c12abe8'
+ assert result_args[2] == ''
+ assert result_args[3] == 'NoResponse'
+ assert result_args[4] == '44'
+
+ # Should not delete the question
+ assert demisto.getIntegrationContext()['questions'] == json.dumps([{
+ 'thread': 'cool',
+ 'entitlement': 'e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43',
+ 'reply': 'Thanks bro',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse',
+ 'last_poll_time': '2019-09-26 18:38:25'
+ }])
+
+
+def test_check_for_answers_error(mocker, requests_mock):
+ import Slack
+
+ # Set
+ mocker.patch.object(demisto, 'handleEntitlementForUser')
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'error')
+
+ requests_mock.post(
+ 'https://oproxy.demisto.ninja/slack-poll',
+ json='error',
+ status_code=401
+ )
+
+ integration_context = get_integration_context()
+ integration_context['questions'] = json.dumps([{
+ 'thread': 'cool',
+ 'entitlement': 'e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse'
+ }, {
+ 'thread': 'notcool',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@30|44',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse'
+ }])
+
+ set_integration_context(integration_context)
+
+ # Arrange
+ Slack.check_for_answers(datetime.datetime(2019, 9, 26, 18, 38, 25))
+
+ # Assert
+
+ assert demisto.handleEntitlementForUser.call_count == 0
+ assert demisto.error.call_count == 2
+
+ # Should not delete the question
+ assert demisto.getIntegrationContext()['questions'] == json.dumps([{
+ 'thread': 'cool',
+ 'entitlement': 'e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse',
+ 'last_poll_time': '2019-09-26 18:38:25'
+ }, {
+ 'thread': 'notcool',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@30|44',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse',
+ 'last_poll_time': '2019-09-26 18:38:25'
+ }])
+
+
+def test_check_for_answers_handle_entitlement_error(mocker, requests_mock):
+ import Slack
+
+ # Set
+ mocker.patch.object(demisto, 'handleEntitlementForUser', side_effect=InterruptedError())
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'error')
+
+ requests_mock.post(
+ 'https://oproxy.demisto.ninja/slack-poll',
+ json={'payload': PAYLOAD_JSON},
+ status_code=200
+ )
+
+ integration_context = get_integration_context()
+ integration_context['questions'] = json.dumps([{
+ 'thread': 'notcool',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@30|44',
+ 'expiry': '3000-09-26 18:38:25',
+ 'default_response': 'NoResponse'
+ }])
+
+ set_integration_context(integration_context)
+
+ # Arrange
+ Slack.check_for_answers(datetime.datetime(2019, 9, 26, 18, 38, 25))
+
+ # Assert
+
+ assert demisto.handleEntitlementForUser.call_count == 1
+ assert demisto.error.call_count == 1
+
+ # Should not delete the question
+ assert demisto.getIntegrationContext()['questions'] == json.dumps([])
+
+
+@pytest.mark.asyncio
+async def test_check_entitlement(mocker):
+ from Slack import check_and_handle_entitlement
+
+ # Set
+ mocker.patch.object(demisto, 'handleEntitlementForUser')
+
+ user = {
+ 'id': 'U123456',
+ 'name': 'test',
+ 'profile': {
+ 'email': 'test@demisto.com'
+ }
+ }
+
+ message1 = 'hi test@demisto.com 4404dae8-2d45-46bd-85fa-64779c12abe8@e093ba05-3f3c-402e-81a7-149db969be5d goodbye'
+ message2 = 'hi test@demisto.com 4404dae8-2d45-46bd-85fa-64779c12abe8@22 goodbye'
+ message3 = 'hi test@demisto.com 4404dae8-2d45-46bd-85fa-64779c12abe8@e093ba05-3f3c-402e-81a7-149db969be5d|4 goodbye'
+ message4 = 'hi test@demisto.com 4404dae8-2d45-46bd-85fa-64779c12abe8@22|43 goodbye'
+ message5 = 'hi test@demisto.com 43434@e093ba05-3f3c-402e-81a7-149db969be5d goodbye'
+ message6 = 'hi test@demisto.com name-of-someone@mail-of-someone goodbye'
+
+ # Arrange
+ result1 = await check_and_handle_entitlement(message1, user, '')
+ result2 = await check_and_handle_entitlement(message2, user, '')
+ result3 = await check_and_handle_entitlement(message3, user, '')
+ result4 = await check_and_handle_entitlement(message4, user, '')
+ result5 = await check_and_handle_entitlement(message5, user, '')
+ result6 = await check_and_handle_entitlement(message6, user, '')
+
+ result1_args = demisto.handleEntitlementForUser.call_args_list[0][0]
+ result2_args = demisto.handleEntitlementForUser.call_args_list[1][0]
+ result3_args = demisto.handleEntitlementForUser.call_args_list[2][0]
+ result4_args = demisto.handleEntitlementForUser.call_args_list[3][0]
+
+ assert result1 == 'Thank you for your response.'
+ assert result2 == 'Thank you for your response.'
+ assert result3 == 'Thank you for your response.'
+ assert result4 == 'Thank you for your response.'
+ assert result5 == ''
+ assert result6 == ''
+
+ assert demisto.handleEntitlementForUser.call_count == 4
+
+ assert result1_args[0] == 'e093ba05-3f3c-402e-81a7-149db969be5d' # incident ID
+ assert result1_args[1] == '4404dae8-2d45-46bd-85fa-64779c12abe8' # GUID
+ assert result1_args[2] == 'test@demisto.com' # email
+ assert result1_args[3] == 'hi test@demisto.com goodbye' # content
+ assert result1_args[4] == '' # task id
+
+ assert result2_args[0] == '22'
+ assert result2_args[1] == '4404dae8-2d45-46bd-85fa-64779c12abe8'
+ assert result2_args[2] == 'test@demisto.com'
+ assert result2_args[3] == 'hi test@demisto.com goodbye'
+ assert result2_args[4] == ''
+
+ assert result3_args[0] == 'e093ba05-3f3c-402e-81a7-149db969be5d'
+ assert result3_args[1] == '4404dae8-2d45-46bd-85fa-64779c12abe8'
+ assert result3_args[2] == 'test@demisto.com'
+ assert result3_args[3] == 'hi test@demisto.com goodbye'
+ assert result3_args[4] == '4'
+
+ assert result4_args[0] == '22'
+ assert result4_args[1] == '4404dae8-2d45-46bd-85fa-64779c12abe8'
+ assert result4_args[2] == 'test@demisto.com'
+ assert result4_args[3] == 'hi test@demisto.com goodbye'
+ assert result4_args[4] == '43'
+
+
+@pytest.mark.asyncio
+async def test_check_entitlement_with_context(mocker):
+ from Slack import check_and_handle_entitlement
+
+ # Set
+ mocker.patch.object(demisto, 'handleEntitlementForUser')
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+
+ user = {
+ 'id': 'U123456',
+ 'name': 'test',
+ 'profile': {
+ 'email': 'test@demisto.com'
+ }
+ }
+
+ integration_context = get_integration_context()
+ integration_context['questions'] = json.dumps([{
+ 'thread': 'cool',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@22|43'
+ }, {
+ 'thread': 'notcool',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@30|44'
+ }])
+
+ set_integration_context(integration_context)
+
+ # Arrange
+ await check_and_handle_entitlement('hola', user, 'cool')
+
+ result_args = demisto.handleEntitlementForUser.call_args_list[0][0]
+
+ # Assert
+ assert demisto.handleEntitlementForUser.call_count == 1
+
+ assert result_args[0] == '22'
+ assert result_args[1] == '4404dae8-2d45-46bd-85fa-64779c12abe8'
+ assert result_args[2] == 'test@demisto.com'
+ assert result_args[3] == 'hola'
+ assert result_args[4] == '43'
+
+ # Should delete the question
+ assert demisto.getIntegrationContext()['questions'] == json.dumps([{
+ 'thread': 'notcool',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@30|44'
+ }])
+
+
+def test_send_request(mocker):
+ import Slack
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ def conversations_list(**kwargs):
+ return {'channels': json.loads(CONVERSATIONS)}
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'conversations_list', side_effect=conversations_list)
+ mocker.patch.object(slack.WebClient, 'im_open', return_value={'channel': {'id': 'im_channel'}})
+ mocker.patch.object(Slack, 'send_file', return_value='neat')
+ mocker.patch.object(Slack, 'send_message', return_value='cool')
+
+ # Arrange
+
+ user_res = Slack.slack_send_request('spengler', None, None, message='Hi')
+ channel_res = Slack.slack_send_request(None, 'general', None, file='file')
+
+ user_args = Slack.send_message.call_args[0]
+ channel_args = Slack.send_file.call_args[0]
+
+ # Assert
+
+ assert slack.WebClient.users_list.call_count == 0
+ assert slack.WebClient.conversations_list.call_count == 0
+ assert Slack.send_message.call_count == 1
+ assert Slack.send_file.call_count == 1
+
+ assert user_args[0] == ['im_channel']
+ assert user_args[1] == ''
+ assert user_args[2] is False
+ assert user_args[4] == 'Hi'
+ assert user_args[5] == ''
+
+ assert channel_args[0] == ['C012AB3CD']
+ assert channel_args[1] == 'file'
+ assert channel_args[3] == ''
+
+ assert user_res == 'cool'
+ assert channel_res == 'neat'
+
+
+def test_send_request_different_name(mocker):
+ import Slack
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ def conversations_list(**kwargs):
+ return {'channels': json.loads(CONVERSATIONS)}
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'conversations_list', side_effect=conversations_list)
+ mocker.patch.object(Slack, 'send_message', return_value='cool')
+
+ # Arrange
+ channel_res = Slack.slack_send_request(None, 'incident-684', None, message='Hi')
+
+ channel_args = Slack.send_message.call_args[0]
+
+ # Assert
+
+ assert slack.WebClient.users_list.call_count == 0
+ assert slack.WebClient.conversations_list.call_count == 0
+ assert Slack.send_message.call_count == 1
+
+ assert channel_args[0] == ['GKB19PA3V']
+ assert channel_args[1] == ''
+ assert channel_args[2] is False
+ assert channel_args[4] == 'Hi'
+ assert channel_args[5] == ''
+
+ assert channel_res == 'cool'
+
+
+def test_send_request_with_severity(mocker):
+ import Slack
+
+ mocker.patch.object(demisto, 'params', return_value={'incidentNotificationChannel': 'general',
+ 'min_severity': 'High', 'notify_incidents': True})
+
+ Slack.init_globals()
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ def conversations_list(**kwargs):
+ return {'channels': json.loads(CONVERSATIONS)}
+
+ mocker.patch.object(demisto, 'args', return_value={'severity': '3', 'message': '!!!',
+ 'messageType': 'incidentOpened'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'conversations_list', side_effect=conversations_list)
+ mocker.patch.object(slack.WebClient, 'im_open', return_value={'channel': {'id': 'im_channel'}})
+ mocker.patch.object(Slack, 'send_message', return_value={'ts': 'cool'})
+
+ # Arrange
+ Slack.slack_send()
+
+ send_args = Slack.send_message.call_args[0]
+
+ results = demisto.results.call_args_list[0][0]
+ # Assert
+
+ assert slack.WebClient.users_list.call_count == 0
+ assert slack.WebClient.conversations_list.call_count == 0
+ assert Slack.send_message.call_count == 1
+
+ assert send_args[0] == ['C012AB3CD']
+ assert send_args[1] is None
+ assert send_args[2] is False
+ assert send_args[4] == '!!!'
+ assert send_args[5] == ''
+
+ assert results[0]['Contents'] == 'Message sent to Slack successfully.\nThread ID is: cool'
+
+
+def test_send_request_with_notification_channel(mocker):
+ import Slack
+
+ mocker.patch.object(demisto, 'params', return_value={'incidentNotificationChannel': 'general',
+ 'min_severity': 'High', 'notify_incidents': True})
+
+ Slack.init_globals()
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ def conversations_list(**kwargs):
+ return {'channels': json.loads(CONVERSATIONS)}
+
+ mocker.patch.object(demisto, 'args', return_value={'channel': 'incidentNotificationChannel',
+ 'severity': '4', 'message': '!!!',
+ 'messageType': 'incidentOpened'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'conversations_list', side_effect=conversations_list)
+ mocker.patch.object(slack.WebClient, 'im_open', return_value={'channel': {'id': 'im_channel'}})
+ mocker.patch.object(Slack, 'send_message', return_value={'ts': 'cool'})
+
+ # Arrange
+ Slack.slack_send()
+
+ send_args = Slack.send_message.call_args[0]
+
+ results = demisto.results.call_args_list[0][0]
+ # Assert
+
+ assert slack.WebClient.users_list.call_count == 0
+ assert slack.WebClient.conversations_list.call_count == 0
+ assert Slack.send_message.call_count == 1
+
+ assert send_args[0] == ['C012AB3CD']
+ assert send_args[1] is None
+ assert send_args[2] is False
+ assert send_args[4] == '!!!'
+ assert send_args[5] == ''
+
+ assert results[0]['Contents'] == 'Message sent to Slack successfully.\nThread ID is: cool'
+
+
+def test_send_request_with_entitlement(mocker):
+ import Slack
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ def conversations_list(**kwargs):
+ return {'channels': json.loads(CONVERSATIONS)}
+
+ mocker.patch.object(demisto, 'args', return_value={
+ 'message': json.dumps({
+ 'message': 'hi test@demisto.com',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@22|43',
+ 'reply': 'Thanks bro',
+ 'expiry': '2019-09-26 18:38:25',
+ 'default_response': 'NoResponse'}),
+ 'to': 'spengler'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'conversations_list', side_effect=conversations_list)
+ mocker.patch.object(slack.WebClient, 'im_open', return_value={'channel': {'id': 'im_channel'}})
+ mocker.patch.object(Slack, 'send_message', return_value={'ts': 'cool'})
+
+ questions = [{
+ 'thread': 'cool',
+ 'entitlement': '4404dae8-2d45-46bd-85fa-64779c12abe8@22|43',
+ 'reply': 'Thanks bro',
+ 'expiry': '2019-09-26 18:38:25',
+ 'default_response': 'NoResponse'
+ }]
+
+ # Arrange
+ Slack.slack_send()
+
+ send_args = Slack.send_message.call_args[0]
+
+ results = demisto.results.call_args_list[0][0]
+ # Assert
+
+ assert slack.WebClient.users_list.call_count == 0
+ assert slack.WebClient.conversations_list.call_count == 0
+ assert Slack.send_message.call_count == 1
+
+ assert send_args[0] == ['im_channel']
+ assert send_args[1] is None
+ assert send_args[2] is False
+ assert send_args[4] == 'hi test@demisto.com'
+ assert send_args[5] == ''
+
+ assert results[0]['Contents'] == 'Message sent to Slack successfully.\nThread ID is: cool'
+
+ assert demisto.getIntegrationContext()['questions'] == json.dumps(questions)
+
+
+def test_send_request_with_entitlement_blocks(mocker):
+ import Slack
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ def conversations_list(**kwargs):
+ return {'channels': json.loads(CONVERSATIONS)}
+
+ mocker.patch.object(demisto, 'args', return_value={
+ 'blocks': json.dumps({
+ 'blocks': json.dumps(BLOCK_JSON),
+ 'entitlement': 'e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43',
+ 'reply': 'Thanks bro',
+ 'expiry': '2019-09-26 18:38:25',
+ 'default_response': 'NoResponse'}),
+ 'to': 'spengler'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'conversations_list', side_effect=conversations_list)
+ mocker.patch.object(slack.WebClient, 'im_open', return_value={'channel': {'id': 'im_channel'}})
+ mocker.patch.object(Slack, 'send_message', return_value={'ts': 'cool'})
+
+ questions = [{
+ 'thread': 'cool',
+ 'entitlement': 'e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43',
+ 'reply': 'Thanks bro',
+ 'expiry': '2019-09-26 18:38:25',
+ 'default_response': 'NoResponse'
+ }]
+
+ # Arrange
+ Slack.slack_send()
+
+ send_args = Slack.send_message.call_args[0]
+
+ results = demisto.results.call_args_list[0][0]
+ # Assert
+ assert slack.WebClient.users_list.call_count == 0
+ assert slack.WebClient.conversations_list.call_count == 0
+ assert Slack.send_message.call_count == 1
+
+ assert send_args[0] == ['im_channel']
+ assert send_args[1] is None
+ assert send_args[2] is False
+ assert send_args[4] == ''
+ assert send_args[6] == json.dumps(BLOCK_JSON)
+
+ assert results[0]['Contents'] == 'Message sent to Slack successfully.\nThread ID is: cool'
+
+ assert demisto.getIntegrationContext()['questions'] == json.dumps(questions)
+
+
+def test_send_request_with_entitlement_blocks_message(mocker):
+ import Slack
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ def conversations_list(**kwargs):
+ return {'channels': json.loads(CONVERSATIONS)}
+
+ mocker.patch.object(demisto, 'args', return_value={
+ 'message': 'wat up',
+ 'blocks': json.dumps({
+ 'blocks': json.dumps(BLOCK_JSON),
+ 'entitlement': 'e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43',
+ 'reply': 'Thanks bro',
+ 'expiry': '2019-09-26 18:38:25',
+ 'default_response': 'NoResponse'}),
+ 'to': 'spengler'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'conversations_list', side_effect=conversations_list)
+ mocker.patch.object(slack.WebClient, 'im_open', return_value={'channel': {'id': 'im_channel'}})
+ mocker.patch.object(Slack, 'send_message', return_value={'ts': 'cool'})
+
+ questions = [{
+ 'thread': 'cool',
+ 'entitlement': 'e95cb5a1-e394-4bc5-8ce0-508973aaf298@22|43',
+ 'reply': 'Thanks bro',
+ 'expiry': '2019-09-26 18:38:25',
+ 'default_response': 'NoResponse'
+ }]
+
+ # Arrange
+ Slack.slack_send()
+
+ send_args = Slack.send_message.call_args[0]
+
+ results = demisto.results.call_args_list[0][0]
+
+ # Assert
+ assert slack.WebClient.users_list.call_count == 0
+ assert slack.WebClient.conversations_list.call_count == 0
+ assert Slack.send_message.call_count == 1
+
+ assert send_args[0] == ['im_channel']
+ assert send_args[1] is None
+ assert send_args[2] is False
+ assert send_args[4] == 'wat up'
+ assert send_args[6] == json.dumps(BLOCK_JSON)
+
+ assert results[0]['Contents'] == 'Message sent to Slack successfully.\nThread ID is: cool'
+
+ assert demisto.getIntegrationContext()['questions'] == json.dumps(questions)
+
+
+def test_send_to_user_lowercase(mocker):
+ import Slack
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ def conversations_list(**kwargs):
+ return {'channels': json.loads(CONVERSATIONS)}
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'args', return_value={'to': 'glenda@south.oz.coven', 'message': 'hi'})
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'conversations_list', side_effect=conversations_list)
+ mocker.patch.object(slack.WebClient, 'im_open', return_value={'channel': {'id': 'im_channel'}})
+ mocker.patch.object(Slack, 'send_file', return_value='neat')
+ mocker.patch.object(Slack, 'send_message', return_value={'ts': 'cool'})
+
+ # Arrange
+
+ Slack.slack_send()
+
+ send_args = Slack.send_message.call_args[0]
+
+ results = demisto.results.call_args_list[0][0]
+
+ # Assert
+
+ assert slack.WebClient.users_list.call_count == 0
+ assert slack.WebClient.conversations_list.call_count == 0
+ assert Slack.send_message.call_count == 1
+
+ assert send_args[0] == ['im_channel']
+ assert send_args[1] is None
+ assert send_args[2] is False
+ assert send_args[4] == 'hi'
+ assert send_args[5] == ''
+
+ assert results[0]['Contents'] == 'Message sent to Slack successfully.\nThread ID is: cool'
+
+
+def test_send_request_with_severity_user_doesnt_exist(mocker, capfd):
+ import Slack
+
+ mocker.patch.object(demisto, 'params', return_value={'incidentNotificationChannel': 'general',
+ 'min_severity': 'High', 'notify_incidents': True})
+
+ Slack.init_globals()
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ def conversations_list(**kwargs):
+ return {'channels': json.loads(CONVERSATIONS)}
+
+ mocker.patch.object(demisto, 'args', return_value={'severity': '3', 'message': '!!!',
+ 'messageType': 'incidentOpened', 'to': 'alexios'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext')
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'conversations_list', side_effect=conversations_list)
+ mocker.patch.object(slack.WebClient, 'im_open', return_value={'channel': {'id': 'im_channel'}})
+ mocker.patch.object(Slack, 'send_message', return_value={'ts': 'cool'})
+
+ # Arrange
+ with capfd.disabled():
+ Slack.slack_send()
+
+ send_args = Slack.send_message.call_args[0]
+
+ results = demisto.results.call_args_list[0][0]
+ # Assert
+
+ assert slack.WebClient.users_list.call_count == 1
+ assert slack.WebClient.conversations_list.call_count == 0
+ assert Slack.send_message.call_count == 1
+
+ assert send_args[0] == ['C012AB3CD']
+ assert send_args[1] is None
+ assert send_args[2] is False
+ assert send_args[4] == '!!!'
+ assert send_args[5] == ''
+
+ assert results[0]['Contents'] == 'Message sent to Slack successfully.\nThread ID is: cool'
+
+
+def test_send_request_no_user(mocker, capfd):
+ import Slack
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ def conversations_list(**kwargs):
+ return {'channels': json.loads(CONVERSATIONS)}
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET, side_effect=InterruptedError())
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'conversations_list', side_effect=conversations_list)
+ mocker.patch.object(slack.WebClient, 'im_open', return_value={'channel': {'id': 'im_channel'}})
+ mocker.patch.object(Slack, 'send_file', return_value='neat')
+ mocker.patch.object(Slack, 'send_message', return_value='cool')
+
+ # Arrange
+
+ with capfd.disabled():
+ with pytest.raises(InterruptedError):
+ Slack.slack_send_request('alexios', None, None, message='Hi')
+ err_msg = return_error_mock.call_args[0][0]
+
+ # Assert
+
+ assert return_error_mock.call_count == 1
+ assert err_msg == 'Could not find any destination to send to.'
+ assert slack.WebClient.users_list.call_count == 1
+ assert Slack.send_message.call_count == 0
+ assert Slack.send_file.call_count == 0
+
+
+def test_send_request_no_severity(mocker):
+ import Slack
+
+ mocker.patch.object(demisto, 'params', return_value={'incidentNotificationChannel': 'general',
+ 'min_severity': 'High', 'notify_incidents': True})
+
+ Slack.init_globals()
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ def conversations_list(**kwargs):
+ return {'channels': json.loads(CONVERSATIONS)}
+
+ mocker.patch.object(demisto, 'args', return_value={'severity': '2', 'message': '!!!',
+ 'messageType': 'incidentOpened'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'results')
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET, side_effect=InterruptedError())
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'conversations_list', side_effect=conversations_list)
+ mocker.patch.object(slack.WebClient, 'im_open', return_value={'channel': {'id': 'im_channel'}})
+ mocker.patch.object(Slack, 'send_message', return_value={'ts': 'cool'})
+
+ # Arrange
+ with pytest.raises(InterruptedError):
+ Slack.slack_send()
+
+ err_msg = return_error_mock.call_args[0][0]
+
+ # Assert
+
+ assert return_error_mock.call_count == 1
+ assert err_msg == 'Either a user, group or channel must be provided.'
+ assert slack.WebClient.users_list.call_count == 0
+ assert Slack.send_message.call_count == 0
+
+
+def test_send_request_zero_severity(mocker):
+ import Slack
+
+ mocker.patch.object(demisto, 'params', return_value={'incidentNotificationChannel': 'general',
+ 'min_severity': 'High', 'notify_incidents': True})
+
+ Slack.init_globals()
+
+ # Set
+
+ def users_list(**kwargs):
+ return {'members': json.loads(USERS)}
+
+ def conversations_list(**kwargs):
+ return {'channels': json.loads(CONVERSATIONS)}
+
+ mocker.patch.object(demisto, 'args', return_value={'severity': '0', 'message': '!!!',
+ 'messageType': 'incidentOpened'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'results')
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET, side_effect=InterruptedError())
+ mocker.patch.object(slack.WebClient, 'users_list', side_effect=users_list)
+ mocker.patch.object(slack.WebClient, 'conversations_list', side_effect=conversations_list)
+ mocker.patch.object(slack.WebClient, 'im_open', return_value={'channel': {'id': 'im_channel'}})
+ mocker.patch.object(Slack, 'send_message', return_value={'ts': 'cool'})
+
+ # Arrange
+ with pytest.raises(InterruptedError):
+ Slack.slack_send()
+
+ err_msg = return_error_mock.call_args[0][0]
+
+ # Assert
+
+ assert return_error_mock.call_count == 1
+ assert err_msg == 'Either a user, group or channel must be provided.'
+ assert slack.WebClient.users_list.call_count == 0
+ assert Slack.send_message.call_count == 0
+
+
+def test_send_message(mocker):
+ import Slack
+ # Set
+
+ link = 'https://www.eizelulz.com:8443/#/WarRoom/727'
+ mocker.patch.object(demisto, 'investigation', return_value={'type': 1})
+ mocker.patch.object(demisto, 'demistoUrls', return_value={'warRoom': link})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(Slack, 'send_message_to_destinations')
+ mocker.patch.object(Slack, 'invite_users_to_conversation')
+
+ # Arrange
+ Slack.send_message(['channel'], None, None, demisto.getIntegrationContext(), 'yo', None, '')
+
+ args = Slack.send_message_to_destinations.call_args[0]
+
+ # Assert
+ assert Slack.send_message_to_destinations.call_count == 1
+
+ assert args[0] == ['channel']
+ assert args[1] == 'yo' + '\nView it on: ' + link
+ assert args[2] is None
+
+
+def test_send_message_retry(mocker):
+ import Slack
+ from slack.errors import SlackApiError
+ # Set
+
+ link = 'https://www.eizelulz.com:8443/#/WarRoom/727'
+ mocker.patch.object(demisto, 'investigation', return_value={'type': 1})
+ mocker.patch.object(demisto, 'demistoUrls', return_value={'warRoom': link})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(Slack, 'invite_users_to_conversation')
+
+ # Arrange
+ mocker.patch.object(Slack, 'send_message_to_destinations',
+ side_effect=[SlackApiError('not_in_channel', None), 'ok'])
+ Slack.send_message(['channel'], None, None, demisto.getIntegrationContext(), 'yo', None, '')
+
+ args = Slack.send_message_to_destinations.call_args_list[1][0]
+
+ # Assert
+ assert Slack.send_message_to_destinations.call_count == 2
+
+ assert args[0] == ['channel']
+ assert args[1] == 'yo' + '\nView it on: ' + link
+ assert args[2] is None
+
+
+def test_send_file_retry(mocker):
+ import Slack
+ from slack.errors import SlackApiError
+ # Set
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(Slack, 'invite_users_to_conversation')
+
+ # Arrange
+ mocker.patch.object(Slack, 'send_file_to_destinations',
+ side_effect=[SlackApiError('not_in_channel', None), 'ok'])
+ Slack.send_file(['channel'], 'file', demisto.getIntegrationContext(), None)
+
+ args = Slack.send_file_to_destinations.call_args_list[1][0]
+
+ # Assert
+ assert Slack.send_file_to_destinations.call_count == 2
+
+ assert args[0] == ['channel']
+ assert args[1] == 'file'
+ assert args[2] is None
+
+
+def test_close_channel_with_name(mocker):
+ import Slack
+
+ # Set
+
+ mocker.patch.object(demisto, 'args', return_value={'channel': 'general'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '681'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(Slack, 'get_conversation_by_name', return_value={'id': 'C012AB3CD'})
+ mocker.patch.object(slack.WebClient, 'conversations_archive')
+ mocker.patch.object(demisto, 'results')
+
+ # Arrange
+ Slack.close_channel()
+
+ close_args = slack.WebClient.conversations_archive.call_args
+ success_results = demisto.results.call_args[0]
+
+ # Assert
+ assert Slack.get_conversation_by_name.call_count == 1
+ assert slack.WebClient.conversations_archive.call_count == 1
+ assert success_results[0] == 'Channel successfully archived.'
+ assert close_args[1]['channel'] == 'C012AB3CD'
+
+
+def test_close_channel_should_delete_mirror(mocker):
+ from Slack import close_channel
+ # Set
+
+ mirrors = json.loads(MIRRORS)
+ mirrors.pop(0)
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '681'})
+ mocker.patch.object(slack.WebClient, 'conversations_archive')
+
+ # Arrange
+ close_channel()
+
+ archive_args = slack.WebClient.conversations_archive.call_args[1]
+ context_args = demisto.setIntegrationContext.call_args[0][0]
+ context_args_mirrors = json.loads(context_args['mirrors'])
+
+ # Assert
+ assert archive_args['channel'] == 'GKQ86DVPH'
+ assert context_args_mirrors == mirrors
+
+
+def test_close_channel_should_delete_mirrors(mocker):
+ from Slack import close_channel
+ # Set
+
+ mirrors = json.loads(MIRRORS)
+ mirrors.pop(1)
+ mirrors.pop(1)
+
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '684'})
+ mocker.patch.object(slack.WebClient, 'conversations_archive')
+
+ # Arrange
+ close_channel()
+
+ archive_args = slack.WebClient.conversations_archive.call_args[1]
+ context_args = demisto.setIntegrationContext.call_args[0][0]
+ context_args_mirrors = json.loads(context_args['mirrors'])
+
+ # Assert
+ assert archive_args['channel'] == 'GKB19PA3V'
+ assert context_args_mirrors == mirrors
+
+
+def test_get_conversation_by_name_paging(mocker):
+ from Slack import get_conversation_by_name
+ # Set
+
+ def conversations_list(**kwargs):
+ if len(kwargs) == 2:
+ return {'channels': json.loads(CONVERSATIONS), 'response_metadata': {
+ 'next_cursor': 'dGVhbTpDQ0M3UENUTks='
+ }}
+ else:
+ return {'channels': [{
+ 'id': 'C248918AB',
+ 'name': 'lulz'
+ }], 'response_metadata': {
+ 'next_cursor': ''
+ }}
+
+ mocker.patch.object(slack.WebClient, 'conversations_list', side_effect=conversations_list)
+
+ # Arrange
+ channel = get_conversation_by_name('lulz')
+ args = slack.WebClient.conversations_list.call_args_list
+ first_args = args[0][1]
+ second_args = args[1][1]
+
+ # Assert
+ assert len(first_args) == 2
+ assert first_args['limit'] == 200
+ assert len(second_args) == 3
+ assert second_args['cursor'] == 'dGVhbTpDQ0M3UENUTks='
+ assert channel['id'] == 'C248918AB'
+ assert slack.WebClient.conversations_list.call_count == 2
+
+
+def test_send_file_no_args_investigation(mocker):
+ import Slack
+
+ # Set
+
+ mocker.patch.object(demisto, 'args', return_value={})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '681'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'getFilePath', return_value={'path': 'path', 'name': 'name'})
+ mocker.patch('builtins.open', mock_open(read_data="data"))
+ mocker.patch.object(demisto, 'results')
+ mocker.patch.object(Slack, 'slack_send_request', return_value='cool')
+
+ # Arrange
+ Slack.slack_send_file()
+
+ send_args = Slack.slack_send_request.call_args
+ success_results = demisto.results.call_args[0]
+
+ # Assert
+ assert Slack.slack_send_request.call_count == 1
+ assert success_results[0] == 'File sent to Slack successfully.'
+
+ assert send_args[0][1] == 'incident-681'
+ assert send_args[1]['file'] == {
+ 'data': 'data',
+ 'name': 'name',
+ 'comment': ''
+ }
+
+
+def test_send_file_no_args_no_investigation(mocker):
+ import Slack
+
+ # Set
+
+ mocker.patch.object(demisto, 'args', return_value={})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '999'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(Slack, 'slack_send_request', return_value='cool')
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET, side_effect=InterruptedError())
+
+ # Arrange
+ with pytest.raises(InterruptedError):
+ Slack.slack_send_file()
+
+ err_msg = return_error_mock.call_args[0][0]
+
+ # Assert
+ assert Slack.slack_send_request.call_count == 0
+ assert err_msg == 'Either a user, group or channel must be provided.'
+
+
+def test_set_topic(mocker):
+ import Slack
+
+ # Set
+
+ mocker.patch.object(demisto, 'args', return_value={'channel': 'general', 'topic': 'ey'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '681'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(Slack, 'get_conversation_by_name', return_value={'id': 'C012AB3CD'})
+ mocker.patch.object(slack.WebClient, 'conversations_setTopic')
+ mocker.patch.object(demisto, 'results')
+
+ # Arrange
+ Slack.set_channel_topic()
+
+ send_args = slack.WebClient.conversations_setTopic.call_args
+ success_results = demisto.results.call_args[0]
+
+ # Assert
+ assert Slack.get_conversation_by_name.call_count == 1
+ assert slack.WebClient.conversations_setTopic.call_count == 1
+ assert success_results[0] == 'Topic successfully set.'
+ assert send_args[1]['channel'] == 'C012AB3CD'
+ assert send_args[1]['topic'] == 'ey'
+
+
+def test_set_topic_no_args_investigation(mocker):
+ import Slack
+
+ # Set
+
+ new_mirror = {
+ 'channel_id': 'GKQ86DVPH',
+ 'channel_name': 'incident-681',
+ 'channel_topic': 'ey',
+ 'investigation_id': '681',
+ 'mirror_type': 'all',
+ 'mirror_direction': 'both',
+ 'mirror_to': 'group',
+ 'auto_close': True,
+ 'mirrored': True
+ }
+
+ mocker.patch.object(demisto, 'args', return_value={'topic': 'ey'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '681'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(Slack, 'get_conversation_by_name', return_value={'id': 'C012AB3CD'})
+ mocker.patch.object(slack.WebClient, 'conversations_setTopic')
+ mocker.patch.object(demisto, 'results')
+
+ # Arrange
+ Slack.set_channel_topic()
+
+ send_args = slack.WebClient.conversations_setTopic.call_args
+ success_results = demisto.results.call_args[0]
+
+ new_context = demisto.setIntegrationContext.call_args[0][0]
+ new_mirrors = json.loads(new_context['mirrors'])
+ our_mirror_filter = list(filter(lambda m: '681' == m['investigation_id'], new_mirrors))
+ our_mirror = our_mirror_filter[0]
+
+ # Assert
+ assert Slack.get_conversation_by_name.call_count == 0
+ assert slack.WebClient.conversations_setTopic.call_count == 1
+ assert success_results[0] == 'Topic successfully set.'
+ assert send_args[1]['channel'] == 'GKQ86DVPH'
+ assert send_args[1]['topic'] == 'ey'
+ assert new_mirror == our_mirror
+
+
+def test_set_topic_no_args_no_investigation(mocker):
+ import Slack
+
+ # Set
+
+ mocker.patch.object(demisto, 'args', return_value={'topic': 'ey'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '9999'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(Slack, 'get_conversation_by_name', return_value={'id': 'C012AB3CD'})
+ mocker.patch.object(slack.WebClient, 'conversations_setTopic')
+ mocker.patch.object(demisto, 'results')
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET, side_effect=InterruptedError())
+
+ # Arrange
+ with pytest.raises(InterruptedError):
+ Slack.set_channel_topic()
+
+ err_msg = return_error_mock.call_args[0][0]
+
+ # Assert
+ assert Slack.get_conversation_by_name.call_count == 0
+ assert err_msg == 'Channel not found - the Demisto app needs to be a member of the channel in order to look it up.'
+
+
+def test_invite_users(mocker):
+ import Slack
+
+ # Set
+
+ mocker.patch.object(demisto, 'args', return_value={'channel': 'general', 'users': 'spengler, glinda'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '681'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(Slack, 'get_conversation_by_name', return_value={'id': 'C012AB3CD'})
+ mocker.patch.object(Slack, 'invite_users_to_conversation')
+ mocker.patch.object(demisto, 'results')
+
+ # Arrange
+ Slack.invite_to_channel()
+
+ send_args = Slack.invite_users_to_conversation.call_args[0]
+ success_results = demisto.results.call_args[0]
+
+ # Assert
+ assert Slack.get_conversation_by_name.call_count == 1
+ assert Slack.invite_users_to_conversation.call_count == 1
+ assert success_results[0] == 'Successfully invited users to the channel.'
+ assert send_args[0] == 'C012AB3CD'
+ assert send_args[1] == ['U012A3CDE', 'U07QCRPA4']
+
+
+def test_invite_users_no_channel(mocker):
+ import Slack
+
+ # Set
+
+ mocker.patch.object(demisto, 'args', return_value={'users': 'spengler, glinda'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '681'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(Slack, 'get_conversation_by_name', return_value={'id': 'GKQ86DVPH'})
+ mocker.patch.object(Slack, 'invite_users_to_conversation')
+ mocker.patch.object(demisto, 'results')
+
+ # Arrange
+ Slack.invite_to_channel()
+
+ send_args = Slack.invite_users_to_conversation.call_args[0]
+ success_results = demisto.results.call_args[0]
+
+ # Assert
+ assert Slack.get_conversation_by_name.call_count == 0
+ assert Slack.invite_users_to_conversation.call_count == 1
+ assert success_results[0] == 'Successfully invited users to the channel.'
+ assert send_args[0] == 'GKQ86DVPH'
+ assert send_args[1] == ['U012A3CDE', 'U07QCRPA4']
+
+
+def test_invite_users_no_channel_doesnt_exist(mocker):
+ import Slack
+
+ # Set
+
+ mocker.patch.object(demisto, 'args', return_value={'users': 'spengler, glinda'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '777'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(Slack, 'get_conversation_by_name', return_value={'id': 'GKQ86DVPH'})
+ mocker.patch.object(Slack, 'invite_users_to_conversation')
+ mocker.patch.object(demisto, 'results')
+
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET, side_effect=InterruptedError())
+
+ # Arrange
+ with pytest.raises(InterruptedError):
+ Slack.invite_to_channel()
+
+ err_msg = return_error_mock.call_args[0][0]
+
+ # Assert
+ assert Slack.get_conversation_by_name.call_count == 0
+ assert Slack.invite_users_to_conversation.call_count == 0
+ assert err_msg == 'Channel not found - the Demisto app needs to be a member of the channel in order to look it up.'
+
+
+def test_kick_users(mocker):
+ import Slack
+
+ # Set
+
+ mocker.patch.object(demisto, 'args', return_value={'channel': 'general', 'users': 'spengler, glinda'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '681'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(Slack, 'get_conversation_by_name', return_value={'id': 'C012AB3CD'})
+ mocker.patch.object(Slack, 'kick_users_from_conversation')
+ mocker.patch.object(demisto, 'results')
+
+ # Arrange
+ Slack.kick_from_channel()
+
+ send_args = Slack.kick_users_from_conversation.call_args[0]
+ success_results = demisto.results.call_args[0]
+
+ # Assert
+ assert Slack.get_conversation_by_name.call_count == 1
+ assert Slack.kick_users_from_conversation.call_count == 1
+ assert success_results[0] == 'Successfully kicked users from the channel.'
+ assert send_args[0] == 'C012AB3CD'
+ assert send_args[1] == ['U012A3CDE', 'U07QCRPA4']
+
+
+def test_kick_users_no_channel(mocker):
+ import Slack
+
+ # Set
+
+ mocker.patch.object(demisto, 'args', return_value={'users': 'spengler, glinda'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '681'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(Slack, 'get_conversation_by_name', return_value={'id': 'GKQ86DVPH'})
+ mocker.patch.object(Slack, 'kick_users_from_conversation')
+ mocker.patch.object(demisto, 'results')
+
+ # Arrange
+ Slack.kick_from_channel()
+
+ send_args = Slack.kick_users_from_conversation.call_args[0]
+ success_results = demisto.results.call_args[0]
+
+ # Assert
+ assert Slack.get_conversation_by_name.call_count == 0
+ assert Slack.kick_users_from_conversation.call_count == 1
+ assert success_results[0] == 'Successfully kicked users from the channel.'
+ assert send_args[0] == 'GKQ86DVPH'
+ assert send_args[1] == ['U012A3CDE', 'U07QCRPA4']
+
+
+def test_kick_users_no_channel_doesnt_exist(mocker):
+ import Slack
+
+ # Set
+
+ mocker.patch.object(demisto, 'args', return_value={'users': 'spengler, glinda'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '777'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(Slack, 'get_conversation_by_name', return_value={'id': 'GKQ86DVPH'})
+ mocker.patch.object(Slack, 'invite_users_to_conversation')
+ mocker.patch.object(demisto, 'results')
+
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET, side_effect=InterruptedError())
+
+ # Arrange
+ with pytest.raises(InterruptedError):
+ Slack.kick_from_channel()
+
+ err_msg = return_error_mock.call_args[0][0]
+
+ # Assert
+ assert Slack.get_conversation_by_name.call_count == 0
+ assert Slack.invite_users_to_conversation.call_count == 0
+ assert err_msg == 'Channel not found - the Demisto app needs to be a member of the channel in order to look it up.'
+
+
+def test_rename_channel(mocker):
+ import Slack
+
+ # Set
+
+ mocker.patch.object(demisto, 'args', return_value={'channel': 'general', 'name': 'ey'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '681'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(Slack, 'get_conversation_by_name', return_value={'id': 'C012AB3CD'})
+ mocker.patch.object(slack.WebClient, 'conversations_rename')
+ mocker.patch.object(demisto, 'results')
+
+ # Arrange
+ Slack.rename_channel()
+
+ send_args = slack.WebClient.conversations_rename.call_args
+ success_results = demisto.results.call_args[0]
+
+ # Assert
+ assert Slack.get_conversation_by_name.call_count == 1
+ assert slack.WebClient.conversations_rename.call_count == 1
+ assert success_results[0] == 'Channel renamed successfully.'
+ assert send_args[1]['channel'] == 'C012AB3CD'
+ assert send_args[1]['name'] == 'ey'
+
+
+def test_rename_no_args_investigation(mocker):
+ import Slack
+
+ # Set
+
+ new_mirror = {
+ 'channel_id': 'GKQ86DVPH',
+ 'channel_name': 'ey',
+ 'channel_topic': 'incident-681',
+ 'investigation_id': '681',
+ 'mirror_type': 'all',
+ 'mirror_direction': 'both',
+ 'mirror_to': 'group',
+ 'auto_close': True,
+ 'mirrored': True
+ }
+
+ mocker.patch.object(demisto, 'args', return_value={'name': 'ey'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '681'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(Slack, 'get_conversation_by_name', return_value={'id': 'C012AB3CD'})
+ mocker.patch.object(slack.WebClient, 'conversations_rename')
+ mocker.patch.object(demisto, 'results')
+
+ # Arrange
+ Slack.rename_channel()
+
+ send_args = slack.WebClient.conversations_rename.call_args
+ success_results = demisto.results.call_args[0]
+
+ new_context = demisto.setIntegrationContext.call_args[0][0]
+ new_mirrors = json.loads(new_context['mirrors'])
+ our_mirror_filter = list(filter(lambda m: '681' == m['investigation_id'], new_mirrors))
+ our_mirror = our_mirror_filter[0]
+
+ # Assert
+ assert Slack.get_conversation_by_name.call_count == 0
+ assert slack.WebClient.conversations_rename.call_count == 1
+ assert success_results[0] == 'Channel renamed successfully.'
+ assert send_args[1]['channel'] == 'GKQ86DVPH'
+ assert send_args[1]['name'] == 'ey'
+ assert new_mirror == our_mirror
+
+
+def test_rename_no_args_no_investigation(mocker):
+ import Slack
+
+ # Set
+
+ mocker.patch.object(demisto, 'args', return_value={'name': 'ey'})
+ mocker.patch.object(demisto, 'investigation', return_value={'id': '9999'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(Slack, 'get_conversation_by_name', return_value={'id': 'C012AB3CD'})
+ mocker.patch.object(slack.WebClient, 'conversations_rename')
+ mocker.patch.object(demisto, 'results')
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET, side_effect=InterruptedError())
+
+ # Arrange
+ with pytest.raises(InterruptedError):
+ Slack.rename_channel()
+
+ err_msg = return_error_mock.call_args[0][0]
+
+ # Assert
+ assert Slack.get_conversation_by_name.call_count == 0
+ assert err_msg == 'Channel not found - the Demisto app needs to be a member of the channel in order to look it up.'
+
+
+def test_get_user(mocker):
+ from Slack import get_user
+
+ # Set
+
+ mocker.patch.object(demisto, 'args', return_value={'user': 'spengler'})
+ mocker.patch.object(demisto, 'getIntegrationContext', side_effect=get_integration_context)
+ mocker.patch.object(demisto, 'setIntegrationContext', side_effect=set_integration_context)
+ mocker.patch.object(demisto, 'results')
+
+ # Arrange
+
+ get_user()
+ user_results = demisto.results.call_args[0]
+
+ assert user_results[0]['EntryContext'] == {'Slack.User(val.ID === obj.ID)': {
+ 'ID': 'U012A3CDE',
+ 'Username': 'spengler',
+ 'Name': 'Egon Spengler',
+ 'DisplayName': 'spengler',
+ 'Email': 'spengler@ghostbusters.example.com',
+ }}
diff --git a/Integrations/SlashNextPhishingIncidentResponse/Pipfile b/Integrations/SlashNextPhishingIncidentResponse/Pipfile
new file mode 100644
index 000000000000..bad8bc3d952c
--- /dev/null
+++ b/Integrations/SlashNextPhishingIncidentResponse/Pipfile
@@ -0,0 +1,16 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "==5.0.1"
+pytest-mock = "*"
+requests-mock = "*"
+pytest-asyncio = "*"
+
+[packages]
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/SlashNextPhishingIncidentResponse/Pipfile.lock b/Integrations/SlashNextPhishingIncidentResponse/Pipfile.lock
new file mode 100644
index 000000000000..5f20ccb2504e
--- /dev/null
+++ b/Integrations/SlashNextPhishingIncidentResponse/Pipfile.lock
@@ -0,0 +1,244 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "af46d4f6dfef85012879217ba21602edd980f9e21ee096dd3e399d20c32ccc7d"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
+ "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
+ ],
+ "version": "==2019.6.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:9ff1b1c5a354142de080b8a4e9803e5d0d59283c93aed808617c787d16768375",
+ "sha256:b7143592e374e50584564794fcb8aaf00a23025f9db866627f89a21491847a8d"
+ ],
+ "version": "==0.20"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:02b260c8deb80db09325b99edf62ae344ce9bc64d68b7a634410b8e9a568edbf",
+ "sha256:18f9c401083a4ba6e162355873f906315332ea7035803d0fd8166051e3d402e3",
+ "sha256:1f2c6209a8917c525c1e2b55a716135ca4658a3042b5122d4e3413a4030c26ce",
+ "sha256:2f06d97f0ca0f414f6b707c974aaf8829c2292c1c497642f63824119d770226f",
+ "sha256:616c94f8176808f4018b39f9638080ed86f96b55370b5a9463b2ee5c926f6c5f",
+ "sha256:63b91e30ef47ef68a30f0c3c278fbfe9822319c15f34b7538a829515b84ca2a0",
+ "sha256:77b454f03860b844f758c5d5c6e5f18d27de899a3db367f4af06bec2e6013a8e",
+ "sha256:83fe27ba321e4cfac466178606147d3c0aa18e8087507caec78ed5a966a64905",
+ "sha256:84742532d39f72df959d237912344d8a1764c2d03fe58beba96a87bfa11a76d8",
+ "sha256:874ebf3caaf55a020aeb08acead813baf5a305927a71ce88c9377970fe7ad3c2",
+ "sha256:9f5caf2c7436d44f3cec97c2fa7791f8a675170badbfa86e1992ca1b84c37009",
+ "sha256:a0c8758d01fcdfe7ae8e4b4017b13552efa7f1197dd7358dc9da0576f9d0328a",
+ "sha256:a4def978d9d28cda2d960c279318d46b327632686d82b4917516c36d4c274512",
+ "sha256:ad4f4be843dace866af5fc142509e9b9817ca0c59342fdb176ab6ad552c927f5",
+ "sha256:ae33dd198f772f714420c5ab698ff05ff900150486c648d29951e9c70694338e",
+ "sha256:b4a2b782b8a8c5522ad35c93e04d60e2ba7f7dcb9271ec8e8c3e08239be6c7b4",
+ "sha256:c462eb33f6abca3b34cdedbe84d761f31a60b814e173b98ede3c81bb48967c4f",
+ "sha256:fd135b8d35dfdcdb984828c84d695937e58cc5f49e1c854eb311c4d6aa03f4f1"
+ ],
+ "version": "==1.4.2"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832",
+ "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"
+ ],
+ "version": "==7.2.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9",
+ "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe"
+ ],
+ "version": "==19.1"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
+ "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
+ ],
+ "version": "==2.4.2"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6ef6d06de77ce2961156013e9dff62f1b2688aa04d0dc244299fe7d67e09370d",
+ "sha256:a736fed91c12681a7b34617c8fcefe39ea04599ca72c608751c31d89579a3f77"
+ ],
+ "index": "pypi",
+ "version": "==5.0.1"
+ },
+ "pytest-asyncio": {
+ "hashes": [
+ "sha256:9fac5100fd716cbecf6ef89233e8590a4ad61d729d1732e0a96b84182df1daaf",
+ "sha256:d734718e25cfc32d2bf78d346e99d33724deeba774cc4afdf491530c6184b63b"
+ ],
+ "index": "pypi",
+ "version": "==0.10.0"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
+ "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ ],
+ "version": "==2.22.0"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:510df890afe08d36eca5bb16b4aa6308a6f85e3159ad3013bac8b9de7bd5a010",
+ "sha256:88d3402dd8b3c69a9e4f9d3a73ad11b15920c6efd36bc27bf1f701cf4a8e4646"
+ ],
+ "index": "pypi",
+ "version": "==1.7.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e",
+ "sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e",
+ "sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0",
+ "sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c",
+ "sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631",
+ "sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4",
+ "sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34",
+ "sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b",
+ "sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a",
+ "sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233",
+ "sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1",
+ "sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36",
+ "sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d",
+ "sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a",
+ "sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.4.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
+ "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
+ ],
+ "version": "==1.25.3"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e",
+ "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
+ ],
+ "version": "==0.6.0"
+ }
+ }
+}
diff --git a/Integrations/SlashNextPhishingIncidentResponse/README.md b/Integrations/SlashNextPhishingIncidentResponse/README.md
new file mode 100644
index 000000000000..dd9d02cdd1ab
--- /dev/null
+++ b/Integrations/SlashNextPhishingIncidentResponse/README.md
@@ -0,0 +1,2755 @@
+
+
+ SlashNext Phishing Incident Response integration enables Demisto users to fully automate analysis of suspicious
+ URLs. For example, IR teams responsible for abuse inbox management can extract links or domains out of suspicious
+ emails and automatically analyze them with the SlashNext SEERâ„¢ threat detection cloud to get definitive, binary
+ verdicts (malicious or benign) along with IOCs, screen shots, and more. Automating URL analysis can save IR teams
+ hundreds of hours versus manually triaging these emails or checking URLs and domains against less accurate phishing
+ databases and domain reputation services.
+
+
+
+ This integration was integrated and tested with version v1.0 of SlashNext Phishing Incident Response.
+
+
+SlashNext Phishing Incident Response Playbook
+SlashNext have developed two sample playbooks to demonstrate two of the major use cases.
+
+ SlashNext - Host Reputation Default v1
+ SlashNext - URL Scan Default v1
+
+Use Cases
+
+
+ Abuse inbox management
+
+
+ Playbooks that mine and analyze network logs
+
+
+Detailed Description
+
+ SlashNext Phishing Incident Response integration uses an API key to authenticate with SlashNext Cloud. If you
+ don’t have a valid API key, contact the SlashNext team support@slashnext.com
+
+Fetch Incidents
+
+ Any phishing incidents/events that contain supsicious URLs, domains, or IP addresses through the use of an
+ Abuse Inbox or by manual reporting.
+
+Configure SlashNext Phishing Incident Response on Demisto
+
+ Navigate to Settings > Integrations
+ > Servers & Services .
+ Search for SlashNext Phishing Incident Response using the search box on the top of the page.
+
+ Click Add instance to create and configure a new integration instance.
+
+ Name : A textual name for the integration instance.
+ SlashNext API Base URL : Use the default value unless specifically provided by SlashNext.
+ API Key : If you don’t have a valid API key, please reach us at support@slashnext.com
+
+
+
+ Click Test to validate the new instance.
+
+
+Commands
+
+ You can execute these commands from the Demisto CLI, as part of an automation, or in a playbook.
+ After you successfully execute a command, a DBot message appears in the War Room with the command details.
+
+
+ ip
+ domain
+ slashnext-host-reputation
+ slashnext-host-report
+ slashnext-host-urls
+ slashnext-url-scan
+ slashnext-url-scan-sync
+ slashnext-scan-report
+ slashnext-download-screenshot
+ slashnext-download-html
+ slashnext-download-text
+
+1. ip
+
+Lookup an IP address indicator in SlashNext Threat Intelligence database.
+Base Command
+
+ ip
+
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ ip
+ IPv4 address which to be looked up in SlashNext Threat Intelligence database.
+ Required
+
+
+
+
+
+Context Output
+
+
+
+
+ Path
+
+
+ Type
+
+
+ Description
+
+
+
+
+
+ DBotScore.Indicator
+ string
+ The indicator that was tested
+
+
+ DBotScore.Type
+ string
+ Indicator type
+
+
+ DBotScore.Vendor
+ string
+ Vendor used to calculate the score
+
+
+ DBotScore.Score
+ number
+ The actual score
+
+
+ IP.Address
+ string
+ IP address
+
+
+ IP.Malicious.Vendor
+ string
+ For malicious IP addresses, the vendor that made the decision
+
+
+ IP.Malicious.Description
+ string
+ For malicious IP addresses, the reason that the vendor made the decision
+
+
+ SlashNext.IP.Value
+ string
+ Value of the Indicator of Compromise (IoC)
+
+
+ SlashNext.IP.Type
+ string
+ Type of the Indicator of Compromise (IoC)
+
+
+ SlashNext.IP.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the IoC
+
+
+ SlashNext.IP.ThreatStatus
+ string
+ Threat status of the IoC
+
+
+ SlashNext.IP.ThreatName
+ string
+ Name of the threat posed by the IoC
+
+
+ SlashNext.IP.ThreatType
+ string
+ Type of the threat posed by the IoC
+
+
+ SlashNext.IP.FirstSeen
+ date
+ Time when the IoC was first observed
+
+
+ SlashNext.IP.LastSeen
+ date
+ Time when the IoC was last observed
+
+
+
+
+
+Command Example
+
+ !ip ip=8.8.8.8
+
+Context Example
+
+{
+ "DBotScore": {
+ "Indicator": "8.8.8.8",
+ "Score": 1,
+ "Type": "ip",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ "IP": {
+ "Address": "8.8.8.8"
+ },
+ "SlashNext.IP": {
+ "FirstSeen": "09-26-2019 07:46:25 UTC",
+ "LastSeen": "09-26-2019 07:46:36 UTC",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "IP",
+ "Value": "8.8.8.8",
+ "Verdict": "Benign"
+ }
+}
+
+Human Readable Output
+
+
SlashNext Phishing Incident Response - IP Lookup
+ip = 8.8.8.8
+
+
+
+ Value
+ Type
+ Verdict
+ ThreatStatus
+ ThreatName
+ ThreatType
+ FirstSeen
+ LastSeen
+
+
+
+
+ 8.8.8.8
+ IP
+ Benign
+ N/A
+ N/A
+ N/A
+ 09-26-2019 07:46:25 UTC
+ 09-26-2019 07:46:36 UTC
+
+
+
+
+
+
+
+
+2. domain
+
+Lookup a FQDN indicator in SlashNext Threat Intelligence database.
+Base Command
+
+ domain
+
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ domain
+ FQDN which to be looked up in SlashNext Threat Intelligence database.
+ Required
+
+
+
+
+
+Context Output
+
+
+
+
+ Path
+
+
+ Type
+
+
+ Description
+
+
+
+
+
+ DBotScore.Indicator
+ string
+ The indicator that was tested
+
+
+ DBotScore.Type
+ string
+ Indicator type
+
+
+ DBotScore.Vendor
+ string
+ Vendor used to calculate the score
+
+
+ DBotScore.Score
+ number
+ The actual score
+
+
+ Domain.Name
+ string
+ Domain name
+
+
+ Domain.Malicious.Vendor
+ string
+ For malicious domain names, the vendor that made the decision
+
+
+ Domain.Malicious.Description
+ string
+ For malicious domain names, the reason that the vendor made the decision
+
+
+ SlashNext.Domain.Value
+ string
+ Value of the Indicator of Compromise (IoC)
+
+
+ SlashNext.Domain.Type
+ string
+ Type of the Indicator of Compromise (IoC)
+
+
+ SlashNext.Domain.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the IoC
+
+
+ SlashNext.Domain.ThreatStatus
+ string
+ Threat status of the IoC
+
+
+ SlashNext.Domain.ThreatName
+ string
+ Name of the threat posed by the IoC
+
+
+ SlashNext.Domain.ThreatType
+ string
+ Type of the threat posed by the IoC
+
+
+ SlashNext.Domain.FirstSeen
+ date
+ Time when the IoC was first observed
+
+
+ SlashNext.Domain.LastSeen
+ date
+ Time when the IoC was last observed
+
+
+
+
+
+Command Example
+
+ !domain domain=www.google.com
+
+Context Example
+
+{
+ "DBotScore": {
+ "Indicator": "www.google.com",
+ "Score": 1,
+ "Type": "domain",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ "Domain": {
+ "Name": "www.google.com"
+ },
+ "SlashNext.Domain": {
+ "FirstSeen": "12-10-2018 13:04:17 UTC",
+ "LastSeen": "10-10-2019 11:26:43 UTC",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Domain",
+ "Value": "www.google.com",
+ "Verdict": "Benign"
+ }
+}
+
+Human Readable Output
+
+
SlashNext Phishing Incident Response - Domain Lookup
+domain = www.google.com
+
+
+
+ Value
+ Type
+ Verdict
+ ThreatStatus
+ ThreatName
+ ThreatType
+ FirstSeen
+ LastSeen
+
+
+
+
+ www.google.com
+ Domain
+ Benign
+ N/A
+ N/A
+ N/A
+ 12-10-2018 13:04:17 UTC
+ 10-10-2019 11:26:43 UTC
+
+
+
+
+
+
+
+
+3. slashnext-host-reputation
+
+Search in SlashNext Cloud database and retrieve reputation of a host.
+Base Command
+
+ slashnext-host-reputation
+
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ host
+ host can either be a domain name or an IPv4 address.
+ Required
+
+
+
+
+
+Context Output
+
+
+
+
+ Path
+
+
+ Type
+
+
+ Description
+
+
+
+
+
+ DBotScore.Indicator
+ string
+ The indicator that was tested
+
+
+ DBotScore.Type
+ string
+ Indicator type
+
+
+ DBotScore.Vendor
+ string
+ Vendor used to calculate the score
+
+
+ DBotScore.Score
+ number
+ The actual score
+
+
+ IP.Address
+ string
+ IP address
+
+
+ IP.Malicious.Vendor
+ string
+ For malicious IP addresses, the vendor that made the decision
+
+
+ IP.Malicious.Description
+ string
+ For malicious IP addresses, the reason that the vendor made the decision
+
+
+ SlashNext.IP.Value
+ string
+ Value of the Indicator of Compromise (IoC)
+
+
+ SlashNext.IP.Type
+ string
+ Type of the Indicator of Compromise (IoC)
+
+
+ SlashNext.IP.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the IoC
+
+
+ SlashNext.IP.ThreatStatus
+ string
+ Threat status of the IoC
+
+
+ SlashNext.IP.ThreatName
+ string
+ Name of the threat posed by the IoC
+
+
+ SlashNext.IP.ThreatType
+ string
+ Type of the threat posed by the IoC
+
+
+ SlashNext.IP.FirstSeen
+ date
+ Time when the IoC was first observed
+
+
+ SlashNext.IP.LastSeen
+ date
+ Time when the IoC was last observed
+
+
+ Domain.Name
+ string
+ Domain name
+
+
+ Domain.Malicious.Vendor
+ string
+ For malicious domain names, the vendor that made the decision
+
+
+ Domain.Malicious.Description
+ string
+ For malicious domain names, the reason that the vendor made the decision
+
+
+ SlashNext.Domain.Value
+ string
+ Value of the Indicator of Compromise (IoC)
+
+
+ SlashNext.Domain.Type
+ string
+ Type of the Indicator of Compromise (IoC)
+
+
+ SlashNext.Domain.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the IoC
+
+
+ SlashNext.Domain.ThreatStatus
+ string
+ Threat status of the IoC
+
+
+ SlashNext.Domain.ThreatName
+ string
+ Name of the threat posed by the IoC
+
+
+ SlashNext.Domain.ThreatType
+ string
+ Type of the threat posed by the IoC
+
+
+ SlashNext.Domain.FirstSeen
+ date
+ Time when the IoC was first observed
+
+
+ SlashNext.Domain.LastSeen
+ date
+ Time when the IoC was last observed
+
+
+
+
+
+Command Example
+
+ !slashnext-host-reputation host=www.google.com
+
+Context Example
+
+{
+ "DBotScore": {
+ "Indicator": "www.google.com",
+ "Score": 1,
+ "Type": "domain",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ "Domain": {
+ "Name": "www.google.com"
+ },
+ "SlashNext.Domain": {
+ "FirstSeen": "12-10-2018 13:04:17 UTC",
+ "LastSeen": "10-10-2019 11:26:43 UTC",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Domain",
+ "Value": "www.google.com",
+ "Verdict": "Benign"
+ }
+}
+
+Human Readable Output
+
+
SlashNext Phishing Incident Response - Host Reputation
+host = www.google.com
+
+
+
+ Value
+ Type
+ Verdict
+ ThreatStatus
+ ThreatName
+ ThreatType
+ FirstSeen
+ LastSeen
+
+
+
+
+ www.google.com
+ Domain
+ Benign
+ N/A
+ N/A
+ N/A
+ 12-10-2018 13:04:17 UTC
+ 10-10-2019 11:26:43 UTC
+
+
+
+
+
+
+
+
+4. slashnext-host-report
+
+Search in SlashNext Cloud database and retrieve a detailed report for a host and associated URL.
+Base Command
+
+ slashnext-host-report
+
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ host
+ host can either be a domain name or IPv4 address.
+ Required
+
+
+
+
+
+Context Output
+
+
+
+
+ Path
+
+
+ Type
+
+
+ Description
+
+
+
+
+
+ DBotScore.Indicator
+ string
+ The indicator that was tested
+
+
+ DBotScore.Type
+ string
+ Indicator type
+
+
+ DBotScore.Vendor
+ string
+ Vendor used to calculate the score
+
+
+ DBotScore.Score
+ number
+ The actual score
+
+
+ IP.Address
+ string
+ IP address
+
+
+ IP.Malicious.Vendor
+ string
+ For malicious IP addresses, the vendor that made the decision
+
+
+ IP.Malicious.Description
+ string
+ For malicious IP addresses, the reason that the vendor made the decision
+
+
+ SlashNext.IP.Value
+ string
+ Value of the Indicator of Compromise (IoC)
+
+
+ SlashNext.IP.Type
+ string
+ Type of the Indicator of Compromise (IoC)
+
+
+ SlashNext.IP.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the IoC
+
+
+ SlashNext.IP.ThreatStatus
+ string
+ Threat status of the IoC
+
+
+ SlashNext.IP.ThreatName
+ string
+ Name of the threat posed by the IoC
+
+
+ SlashNext.IP.ThreatType
+ string
+ Type of the threat posed by the IoC
+
+
+ SlashNext.IP.FirstSeen
+ date
+ Time when the IoC was first observed
+
+
+ SlashNext.IP.LastSeen
+ date
+ Time when the IoC was last observed
+
+
+ Domain.Name
+ string
+ Domain name
+
+
+ Domain.Malicious.Vendor
+ string
+ For malicious domain names, the vendor that made the decision
+
+
+ Domain.Malicious.Description
+ string
+ For malicious domain names, the reason that the vendor made the decision
+
+
+ SlashNext.Domain.Value
+ string
+ Value of the Indicator of Compromise (IoC)
+
+
+ SlashNext.Domain.Type
+ string
+ Type of the Indicator of Compromise (IoC)
+
+
+ SlashNext.Domain.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the IoC
+
+
+ SlashNext.Domain.ThreatStatus
+ string
+ Threat status of the IoC
+
+
+ SlashNext.Domain.ThreatName
+ string
+ Name of the threat posed by the IoC
+
+
+ SlashNext.Domain.ThreatType
+ string
+ Type of the threat posed by the IoC
+
+
+ SlashNext.Domain.FirstSeen
+ date
+ Time when the IoC was first observed
+
+
+ SlashNext.Domain.LastSeen
+ date
+ Time when the IoC was last observed
+
+
+
+
+
+Command Example
+
+ !slashnext-host-report host=www.google.com
+
+Context Example
+
+{
+ "DBotScore": {
+ "Indicator": "www.google.com",
+ "Score": 1,
+ "Type": "domain",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ "Domain": {
+ "Name": "www.google.com"
+ },
+ "SlashNext.Domain": {
+ "FirstSeen": "12-10-2018 13:04:17 UTC",
+ "LastSeen": "10-10-2019 11:26:43 UTC",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Domain",
+ "Value": "www.google.com",
+ "Verdict": "Benign"
+ }
+}{
+ "DBotScore": [
+ {
+ "Indicator": "http://www.google.com/wasif",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ }
+ ],
+ "SlashNext.URL": {
+ "FirstSeen": "10-03-2019 08:24:04 UTC",
+ "LastSeen": "10-03-2019 08:24:14 UTC",
+ "ScanID": "61fe7c96-88e3-440e-a56f-75834b734b06",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Scanned URL",
+ "Value": "http://www.google.com/wasif",
+ "Verdict": "Benign"
+ },
+ "URL": [
+ {
+ "Data": "http://www.google.com/wasif"
+ }
+ ]
+}
+
+Human Readable Output
+
+
SlashNext Phishing Incident Response - Host Report
+host = www.google.com
+
+
+
+ Value
+ Type
+ Verdict
+ ThreatStatus
+ ThreatName
+ ThreatType
+ FirstSeen
+ LastSeen
+
+
+
+
+ www.google.com
+ Domain
+ Benign
+ N/A
+ N/A
+ N/A
+ 12-10-2018 13:04:17 UTC
+ 10-10-2019 11:26:43 UTC
+
+
+
+
+SlashNext Phishing Incident Response - Latest Scanned URL
+host = www.google.com
+
+
+
+ Value
+ Type
+ Verdict
+ ScanID
+ ThreatStatus
+ ThreatName
+ ThreatType
+ FirstSeen
+ LastSeen
+
+
+
+
+ http://www.google.com/wasif
+ Scanned URL
+ Benign
+ 61fe7c96-88e3-440e-a56f-75834b734b06
+ N/A
+ N/A
+ N/A
+ 10-03-2019 08:24:04 UTC
+ 10-03-2019 08:24:14 UTC
+
+
+
+
+
+Forensics: Webpage Screenshot for the Scanned URL = http://www.google.com/wasif
+Forensics: Webpage HTML for the Scanned URL = http://www.google.com/wasif
+Forensics: Webpage Rendered Text for the Scanned URL = http://www.google.com/wasif
+
+
+
+
+
+5. slashnext-host-urls
+
+Search in SlashNext Cloud database and retrieve list of all URLs associated with the specified host.
+Base Command
+
+ slashnext-host-urls
+
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ host
+ host can either be a domain name or IPv4 address.
+ Required
+
+
+ limit
+ maximum number of URL records to fetch. This is an optional parameter with a default value of 10.
+ Optional
+
+
+
+
+
+Context Output
+
+
+
+
+ Path
+
+
+ Type
+
+
+ Description
+
+
+
+
+
+ DBotScore.Indicator
+ string
+ The indicator that was tested
+
+
+ DBotScore.Type
+ string
+ Indicator type
+
+
+ DBotScore.Vendor
+ string
+ Vendor used to calculate the score
+
+
+ DBotScore.Score
+ number
+ The actual score
+
+
+ URL.Data
+ string
+ URL reported
+
+
+ URL.Malicious.Vendor
+ string
+ For malicious URLs, the vendor that made the decision
+
+
+ URL.Malicious.Description
+ string
+ For malicious URLs, the reason that the vendor made the decision
+
+
+ SlashNext.URL.Value
+ string
+ Value of the Indicator of Compromise (IoC)
+
+
+ SlashNext.URL.Type
+ string
+ Type of the Indicator of Compromise (IoC)
+
+
+ SlashNext.URL.ScanID
+ string
+ Scan ID to be used to get the IoC forensics data for further investigation
+
+
+ SlashNext.URL.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the IoC
+
+
+ SlashNext.URL.ThreatStatus
+ string
+ Threat status of the IoC
+
+
+ SlashNext.URL.ThreatName
+ string
+ Name of the threat posed by the IoC
+
+
+ SlashNext.URL.ThreatType
+ string
+ Type of the threat posed by the IoC
+
+
+ SlashNext.URL.FirstSeen
+ date
+ Time when the IoC was first observed
+
+
+ SlashNext.URL.LastSeen
+ date
+ Time when the IoC was last observed
+
+
+ SlashNext.URL.Final.Value
+ string
+ Final IoC value in case original IoC is a redirector to same domain
+
+
+ SlashNext.URL.Final.Type
+ string
+ Type of the final IoC
+
+
+ SlashNext.URL.Final.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the final IoC
+
+
+ SlashNext.URL.Landing.Value
+ string
+ Landing IoC value in case original IoC is a redirector to different domain
+
+
+ SlashNext.URL.Landing.Type
+ string
+ Type of the landing IoC
+
+
+ SlashNext.URL.Landing.ScanID
+ string
+ Scan ID to be used to get the landing IoC forensics data for further investigation
+
+
+ SlashNext.URL.Landing.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the landing IoC
+
+
+ SlashNext.URL.Landing.ThreatStatus
+ string
+ Threat status of the landing IoC
+
+
+ SlashNext.URL.Landing.ThreatName
+ string
+ Name of the threat posed by the landing IoC
+
+
+ SlashNext.URL.Landing.ThreatType
+ string
+ Type of the threat posed by the landing IoC
+
+
+ SlashNext.URL.Landing.FirstSeen
+ date
+ Time when the landing IoC was first observed
+
+
+ SlashNext.URL.Landing.LastSeen
+ date
+ Time when the landing IoC was last observed
+
+
+
+
+
+Command Example
+
+ !slashnext-host-urls host=www.google.com
+
+Context Example
+
+{
+ "DBotScore": [
+ {
+ "Indicator": "http://www.google.com/wasif",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "http://www.google.com/abrar",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "http://www.google.com/saadat",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "https://www.google.com/",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "https://www.google.com/url?q=replacedlink/&source=gmail&...",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "http://www.google.com/",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "https://www.google.com/?gws_rd=ssl",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "http://www.google.com/maps/place/2307",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "https://www.google.com/maps/place/2307",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "http://www.google.com/maps/place/2307+Watterson+Trail",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "https://www.google.com/maps/place/2307+Watterson+Trail",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "http://www.google.com/maps/place/2307+Watterson+Trail",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "https://www.google.com/maps/place/2307+Watterson+Trail",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "http://www.google.com/maps/place/2307+Watterson+Trail,+Jeffersontown,+KY+40299/@38.2107207,-85.5607165,17z/data=!3m1!4b1!4m5!3m4!1s0x8869a1b57420f6d9:0xccc95b8f32dcfd4b!8m2!3d38.2107165!4d-85.5585225",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "https://www.google.com/maps/place/2307+Watterson+Trail,+Jeffersontown,+KY+40299/@38.2107207,-85.5607165,17z/data=!3m1!4b1!4m5!3m4!1s0x8869a1b57420f6d9:0xccc95b8f32dcfd4b!8m2!3d38.2107165!4d-85.5585225",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ }
+ ],
+ "SlashNext.URL": [
+ {
+ "FirstSeen": "10-03-2019 08:24:04 UTC",
+ "LastSeen": "10-03-2019 08:24:14 UTC",
+ "ScanID": "61fe7c96-88e3-440e-a56f-75834b734b06",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Scanned URL",
+ "Value": "http://www.google.com/wasif",
+ "Verdict": "Benign"
+ },
+ {
+ "FirstSeen": "10-03-2019 08:22:36 UTC",
+ "LastSeen": "10-03-2019 08:22:46 UTC",
+ "ScanID": "820275cd-c6de-46e9-b3a3-7cb072179bb4",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Scanned URL",
+ "Value": "http://www.google.com/abrar",
+ "Verdict": "Benign"
+ },
+ {
+ "FirstSeen": "10-03-2019 08:17:49 UTC",
+ "LastSeen": "10-03-2019 08:18:00 UTC",
+ "ScanID": "905cf63e-7761-4681-b314-4b8820f04c41",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Scanned URL",
+ "Value": "http://www.google.com/saadat",
+ "Verdict": "Benign"
+ },
+ {
+ "FirstSeen": "08-27-2019 10:32:19 UTC",
+ "LastSeen": "08-27-2019 12:34:52 UTC",
+ "ScanID": "4f1540b9-3517-4e6c-bca8-923acc3eed43",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Scanned URL",
+ "Value": "https://www.google.com/",
+ "Verdict": "Benign"
+ },
+ {
+ "FirstSeen": "08-30-2019 06:06:10 UTC",
+ "LastSeen": "08-30-2019 06:06:21 UTC",
+ "ScanID": "7277ea43-df3d-4692-8615-8c15485249c5",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Scanned URL",
+ "Value": "https://www.google.com/url?q=replacedlink/&source=gmail&...",
+ "Verdict": "Benign"
+ },
+ {
+ "Final": {
+ "Type": "Final URL",
+ "Value": "https://www.google.com/?gws_rd=ssl",
+ "Verdict": "Benign"
+ },
+ "FirstSeen": "08-26-2019 17:29:38 UTC",
+ "LastSeen": "08-26-2019 19:41:19 UTC",
+ "ScanID": "48ae7b06-5915-4633-bc51-2cfaa0036742",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Scanned URL",
+ "Value": "http://www.google.com/",
+ "Verdict": "Benign"
+ },
+ {
+ "Final": {
+ "Type": "Final URL",
+ "Value": "https://www.google.com/maps/place/2307",
+ "Verdict": "Benign"
+ },
+ "FirstSeen": "10-01-2019 12:50:34 UTC",
+ "LastSeen": "10-01-2019 12:50:47 UTC",
+ "ScanID": "N/A",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Scanned URL",
+ "Value": "http://www.google.com/maps/place/2307",
+ "Verdict": "Benign"
+ },
+ {
+ "Final": {
+ "Type": "Final URL",
+ "Value": "https://www.google.com/maps/place/2307+Watterson+Trail",
+ "Verdict": "Benign"
+ },
+ "FirstSeen": "10-01-2019 12:50:12 UTC",
+ "LastSeen": "10-01-2019 12:50:26 UTC",
+ "ScanID": "N/A",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Scanned URL",
+ "Value": "http://www.google.com/maps/place/2307+Watterson+Trail",
+ "Verdict": "Benign"
+ },
+ {
+ "Final": {
+ "Type": "Final URL",
+ "Value": "https://www.google.com/maps/place/2307+Watterson+Trail",
+ "Verdict": "Benign"
+ },
+ "FirstSeen": "10-01-2019 12:50:11 UTC",
+ "LastSeen": "10-01-2019 12:50:24 UTC",
+ "ScanID": "N/A",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Scanned URL",
+ "Value": "http://www.google.com/maps/place/2307+Watterson+Trail",
+ "Verdict": "Benign"
+ },
+ {
+ "Final": {
+ "Type": "Final URL",
+ "Value": "https://www.google.com/maps/place/2307+Watterson+Trail,+Jeffersontown,+KY+40299/@38.2107207,-85.5607165,17z/data=!3m1!4b1!4m5!3m4!1s0x8869a1b57420f6d9:0xccc95b8f32dcfd4b!8m2!3d38.2107165!4d-85.5585225",
+ "Verdict": "Benign"
+ },
+ "FirstSeen": "10-01-2019 12:49:44 UTC",
+ "LastSeen": "10-01-2019 12:49:58 UTC",
+ "ScanID": "N/A",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Scanned URL",
+ "Value": "http://www.google.com/maps/place/2307+Watterson+Trail,+Jeffersontown,+KY+40299/@38.2107207,-85.5607165,17z/data=!3m1!4b1!4m5!3m4!1s0x8869a1b57420f6d9:0xccc95b8f32dcfd4b!8m2!3d38.2107165!4d-85.5585225",
+ "Verdict": "Benign"
+ }
+ ],
+ "URL": [
+ {
+ "Data": "http://www.google.com/wasif"
+ },
+ {
+ "Data": "http://www.google.com/abrar"
+ },
+ {
+ "Data": "http://www.google.com/saadat"
+ },
+ {
+ "Data": "https://www.google.com/"
+ },
+ {
+ "Data": "https://www.google.com/url?q=replacedlink/&source=gmail&..."
+ },
+ {
+ "Data": "http://www.google.com/"
+ },
+ {
+ "Data": "https://www.google.com/?gws_rd=ssl"
+ },
+ {
+ "Data": "http://www.google.com/maps/place/2307"
+ },
+ {
+ "Data": "https://www.google.com/maps/place/2307"
+ },
+ {
+ "Data": "http://www.google.com/maps/place/2307+Watterson+Trail"
+ },
+ {
+ "Data": "https://www.google.com/maps/place/2307+Watterson+Trail"
+ },
+ {
+ "Data": "http://www.google.com/maps/place/2307+Watterson+Trail"
+ },
+ {
+ "Data": "https://www.google.com/maps/place/2307+Watterson+Trail"
+ },
+ {
+ "Data": "http://www.google.com/maps/place/2307+Watterson+Trail,+Jeffersontown,+KY+40299/@38.2107207,-85.5607165,17z/data=!3m1!4b1!4m5!3m4!1s0x8869a1b57420f6d9:0xccc95b8f32dcfd4b!8m2!3d38.2107165!4d-85.5585225"
+ },
+ {
+ "Data": "https://www.google.com/maps/place/2307+Watterson+Trail,+Jeffersontown,+KY+40299/@38.2107207,-85.5607165,17z/data=!3m1!4b1!4m5!3m4!1s0x8869a1b57420f6d9:0xccc95b8f32dcfd4b!8m2!3d38.2107165!4d-85.5585225"
+ }
+ ]
+}
+
+Human Readable Output
+
+
SlashNext Phishing Incident Response - Host URLs
+host = www.google.com
+
+
+
+ Value
+ Type
+ Verdict
+ ScanID
+ ThreatStatus
+ ThreatName
+ ThreatType
+ FirstSeen
+ LastSeen
+
+
+
+
+ http://www.google.com/wasif
+ Scanned URL
+ Benign
+ 61fe7c96-88e3-440e-a56f-75834b734b06
+ N/A
+ N/A
+ N/A
+ 10-03-2019 08:24:04 UTC
+ 10-03-2019 08:24:14 UTC
+
+
+ http://www.google.com/abrar
+ Scanned URL
+ Benign
+ 820275cd-c6de-46e9-b3a3-7cb072179bb4
+ N/A
+ N/A
+ N/A
+ 10-03-2019 08:22:36 UTC
+ 10-03-2019 08:22:46 UTC
+
+
+ http://www.google.com/saadat
+ Scanned URL
+ Benign
+ 905cf63e-7761-4681-b314-4b8820f04c41
+ N/A
+ N/A
+ N/A
+ 10-03-2019 08:17:49 UTC
+ 10-03-2019 08:18:00 UTC
+
+
+ https://www.google.com/
+ Scanned URL
+ Benign
+ 4f1540b9-3517-4e6c-bca8-923acc3eed43
+ N/A
+ N/A
+ N/A
+ 08-27-2019 10:32:19 UTC
+ 08-27-2019 12:34:52 UTC
+
+
+ https://www.google.com/url?q=replacedlink/&source=gmail&...
+ Scanned URL
+ Benign
+ 7277ea43-df3d-4692-8615-8c15485249c5
+ N/A
+ N/A
+ N/A
+ 08-30-2019 06:06:10 UTC
+ 08-30-2019 06:06:21 UTC
+
+
+ http://www.google.com/
+ Scanned URL
+ Benign
+ 48ae7b06-5915-4633-bc51-2cfaa0036742
+ N/A
+ N/A
+ N/A
+ 08-26-2019 17:29:38 UTC
+ 08-26-2019 19:41:19 UTC
+
+
+ --------> https://www.google.com/?gws_rd=ssl
+ Final URL
+ Benign
+
+
+
+
+
+
+
+
+ http://www.google.com/maps/place/2307
+ Scanned URL
+ Benign
+ N/A
+ N/A
+ N/A
+ N/A
+ 10-01-2019 12:50:34 UTC
+ 10-01-2019 12:50:47 UTC
+
+
+ --------> https://www.google.com/maps/place/2307
+ Final URL
+ Benign
+
+
+
+
+
+
+
+
+ http://www.google.com/maps/place/2307+Watterson+Trail
+ Scanned URL
+ Benign
+ N/A
+ N/A
+ N/A
+ N/A
+ 10-01-2019 12:50:12 UTC
+ 10-01-2019 12:50:26 UTC
+
+
+ --------> https://www.google.com/maps/place/2307+Watterson+Trail
+ Final URL
+ Benign
+
+
+
+
+
+
+
+
+ http://www.google.com/maps/place/2307+Watterson+Trail
+ Scanned URL
+ Benign
+ N/A
+ N/A
+ N/A
+ N/A
+ 10-01-2019 12:50:11 UTC
+ 10-01-2019 12:50:24 UTC
+
+
+ --------> https://www.google.com/maps/place/2307+Watterson+Trail
+ Final URL
+ Benign
+
+
+
+
+
+
+
+
+ http://www.google.com/maps/place/2307+Watterson+Trail,+Jeffersontown,+KY+40299/@38.2107207,-85.5607165,17z/data=!3m1!4b1!4m5!3m4!1s0x8869a1b57420f6d9:0xccc95b8f32dcfd4b!8m2!3d38.2107165!4d-85.5585225
+ Scanned URL
+ Benign
+ N/A
+ N/A
+ N/A
+ N/A
+ 10-01-2019 12:49:44 UTC
+ 10-01-2019 12:49:58 UTC
+
+
+ --------> https://www.google.com/maps/place/2307+Watterson+Trail,+Jeffersontown,+KY+40299/@38.2107207,-85.5607165,17z/data=!3m1!4b1!4m5!3m4!1s0x8869a1b57420f6d9:0xccc95b8f32dcfd4b!8m2!3d38.2107165!4d-85.5585225
+ Final URL
+ Benign
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+6. slashnext-url-scan
+
+Perform a real-time URL reputation scan with SlashNext cloud-based SEER Engine. If the specified URL already exists in the cloud database, scan results will get returned immediately. If not, this command will submit a URL scan request and return with ‘check back later’ message along with a unique Scan ID. User can check results of this scan with ‘slashnext-scan-report’ command after 60 seconds or later using the retuned Scan ID
+Base Command
+
+ slashnext-url-scan
+
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ url
+ The URL that needs to be scanned.
+ Required
+
+
+ extended_info
+ If extented_info is set ‘true’ the system along with URL reputation also downloads forensics data like screenshot, HTML and rendered text. If this parameter is not filled, the system will consider this as 'false'.
+ Optional
+
+
+
+
+
+Context Output
+
+
+
+
+ Path
+
+
+ Type
+
+
+ Description
+
+
+
+
+
+ DBotScore.Indicator
+ string
+ The indicator that was tested
+
+
+ DBotScore.Type
+ string
+ Indicator type
+
+
+ DBotScore.Vendor
+ string
+ Vendor used to calculate the score
+
+
+ DBotScore.Score
+ number
+ The actual score
+
+
+ URL.Data
+ string
+ URL reported
+
+
+ URL.Malicious.Vendor
+ string
+ For malicious URLs, the vendor that made the decision
+
+
+ URL.Malicious.Description
+ string
+ For malicious URLs, the reason that the vendor made the decision
+
+
+ SlashNext.URL.Value
+ string
+ Value of the Indicator of Compromise (IoC)
+
+
+ SlashNext.URL.Type
+ string
+ Type of the Indicator of Compromise (IoC)
+
+
+ SlashNext.URL.ScanID
+ string
+ Scan ID to be used to get the IoC forensics data for further investigation
+
+
+ SlashNext.URL.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the IoC
+
+
+ SlashNext.URL.ThreatStatus
+ string
+ Threat status of the IoC
+
+
+ SlashNext.URL.ThreatName
+ string
+ Name of the threat posed by the IoC
+
+
+ SlashNext.URL.ThreatType
+ string
+ Type of the threat posed by the IoC
+
+
+ SlashNext.URL.FirstSeen
+ date
+ Time when the IoC was first observed
+
+
+ SlashNext.URL.LastSeen
+ date
+ Time when the IoC was last observed
+
+
+ SlashNext.URL.Final.Value
+ string
+ Final IoC value in case original IoC is a redirector to same domain
+
+
+ SlashNext.URL.Final.Type
+ string
+ Type of the final IoC
+
+
+ SlashNext.URL.Final.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the final IoC
+
+
+ SlashNext.URL.Landing.Value
+ string
+ Landing IoC value in case original IoC is a redirector to different domain
+
+
+ SlashNext.URL.Landing.Type
+ string
+ Type of the landing IoC
+
+
+ SlashNext.URL.Landing.ScanID
+ string
+ Scan ID to be used to get the landing IoC forensics data for further investigation
+
+
+ SlashNext.URL.Landing.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the landing IoC
+
+
+ SlashNext.URL.Landing.ThreatStatus
+ string
+ Threat status of the landing IoC
+
+
+ SlashNext.URL.Landing.ThreatName
+ string
+ Name of the threat posed by the landing IoC
+
+
+ SlashNext.URL.Landing.ThreatType
+ string
+ Type of the threat posed by the landing IoC
+
+
+ SlashNext.URL.Landing.FirstSeen
+ date
+ Time when the landing IoC was first observed
+
+
+ SlashNext.URL.Landing.LastSeen
+ date
+ Time when the landing IoC was last observed
+
+
+
+
+
+Command Example
+
+ !slashnext-url-scan url=www.google.com extednded_info=true
+
+Context Example
+
+{
+ "DBotScore": [
+ {
+ "Indicator": "http://www.google.com/",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "https://www.google.com/?gws_rd=ssl",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ }
+ ],
+ "SlashNext.URL": {
+ "Final": {
+ "Type": "Final URL",
+ "Value": "https://www.google.com/?gws_rd=ssl",
+ "Verdict": "Benign"
+ },
+ "FirstSeen": "08-26-2019 17:29:38 UTC",
+ "LastSeen": "08-26-2019 19:41:19 UTC",
+ "ScanID": "48ae7b06-5915-4633-bc51-2cfaa0036742",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Scanned URL",
+ "Value": "http://www.google.com/",
+ "Verdict": "Benign"
+ },
+ "URL": [
+ {
+ "Data": "http://www.google.com/"
+ },
+ {
+ "Data": "https://www.google.com/?gws_rd=ssl"
+ }
+ ]
+}
+
+Human Readable Output
+
+
SlashNext Phishing Incident Response - URL Scan
+url = http://www.google.com/
+
+
+
+ Value
+ Type
+ Verdict
+ ScanID
+ ThreatStatus
+ ThreatName
+ ThreatType
+ FirstSeen
+ LastSeen
+
+
+
+
+ http://www.google.com/
+ Scanned URL
+ Benign
+ 48ae7b06-5915-4633-bc51-2cfaa0036742
+ N/A
+ N/A
+ N/A
+ 08-26-2019 17:29:38 UTC
+ 08-26-2019 19:41:19 UTC
+
+
+ --------> https://www.google.com/?gws_rd=ssl
+ Final URL
+ Benign
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+7. slashnext-url-scan-sync
+
+Perform a real-time URL scan with SlashNext cloud-based SEER Engine in a blocking mode. If the specified URL already exists in the cloud database, scan result will get returned immediately. If not, this command will submit a URL scan request and wait for the scan to finish. The scan may take up to 60 seconds to finish.
+Base Command
+
+ slashnext-url-scan-sync
+
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ url
+ The URL that needs to be scanned.
+ Required
+
+
+ timeout
+ A timeout value in seconds. If the system is unable to complete a scan within the specified timeout, a timeout error will be returned. User may try again with a different timeout. If no timeout value is specified, a default value of 60 seconds will be used.
+ Optional
+
+
+ extended_info
+ If extented_info is set ‘true’ the system along with URL reputation also downloads forensics data like screenshot, HTML and rendered text. If this parameter is not filled, the system will consider this as 'false'.
+ Optional
+
+
+
+
+
+Context Output
+
+
+
+
+ Path
+
+
+ Type
+
+
+ Description
+
+
+
+
+
+ DBotScore.Indicator
+ string
+ The indicator that was tested
+
+
+ DBotScore.Type
+ string
+ Indicator type
+
+
+ DBotScore.Vendor
+ string
+ Vendor used to calculate the score
+
+
+ DBotScore.Score
+ number
+ The actual score
+
+
+ URL.Data
+ string
+ URL reported
+
+
+ URL.Malicious.Vendor
+ string
+ For malicious URLs, the vendor that made the decision
+
+
+ URL.Malicious.Description
+ string
+ For malicious URLs, the reason that the vendor made the decision
+
+
+ SlashNext.URL.Value
+ string
+ Value of the Indicator of Compromise (IoC)
+
+
+ SlashNext.URL.Type
+ string
+ Type of the Indicator of Compromise (IoC)
+
+
+ SlashNext.URL.ScanID
+ string
+ Scan ID to be used to get the IoC forensics data for further investigation
+
+
+ SlashNext.URL.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the IoC
+
+
+ SlashNext.URL.ThreatStatus
+ string
+ Threat status of the IoC
+
+
+ SlashNext.URL.ThreatName
+ string
+ Name of the threat posed by the IoC
+
+
+ SlashNext.URL.ThreatType
+ string
+ Type of the threat posed by the IoC
+
+
+ SlashNext.URL.FirstSeen
+ date
+ Time when the IoC was first observed
+
+
+ SlashNext.URL.LastSeen
+ date
+ Time when the IoC was last observed
+
+
+ SlashNext.URL.Final.Value
+ string
+ Final IoC value in case original IoC is a redirector to same domain
+
+
+ SlashNext.URL.Final.Type
+ string
+ Type of the final IoC
+
+
+ SlashNext.URL.Final.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the final IoC
+
+
+ SlashNext.URL.Landing.Value
+ string
+ Landing IoC value in case original IoC is a redirector to different domain
+
+
+ SlashNext.URL.Landing.Type
+ string
+ Type of the landing IoC
+
+
+ SlashNext.URL.Landing.ScanID
+ string
+ Scan ID to be used to get the landing IoC forensics data for further investigation
+
+
+ SlashNext.URL.Landing.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the landing IoC
+
+
+ SlashNext.URL.Landing.ThreatStatus
+ string
+ Threat status of the landing IoC
+
+
+ SlashNext.URL.Landing.ThreatName
+ string
+ Name of the threat posed by the landing IoC
+
+
+ SlashNext.URL.Landing.ThreatType
+ string
+ Type of the threat posed by the landing IoC
+
+
+ SlashNext.URL.Landing.FirstSeen
+ date
+ Time when the landing IoC was first observed
+
+
+ SlashNext.URL.Landing.LastSeen
+ date
+ Time when the landing IoC was last observed
+
+
+
+
+
+Command Example
+
+ !slashnext-url-scan-sync url=www.google.com extednded_info=true
+
+Context Example
+
+{
+ "DBotScore": [
+ {
+ "Indicator": "http://www.google.com/",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "https://www.google.com/?gws_rd=ssl",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ }
+ ],
+ "SlashNext.URL": {
+ "Final": {
+ "Type": "Final URL",
+ "Value": "https://www.google.com/?gws_rd=ssl",
+ "Verdict": "Benign"
+ },
+ "FirstSeen": "08-26-2019 17:29:38 UTC",
+ "LastSeen": "08-26-2019 19:41:19 UTC",
+ "ScanID": "48ae7b06-5915-4633-bc51-2cfaa0036742",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Scanned URL",
+ "Value": "http://www.google.com/",
+ "Verdict": "Benign"
+ },
+ "URL": [
+ {
+ "Data": "http://www.google.com/"
+ },
+ {
+ "Data": "https://www.google.com/?gws_rd=ssl"
+ }
+ ]
+}
+
+Human Readable Output
+
+
SlashNext Phishing Incident Response - URL Scan Sync
+url = http://www.google.com/
+
+
+
+ Value
+ Type
+ Verdict
+ ScanID
+ ThreatStatus
+ ThreatName
+ ThreatType
+ FirstSeen
+ LastSeen
+
+
+
+
+ http://www.google.com/
+ Scanned URL
+ Benign
+ 48ae7b06-5915-4633-bc51-2cfaa0036742
+ N/A
+ N/A
+ N/A
+ 08-26-2019 17:29:38 UTC
+ 08-26-2019 19:41:19 UTC
+
+
+ --------> https://www.google.com/?gws_rd=ssl
+ Final URL
+ Benign
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+8. slashnext-scan-report
+
+Retrieve URL scan results against a previous Scan request. If the scan is finished, result will be retuned immediately; otherwise a ‘check back later’ message will be returned.
+Base Command
+
+ slashnext-scan-report
+
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ scanid
+ Scan ID returned by an earlier call to ‘slashnext-url-scan’ or ‘slashnext-url-scan-sync’ commands.
+ Required
+
+
+ extended_info
+ If extented_info is set ‘true’ the system along with URL reputation also downloads forensics data like screenshot, HTML and rendered text. If this parameter is not filled, the system will consider this as 'false'.
+ Optional
+
+
+
+
+
+Context Output
+
+
+
+
+ Path
+
+
+ Type
+
+
+ Description
+
+
+
+
+
+ DBotScore.Indicator
+ string
+ The indicator that was tested
+
+
+ DBotScore.Type
+ string
+ Indicator type
+
+
+ DBotScore.Vendor
+ string
+ Vendor used to calculate the score
+
+
+ DBotScore.Score
+ number
+ The actual score
+
+
+ URL.Data
+ string
+ URL reported
+
+
+ URL.Malicious.Vendor
+ string
+ For malicious URLs, the vendor that made the decision
+
+
+ URL.Malicious.Description
+ string
+ For malicious URLs, the reason that the vendor made the decision
+
+
+ SlashNext.URL.Value
+ string
+ Value of the Indicator of Compromise (IoC)
+
+
+ SlashNext.URL.Type
+ string
+ Type of the Indicator of Compromise (IoC)
+
+
+ SlashNext.URL.ScanID
+ string
+ Scan ID to be used to get the IoC forensics data for further investigation
+
+
+ SlashNext.URL.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the IoC
+
+
+ SlashNext.URL.ThreatStatus
+ string
+ Threat status of the IoC
+
+
+ SlashNext.URL.ThreatName
+ string
+ Name of the threat posed by the IoC
+
+
+ SlashNext.URL.ThreatType
+ string
+ Type of the threat posed by the IoC
+
+
+ SlashNext.URL.FirstSeen
+ date
+ Time when the IoC was first observed
+
+
+ SlashNext.URL.LastSeen
+ date
+ Time when the IoC was last observed
+
+
+ SlashNext.URL.Final.Value
+ string
+ Final IoC value in case original IoC is a redirector to same domain
+
+
+ SlashNext.URL.Final.Type
+ string
+ Type of the final IoC
+
+
+ SlashNext.URL.Final.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the final IoC
+
+
+ SlashNext.URL.Landing.Value
+ string
+ Landing IoC value in case original IoC is a redirector to different domain
+
+
+ SlashNext.URL.Landing.Type
+ string
+ Type of the landing IoC
+
+
+ SlashNext.URL.Landing.ScanID
+ string
+ Scan ID to be used to get the landing IoC forensics data for further investigation
+
+
+ SlashNext.URL.Landing.Verdict
+ string
+ SlashNext Phishing Incident Response verdict on the landing IoC
+
+
+ SlashNext.URL.Landing.ThreatStatus
+ string
+ Threat status of the landing IoC
+
+
+ SlashNext.URL.Landing.ThreatName
+ string
+ Name of the threat posed by the landing IoC
+
+
+ SlashNext.URL.Landing.ThreatType
+ string
+ Type of the threat posed by the landing IoC
+
+
+ SlashNext.URL.Landing.FirstSeen
+ date
+ Time when the landing IoC was first observed
+
+
+ SlashNext.URL.Landing.LastSeen
+ date
+ Time when the landing IoC was last observed
+
+
+
+
+
+Command Example
+
+ !slashnext-scan-report scanid=48ae7b06-5915-4633-bc51-2cfaa0036742 extednded_info=true
+
+Context Example
+
+{
+ "DBotScore": [
+ {
+ "Indicator": "http://www.google.com/",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ },
+ {
+ "Indicator": "https://www.google.com/?gws_rd=ssl",
+ "Score": 1,
+ "Type": "url",
+ "Vendor": "SlashNext Phishing Incident Response"
+ }
+ ],
+ "SlashNext.URL": {
+ "Final": {
+ "Type": "Final URL",
+ "Value": "https://www.google.com/?gws_rd=ssl",
+ "Verdict": "Benign"
+ },
+ "FirstSeen": "08-26-2019 17:29:38 UTC",
+ "LastSeen": "08-26-2019 19:41:19 UTC",
+ "ScanID": "48ae7b06-5915-4633-bc51-2cfaa0036742",
+ "ThreatName": "N/A",
+ "ThreatStatus": "N/A",
+ "ThreatType": "N/A",
+ "Type": "Scanned URL",
+ "Value": "http://www.google.com/",
+ "Verdict": "Benign"
+ },
+ "URL": [
+ {
+ "Data": "http://www.google.com/"
+ },
+ {
+ "Data": "https://www.google.com/?gws_rd=ssl"
+ }
+ ]
+}
+
+Human Readable Output
+
+
SlashNext Phishing Incident Response - Scan Report
+url = http://www.google.com/
+
+
+
+ Value
+ Type
+ Verdict
+ ScanID
+ ThreatStatus
+ ThreatName
+ ThreatType
+ FirstSeen
+ LastSeen
+
+
+
+
+ http://www.google.com/
+ Scanned URL
+ Benign
+ 48ae7b06-5915-4633-bc51-2cfaa0036742
+ N/A
+ N/A
+ N/A
+ 08-26-2019 17:29:38 UTC
+ 08-26-2019 19:41:19 UTC
+
+
+ --------> https://www.google.com/?gws_rd=ssl
+ Final URL
+ Benign
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+9. slashnext-download-screenshot
+
+Download webpage screenshot against a previous URL Scan request.
+Base Command
+
+ slashnext-download-screenshot
+
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ scanid
+ Scan ID returned by an earlier call to ‘slashnext-url-scan’ or ‘slashnext-url-scan-sync’ command.
+ Required
+
+
+ resolution
+ Resolution of the webpage screenshot. Currently only 'high' and 'medium' resolutions are supported.
+ Optional
+
+
+
+
+
+Context Output
+There are no context output for this command.
+
+Command Example
+
+ !slashnext-download-screenshot scanid=48ae7b06-5915-4633-bc51-2cfaa0036742
+
+
+Human Readable Output
+
+
+Forensics: Webpage Screenshot for URL Scan ID = 48ae7b06-5915-4633-bc51-2cfaa0036742
+
+
+
+
+
+10. slashnext-download-html
+
+Download webpage HTML against a previous URL Scan request.
+Base Command
+
+ slashnext-download-html
+
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ scanid
+ Scan ID returned by an earlier call to ‘slashnext-url-scan’ or ‘slashnext-url-scan-sync’ command.
+ Required
+
+
+
+
+
+Context Output
+There are no context output for this command.
+
+Command Example
+
+ !slashnext-download-html scanid=48ae7b06-5915-4633-bc51-2cfaa0036742
+
+
+Human Readable Output
+
+
+Forensics: Webpage HTML for URL Scan ID = 48ae7b06-5915-4633-bc51-2cfaa0036742
+
+
+
+
+
+11. slashnext-download-text
+
+Download webpage text against a previous URL Scan request.
+Base Command
+
+ slashnext-download-text
+
+
+Input
+
+
+
+
+ Argument Name
+
+
+ Description
+
+
+ Required
+
+
+
+
+
+ scanid
+ Scan ID returned by an earlier call to ‘slashnext-url-scan’ or ‘slashnext-url-scan-sync’ command.
+ Required
+
+
+
+
+
+Context Output
+There are no context output for this command.
+
+Command Example
+
+ !slashnext-download-text scanid=48ae7b06-5915-4633-bc51-2cfaa0036742
+
+
+Human Readable Output
+
+
+Forensics: Webpage Rendered Text for URL Scan ID = 48ae7b06-5915-4633-bc51-2cfaa0036742
+
+
+
+
+Additional Information Known Limitations Troubleshooting
diff --git a/Integrations/SlashNextPhishingIncidentResponse/SlashNextPhishingIncidentResponse.py b/Integrations/SlashNextPhishingIncidentResponse/SlashNextPhishingIncidentResponse.py
new file mode 100644
index 000000000000..a2baaebe79cc
--- /dev/null
+++ b/Integrations/SlashNextPhishingIncidentResponse/SlashNextPhishingIncidentResponse.py
@@ -0,0 +1,1197 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+from typing import List, Dict
+import requests
+import base64
+
+requests.packages.urllib3.disable_warnings()
+
+"""
+Created on August 1, 2019
+
+@author: Saadat Abid
+"""
+
+
+''' GLOBAL VARS '''
+AUTH_KEY = demisto.params().get('apikey')
+BASE_API = demisto.params().get('apiurl', 'https://oti.slashnext.cloud/api')
+if BASE_API.endswith('/'):
+ BASE_API = BASE_API.strip('/')
+VERIFY = not demisto.params().get('unsecure', False)
+
+HOST_REPUTE_API = '/oti/v1/host/reputation'
+URL_SCAN_API = '/oti/v1/url/scan'
+URL_SCANSYNC_API = '/oti/v1/url/scansync'
+HOST_REPORT_API = '/oti/v1/host/report'
+DL_SC_API = '/oti/v1/download/screenshot'
+DL_HTML_API = '/oti/v1/download/html'
+DL_TEXT_API = '/oti/v1/download/text'
+
+
+''' HELPERS FUNCTIONS '''
+
+
+@logger
+def http_request(endpoint, data, method='POST'):
+ """
+ Make the http request to SlashNext cloud API endpoint with the given API args
+ :param endpoint: Corresponds to SlashNext cloud API to be invoked
+ :param data: Parameter dictionary as part of data
+ :param method: HTTP method to be used for API i.e. GET or POST
+ :return: Response of the SlashNext web API in json format
+ """
+ url = BASE_API + endpoint
+ data['authkey'] = AUTH_KEY
+
+ response = requests.request(method, url=url, data=data, timeout=300, verify=VERIFY)
+ if response.status_code == 200:
+ try:
+ return response.json()
+ except Exception as e:
+ return_error('Response JSON decoding failed due to {}'.format(str(e)))
+
+ else:
+ return_error('API Returned, {}:{}'.format(response.status_code, response.reason))
+
+
+def get_dbot_score(verdict):
+ """
+ Evaluate the dbot (Demisto) score as per verdict from SlashNext cloud API
+ :param verdict: SlashNext verdict on a certain IoC
+ :return: Dbot score
+ """
+ if verdict == 'Malicious':
+ return 3
+ elif verdict == 'Suspicious':
+ return 2
+ elif verdict == 'Benign' or verdict == 'Redirector':
+ return 1
+ else:
+ return 0
+
+
+def get_dbot_std_context(indicator, ioc_type, verdict, threat_type):
+ """
+ Makes the dictionary for dbot score and standard Demisto contexts
+ :param indicator: IoC value
+ :param ioc_type: IoC type, ip, domain or url
+ :param verdict: Verdict by SlashNext OTI cloud
+ :param threat_type: Threat type reported by SlashNext OTI cloud
+ :return: Dbot score context dictionary, dbot standard context dictionary
+ """
+ dbot_score = get_dbot_score(verdict)
+
+ dbot_score_cont = {
+ 'Indicator': indicator,
+ 'Type': ioc_type.lower(),
+ 'Vendor': 'SlashNext Phishing Incident Response',
+ 'Score': dbot_score
+ }
+
+ if ioc_type.lower() == 'ip':
+ standard_cont = {
+ 'Address': indicator
+ }
+ elif ioc_type.lower() == 'domain':
+ standard_cont = {
+ 'Name': indicator
+ }
+ else:
+ standard_cont = {
+ 'Data': indicator
+ }
+
+ if dbot_score == 3:
+ standard_cont['Malicious'] = {
+ 'Vendor': 'SlashNext Phishing Incident Response',
+ 'Description': 'Detected "{}" Activity'.format(threat_type)
+ }
+
+ return dbot_score_cont, standard_cont
+
+
+def get_snx_host_ioc_context(indicator, ioc_type, threat_data):
+ """
+ Make the dictionary for SlashNext IoC contexts for hosts
+ :param indicator: IoC value
+ :param ioc_type: IoC type
+ :param threat_data: Threat data by SlashNext OTI cloud
+ :return: SlashNext IoC context dictionary
+ """
+ snx_ioc_cont = {
+ 'Value': indicator,
+ 'Type': ioc_type,
+ 'Verdict': threat_data.get('verdict'),
+ 'ThreatStatus': threat_data.get('threatStatus'),
+ 'ThreatType': threat_data.get('threatType'),
+ 'ThreatName': threat_data.get('threatName'),
+ 'FirstSeen': threat_data.get('firstSeen'),
+ 'LastSeen': threat_data.get('lastSeen')
+ }
+
+ return snx_ioc_cont
+
+
+def get_snx_url_ioc_context(url_data, is_scan=False):
+ """
+ Make the dictionary for SlashNext URL IoC contexts for URLs
+ :param url_data: URL data received in json format
+ :param is_scan: Is Scan ID to be included
+ :return: List of SlashNext IoC context dictionaries, Entry context dictionary
+ """
+ snx_ioc_cont_list = []
+ dbot_score_cont_list = []
+ url_cont_list = []
+
+ url_threat_data = url_data.get('threatData')
+ snx_ioc_cont = {
+ 'Value': url_data.get('url'),
+ 'Type': 'Scanned URL',
+ 'Verdict': url_threat_data.get('verdict'),
+ 'ThreatStatus': url_threat_data.get('threatStatus'),
+ 'ThreatType': url_threat_data.get('threatType'),
+ 'ThreatName': url_threat_data.get('threatName'),
+ 'FirstSeen': url_threat_data.get('firstSeen'),
+ 'LastSeen': url_threat_data.get('lastSeen')
+ }
+ if is_scan is True:
+ snx_ioc_cont['ScanID'] = url_data.get('scanId')
+
+ dbot_score_cont, url_cont = get_dbot_std_context(
+ url_data.get('url'), 'url',
+ url_threat_data.get('verdict'),
+ url_threat_data.get('threatType'))
+ dbot_score_cont_list.append(dbot_score_cont)
+ if url_cont is not None:
+ url_cont_list.append(url_cont)
+
+ if url_data.get('landingUrl') is None:
+ if url_data.get('finalUrl') is not None and url_data.get('finalUrl') != 'N/A':
+ dbot_final_score_cont, final_url_cont = get_dbot_std_context(
+ url_data.get('finalUrl'), 'url',
+ url_threat_data.get('verdict'),
+ url_threat_data.get('threatType'))
+ dbot_score_cont_list.append(dbot_final_score_cont)
+ if final_url_cont is not None:
+ url_cont_list.append(final_url_cont)
+
+ snx_final_ioc_cont = {
+ 'Value': url_data.get('finalUrl'),
+ 'Type': 'Final URL',
+ 'Verdict': url_threat_data.get('verdict')
+ }
+
+ snx_ioc_cont['Final'] = snx_final_ioc_cont.copy()
+ snx_ioc_cont_list.append(snx_ioc_cont)
+
+ snx_final_ioc_cont['Value'] = '--------> {}'.format(url_data.get('finalUrl'))
+ snx_ioc_cont_list.append(snx_final_ioc_cont)
+
+ else:
+ snx_ioc_cont_list.append(snx_ioc_cont)
+
+ else:
+ landing = url_data.get('landingUrl')
+ landing_threat_data = landing.get('threatData')
+
+ dbot_landing_score_cont, landing_url_cont = get_dbot_std_context(
+ landing.get('url'), 'url',
+ landing_threat_data.get('verdict'),
+ landing_threat_data.get('threatType'))
+ dbot_score_cont_list.append(dbot_landing_score_cont)
+ if landing_url_cont is not None:
+ url_cont_list.append(landing_url_cont)
+
+ snx_landing_ioc_cont = {
+ 'Value': landing.get('url'),
+ 'Type': 'Redirected URL',
+ 'Verdict': landing_threat_data.get('verdict'),
+ 'ThreatStatus': landing_threat_data.get('threatStatus'),
+ 'ThreatType': landing_threat_data.get('threatType'),
+ 'ThreatName': landing_threat_data.get('threatName'),
+ 'FirstSeen': landing_threat_data.get('firstSeen'),
+ 'LastSeen': landing_threat_data.get('lastSeen')
+ }
+ if is_scan is True:
+ snx_landing_ioc_cont['ScanID'] = landing.get('scanId')
+
+ snx_ioc_cont['Landing'] = snx_landing_ioc_cont.copy()
+ snx_ioc_cont_list.append(snx_ioc_cont)
+
+ snx_landing_ioc_cont['Value'] = '--------> {}'.format(landing.get('url'))
+ snx_ioc_cont_list.append(snx_landing_ioc_cont)
+
+ return snx_ioc_cont_list, dbot_score_cont_list, url_cont_list
+
+
+def download_forensics_data(scanid, tag, screenshot=False, html=False, txt=False):
+ """
+ Download the selected forensics data from SlashNext cloud
+ :param scanid: Scan ID for which foresics data to download
+ :param tag: String to tag the corresponding forensics data file
+ :param screenshot: Holds true if screenshot is to be downloaded
+ :param html: Holds true if the HTML is to be downloaded
+ :param txt: Holds true if the text is to be downloaded
+ :return: None
+ """
+ error_no = 0
+ error_msg = 'Success'
+ show_error_msg = True
+ if screenshot is True:
+ # Host Screenshot Section
+ api_data = {
+ 'scanid': scanid,
+ 'resolution': 'medium'
+ }
+ response = http_request(endpoint=DL_SC_API, data=api_data)
+
+ if response.get('errorNo') != 0:
+ error_no = response.get('errorNo')
+ error_msg = response.get('errorMsg')
+ else:
+ show_error_msg = False
+
+ sc_base64 = response.get('scData').get('scBase64')
+ sc_data = base64.b64decode(sc_base64)
+
+ sc_file = fileResult('slashnext_{}.jpg'.format(scanid), sc_data, entryTypes['image'])
+
+ demisto.results({
+ 'Type': entryTypes['image'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': 'Forensics: Webpage Screenshot for the ' + tag,
+ 'File': sc_file.get('File'),
+ 'FileID': sc_file.get('FileID')
+ })
+
+ if html is True:
+ # Host HTML Section
+ api_data = {
+ 'scanid': scanid
+ }
+ response = http_request(endpoint=DL_HTML_API, data=api_data)
+
+ if response.get('errorNo') == 0:
+ show_error_msg = False
+
+ html_base64 = response.get('htmlData').get('htmlBase64')
+ html_data = base64.b64decode(html_base64)
+
+ html_file = fileResult('slashnext_{}.html'.format(scanid), html_data, entryTypes['file'])
+
+ demisto.results({
+ 'Type': entryTypes['file'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': 'Forensics: Webpage HTML for the ' + tag,
+ 'File': html_file.get('File'),
+ 'FileID': html_file.get('FileID')
+ })
+
+ if txt is True:
+ # Host Text Section
+ api_data = {
+ 'scanid': scanid
+ }
+ response = http_request(endpoint=DL_TEXT_API, data=api_data)
+
+ if response.get('errorNo') == 0:
+ show_error_msg = False
+
+ text_base64 = response.get('textData').get('textBase64')
+ text_data = base64.b64decode(text_base64)
+
+ text_file = fileResult('slashnext_{}.txt'.format(scanid), text_data, entryTypes['file'])
+
+ demisto.results({
+ 'Type': entryTypes['file'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': 'Forensics: Webpage Rendered Text for the ' + tag,
+ 'File': text_file.get('File'),
+ 'FileID': text_file.get('FileID')
+ })
+
+ # Show Error Message
+ if show_error_msg is True and (screenshot is True or html is True or txt is True):
+ demisto.results('API Returned, {}:{}'.format(error_no, error_msg))
+
+
+''' COMMAND FUNCTIONS '''
+
+
+def validate_snx_api_key():
+ """
+ Validate the provided SlashNext cloud API key and test connection, in case of any error exit the program
+ @:return: None
+ """
+ api_data = {
+ 'host': 'www.google.com'
+ }
+ response = http_request(endpoint=HOST_REPUTE_API, data=api_data)
+
+ if response.get('errorNo') != 0:
+ return_error('API Returned, {}:{}'.format(response.get('errorNo'), response.get('errorMsg')))
+
+ return 'ok'
+
+
+def ip_lookup(ip):
+ """
+ Execute SlashNext's host/reputation API against the requested IP address with the given parameters
+ :param ip: IP address whose reputation needs to be fetched
+ :return: Response of the SlashNext host/reputation API
+ """
+ # Create the required data dictionary for Host/Reputation
+ api_data = {
+ 'host': ip
+ }
+ response = http_request(endpoint=HOST_REPUTE_API, data=api_data)
+
+ if response.get('errorNo') != 0:
+ return_error('API Returned, {}:{}'.format(response.get('errorNo'), response.get('errorMsg')))
+
+ return response
+
+
+def ip_command():
+ """
+ Execute SlashNext's host/reputation API against the requested IP reputation command with the given parameters
+ @:return: None
+ """
+ # 1. Get input host from Demisto
+ ip = demisto.args().get('ip')
+ if not is_ip_valid(ip):
+ return_error('Invalid IP address, Please retry with a valid IP address')
+ # 2. Get the host reputation from SlashNext API
+ response = ip_lookup(ip=ip)
+ if response.get('errorNo') != 0:
+ return
+ # 3. Parse and format the response
+ dbot_score_cont, ip_cont = get_dbot_std_context(
+ ip, 'IP', response.get('threatData').get('verdict'), response.get('threatData').get('threatType'))
+
+ snx_ioc_cont = get_snx_host_ioc_context(ip, 'IP', response.get('threatData'))
+
+ ec = {
+ 'SlashNext.IP(val.Value === obj.Value)': snx_ioc_cont,
+ 'DBotScore': dbot_score_cont,
+ 'IP': ip_cont
+ }
+
+ title = 'SlashNext Phishing Incident Response - IP Lookup\n' \
+ '##### ip = {}'.format(ip)
+
+ md = tableToMarkdown(
+ title,
+ snx_ioc_cont,
+ ['Value',
+ 'Type',
+ 'Verdict',
+ 'ThreatStatus',
+ 'ThreatName',
+ 'ThreatType',
+ 'FirstSeen',
+ 'LastSeen']
+ )
+
+ return_outputs(md, ec, snx_ioc_cont)
+
+
+def domain_lookup(domain):
+ """
+ Execute SlashNext's host/reputation API against the requested domain with the given parameters
+ :param domain: Domain whose reputation needs to be fetched
+ :return: Response of the SlashNext host/reputation API
+ """
+ # Create the required data dictionary for Host/Reputation
+ api_data = {
+ 'host': domain
+ }
+ response = http_request(endpoint=HOST_REPUTE_API, data=api_data)
+
+ if response.get('errorNo') != 0:
+ return_error('API Returned, {}:{}'.format(response.get('errorNo'), response.get('errorMsg')))
+
+ return response
+
+
+def domain_command():
+ """
+ Execute SlashNext's host/reputation API against the requested domain reputation command with the given parameters
+ @:return: None
+ """
+ # 1. Get input host from Demisto
+ domain = demisto.args().get('domain')
+ # 2. Get the host reputation from SlashNext API
+ response = domain_lookup(domain=domain)
+ if response.get('errorNo') != 0:
+ return
+ # 3. Parse and format the response
+ dbot_score_cont, domain_cont = get_dbot_std_context(
+ domain, 'Domain', response.get('threatData').get('verdict'), response.get('threatData').get('threatType'))
+
+ snx_ioc_cont = get_snx_host_ioc_context(domain, 'Domain', response.get('threatData'))
+
+ ec = {
+ 'SlashNext.Domain(val.Value === obj.Value)': snx_ioc_cont,
+ 'DBotScore': dbot_score_cont,
+ 'Domain': domain_cont
+ }
+
+ domain = domain.encode('idna')
+
+ title = 'SlashNext Phishing Incident Response - Domain Lookup\n' \
+ '##### domain = {}'.format(domain.decode())
+
+ md = tableToMarkdown(
+ title,
+ snx_ioc_cont,
+ ['Value',
+ 'Type',
+ 'Verdict',
+ 'ThreatStatus',
+ 'ThreatName',
+ 'ThreatType',
+ 'FirstSeen',
+ 'LastSeen']
+ )
+
+ return_outputs(md, ec, snx_ioc_cont)
+
+
+def host_reputation(host):
+ """
+ Execute SlashNext's host/reputation API against the requested host with the given parameters
+ :param host: Host whose reputation needs to be fetched
+ :return: Response of the SlashNext host/reputation API
+ """
+ # Create the required data dictionary for Host/Reputation
+ api_data = {
+ 'host': host
+ }
+ response = http_request(endpoint=HOST_REPUTE_API, data=api_data)
+
+ if response.get('errorNo') != 0:
+ return_error('API Returned, {}:{}'.format(response.get('errorNo'), response.get('errorMsg')))
+
+ return response
+
+
+def host_reputation_command():
+ """
+ Execute SlashNext's host/reputation API against the requested host reputation command with the given parameters
+ @:return: None
+ """
+ # 1. Get input host from Demisto
+ host = demisto.args().get('host')
+ # 2. Get the host reputation from SlashNext API
+ response = host_reputation(host=host)
+ if response.get('errorNo') != 0:
+ return
+ # 3. Parse and format the response
+ ioc_type = 'IP' if is_ip_valid(host) else 'Domain'
+
+ dbot_score_cont, host_cont = get_dbot_std_context(
+ host, ioc_type, response.get('threatData').get('verdict'), response.get('threatData').get('threatType'))
+
+ snx_ioc_cont = get_snx_host_ioc_context(host, ioc_type, response.get('threatData'))
+
+ ec = {
+ 'SlashNext.{}(val.Value === obj.Value)'.format(ioc_type): snx_ioc_cont,
+ 'DBotScore': dbot_score_cont,
+ ioc_type: host_cont
+ }
+
+ host = host.encode('idna')
+
+ title = 'SlashNext Phishing Incident Response - Host Reputation\n' \
+ '##### host = {}'.format(host.decode())
+
+ md = tableToMarkdown(
+ title,
+ snx_ioc_cont,
+ ['Value',
+ 'Type',
+ 'Verdict',
+ 'ThreatStatus',
+ 'ThreatName',
+ 'ThreatType',
+ 'FirstSeen',
+ 'LastSeen']
+ )
+
+ return_outputs(md, ec, snx_ioc_cont)
+
+
+def host_report_command():
+ """
+ Execute SlashNext's host/reputation, host/report, url/scansync, download/screenshot, download/html and download/text
+ APIs against the requested host report command with given parameters
+ @:return: None
+ """
+ # 1. Get input host from Demisto
+ host = demisto.args().get('host')
+ # 2(i). Get the host reputation from SlashNext API
+ response = host_reputation(host=host)
+ if response.get('errorNo') != 0:
+ return
+ # 3(i). Parse and format the response
+ ioc_type = 'IP' if is_ip_valid(host) else 'Domain'
+
+ dbot_score_cont, host_cont = get_dbot_std_context(
+ host, ioc_type, response.get('threatData').get('verdict'), response.get('threatData').get('threatType'))
+
+ snx_ioc_cont = get_snx_host_ioc_context(host, ioc_type, response.get('threatData'))
+
+ ec = {
+ 'SlashNext.{}(val.Value === obj.Value)'.format(ioc_type): snx_ioc_cont,
+ 'DBotScore': dbot_score_cont,
+ ioc_type: host_cont
+ }
+
+ enc_host = host.encode('idna')
+
+ title = 'SlashNext Phishing Incident Response - Host Report\n'\
+ '##### host = {}'.format(enc_host.decode())
+
+ md = tableToMarkdown(
+ title,
+ snx_ioc_cont,
+ ['Value',
+ 'Type',
+ 'Verdict',
+ 'ThreatStatus',
+ 'ThreatName',
+ 'ThreatType',
+ 'FirstSeen',
+ 'LastSeen']
+ )
+
+ return_outputs(md, ec, snx_ioc_cont)
+
+ # In case host is Unrated, the command execution is completed else continue with host report
+ if response.get('threatData').get('verdict').startswith('Unrated'):
+ return
+
+ # 2(ii). Get the host report from SlashNext API
+ response = host_urls(host=host, limit=1)
+ if response.get('errorNo') != 0:
+ return
+ # 3(ii). Parse and format the response
+ url_data = response.get('urlDataList')[0]
+ scanid = url_data.get('scanId')
+
+ if scanid == 'N/A':
+ # 2(iii). Get the url scan sync from SlashNext API
+ response = url_scan_sync(url=url_data.get('url'), timeout=60)
+ if response.get('errorNo') != 0:
+ return
+ # 3(iii). Parse and format the response
+ url_data = response.get('urlData')
+ scanid = url_data.get('scanId')
+
+ snx_ioc_cont, dbot_score_cont, url_cont = get_snx_url_ioc_context(url_data, is_scan=True)
+ else:
+ snx_ioc_cont, dbot_score_cont, url_cont = get_snx_url_ioc_context(url_data, is_scan=True)
+
+ ec = {
+ 'SlashNext.URL(val.Value === obj.Value)': snx_ioc_cont[0],
+ 'DBotScore': dbot_score_cont,
+ 'URL': url_cont
+ }
+
+ enc_host = host.encode('idna')
+
+ title = 'SlashNext Phishing Incident Response - Latest Scanned URL\n' \
+ '##### host = {}'.format(enc_host.decode())
+
+ if response.get('normalizeData').get('normalizeStatus') == 1:
+ title += ' *\n*' + response.get('normalizeData').get('normalizeMessage')
+
+ md = tableToMarkdown(
+ title,
+ snx_ioc_cont,
+ ['Value',
+ 'Type',
+ 'Verdict',
+ 'ScanID',
+ 'ThreatStatus',
+ 'ThreatName',
+ 'ThreatType',
+ 'FirstSeen',
+ 'LastSeen']
+ )
+
+ return_outputs(md, ec, snx_ioc_cont)
+
+ # Download Screenshot, HTML and Text Section
+ if url_data.get('landingUrl') is None:
+ if url_data.get('finalUrl') is not None and url_data.get('finalUrl') != 'N/A':
+ tag = 'Final URL = {}'.format(url_data.get('finalUrl'))
+ else:
+ tag = 'Scanned URL = {}'.format(url_data.get('url'))
+ else:
+ tag = 'Redirected URL = {}'.format(url_data.get('landingUrl').get('url'))
+
+ if response.get('swlData') is None:
+ download_forensics_data(scanid=scanid, tag=tag, screenshot=True, html=True, txt=True)
+
+
+def host_urls(host, limit):
+ """
+ Execute SlashNext's host/report API against the requested host urls with the given parameters
+ :param host: Host whose related/associated URLs to be fetched
+ :param limit: Number of related URLs to be fetched
+ :return: Response of the SlashNext host/report API
+ """
+ # Create the required data dictionary for Host/Report
+ api_data = {
+ 'host': host,
+ 'page': 1,
+ 'rpp': limit
+ }
+ response = http_request(endpoint=HOST_REPORT_API, data=api_data)
+
+ if response.get('errorNo') != 0:
+ return_error('API Returned, {}:{}'.format(response.get('errorNo'), response.get('errorMsg')))
+
+ return response
+
+
+def host_urls_command():
+ """
+ Execute SlashNext's host/report API against the requested host urls command with the given parameters
+ @:return: None
+ """
+ # 1. Get input host and limit from Demisto
+ host = demisto.args().get('host')
+ limit = demisto.args().get('limit')
+ # 2. Get the host report from SlashNext API
+ response = host_urls(host=host, limit=limit)
+ if response.get('errorNo') != 0:
+ return
+ # 3. Parse and format the response
+ snx_ioc_cont_list = [] # type: List[Dict[str, str]]
+ dbot_score_cont_list = [] # type: List[Dict[str, str]]
+ url_cont_list = [] # type: List[Dict[str, str]]
+ snx_ec_cont_list = [] # type: List[Dict[str, str]]
+ for url_data in response.get('urlDataList'):
+ if url_data.get('threatData').get('verdict').startswith('Unrated') is False:
+ snx_ioc_cont, dbot_score_cont, url_cont = get_snx_url_ioc_context(url_data, is_scan=True)
+ snx_ioc_cont_list.extend(snx_ioc_cont)
+ dbot_score_cont_list.extend(dbot_score_cont)
+ url_cont_list.extend(url_cont)
+ snx_ec_cont_list.append(snx_ioc_cont[0])
+
+ ec = {} # type: Dict[str, List[Dict[str, str]]]
+ if response.get('urlDataList')[0].get('threatData').get('verdict').startswith('Unrated') is False:
+ ec = {
+ 'SlashNext.URL(val.Value === obj.Value)': snx_ec_cont_list,
+ 'DBotScore': dbot_score_cont_list,
+ 'URL': url_cont_list
+ }
+
+ host = host.encode('idna')
+
+ title = 'SlashNext Phishing Incident Response - Host URLs\n' \
+ '##### host = {}'.format(host.decode())
+
+ if response.get('normalizeData').get('normalizeStatus') == 1:
+ title += ' *\n*' + response.get('normalizeData').get('normalizeMessage')
+
+ md = tableToMarkdown(
+ title,
+ snx_ioc_cont_list,
+ ['Value',
+ 'Type',
+ 'Verdict',
+ 'ScanID',
+ 'ThreatStatus',
+ 'ThreatName',
+ 'ThreatType',
+ 'FirstSeen',
+ 'LastSeen']
+ )
+
+ return_outputs(md, ec, snx_ioc_cont_list)
+
+
+def url_scan(url):
+ """
+ Execute SlashNext's url/scan API against the requested URL scan with the given parameters
+ :param url: URL to be scanned
+ :return: Response of the SlashNext url/scan API
+ """
+ # Create the required data dictionary for URL/Scan
+ api_data = {
+ 'url': url
+ }
+ response = http_request(endpoint=URL_SCAN_API, data=api_data)
+
+ if response.get('errorNo') == 1:
+ url_threat_data = response.get('urlData').get('threatData')
+ snx_ioc_cont = {
+ 'Value': url,
+ 'Type': 'Scanned URL',
+ 'Verdict': url_threat_data.get('verdict'),
+ 'ThreatStatus': url_threat_data.get('threatStatus'),
+ 'ThreatType': url_threat_data.get('threatType'),
+ 'ThreatName': url_threat_data.get('threatName'),
+ 'FirstSeen': url_threat_data.get('firstSeen'),
+ 'LastSeen': url_threat_data.get('lastSeen'),
+ 'ScanID': response.get('urlData').get('scanId')
+ }
+ ec = {
+ 'SlashNext.URL(val.Value === obj.Value)': snx_ioc_cont
+ }
+ md = '### SlashNext Phishing Incident Response - URL Scan\n' \
+ '##### url = {}\n' \
+ 'Your Url Scan request is submitted to the cloud and may take up-to 60 seconds to complete.\n'\
+ 'Please check back later using "slashnext-scan-report" command with Scan ID = {} or running the same ' \
+ '"slashnext-url-scan" command one more time.'.format(url, response.get('urlData').get('scanId'))
+ return_outputs(md, ec, response)
+ elif response.get('errorNo') != 0:
+ return_error('API Returned, {}:{}'.format(response.get('errorNo'), response.get('errorMsg')))
+
+ return response
+
+
+def url_scan_command():
+ """
+ Execute SlashNext's URL/scan API against the requested URL scan command with the given parameters
+ @:return: None
+ """
+ # 1. Get input url and extended_info from Demisto
+ url = demisto.args().get('url')
+ extended_info = demisto.args().get('extended_info')
+ # 2. Get the url scan from SlashNext API
+ response = url_scan(url=url)
+ if response.get('errorNo') != 0:
+ return
+ # 3. Parse and format the response
+ url_data = response.get('urlData')
+ scanid = url_data.get('scanId')
+
+ snx_ioc_cont, dbot_score_cont, url_cont = get_snx_url_ioc_context(url_data, is_scan=True)
+
+ ec = {
+ 'SlashNext.URL(val.Value === obj.Value)': snx_ioc_cont[0],
+ 'DBotScore': dbot_score_cont,
+ 'URL': url_cont
+ }
+
+ title = 'SlashNext Phishing Incident Response - URL Scan\n'\
+ '##### url = {}'.format(url_data.get('url'))
+
+ if response.get('normalizeData').get('normalizeStatus') == 1:
+ title += ' *\n*' + response.get('normalizeData').get('normalizeMessage')
+
+ md = tableToMarkdown(
+ title,
+ snx_ioc_cont,
+ ['Value',
+ 'Type',
+ 'Verdict',
+ 'ScanID',
+ 'ThreatStatus',
+ 'ThreatName',
+ 'ThreatType',
+ 'FirstSeen',
+ 'LastSeen']
+ )
+
+ return_outputs(md, ec, snx_ioc_cont)
+
+ if extended_info == 'true' and response.get('swlData') is None:
+ # Download Screenshot, HTML and Text Section
+ if url_data.get('landingUrl') is None:
+ if url_data.get('finalUrl') is not None and url_data.get('finalUrl') != 'N/A':
+ tag = 'Final URL = {}'.format(url_data.get('finalUrl'))
+ else:
+ tag = 'Scanned URL = {}'.format(url_data.get('url'))
+ else:
+ tag = 'Redirected URL = {}'.format(url_data.get('landingUrl').get('url'))
+
+ download_forensics_data(scanid=scanid, tag=tag, screenshot=True, html=True, txt=True)
+
+
+def url_scan_sync(url, timeout):
+ """
+ Execute SlashNext's url/scansync API against the requested URL scan sync with the given parameters
+ :param url: URL to be scanned
+ :param timeout: Timeout value in seconds
+ :return: Response of the SlashNext url/scansync API
+ """
+ # Create the required data dictionary for URL/ScanSync
+ api_data = {
+ 'url': url,
+ 'timeout': timeout
+ }
+ response = http_request(endpoint=URL_SCANSYNC_API, data=api_data)
+
+ if response.get('errorNo') == 1:
+ url_threat_data = response.get('urlData').get('threatData')
+ snx_ioc_cont = {
+ 'Value': url,
+ 'Type': 'Scanned URL',
+ 'Verdict': url_threat_data.get('verdict'),
+ 'ThreatStatus': url_threat_data.get('threatStatus'),
+ 'ThreatType': url_threat_data.get('threatType'),
+ 'ThreatName': url_threat_data.get('threatName'),
+ 'FirstSeen': url_threat_data.get('firstSeen'),
+ 'LastSeen': url_threat_data.get('lastSeen'),
+ 'ScanID': response.get('urlData').get('scanId')
+ }
+ ec = {
+ 'SlashNext.URL(val.Value === obj.Value)': snx_ioc_cont
+ }
+ md = '### SlashNext Phishing Incident Response - URL Scan Sync\n' \
+ '##### url = {}\n' \
+ 'Your Url Scan request is submitted to the cloud and is taking longer than expected to complete.\n' \
+ 'Please check back later using "slashnext-scan-report" command with Scan ID = {} or running the same ' \
+ '"slashnext-url-scan-sync" command one more time.'.format(url, response.get('urlData').get('scanId'))
+ return_outputs(md, ec, response)
+ elif response.get('errorNo') != 0:
+ return_error('API Returned, {}:{}'.format(response.get('errorNo'), response.get('errorMsg')))
+
+ return response
+
+
+def url_scan_sync_command():
+ """
+ Execute SlashNext's url/scansync API against the requested URL scan sync command with the given parameters
+ @:return: None
+ """
+ # 1. Get input url, extended_info and timeout from Demisto
+ url = demisto.args().get('url')
+ timeout = demisto.args().get('timeout')
+ extended_info = demisto.args().get('extended_info')
+ # 2. Get the url scan sync from SlashNext API
+ response = url_scan_sync(url=url, timeout=timeout)
+ if response.get('errorNo') != 0:
+ return
+ # 3. Parse and format the response
+ url_data = response.get('urlData')
+ scanid = url_data.get('scanId')
+
+ snx_ioc_cont, dbot_score_cont, url_cont = get_snx_url_ioc_context(url_data, is_scan=True)
+
+ ec = {
+ 'SlashNext.URL(val.Value === obj.Value)': snx_ioc_cont[0],
+ 'DBotScore': dbot_score_cont,
+ 'URL': url_cont
+ }
+
+ title = 'SlashNext Phishing Incident Response - URL Scan Sync\n'\
+ '##### url = {}'.format(url_data.get('url'))
+
+ if response.get('normalizeData').get('normalizeStatus') == 1:
+ title += ' *\n*' + response.get('normalizeData').get('normalizeMessage')
+
+ md = tableToMarkdown(
+ title,
+ snx_ioc_cont,
+ ['Value',
+ 'Type',
+ 'Verdict',
+ 'ScanID',
+ 'ThreatStatus',
+ 'ThreatName',
+ 'ThreatType',
+ 'FirstSeen',
+ 'LastSeen']
+ )
+
+ return_outputs(md, ec, snx_ioc_cont)
+
+ if extended_info == 'true' and response.get('swlData') is None:
+ # Download Screenshot, HTML and Text Section
+ if url_data.get('landingUrl') is None:
+ if url_data.get('finalUrl') is not None and url_data.get('finalUrl') != 'N/A':
+ tag = 'Final URL = {}'.format(url_data.get('finalUrl'))
+ else:
+ tag = 'Scanned URL = {}'.format(url_data.get('url'))
+ else:
+ tag = 'Redirected URL = {}'.format(url_data.get('landingUrl').get('url'))
+
+ download_forensics_data(scanid=scanid, tag=tag, screenshot=True, html=True, txt=True)
+
+
+def scan_report(scanid):
+ """
+ Execute SlashNext's url/scan API against the already requested URL scan with the given parameters
+ :param scanid: Scan ID returned by a SlashNext API earlier as a result of a scan request
+ :return: Response of the SlashNext url/scan API
+ """
+ # Create the required data dictionary for URL/Scan
+ api_data = {
+ 'scanid': scanid
+ }
+ response = http_request(endpoint=URL_SCAN_API, data=api_data)
+
+ if response.get('errorNo') == 1:
+ md = '### SlashNext Phishing Incident Response - Scan Report\n' \
+ '##### scanid = {}\n' \
+ 'Your Url Scan request is submitted to the cloud and may take up-to 60 seconds to complete.\n' \
+ 'Please check back later using "slashnext-scan-report" command with Scan ID = {}'.format(scanid, scanid)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': response,
+ 'HumanReadable': md,
+ 'ReadableContentsFormat': formats['markdown']
+ })
+ elif response.get('errorNo') != 0:
+ return_error('API Returned, {}:{}'.format(response.get('errorNo'), response.get('errorMsg')))
+
+ return response
+
+
+def scan_report_command():
+ """
+ Execute SlashNext's url/scan API against the already requested URL scan command with the given parameters
+ @:return: None
+ """
+ # 1. Get input scan id and extended_info flag from Demisto
+ scanid = demisto.args().get('scanid')
+ extended_info = demisto.args().get('extended_info')
+ # 2. Get the scan report from SlashNext API
+ response = scan_report(scanid=scanid)
+ if response.get('errorNo') != 0:
+ return
+ # 3. Parse and format the response
+ url_data = response.get('urlData')
+ scanid = url_data.get('scanId')
+
+ snx_ioc_cont, dbot_score_cont, url_cont = get_snx_url_ioc_context(url_data, is_scan=True)
+
+ ec = {
+ 'SlashNext.URL(val.Value === obj.Value)': snx_ioc_cont[0],
+ 'DBotScore': dbot_score_cont,
+ 'URL': url_cont
+ }
+
+ title = 'SlashNext Phishing Incident Response - Scan Report\n'\
+ '##### url = {}'.format(url_data.get('url'))
+
+ if response.get('normalizeData').get('normalizeStatus') == 1:
+ title += ' *\n*' + response.get('normalizeData').get('normalizeMessage')
+
+ md = tableToMarkdown(
+ title,
+ snx_ioc_cont,
+ ['Value',
+ 'Type',
+ 'Verdict',
+ 'ScanID',
+ 'ThreatStatus',
+ 'ThreatName',
+ 'ThreatType',
+ 'FirstSeen',
+ 'LastSeen']
+ )
+
+ return_outputs(md, ec, snx_ioc_cont)
+
+ if extended_info == 'true' and response.get('swlData') is None:
+ # Download Screenshot, HTML and Text Section
+ if url_data.get('landingUrl') is None:
+ if url_data.get('finalUrl') is not None and url_data.get('finalUrl') != 'N/A':
+ tag = 'Final URL = {}'.format(url_data.get('finalUrl'))
+ else:
+ tag = 'Scanned URL = {}'.format(url_data.get('url'))
+ else:
+ tag = 'Redirected URL = {}'.format(url_data.get('landingUrl').get('url'))
+
+ download_forensics_data(scanid=scanid, tag=tag, screenshot=True, html=True, txt=True)
+
+
+def download_screenshot(scanid, resolution='high'):
+ """
+ Execute SlashNext's download/screenshot API against the already requested URL scan with the given parameters
+ :param scanid: Scan ID returned by a SlashNext API earlier as a result of a scan request
+ :param resolution: Desired resolution of the screenshot. Currently supported values are 'high' and 'medium'
+ :return: Response of the SlashNext download/screenshot API
+ """
+ # Create the required data dictionary for Download/Screenshot
+ api_data = {
+ 'scanid': scanid,
+ 'resolution': resolution
+ }
+ response = http_request(endpoint=DL_SC_API, data=api_data)
+
+ if response.get('errorNo') == 1:
+ demisto.results(
+ 'Your Url Scan request is submitted to the cloud and may take up-to 60 seconds to complete.\n'
+ 'Please check back later using "slashnext-download-screenshot" command with Scan ID = {}'.format(scanid))
+ elif response.get('errorNo') != 0:
+ return_error('API Returned, {}:{}'.format(response.get('errorNo'), response.get('errorMsg')))
+
+ return response
+
+
+def download_screenshot_command():
+ """
+ Execute SlashNext's download/screenshot API against the already requested URL scan command with the given parameters
+ @:return: None
+ """
+ # 1. Get input scan id and resolution from Demisto
+ scanid = demisto.args().get('scanid')
+ resolution = demisto.args().get('resolution')
+ # 2. Get the forensic webpage screenshot from SlashNext API
+ response = download_screenshot(scanid=scanid, resolution=resolution)
+ if response.get('errorNo') != 0:
+ return
+ # 3. Parse and format the response
+ sc_base64 = response.get('scData').get('scBase64')
+ sc_data = base64.b64decode(sc_base64)
+
+ sc_file = fileResult('slashnext_{}.jpg'.format(scanid), sc_data, entryTypes['image'])
+
+ demisto.results({
+ 'Type': entryTypes['image'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': 'Forensics: Webpage Screenshot for URL Scan ID = {}'.format(scanid),
+ 'File': sc_file.get('File'),
+ 'FileID': sc_file.get('FileID')
+ })
+
+
+def download_html(scanid):
+ """
+ Execute SlashNext's download/html API against the already requested URL scan with the given parameters
+ :param scanid: Scan ID returned by a SlashNext API earlier as a result of a scan request
+ :return: Response of the SlashNext download/html API
+ """
+ # Create the required data dictionary for Download/HTML
+ api_data = {
+ 'scanid': scanid
+ }
+ response = http_request(endpoint=DL_HTML_API, data=api_data)
+
+ if response.get('errorNo') == 1:
+ demisto.results(
+ 'Your Url Scan request is submitted to the cloud and may take up-to 60 seconds to complete.\n'
+ 'Please check back later using "slashnext-download-html" command with Scan ID = {}'.format(scanid))
+ elif response.get('errorNo') != 0:
+ return_error('API Returned, {}:{}'.format(response.get('errorNo'), response.get('errorMsg')))
+
+ return response
+
+
+def download_html_command():
+ """
+ Execute SlashNext's download/HTML API against the already requested URL scan command with the given parameters
+ @:return: None
+ """
+ # 1. Get input scan id from Demisto
+ scanid = demisto.args().get('scanid')
+ # 2. Get the forensic webpage HTML from SlashNext API
+ response = download_html(scanid=scanid)
+ if response.get('errorNo') != 0:
+ return
+ # 3. Parse and format the response
+ html_base64 = response.get('htmlData').get('htmlBase64')
+ html_data = base64.b64decode(html_base64)
+
+ html_file = fileResult('slashnext_{}.html'.format(scanid), html_data, entryTypes['file'])
+
+ demisto.results({
+ 'Type': entryTypes['file'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': 'Forensics: Webpage HTML for URL Scan ID = {}'.format(scanid),
+ 'File': html_file.get('File'),
+ 'FileID': html_file.get('FileID')
+ })
+
+
+def download_text(scanid):
+ """
+ Execute SlashNext's download/text API against the already requested URL scan with the given parameters
+ :param scanid: Scan ID returned by a SlashNext API earlier as a result of a scan request
+ :return: Response of the SlashNext download/text API
+ """
+ # Create the required data dictionary for Download/Text
+ api_data = {
+ 'scanid': scanid
+ }
+ response = http_request(endpoint=DL_TEXT_API, data=api_data)
+
+ if response.get('errorNo') == 1:
+ demisto.results(
+ 'Your Url Scan request is submitted to the cloud and may take up-to 60 seconds to complete.\n'
+ 'Please check back later using "slashnext-download-text" command with Scan ID = {}'.format(scanid))
+ elif response.get('errorNo') != 0:
+ return_error('API Returned, {}:{}'.format(response.get('errorNo'), response.get('errorMsg')))
+
+ return response
+
+
+def download_text_command():
+ """
+ Execute SlashNext's download/text API against the already requested URL scan command with the given parameters
+ @:return: None
+ """
+ # 1. Get input scan id from Demisto
+ scanid = demisto.args().get('scanid')
+ # 2. Get the forensic webpage text from SlashNext API
+ response = download_text(scanid=scanid)
+ if response.get('errorNo') != 0:
+ return
+ # 3. Parse and format the response
+ text_base64 = response.get('textData').get('textBase64')
+ text_data = base64.b64decode(text_base64)
+
+ text_file = fileResult('slashnext_{}.txt'.format(scanid), text_data, entryTypes['file'])
+
+ demisto.results({
+ 'Type': entryTypes['file'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': 'Forensics: Webpage Rendered Text for URL Scan ID = {}'.format(scanid),
+ 'File': text_file.get('File'),
+ 'FileID': text_file.get('FileID')
+ })
+
+
+''' EXECUTION '''
+
+
+def main():
+ LOG('Command to be executed is {}.'.format(demisto.command()))
+ handle_proxy()
+ try:
+ if demisto.command() == 'test-module':
+ demisto.results(validate_snx_api_key())
+
+ if demisto.command() == 'ip':
+ ip_command()
+ elif demisto.command() == 'domain':
+ domain_command()
+ elif demisto.command() == 'slashnext-host-reputation':
+ host_reputation_command()
+ elif demisto.command() == 'slashnext-host-report':
+ host_report_command()
+ elif demisto.command() == 'slashnext-host-urls':
+ host_urls_command()
+ elif demisto.command() == 'slashnext-url-scan':
+ url_scan_command()
+ elif demisto.command() == 'slashnext-url-scan-sync':
+ url_scan_sync_command()
+ elif demisto.command() == 'slashnext-scan-report':
+ scan_report_command()
+ elif demisto.command() == 'slashnext-download-screenshot':
+ download_screenshot_command()
+ elif demisto.command() == 'slashnext-download-html':
+ download_html_command()
+ elif demisto.command() == 'slashnext-download-text':
+ download_text_command()
+
+ except Exception as e:
+ return_error(str(e))
+
+
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/SlashNextPhishingIncidentResponse/SlashNextPhishingIncidentResponse.yml b/Integrations/SlashNextPhishingIncidentResponse/SlashNextPhishingIncidentResponse.yml
new file mode 100644
index 000000000000..376c2ad8e423
--- /dev/null
+++ b/Integrations/SlashNextPhishingIncidentResponse/SlashNextPhishingIncidentResponse.yml
@@ -0,0 +1,729 @@
+commonfields:
+ id: SlashNext Phishing Incident Response
+ version: -1
+name: SlashNext Phishing Incident Response
+display: SlashNext Phishing Incident Response
+category: Data Enrichment & Threat Intelligence
+description: "SlashNext Phishing Incident Response integration allows Demisto users
+ to fully automate analysis of suspicious URLs. For example, IR teams responsible
+ for abuse inbox management can extract links or domains out of suspicious emails
+ and automatically analyze them with the SlashNext SEER threat detection cloud to
+ get definitive, binary verdicts (malicious or benign) along with IOCs, screen shots,
+ and more. Automating URL analysis can save IR teams hundreds of hours versus manually
+ triaging these emails or checking URLs and domains against less accurate phishing
+ databases and domain reputation services."
+configuration:
+- display: SlashNext API Base URL
+ name: apiurl
+ defaultvalue: https://oti.slashnext.cloud/api
+ type: 0
+ required: true
+- display: API Key
+ name: apikey
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Trust any certificate (not secure)
+ name: unsecure
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ type: 8
+ required: false
+script:
+ dockerimage: demisto/python3:3.7.3.260
+ type: python
+ subtype: python3
+ script: '-'
+ commands:
+ - name: ip
+ arguments:
+ - name: ip
+ required: true
+ description: IPv4 address to look up in the SlashNext Threat Intelligence database.
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: IP.Address
+ description: The IP address.
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IP addresses, the vendor that made the decision.
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IP addresses, the reason that the vendor made the decision.
+ type: string
+ - contextPath: SlashNext.IP.Value
+ description: Value of the IoC.
+ type: string
+ - contextPath: SlashNext.IP.Type
+ description: Type of the IoC.
+ type: string
+ - contextPath: SlashNext.IP.Verdict
+ description: SlashNext Phishing Incident Response verdict on the IoC.
+ type: string
+ - contextPath: SlashNext.IP.ThreatStatus
+ description: Threat status of the IoC.
+ type: string
+ - contextPath: SlashNext.IP.ThreatName
+ description: Name of the threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.IP.ThreatType
+ description: Type of threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.IP.FirstSeen
+ description: Time when the IoC was first observed.
+ type: date
+ - contextPath: SlashNext.IP.LastSeen
+ description: Time when the IoC was last observed.
+ type: date
+ description: Looks up an IP address indicator in the SlashNext Threat Intelligence database.
+ - name: domain
+ arguments:
+ - name: domain
+ required: true
+ description: The FQDN to look up in the SlashNext Threat Intelligence database.
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: Domain.Name
+ description: Domain name.
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domain names, the vendor that made the decision.
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domain names, the reason that the vendor made the decision.
+ type: string
+ - contextPath: SlashNext.Domain.Value
+ description: Value of the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.Type
+ description: Type of the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.Verdict
+ description: SlashNext Phishing Incident Response verdict on the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.ThreatStatus
+ description: Threat status of the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.ThreatName
+ description: Name of the threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.ThreatType
+ description: Type of threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.FirstSeen
+ description: Time when the IoC was first observed.
+ type: date
+ - contextPath: SlashNext.Domain.LastSeen
+ description: Time when the IoC was last observed.
+ type: date
+ description: Looks up a Fully Qualified Domain Name (FQDN) indicator in the SlashNext Threat Intelligence database.
+ - name: slashnext-host-reputation
+ arguments:
+ - name: host
+ required: true
+ description: The host to look up in the SlashNext Threat Intelligence database. Can be either a domain name or an IPv4 address.
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: IP.Address
+ description: IP address.
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IP addresses, the vendor that made the decision.
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IP addresses, the reason that the vendor made the decision.
+ type: string
+ - contextPath: SlashNext.IP.Value
+ description: Value of the IoC.
+ type: string
+ - contextPath: SlashNext.IP.Type
+ description: Type of the IoC.
+ type: string
+ - contextPath: SlashNext.IP.Verdict
+ description: SlashNext Phishing Incident Response verdict on the IoC.
+ type: string
+ - contextPath: SlashNext.IP.ThreatStatus
+ description: Threat status of the IoC.
+ type: string
+ - contextPath: SlashNext.IP.ThreatName
+ description: Name of the threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.IP.ThreatType
+ description: Type of threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.IP.FirstSeen
+ description: Time when the IoC was first observed.
+ type: date
+ - contextPath: SlashNext.IP.LastSeen
+ description: Time when the IoC was last observed.
+ type: date
+ - contextPath: Domain.Name
+ description: Domain name.
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domain names, the vendor that made the decision.
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domain names, the reason that the vendor made the decision.
+ type: string
+ - contextPath: SlashNext.Domain.Value
+ description: Value of the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.Type
+ description: Type of IoC.
+ type: string
+ - contextPath: SlashNext.Domain.Verdict
+ description: SlashNext Phishing Incident Response verdict on the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.ThreatStatus
+ description: Threat status of the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.ThreatName
+ description: Name of the threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.ThreatType
+ description: Type of threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.FirstSeen
+ description: Time when the IoC was first observed.
+ type: date
+ - contextPath: SlashNext.Domain.LastSeen
+ description: Time when the IoC was last observed.
+ type: date
+ description: Queries the SlashNext Cloud database and retrieves the reputation of a host.
+ - name: slashnext-host-report
+ arguments:
+ - name: host
+ required: true
+ description: The host to look up in the SlashNext Threat Intelligence database. Can be either a domain name or an IPv4 address.
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: IP.Address
+ description: IP address.
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IP addresses, the vendor that made the decision.
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IP addresses, the reason that the vendor made the decision.
+ type: string
+ - contextPath: SlashNext.IP.Value
+ description: Value of the IoC.
+ type: string
+ - contextPath: SlashNext.IP.Type
+ description: Type of the IoC.
+ type: string
+ - contextPath: SlashNext.IP.Verdict
+ description: SlashNext Phishing Incident Response verdict on the IoC.
+ type: string
+ - contextPath: SlashNext.IP.ThreatStatus
+ description: Threat status of the IoC.
+ type: string
+ - contextPath: SlashNext.IP.ThreatName
+ description: Name of the threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.IP.ThreatType
+ description: Type of threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.IP.FirstSeen
+ description: Time when the IoC was first observed.
+ type: date
+ - contextPath: SlashNext.IP.LastSeen
+ description: Time when the IoC was last observed.
+ type: date
+ - contextPath: Domain.Name
+ description: Domain name.
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domain names, the vendor that made the decision.
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domain names, the reason that the vendor made the decision.
+ type: string
+ - contextPath: SlashNext.Domain.Value
+ description: Value of the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.Type
+ description: Type of the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.Verdict
+ description: SlashNext Phishing Incident Response verdict on the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.ThreatStatus
+ description: Threat status of the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.ThreatName
+ description: Name of the threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.ThreatType
+ description: Type of threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.Domain.FirstSeen
+ description: Time when the IoC was first observed.
+ type: date
+ - contextPath: SlashNext.Domain.LastSeen
+ description: Time when the IoC was last observed.
+ type: date
+ description: Queries the SlashNext Cloud database and retrieves a detailed report
+ for a host and associated URL.
+ - name: slashnext-host-urls
+ arguments:
+ - name: host
+ required: true
+ description: The host to look up in the SlashNext Threat Intelligence database, for which to return a list of associated URLs. Can be either a domain name or an IPv4 address.
+ - name: limit
+ description: The maximum number of URL records to fetch. Default is "10".
+ defaultValue: "10"
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: URL.Data
+ description: URL reported.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the reason that the vendor made the decision.
+ type: string
+ - contextPath: SlashNext.URL.Value
+ description: Value of the IoC.
+ type: string
+ - contextPath: SlashNext.URL.Type
+ description: Type of IoC.
+ type: string
+ - contextPath: SlashNext.URL.ScanID
+ description: Scan ID to be used to get the IoC forensics data for further investigation.
+ type: string
+ - contextPath: SlashNext.URL.Verdict
+ description: SlashNext Phishing Incident Response verdict on the IoC.
+ type: string
+ - contextPath: SlashNext.URL.ThreatStatus
+ description: Threat status of the IoC.
+ type: string
+ - contextPath: SlashNext.URL.ThreatName
+ description: Name of the threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.URL.ThreatType
+ description: Type of threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.URL.FirstSeen
+ description: Time when the IoC was first observed.
+ type: date
+ - contextPath: SlashNext.URL.LastSeen
+ description: Time when the IoC was last observed.
+ type: date
+ - contextPath: SlashNext.URL.Final.Value
+ description: Final IoC value (in case original IoC is a redirector to the same domain).
+ type: string
+ - contextPath: SlashNext.URL.Final.Type
+ description: Type of the final IoC.
+ type: string
+ - contextPath: SlashNext.URL.Final.Verdict
+ description: SlashNext Phishing Incident Response verdict on the final IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.Value
+ description: Landing IoC value (in case original IoC is a redirector to a different domain).
+ type: string
+ - contextPath: SlashNext.URL.Landing.Type
+ description: Type of landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ScanID
+ description: Scan ID to be used to get the landing IoC forensics data for further investigation.
+ type: string
+ - contextPath: SlashNext.URL.Landing.Verdict
+ description: SlashNext Phishing Incident Response verdict on the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ThreatStatus
+ description: Threat status of the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ThreatName
+ description: Name of the threat posed by the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ThreatType
+ description: Type of threat posed by the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.FirstSeen
+ description: Time when the landing IoC was first observed.
+ type: date
+ - contextPath: SlashNext.URL.Landing.LastSeen
+ description: Time when the landing IoC was last observed.
+ type: date
+ description: Queries the SlashNext Cloud database and retrieves a list of all URLs
+ associated with the specified host.
+ - name: slashnext-url-scan
+ arguments:
+ - name: url
+ required: true
+ description: The URL to scan.
+ - name: extended_info
+ description: Whether to download forensics data, such as screenshot, HTML, and rendered text. If "true", forensics data will be returned. If "false" (or empty) forensics data will not be returned. Default is "false".
+ defaultValue: "false"
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: URL.Data
+ description: URL reported.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the reason that the vendor made the decision.
+ type: string
+ - contextPath: SlashNext.URL.Value
+ description: Value of the IoC.
+ type: string
+ - contextPath: SlashNext.URL.Type
+ description: Type of IoC.
+ type: string
+ - contextPath: SlashNext.URL.ScanID
+ description: Scan ID to be used to get the IoC forensics data for further investigation.
+ type: string
+ - contextPath: SlashNext.URL.Verdict
+ description: SlashNext Phishing Incident Response verdict on the IoC.
+ type: string
+ - contextPath: SlashNext.URL.ThreatStatus
+ description: Threat status of the IoC.
+ type: string
+ - contextPath: SlashNext.URL.ThreatName
+ description: Name of the threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.URL.ThreatType
+ description: Type of threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.URL.FirstSeen
+ description: Time when the IoC was first observed.
+ type: date
+ - contextPath: SlashNext.URL.LastSeen
+ description: Time when the IoC was last observed.
+ type: date
+ - contextPath: SlashNext.URL.Final.Value
+ description: Final IoC value (in case original IoC is a redirector to the same domain).
+ type: string
+ - contextPath: SlashNext.URL.Final.Type
+ description: Type of the final IoC.
+ type: string
+ - contextPath: SlashNext.URL.Final.Verdict
+ description: SlashNext Phishing Incident Response verdict on the final IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.Value
+ description: Landing IoC value (in case original IoC is a redirector to a different domain).
+ type: string
+ - contextPath: SlashNext.URL.Landing.Type
+ description: Type of landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ScanID
+ description: Scan ID to be used to get the landing IoC forensics data for further investigation.
+ type: string
+ - contextPath: SlashNext.URL.Landing.Verdict
+ description: SlashNext Phishing Incident Response verdict on the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ThreatStatus
+ description: Threat status of the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ThreatName
+ description: Name of the threat posed by the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ThreatType
+ description: Type of the threat posed by the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.FirstSeen
+ description: Time when the landing IoC was first observed.
+ type: date
+ - contextPath: SlashNext.URL.Landing.LastSeen
+ description: Time when the landing IoC was last observed.
+ type: date
+ description: Performs a real-time URL reputation scan with SlashNext cloud-based
+ SEER Engine. If the specified URL already exists in the cloud database, scan
+ results will be returned immediately. If not, this command will submit a URL
+ scan request and return with the message "check back later" and include a unique
+ Scan ID. You can check the results of this scan using the "slashnext-scan-report" command anytime
+ after 60 seconds using the returned Scan ID.
+ - name: slashnext-url-scan-sync
+ arguments:
+ - name: url
+ required: true
+ description: The URL to scan.
+ - name: timeout
+ description: A timeout value in seconds. If the system is unable to complete
+ a scan within the specified timeout, a timeout error will be returned. You can run the command again with a different timeout. If no timeout value is specified,
+ a default timeout value is 60 seconds.
+ defaultValue: "60"
+ - name: extended_info
+ description: Whether to download forensics data, such as screenshot, HTML, and rendered text. If "true", forensics data will be returned. If "false" (or empty) forensics data will not be returned. Default is "false".
+ defaultValue: "false"
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: URL.Data
+ description: URL reported.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the reason that the vendor made the decision.
+ type: string
+ - contextPath: SlashNext.URL.Value
+ description: Value of the IoC.
+ type: string
+ - contextPath: SlashNext.URL.Type
+ description: Type of IoC.
+ type: string
+ - contextPath: SlashNext.URL.ScanID
+ description: Scan ID to be used to get the IoC forensics data for further investigation.
+ type: string
+ - contextPath: SlashNext.URL.Verdict
+ description: SlashNext Phishing Incident Response verdict on the IoC.
+ type: string
+ - contextPath: SlashNext.URL.ThreatStatus
+ description: Threat status of the IoC.
+ type: string
+ - contextPath: SlashNext.URL.ThreatName
+ description: Name of the threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.URL.ThreatType
+ description: Type of threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.URL.FirstSeen
+ description: Time when the IoC was first observed.
+ type: date
+ - contextPath: SlashNext.URL.LastSeen
+ description: Time when the IoC was last observed.
+ type: date
+ - contextPath: SlashNext.URL.Final.Value
+ description: Final IoC value (in case original IoC is a redirector to the same domain).
+ type: string
+ - contextPath: SlashNext.URL.Final.Type
+ description: Type of the final IoC.
+ type: string
+ - contextPath: SlashNext.URL.Final.Verdict
+ description: SlashNext Phishing Incident Response verdict on the final IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.Value
+ description: Landing IoC value (in case original IoC is a redirector to a different domain).
+ type: string
+ - contextPath: SlashNext.URL.Landing.Type
+ description: Type of landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ScanID
+ description: Scan ID to be used to get the landing IoC forensics data for further investigation.
+ type: string
+ - contextPath: SlashNext.URL.Landing.Verdict
+ description: SlashNext Phishing Incident Response verdict on the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ThreatStatus
+ description: Threat status of the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ThreatName
+ description: Name of the threat posed by the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ThreatType
+ description: Type of threat posed by the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.FirstSeen
+ description: Time when the landing IoC was first observed.
+ type: date
+ - contextPath: SlashNext.URL.Landing.LastSeen
+ description: Time when the landing IoC was last observed.
+ type: date
+ description: Performs a real-time URL scan with SlashNext cloud-based SEER Engine
+ in a blocking mode. If the specified URL already exists in the cloud database,
+ scan result will be returned immediately. If not, this command will submit
+ a URL scan request and wait for the scan to finish. The scan may take up to
+ 60 seconds to finish.
+ - name: slashnext-scan-report
+ arguments:
+ - name: scanid
+ required: true
+ description: Scan ID of the scan for which to get the report. Can be retrieved from the "slashnext-url-scan" command or "slashnext-url-scan-sync" command.
+ - name: extended_info
+ description: Whether to download forensics data, such as screenshot, HTML, and rendered text. If "true", forensics data will be returned. If "false" (or empty) forensics data will not be returned. Default is "false".
+ defaultValue: "false"
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: DBotScore.Type
+ description: Indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: URL.Data
+ description: URL reported.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the reason that the vendor made the decision.
+ type: string
+ - contextPath: SlashNext.URL.Value
+ description: Value of the IoC.
+ type: string
+ - contextPath: SlashNext.URL.Type
+ description: Type of IoC.
+ type: string
+ - contextPath: SlashNext.URL.ScanID
+ description: Scan ID to be used to get the IoC forensics data for further investigation.
+ type: string
+ - contextPath: SlashNext.URL.Verdict
+ description: SlashNext Phishing Incident Response verdict on the IoC.
+ type: string
+ - contextPath: SlashNext.URL.ThreatStatus
+ description: Threat status of the IoC.
+ type: string
+ - contextPath: SlashNext.URL.ThreatName
+ description: Name of the threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.URL.ThreatType
+ description: Type of threat posed by the IoC.
+ type: string
+ - contextPath: SlashNext.URL.FirstSeen
+ description: Time when the IoC was first observed.
+ type: date
+ - contextPath: SlashNext.URL.LastSeen
+ description: Time when the IoC was last observed.
+ type: date
+ - contextPath: SlashNext.URL.Final.Value
+ description: Final IoC value (in case original IoC is a redirector to same the domain).
+ type: string
+ - contextPath: SlashNext.URL.Final.Type
+ description: Type of the final IoC.
+ type: string
+ - contextPath: SlashNext.URL.Final.Verdict
+ description: SlashNext Phishing Incident Response verdict on the final IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.Value
+ description: Landing IoC value (in case original IoC is a redirector to a different domain).
+ type: string
+ - contextPath: SlashNext.URL.Landing.Type
+ description: Type of landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ScanID
+ description: Scan ID to be used to get the landing IoC forensics data for further investigation.
+ type: string
+ - contextPath: SlashNext.URL.Landing.Verdict
+ description: SlashNext Phishing Incident Response verdict on the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ThreatStatus
+ description: Threat status of the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ThreatName
+ description: Name of the threat posed by the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.ThreatType
+ description: Type of threat posed by the landing IoC.
+ type: string
+ - contextPath: SlashNext.URL.Landing.FirstSeen
+ description: Time when the landing IoC was first observed.
+ type: date
+ - contextPath: SlashNext.URL.Landing.LastSeen
+ description: Time when the landing IoC was last observed.
+ type: date
+ description: Retrieves the results of a URL scan against a previous scan request. If the
+ scan is finished, results will be returned immediately; otherwise the message "check back
+ later" will be returned.
+ - name: slashnext-download-screenshot
+ arguments:
+ - name: scanid
+ required: true
+ description: Scan ID. Can be retrieved from the "slashnext-url-scan" command or the "slashnext-url-scan-sync" command.
+ - name: resolution
+ description: Resolution of the web page screenshot. Can be "high" or "medium". Default is "high".
+ defaultValue: "high"
+ description: Downloads a screenshot of a web page against a previous URL scan request.
+ - name: slashnext-download-html
+ arguments:
+ - name: scanid
+ required: true
+ description: Scan ID. Can be retrieved from the "slashnext-url-scan" command or the "slashnext-url-scan-sync" command.
+ description: Downloads a web page HTML against a previous URL scan request.
+ - name: slashnext-download-text
+ arguments:
+ - name: scanid
+ required: true
+ description: Scan ID. Can be retrieved from the "slashnext-url-scan" command or the "slashnext-url-scan-sync" command.
+ description: Downloads the text of a web page against a previous URL scan request.
+ runonce: false
diff --git a/Integrations/SlashNextPhishingIncidentResponse/SlashNextPhishingIncidentResponse_description.md b/Integrations/SlashNextPhishingIncidentResponse/SlashNextPhishingIncidentResponse_description.md
new file mode 100644
index 000000000000..c0d5009667be
--- /dev/null
+++ b/Integrations/SlashNextPhishingIncidentResponse/SlashNextPhishingIncidentResponse_description.md
@@ -0,0 +1,9 @@
+The **SlashNext Phishing Incident Response** integration app enables **Demisto** users to fully automate the analysis of suspicious URLs in phishing emails, network logs, and more. Playbooks that require URL or Domain analysis can automatically analyze them with the SlashNext SEER threat detection cloud to get definitive, binary verdicts (malicious or benign) along with IOCs, screenshots, and more.
+
+SlashNext threat detection uses browsers in a purpose-built cloud to dynamically inspect page contents and site behavior in real-time. This method enables SlashNext to follow URL re-directs and multi-stage attacks to more thoroughly analyze the final page(s) and made a much more accurate, binary determination with near-zero false positives. It also detects all six major categories of phishing and social engineering sites. These include credential stealing, rogue software / malware sites, scareware, phishing exploits (sites hosting weaponized documents, etc.), and social engineering scams (fake deals, giveaways, etc.).
+
+Use cases include abuse inbox management where SOC teams can automate URL analysis in phishing emails to save hundreds of hours versus more manual methods. Playbooks that mine and analyze network logs can also leverage SlashNext URL analysis on demand.
+
+SlashNext not only provides accurate, binary verdicts (rather than threat scores), it provides IOC metadata and screen shots of detected phishing pages. These enables easier classification and reporting. Screen shots can be used as an aid in on-going employee phishing awareness training and testing.
+
+The SlashNext Phishing Incident Response integration app uses an API key to authenticate with SlashNext cloud. If you don't have a valid API key, contact the SlashNext team: support@slashnext.com
diff --git a/Integrations/SlashNextPhishingIncidentResponse/SlashNextPhishingIncidentResponse_image.png b/Integrations/SlashNextPhishingIncidentResponse/SlashNextPhishingIncidentResponse_image.png
new file mode 100644
index 000000000000..d19424858b00
Binary files /dev/null and b/Integrations/SlashNextPhishingIncidentResponse/SlashNextPhishingIncidentResponse_image.png differ
diff --git a/Integrations/SlashNextPhishingIncidentResponse/SlashNextPhishingIncidentResponse_test.py b/Integrations/SlashNextPhishingIncidentResponse/SlashNextPhishingIncidentResponse_test.py
new file mode 100644
index 000000000000..d6534f995dc3
--- /dev/null
+++ b/Integrations/SlashNextPhishingIncidentResponse/SlashNextPhishingIncidentResponse_test.py
@@ -0,0 +1,18 @@
+"""
+Created on September 26, 2019
+
+@author: Saadat Abid
+"""
+
+
+def test_get_dbot_score():
+ from SlashNextPhishingIncidentResponse import get_dbot_score
+
+ assert 1 == get_dbot_score(verdict='Benign')
+ assert 1 == get_dbot_score(verdict='Redirector')
+
+ assert 2 == get_dbot_score(verdict='Suspicious')
+
+ assert 3 == get_dbot_score(verdict='Malicious')
+
+ assert 0 == get_dbot_score(verdict='Unrated')
diff --git a/Integrations/Snowflake/CHANGELOG.md b/Integrations/Snowflake/CHANGELOG.md
new file mode 100644
index 000000000000..29a5d57f0f39
--- /dev/null
+++ b/Integrations/Snowflake/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.0] - 2019-10-03
+Fixed an issue in the ***fetch incidents*** functionality.
diff --git a/Integrations/Snowflake/Snowflake.py b/Integrations/Snowflake/Snowflake.py
new file mode 100644
index 000000000000..d8c1e4b012db
--- /dev/null
+++ b/Integrations/Snowflake/Snowflake.py
@@ -0,0 +1,417 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+'''IMPORTS'''
+
+import snowflake.connector
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives import serialization
+from datetime import date, timedelta, datetime
+from datetime import time as dttime
+from decimal import Decimal
+
+'''GLOBAL VARS'''
+
+PARAMS = demisto.params()
+CREDENTIALS = PARAMS.get('credentials')
+USER = CREDENTIALS.get('identifier')
+PASSWORD = CREDENTIALS.get('password')
+CERTIFICATE = CREDENTIALS.get('credentials', {}).get('sshkey').encode()
+CERT_PASSWORD = CREDENTIALS.get('credentials', {}).get('password')
+CERT_PASSWORD = CERT_PASSWORD.encode() if CERT_PASSWORD else None
+ACCOUNT = PARAMS.get('account')
+AUTHENTICATOR = PARAMS.get('authenticator')
+REGION = PARAMS.get('region')
+WAREHOUSE = PARAMS.get('warehouse')
+DATABASE = PARAMS.get('database')
+SCHEMA = PARAMS.get('schema')
+ROLE = PARAMS.get('role')
+INSECURE = PARAMS.get('insecure', False)
+# How much time before the first fetch to retrieve incidents
+IS_FETCH = PARAMS.get('isFetch')
+FETCH_TIME = PARAMS.get('fetch_time')
+FETCH_QUERY = PARAMS.get('fetch_query')
+DATETIME_COLUMN = PARAMS.get('datetime_column')
+INCIDENT_NAME_COLUMN = PARAMS.get('incident_name_column')
+MAX_ROWS = int(PARAMS.get('limit')) if PARAMS.get('limit') else 10000
+
+TYPE_CODE_TO_DATATYPE = {
+ 0: 'number/int',
+ 1: 'real',
+ 2: 'varchar/string',
+ 3: 'date',
+ 4: 'timestamp',
+ 5: 'variant',
+ 6: 'timestamp_ltz',
+ 7: 'timestamp_tz',
+ 8: 'timestamp_tz',
+ 9: 'object',
+ 10: 'array',
+ 11: 'binary',
+ 12: 'time',
+ 13: 'boolean'
+}
+DT_NEEDS_CHECKING = {'date', 'timestamp', 'timestamp_ltz', 'timestamp_tz', 'time'}
+
+
+'''SETUP'''
+
+if IS_FETCH and not (FETCH_QUERY and DATETIME_COLUMN):
+ err_msg = 'When fetching is enabled there are two additional parameters that are required;'
+ err_msg += ' The fetch query that determines what data to fetch and the name of the column'
+ err_msg += ' in the fetched data that contains a datetime object or timestamp.'
+ raise Exception(err_msg)
+
+
+'''HELPER FUNCTIONS'''
+
+
+def convert_datetime_to_string(v):
+ """
+ Parses date, time, timedelta, or datetime object into string
+
+ parameter: (datetime/date/time/timedelta) v
+ The datetime/date/time/timedelta object to convert
+
+ returns:
+ Formatted string of the object
+ """
+ if isinstance(v, datetime):
+ return v.strftime('%Y-%m-%d %H:%M:%S.%f %z').strip()
+ elif isinstance(v, date):
+ return v.strftime('%Y-%m-%d').strip()
+ elif isinstance(v, dttime):
+ return v.strftime('%H:%M:%S.%f').strip()
+ return v
+
+
+def error_message_from_snowflake_error(e):
+ """
+ Return formatted error message from contents of a Snowflake error
+
+ parameter: (snowflake.connector.errors.Error) e
+ The Snowflake error object
+
+ returns:
+ Formatted error message
+ """
+ err_msg = 'Snowflake DB error code: {}\n'.format(e.errno)
+ err_msg += 'ANSI-compliant SQL State code: {}\n'.format(e.sqlstate)
+ err_msg += 'Snowflake query ID: {}\n'.format(e.sfqid)
+ err_msg += 'Error message: {}'
+ if e.errno == 606:
+ first_sentence = e.raw_msg[:e.raw_msg.find('.') + 1]
+ err_msg = err_msg.format(first_sentence)
+ err_msg += ' Specify an active warehouse in the command '
+ err_msg += 'arguments or in the integration parameters.'
+ elif e.errno == 2003:
+ err_msg = err_msg.format(e.raw_msg)
+ err_msg += ' A possible explanation is that the values you entered'
+ err_msg += ' for the \'warehouse\' and \'database\' were incorrect.'
+ else:
+ err_msg = err_msg.format(e.raw_msg)
+ return err_msg
+
+
+def set_provided(params, key, val1, val2=None):
+ """
+ If value is provided, set it in the dict
+ """
+ if val1:
+ params[key] = val1
+ elif val2:
+ params[key] = val2
+
+
+def process_table_row(row, checks):
+ """
+ Check row data and reformat if necessary
+
+ The 'checks' parameter contains the names of fields that have the potential to cause
+ issues when they will be json decoded. This function checks the values of the fields
+ flagged in the 'checks' parameter and formats the contents to a json friendly type if
+ necessary.
+
+ parameter: (dict) row
+ The data (table row) that needs to be processed
+
+ parameter: (dict[str, list]) checks
+ Dictionary where the key is a string indicative of the type (or bucket of types) that needs
+ reformatting and the values are a list of column names whose data is of that type
+
+ returns:
+ Reformatted Row
+ """
+ for column_name, val in row.items():
+ if column_name in checks.get('isDecimal', []):
+ # Then check the value and reformat it if necessary
+ if isinstance(val, Decimal):
+ row[column_name] = str(val)
+ elif column_name in checks.get('isDT', []):
+ # Then reformat it if necessary
+ row[column_name] = convert_datetime_to_string(val)
+ return row
+
+
+def format_to_json_serializable(column_descriptions, results):
+ """
+ Screen and reformat any data in 'results' argument that is
+ not json serializable, and return 'results'. 'results' can
+ be a table of data (a list of rows) or a single row.
+
+ parameter: (list) column_descriptions
+ The metadata that describes data for each column in the 'results' parameter
+
+ parameter: (list/dict) results
+ What was returned by the cursor object's execute or fetch operation
+
+ returns:
+ Reformatted 'results'
+ """
+ name = 0
+ type_code = 1
+
+ checks: dict = {}
+ # Screen by type_code
+ for col in column_descriptions:
+ # if col[type_code] == 0:
+ if TYPE_CODE_TO_DATATYPE.get(col[type_code]) == 'number/int':
+ # Then need to check that column's data to see if its data type is Decimal
+ checks.setdefault('isDecimal', []).append(col[name])
+ # elif col[type_code] in {3, 4, 6, 7, 8, 12}:
+ elif TYPE_CODE_TO_DATATYPE.get(col[type_code]) in DT_NEEDS_CHECKING:
+ # Then need to check that column's data to see if its data type is date, time, timedelta or datetime
+ checks.setdefault('isDT', []).append(col[name])
+
+ # if 'results' is a list then it is a data table (list of rows) and need to process each row
+ # in the table, otherwise if 'results' is a dict then it a single table row
+ # Check candidates and reformat if necessary
+ if isinstance(results, dict):
+ results = process_table_row(results, checks)
+ else:
+ # if 'results' isn't a dict, assume it's a list
+ for i, row in enumerate(results):
+ results[i] = process_table_row(row, checks)
+ return results
+
+
+def get_connection_params(args):
+ """
+ Construct and return the connection parameters
+
+ parameter: (dict) args
+ The command arguments of the command function calling this helper function
+
+ returns:
+ Snowflake connection params
+ """
+ params: dict = {}
+ set_provided(params, 'user', USER)
+ set_provided(params, 'password', PASSWORD)
+ set_provided(params, 'account', ACCOUNT)
+ set_provided(params, 'authenticator', AUTHENTICATOR)
+ set_provided(params, 'region', REGION)
+ set_provided(params, 'insecure_mode', INSECURE)
+ set_provided(params, 'warehouse', args.get('warehouse'), WAREHOUSE)
+ set_provided(params, 'database', args.get('database'), DATABASE)
+ set_provided(params, 'schema', args.get('schema'), SCHEMA)
+ set_provided(params, 'role', args.get('role'), ROLE)
+ if CERTIFICATE:
+ p_key = serialization.load_pem_private_key(CERTIFICATE, password=CERT_PASSWORD, backend=default_backend())
+ pkb = p_key.private_bytes(
+ encoding=serialization.Encoding.DER,
+ format=serialization.PrivateFormat.PKCS8,
+ encryption_algorithm=serialization.NoEncryption()
+ )
+ params['private_key'] = pkb
+ return params
+
+
+def row_to_incident(column_descriptions, row):
+ """
+ Create incident from data returned by queried database in fetch_incidents
+
+ parameter: (list) column_descriptions
+ The metadata that describes the values for each column in the 'data' parameter
+
+ parameter: (dict) row
+ The row of data where each cell's key in the row is the name of the column
+ to which it belongs
+
+ returns:
+ Incident Object
+ """
+ incident = {}
+ occurred = row.get(DATETIME_COLUMN)
+ timestamp = None
+ if occurred:
+ if isinstance(occurred, (dttime, timedelta)):
+ err_msg = 'The datetime field specified in the integration parameters must '
+ err_msg += 'contain values of type "datetime" or "date".'
+ raise Exception(err_msg)
+ timestamp = occurred.timestamp() * 1000
+ else:
+ err_msg = 'Nothing found when trying to fetch the datetime field specified in'
+ err_msg += ' the integration parameters. Please check that the name was correct.'
+ err_msg += ' If the field name was correct, verify that the returned value for'
+ err_msg += ' the specified field is not NULL for ALL of the rows to be fetched.'
+ raise Exception(err_msg)
+ # Incident Title
+ if INCIDENT_NAME_COLUMN:
+ name = row.get(INCIDENT_NAME_COLUMN)
+ else:
+ name = 'Snowflake Incident -- '
+ name += convert_datetime_to_string(occurred) + '- ' + str(datetime.now().timestamp())
+ incident['name'] = name
+ incident['occurred'] = occurred.isoformat()
+ # Incident occurrence time as timestamp - the datetime field specified in the integration parameters
+ incident['timestamp'] = timestamp
+ # The raw response for the row (reformatted to be json serializable) returned by the db query
+ reformatted_row = format_to_json_serializable(column_descriptions, row)
+ incident['rawJSON'] = json.dumps(reformatted_row)
+ return incident
+
+
+'''MAIN FUNCTIONS / API CALLS'''
+
+
+def test_module():
+ """
+ Test the validity of the integration instance parameters by trying to create a connection
+
+ returns:
+ An 'ok' message if valid, otherwise an error message
+ """
+ params = get_connection_params({})
+ with snowflake.connector.connect(**params):
+ demisto.results('ok')
+
+
+def fetch_incidents():
+ """
+ Fetch events from this integration and return them as Demisto incidents
+
+ returns:
+ Demisto incidents
+ """
+ # demisto.getLastRun() will returns an obj with the previous run in it.
+ last_run = demisto.getLastRun()
+ # Get the last fetch time and data if it exists
+ last_fetch = last_run.get('last_fetched_data_timestamp')
+ last_fetched_data = last_run.get('last_fetched_data')
+
+ # Handle first time fetch, fetch incidents retroactively
+ if not last_fetch:
+ last_fetch, _ = parse_date_range(FETCH_TIME, to_timestamp=True)
+ args = {'rows': MAX_ROWS, 'query': FETCH_QUERY}
+ column_descriptions, data = snowflake_query(args)
+ data.sort(key=lambda k: k[DATETIME_COLUMN])
+ # convert the data/events to demisto incidents
+ incidents = []
+ for row in data:
+ incident = row_to_incident(column_descriptions, row)
+ incident_timestamp = incident.get('timestamp')
+
+ # Update last run and add incident if the incident is newer than last fetch
+ if incident_timestamp and incident_timestamp >= last_fetch:
+ last_fetch = incident_timestamp
+ if incident.get('rawJSON') != last_fetched_data:
+ last_fetched_data = incident.get('rawJSON')
+ del incident['timestamp']
+ incidents.append(incident)
+
+ this_run = {
+ 'last_fetched_data': last_fetched_data,
+ 'last_fetched_data_timestamp': last_fetch
+ }
+ demisto.setLastRun(this_run)
+ demisto.incidents(incidents)
+
+
+def snowflake_query(args):
+ params = get_connection_params(args)
+ query = args.get('query')
+ limit = args.get('limit', '100')
+ try:
+ limit = int(limit)
+ except ValueError:
+ raise ValueError('The value for limit must be an integer.')
+ if limit > MAX_ROWS:
+ limit = MAX_ROWS
+ with snowflake.connector.connect(**params) as connection:
+ with connection.cursor(snowflake.connector.DictCursor) as cur:
+ cur.execute(query)
+ results = cur.fetchmany(limit)
+ if results:
+ return cur.description, results
+ else:
+ return [], []
+
+
+def snowflake_query_command():
+ args = demisto.args()
+ query = args.get('query')
+ db = args.get('database') if args.get('database') else DATABASE
+ schema = args.get('schema') if args.get('schema') else SCHEMA
+ col_descriptions, results = snowflake_query(args)
+ if not results:
+ demisto.results('No data found matching the query')
+ else:
+ results = format_to_json_serializable(col_descriptions, results)
+
+ entry_context = {
+ 'Database': db,
+ 'Schema': schema,
+ 'Query': query,
+ 'Result': results
+ }
+ columns = argToList(args.get('columns'))
+ human_readable = tableToMarkdown(query, results, columns, removeNull=True)
+ demisto_transform = 'Snowflake(val.Query && val.Query === obj.Query'
+ demisto_transform += ' && val.Database && val.Database === obj.Database'
+ demisto_transform += ' && val.Schema && val.Schema === obj.Schema)'
+ outputs = {demisto_transform: entry_context}
+
+ return_outputs(
+ outputs=outputs,
+ readable_output=human_readable,
+ raw_response=results
+ )
+
+
+def snowflake_update_command():
+ args = demisto.args()
+ db_operation = args.get('db_operation')
+ params = get_connection_params(args)
+ with snowflake.connector.connect(**params) as connection:
+ with connection.cursor() as cursor:
+ cursor.execute(db_operation)
+ demisto.results('Operation executed successfully.')
+
+
+'''COMMAND SWITCHBOARD'''
+
+commands = {
+ 'test-module': test_module,
+ 'fetch-incidents': fetch_incidents,
+ 'snowflake-query': snowflake_query_command,
+ 'snowflake-update': snowflake_update_command
+}
+
+
+'''EXECUTION'''
+
+try:
+ handle_proxy()
+ if demisto.command() in commands.keys():
+ commands[demisto.command()]()
+except snowflake.connector.errors.Error as e:
+ return_error(error_message_from_snowflake_error(e))
+except Exception as e:
+ if IS_FETCH:
+ raise e
+ else:
+ if isinstance(e, snowflake.connector.errors.Error):
+ return_error(error_message_from_snowflake_error(e))
+ else:
+ return_error(str(e))
diff --git a/Integrations/Snowflake/Snowflake.yml b/Integrations/Snowflake/Snowflake.yml
new file mode 100644
index 000000000000..01f94190170d
--- /dev/null
+++ b/Integrations/Snowflake/Snowflake.yml
@@ -0,0 +1,189 @@
+category: Database
+commonfields:
+ id: Snowflake
+ version: -1
+configuration:
+- display: Account - See Detailed Description section.
+ name: account
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: true
+ type: 9
+- display: Region (only if you are not US West)
+ name: region
+ required: false
+ type: 0
+- display: Authenticator - See Detailed Description section.
+ name: authenticator
+ required: false
+ type: 0
+- display: Default warehouse to use
+ name: warehouse
+ required: true
+ type: 0
+- display: Default database to use
+ name: database
+ required: true
+ type: 0
+- display: Default schema to use
+ name: schema
+ required: false
+ type: 0
+- display: Default role to use
+ name: role
+ required: false
+ type: 0
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: ┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉┉‎ Fetch
+ query to retrieve new incidents. This field is mandatory when 'Fetches incidents'
+ is set to true.
+ name: fetch_query
+ required: false
+ type: 0
+- defaultvalue: 24 hours
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days)
+ name: fetch_time
+ required: false
+ type: 0
+- display: The name of the field/column that contains the datetime object or timestamp
+ for the data being fetched (case sensitive). This field is mandatory when 'Fetches
+ incidents' is set to true.
+ name: datetime_column
+ required: false
+ type: 0
+- display: The name of the field/column in the fetched data from which the name for
+ the demisto incident will be assigned (case sensitive)
+ name: incident_name_column
+ required: false
+ type: 0
+- defaultvalue: '10000'
+ display: The maximum number of rows to be returned by a fetch
+ name: limit
+ required: false
+ type: 0
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+description: Analytic data warehouse provided as Software-as-a-Service.
+display: Snowflake
+name: Snowflake
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: The query to execute.
+ isArray: false
+ name: query
+ required: true
+ secret: false
+ - default: false
+ description: The warehouse to use for the query. If not specified, the default will be used.
+ isArray: false
+ name: warehouse
+ required: false
+ secret: false
+ - default: false
+ description: The database to use for the query. If not specified, the default will be used.
+ isArray: false
+ name: database
+ required: false
+ secret: false
+ - default: false
+ description: The schema to use for the query. If not specified, the default will be used.
+ isArray: false
+ name: schema
+ required: false
+ secret: false
+ - default: false
+ description: The role to use for the query. If not specified, the default will be used.
+ isArray: false
+ name: role
+ required: false
+ secret: false
+ - default: true
+ defaultValue: '100'
+ description: The number of rows to retrieve.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: 'A CSV list of columns to display in the specified order, for example: "Name, ID, Timestamp"'
+ isArray: true
+ name: columns
+ required: false
+ secret: false
+ deprecated: false
+ description: Executes a SELECT query and retrieve the data.
+ execution: true
+ name: snowflake-query
+ outputs:
+ - contextPath: Snowflake.Query
+ description: The query used to fetch results from the database.
+ type: String
+ - contextPath: Snowflake.Result
+ description: Results from querying the database.
+ type: Unknown
+ - contextPath: Snowflake.Database
+ description: The name of the database object.
+ type: String
+ - contextPath: Snowflake.Schema
+ description: The name of the schema object.
+ type: String
+ - arguments:
+ - default: true
+ description: The command to execute.
+ isArray: false
+ name: db_operation
+ required: true
+ secret: false
+ - default: false
+ description: The warehouse to use for the query. If not specified, the default will be used.
+ isArray: false
+ name: warehouse
+ required: false
+ secret: false
+ - default: false
+ description: The database to use for the query. If not specified, the default will be used.
+ isArray: false
+ name: database
+ required: false
+ secret: false
+ - default: false
+ description: The schema to use for the query. If not specified, the default will be used.
+ isArray: false
+ name: schema
+ required: false
+ secret: false
+ - default: false
+ description: The role to use for the query. If not specified, the default will be used.
+ isArray: false
+ name: role
+ required: false
+ secret: false
+ deprecated: false
+ description: Makes a DML change in the database.
+ execution: true
+ name: snowflake-update
+ dockerimage: demisto/snowflake:1.0.0.211
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- Snowflake-Test
diff --git a/Integrations/Snowflake/Snowflake_description.md b/Integrations/Snowflake/Snowflake_description.md
new file mode 100644
index 000000000000..485c6fc70059
--- /dev/null
+++ b/Integrations/Snowflake/Snowflake_description.md
@@ -0,0 +1,13 @@
+## Integration Parameters
+
+### Account
+The name of the Snowflake account to connect to without the domain name: snowflakecomputing.com. For example, mycompany.snowflakecomputing.com, enter "mycompany". For more information, see the [Snowflake Computing documentation](https://docs.snowflake.net/manuals/user-guide/python-connector-api.html#label-account-format-info).
+
+### Authenticator
+(Optional) Use this parameter to log in to your Snowflake account using Okta. For the 'Username' parameter, enter your ''. For the 'Password' parameter, enter your ''. The value entered here should be 'https://.okta.com/' where all the values between the less than and greater than symbols are replaced with the actual information specific to your Okta account.
+
+### Credentials
+To use Key Pair authentication, follow these instructions:
+1. Follow steps 1-4 in the instructions detailed in the [Snowflake Computing documentation](https://docs.snowflake.net/manuals/user-guide/python-connector-example.html#using-key-pair-authentication).
+2. Follow the instructions under the section titled **Configure Demisto Credentials** at this [link](https://support.demisto.com/hc/en-us/articles/115002567894).
+3. Use the credentials you configured. Refer to the two images at the bottom of the section titled **Configure an External Credentials Vault**.
\ No newline at end of file
diff --git a/Integrations/Snowflake/Snowflake_image.png b/Integrations/Snowflake/Snowflake_image.png
new file mode 100644
index 000000000000..84e1cc00945c
Binary files /dev/null and b/Integrations/Snowflake/Snowflake_image.png differ
diff --git a/Integrations/SplunkPy/CHANGELOG.md b/Integrations/SplunkPy/CHANGELOG.md
new file mode 100644
index 000000000000..7f72fb2a03cf
--- /dev/null
+++ b/Integrations/SplunkPy/CHANGELOG.md
@@ -0,0 +1,18 @@
+## [Unreleased]
+ - Improved handling of the *app context* parameter.
+ - Fixed handling of arrays when converting notable events to incidents.
+
+## [19.10.1] - 2019-10-15
+- Added the *app* parameter, which is the app context of the namespace.
+- Prettified the human readable of the search command.
+
+
+## [19.10.0] - 2019-10-03
+Added the *Earliest time to fetch* and *Latest time to fetch* parameters, which are the name of the Splunk fields whose value defines the query's earliest and latest time to fetch.
+
+
+## [19.9.1] - 2019-09-18
+-
+
+## [19.9.0] - 2019-09-04
+- Added the *Fetch limit* parameter to the instance configuration, which specified the maximum number of results to fetch.
diff --git a/Integrations/SplunkPy/Pipfile b/Integrations/SplunkPy/Pipfile
new file mode 100644
index 000000000000..7da7c2702612
--- /dev/null
+++ b/Integrations/SplunkPy/Pipfile
@@ -0,0 +1,22 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+
+[packages]
+certifi = "==2017.7.27.1"
+chardet = "==3.0.4"
+idna = "==2.6"
+requests = "==2.18.4"
+splunk-sdk = "==1.6.2"
+urllib3 = "==1.22"
+virtualenv = "==15.0.3"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/SplunkPy/Pipfile.lock b/Integrations/SplunkPy/Pipfile.lock
new file mode 100644
index 000000000000..28748573c3bc
--- /dev/null
+++ b/Integrations/SplunkPy/Pipfile.lock
@@ -0,0 +1,369 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "1cd2e04e5e042aea8c0d5597d2c708a04b9c11a9bf2f00d013b7932c1d2154fa"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "certifi": {
+ "hashes": [
+ "sha256:40523d2efb60523e113b44602298f0960e900388cf3bb6043f645cf57ea9e3f5",
+ "sha256:54a07c09c586b0e4c619f02a5e94e36619da8e2b053e20f594348c0611803704"
+ ],
+ "index": "pypi",
+ "version": "==2017.7.27.1"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "index": "pypi",
+ "version": "==2.6"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "splunk-sdk": {
+ "hashes": [
+ "sha256:17c6a5be24e784fa5083a2a8714d38f0c95fba5cf995a1dea8f9d12efd81c754"
+ ],
+ "index": "pypi",
+ "version": "==1.6.2"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "index": "pypi",
+ "version": "==1.22"
+ },
+ "virtualenv": {
+ "hashes": [
+ "sha256:6d9c760d3fc5fa0894b0f99b9de82a4647e1164f0b700a7f99055034bf548b1d",
+ "sha256:cc8164362fc9611d478f784bbc066f3ee74526c50336ec61a6e75d5af97926c8"
+ ],
+ "index": "pypi",
+ "version": "==15.0.3"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.5"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:40523d2efb60523e113b44602298f0960e900388cf3bb6043f645cf57ea9e3f5",
+ "sha256:54a07c09c586b0e4c619f02a5e94e36619da8e2b053e20f594348c0611803704"
+ ],
+ "index": "pypi",
+ "version": "==2017.7.27.1"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32",
+ "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==3.7.4"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==1.0.2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16",
+ "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.3.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "index": "pypi",
+ "version": "==2.6"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8",
+ "sha256:80d2de76188eabfbfcf27e6a37342c2827801e59c4cc14b0371c56fed43820e3"
+ ],
+ "version": "==0.19"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9",
+ "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe"
+ ],
+ "version": "==19.1"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e",
+ "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8"
+ ],
+ "markers": "python_version < '3.6'",
+ "version": "==2.3.4"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42",
+ "sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300"
+ ],
+ "index": "pypi",
+ "version": "==1.9.5"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
+ "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
+ ],
+ "version": "==2.4.2"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae",
+ "sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6"
+ ],
+ "index": "pypi",
+ "version": "==4.6.4"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:12e17c7ad1397fd1df5ead7727eb3f1bdc9fe1c18293b0492e0e01b57997e38d",
+ "sha256:dc9e416a095ee7c3360056990d52e5611fb94469352fc1c2dc85be1ff2189146"
+ ],
+ "index": "pypi",
+ "version": "==1.6.0"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "index": "pypi",
+ "version": "==1.22"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/SplunkPy/SplunkPy.py b/Integrations/SplunkPy/SplunkPy.py
new file mode 100644
index 000000000000..9a081a5ee342
--- /dev/null
+++ b/Integrations/SplunkPy/SplunkPy.py
@@ -0,0 +1,437 @@
+import demistomock as demisto
+from CommonServerPython import *
+import splunklib.client as client
+import splunklib.results as results
+import json
+from datetime import timedelta, datetime
+import urllib2
+import ssl
+from StringIO import StringIO
+import requests
+import urllib3
+
+urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+
+# Define utf8 as default encoding
+reload(sys)
+sys.setdefaultencoding('utf8') # pylint: disable=maybe-no-member
+
+SPLUNK_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
+VERIFY_CERTIFICATE = not bool(demisto.params().get('unsecure'))
+FETCH_LIMIT = int(demisto.params().get('fetch_limit', 50))
+FETCH_LIMIT = max(min(50, FETCH_LIMIT), 1)
+
+
+def get_current_splunk_time(splunk_service):
+ t = datetime.utcnow() - timedelta(days=3)
+ time = t.strftime(SPLUNK_TIME_FORMAT)
+ kwargs_oneshot = {'count': 1, 'earliest_time': time}
+ searchquery_oneshot = '| gentimes start=-1 | eval clock = strftime(time(), "%Y-%m-%dT%H:%M:%S")' \
+ ' | sort 1 -_time | table clock'
+
+ oneshotsearch_results = splunk_service.jobs.oneshot(searchquery_oneshot, **kwargs_oneshot)
+
+ reader = results.ResultsReader(oneshotsearch_results)
+ for item in reader:
+ if isinstance(item, results.Message):
+ return item.message["clock"]
+ if isinstance(item, dict):
+ return item["clock"]
+ raise ValueError('Error: Could not fetch Splunk time')
+
+
+def rawToDict(raw):
+ result = {} # type: Dict[str, str]
+ raw = raw.strip("}")
+ raw = raw.strip("{")
+ key_val_arr = raw.split(",")
+
+ for key_val in key_val_arr:
+ single_key_val = key_val.split("=")
+ if len(single_key_val) > 1:
+ val = single_key_val[1]
+ val = val.strip("\\")
+ val = val.strip("\"")
+ val = val.strip("\\")
+ key = single_key_val[0].strip()
+
+ alreadyThere = False
+ for dictkey, dictvalue in result.items():
+ if dictkey == key:
+ alreadyThere = True
+ result[dictkey] = dictvalue + "," + val
+
+ if not alreadyThere:
+ result[key] = val
+
+ return result
+
+
+# Converts to an str
+
+
+def convert_to_str(obj):
+ if isinstance(obj, unicode):
+ return obj.encode('utf-8')
+ return str(obj)
+
+
+def updateNotableEvents(sessionKey, baseurl, comment, status=None, urgency=None, owner=None, eventIDs=None,
+ searchID=None):
+ """
+ Update some notable events.
+
+ Arguments:
+ sessionKey -- The session key to use
+ comment -- A description of the change or some information about the notable events
+ status -- A status (only required if you are changing the status of the event)
+ urgency -- An urgency (only required if you are changing the urgency of the event)
+ owner -- A nowner (only required if reassigning the event)
+ eventIDs -- A list of notable event IDs (must be provided if a search ID is not provided)
+ searchID -- An ID of a search. All of the events associated with this search will be modified
+ unless a list of eventIDs are provided that limit the scope to a sub-set of the results.
+ """
+
+ # Make sure that the session ID was provided
+ if sessionKey is None:
+ raise Exception("A session key was not provided")
+
+ # Make sure that rule IDs and/or a search ID is provided
+ if eventIDs is None and searchID is None:
+ raise Exception("Either eventIDs of a searchID must be provided (or both)")
+ return False
+
+ # These the arguments to the REST handler
+ args = {}
+ args['comment'] = comment
+
+ if status is not None:
+ args['status'] = status
+
+ if urgency is not None:
+ args['urgency'] = urgency
+
+ if owner is not None:
+ args['newOwner'] = owner
+
+ # Provide the list of event IDs that you want to change:
+ if eventIDs is not None:
+ args['ruleUIDs'] = eventIDs
+
+ # If you want to manipulate the notable events returned by a search then include the search ID
+ if searchID is not None:
+ args['searchID'] = searchID
+
+ auth_header = {'Authorization': 'Splunk %s' % sessionKey}
+
+ args['output_mode'] = 'json'
+
+ mod_notables = requests.post(baseurl + 'services/notable_update', data=args, headers=auth_header,
+ verify=VERIFY_CERTIFICATE)
+
+ return mod_notables.json()
+
+
+def severity_to_level(severity):
+ if severity == 'informational':
+ return 0.5
+ elif severity == 'critical':
+ return 4
+ elif severity == 'high':
+ return 3
+ elif severity == 'medium':
+ return 2
+ else:
+ return 1
+
+
+def notable_to_incident(event):
+ incident = {} # type: Dict[str,Any]
+ rule_title = ''
+ rule_name = ''
+ if demisto.get(event, 'rule_title'):
+ rule_title = event['rule_title']
+ if demisto.get(event, 'rule_name'):
+ rule_name = event['rule_name']
+ incident["name"] = "{} : {}".format(rule_title, rule_name)
+ if demisto.get(event, 'urgency'):
+ incident["severity"] = severity_to_level(event['urgency'])
+ if demisto.get(event, 'rule_description'):
+ incident["details"] = event["rule_description"]
+ incident["occurred"] = event["_time"]
+ incident["rawJSON"] = json.dumps(event)
+ labels = []
+ if demisto.get(demisto.params(), 'parseNotableEventsRaw'):
+ isParseNotableEventsRaw = demisto.params()['parseNotableEventsRaw']
+ if isParseNotableEventsRaw:
+ rawDict = rawToDict(event['_raw'])
+ for rawKey in rawDict:
+ labels.append({'type': rawKey, 'value': rawDict[rawKey]})
+ if demisto.get(event, 'security_domain'):
+ labels.append({'type': 'security_domain', 'value': event["security_domain"]})
+ incident['labels'] = labels
+ return incident
+
+
+def handler(proxy):
+ proxy_handler = urllib2.ProxyHandler({'http': proxy, 'https': proxy})
+ opener = urllib2.build_opener(proxy_handler)
+ urllib2.install_opener(opener)
+ return request
+
+
+def request(url, message, **kwargs):
+ method = message['method'].lower()
+ data = message.get('body', "") if method == 'post' else None
+ headers = dict(message.get('headers', []))
+ req = urllib2.Request(url, data, headers) # guardrails-disable-line
+ context = ssl.create_default_context()
+
+ if VERIFY_CERTIFICATE:
+ context.verify_mode = ssl.CERT_REQUIRED
+ else:
+ context.check_hostname = False
+ context.verify_mode = ssl.CERT_NONE
+
+ try:
+ response = urllib2.urlopen(req, context=context) # guardrails-disable-line
+ except urllib2.HTTPError as response:
+ pass # Propagate HTTP errors via the returned response message
+ return {
+ 'status': response.code, # type: ignore
+ 'reason': response.msg, # type: ignore
+ 'headers': response.info().dict, # type: ignore
+ 'body': StringIO(response.read()) # type: ignore
+ }
+
+
+service = None
+proxy = demisto.params()['proxy']
+if proxy:
+ try:
+ service = client.connect(
+ handler=handler(proxy),
+ host=demisto.params()['host'],
+ port=demisto.params()['port'],
+ app=demisto.params().get('app'),
+ username=demisto.params()['authentication']['identifier'],
+ password=demisto.params()['authentication']['password'],
+ verify=VERIFY_CERTIFICATE)
+ except urllib2.URLError as e:
+ if e.reason.errno == 1 and sys.version_info < (2, 6, 3): # type: ignore
+ pass
+ else:
+ raise
+else:
+ service = client.connect(
+ host=demisto.params()['host'],
+ port=demisto.params()['port'],
+ app=demisto.params().get('app'),
+ username=demisto.params()['authentication']['identifier'],
+ password=demisto.params()['authentication']['password'],
+ verify=VERIFY_CERTIFICATE)
+
+if service is None:
+ demisto.error("Could not connect to SplunkPy")
+ sys.exit(0)
+
+# The command demisto.command() holds the command sent from the user.
+if demisto.command() == 'test-module':
+ # for app in service.apps:
+ # print app.name
+ if len(service.jobs) >= 0:
+ demisto.results('ok')
+ sys.exit(0)
+if demisto.command() == 'splunk-search':
+ t = datetime.utcnow() - timedelta(days=7)
+ time_str = t.strftime(SPLUNK_TIME_FORMAT)
+ kwargs_oneshot = {"earliest_time": time_str} # type: Dict[str,Any]
+ if demisto.get(demisto.args(), 'earliest_time'):
+ kwargs_oneshot['earliest_time'] = demisto.args()['earliest_time']
+ if demisto.get(demisto.args(), 'latest_time'):
+ kwargs_oneshot['latest_time'] = demisto.args()['latest_time']
+ if demisto.get(demisto.args(), 'event_limit'):
+ kwargs_oneshot['count'] = int(demisto.args()['event_limit'])
+ searchquery_oneshot = demisto.args()['query']
+ searchquery_oneshot = searchquery_oneshot.encode('utf-8')
+ if not searchquery_oneshot.startswith('search') and not searchquery_oneshot.startswith('Search')\
+ and not searchquery_oneshot.startswith('|'):
+ searchquery_oneshot = 'search ' + searchquery_oneshot
+ oneshotsearch_results = service.jobs.oneshot(searchquery_oneshot, **kwargs_oneshot)
+
+ reader = results.ResultsReader(oneshotsearch_results)
+ res = []
+ dbot_scores = [] # type: List[Dict[str,Any]]
+ for item in reader:
+ if isinstance(item, results.Message):
+ if "Error in" in item.message:
+ raise ValueError(item.message)
+ res.append(convert_to_str(item.message))
+
+ elif isinstance(item, dict):
+ if demisto.get(item, 'host'):
+ dbot_scores.append({'Indicator': item['host'], 'Type': 'hostname',
+ 'Vendor': 'Splunk', 'Score': 0, 'isTypedIndicator': True})
+ # Normal events are returned as dicts
+ res.append(item)
+ ec = {}
+ ec['Splunk.Result'] = res
+ if len(dbot_scores) > 0:
+ ec['DBotScore'] = dbot_scores
+
+ headers = ""
+ if (res and len(res) > 0):
+ if not isinstance(res[0], dict):
+ headers = "results"
+
+ human_readable = tableToMarkdown("Splunk Search results \n\n Results for query: {}".format(demisto.args()['query']),
+ res, headers)
+
+ demisto.results({
+ "Type": 1,
+ "Contents": res,
+ "ContentsFormat": "json",
+ "EntryContext": ec,
+ "HumanReadable": human_readable
+ })
+
+ sys.exit(0)
+if demisto.command() == 'splunk-job-create':
+ searchquery_normal = demisto.args()['query']
+ if not searchquery_normal.startswith('search'):
+ searchquery_normal = 'search ' + searchquery_normal
+ kwargs_normalsearch = {"exec_mode": "normal"}
+ job = service.jobs.create(searchquery_normal, **kwargs_normalsearch)
+
+ ec = {}
+ ec['Splunk.Job'] = job.sid
+ demisto.results({"Type": 1, "ContentsFormat": formats['text'],
+ "Contents": "Splunk Job created with SID: " + job.sid, "EntryContext": ec})
+ sys.exit(0)
+if demisto.command() == 'splunk-results':
+ jobs = service.jobs
+ found = False
+ res = []
+ for job in jobs:
+ if job.sid == demisto.args()['sid']:
+ rr = results.ResultsReader(job.results())
+ for result in rr:
+ if isinstance(result, results.Message):
+ demisto.results({"Type": 1, "ContentsFormat": "json", "Contents": json.dumps(result.message)})
+ elif isinstance(result, dict):
+ # Normal events are returned as dicts
+ res.append(result)
+ found = True
+ if not found:
+ demisto.results("Found no job for sid: " + demisto.args()['sid'])
+ if found:
+ demisto.results({"Type": 1, "ContentsFormat": "json", "Contents": json.dumps(res)})
+ sys.exit(0)
+if demisto.command() == 'fetch-incidents':
+ lastRun = demisto.getLastRun() and demisto.getLastRun()['time']
+ search_offset = demisto.getLastRun().get('offset', 0)
+
+ incidents = []
+ t = datetime.utcnow()
+ if demisto.get(demisto.params(), 'timezone'):
+ timezone = demisto.params()['timezone']
+ t = t + timedelta(minutes=int(timezone))
+
+ now = t.strftime(SPLUNK_TIME_FORMAT)
+ if demisto.get(demisto.params(), 'useSplunkTime'):
+ now = get_current_splunk_time(service)
+ t = datetime.strptime(now, SPLUNK_TIME_FORMAT)
+ if len(lastRun) == 0:
+ t = t - timedelta(minutes=10)
+ lastRun = t.strftime(SPLUNK_TIME_FORMAT)
+
+ earliest_fetch_time_fieldname = demisto.params().get("earliest_fetch_time_fieldname", "index_earliest")
+ latest_fetch_time_fieldname = demisto.params().get("latest_fetch_time_fieldname", "index_latest")
+
+ kwargs_oneshot = {earliest_fetch_time_fieldname: lastRun,
+ latest_fetch_time_fieldname: now, "count": FETCH_LIMIT, 'offset': search_offset}
+
+ searchquery_oneshot = demisto.params()['fetchQuery']
+
+ if demisto.get(demisto.params(), 'extractFields'):
+ extractFields = demisto.params()['extractFields']
+ extra_raw_arr = extractFields.split(',')
+ for field in extra_raw_arr:
+ field_trimmed = field.strip()
+ searchquery_oneshot = searchquery_oneshot + ' | eval ' + field_trimmed + '=' + field_trimmed
+
+ oneshotsearch_results = service.jobs.oneshot(searchquery_oneshot, **kwargs_oneshot)
+ reader = results.ResultsReader(oneshotsearch_results)
+ for item in reader:
+ inc = notable_to_incident(item)
+ incidents.append(inc)
+
+ demisto.incidents(incidents)
+ if len(incidents) < FETCH_LIMIT:
+ demisto.setLastRun({'time': now, 'offset': 0})
+ else:
+ demisto.setLastRun({'time': lastRun, 'offset': search_offset + FETCH_LIMIT})
+ sys.exit(0)
+
+if demisto.command() == 'splunk-get-indexes':
+ indexes = service.indexes
+ indexesNames = []
+ for index in indexes:
+ index_json = {'name': index.name, 'count': index["totalEventCount"]}
+ indexesNames.append(index_json)
+ demisto.results({"Type": 1, "ContentsFormat": "json", "Contents": json.dumps(indexesNames),
+ 'HumanReadable': tableToMarkdown("Splunk Indexes names", indexesNames, '')})
+ sys.exit(0)
+
+if demisto.command() == 'splunk-submit-event':
+ try:
+ index = service.indexes[demisto.args()['index']]
+ except KeyError:
+ demisto.results({'ContentsFormat': formats['text'], 'Type': entryTypes['error'],
+ 'Contents': "Found no Splunk index: " + demisto.args()['index']})
+ sys.exit(0)
+ else:
+ data = demisto.args()['data']
+ data_formatted = data.encode('utf8')
+ r = index.submit(data_formatted, sourcetype=demisto.args()['sourcetype'], host=demisto.args()['host'])
+ demisto.results('Event was created in Splunk index: ' + r.name)
+ sys.exit(0)
+
+if demisto.command() == 'splunk-notable-event-edit':
+ if not proxy:
+ os.environ["HTTPS_PROXY"] = ""
+ os.environ["HTTP_PROXY"] = ""
+ os.environ["https_proxy"] = ""
+ os.environ["http_proxy"] = ""
+ baseurl = 'https://' + demisto.params()['host'] + ':' + demisto.params()['port'] + '/'
+ username = demisto.params()['authentication']['identifier']
+ password = demisto.params()['authentication']['password']
+ auth_req = requests.post(baseurl + 'services/auth/login',
+ data={'username': username, 'password': password, 'output_mode': 'json'}, verify=VERIFY_CERTIFICATE)
+
+ sessionKey = auth_req.json()['sessionKey']
+ eventIDs = None
+ if demisto.get(demisto.args(), 'eventIDs'):
+ eventIDsStr = demisto.args()['eventIDs']
+ eventIDs = eventIDsStr.split(",")
+ status = None
+ if demisto.get(demisto.args(), 'status'):
+ status = int(demisto.args()['status'])
+ response_info = updateNotableEvents(sessionKey=sessionKey, baseurl=baseurl,
+ comment=demisto.get(demisto.args(), 'comment'), status=status,
+ urgency=demisto.get(demisto.args(), 'urgency'),
+ owner=demisto.get(demisto.args(), 'owner'), eventIDs=eventIDs)
+ if 'success' not in response_info or not response_info['success']:
+ demisto.results({'ContentsFormat': formats['text'], 'Type': entryTypes['error'],
+ 'Contents': "Could not update notable "
+ "events: " + demisto.args()['eventIDs'] + ' : ' + str(response_info)})
+ sys.exit(0)
+ demisto.results('Splunk ES Notable events: ' + response_info['message'])
+ sys.exit(0)
+if demisto.command() == 'splunk-parse-raw':
+ raw = demisto.args()['raw']
+ rawDict = rawToDict(raw)
+ ec = {}
+ ec['Splunk.Raw.Parsed'] = rawDict
+ demisto.results({"Type": 1, "ContentsFormat": "json", "Contents": json.dumps(rawDict), "EntryContext": ec})
+ sys.exit(0)
diff --git a/Integrations/SplunkPy/SplunkPy.yml b/Integrations/SplunkPy/SplunkPy.yml
new file mode 100644
index 000000000000..6f0afcfe1374
--- /dev/null
+++ b/Integrations/SplunkPy/SplunkPy.yml
@@ -0,0 +1,257 @@
+category: Analytics & SIEM
+commonfields:
+ id: SplunkPy
+ version: -1
+configuration:
+- display: Host - ip (x.x.x.x)
+ name: host
+ required: true
+ type: 0
+- display: Username
+ name: authentication
+ required: true
+ type: 9
+- defaultvalue: '8089'
+ display: Port
+ name: port
+ required: true
+ type: 0
+- defaultvalue: search index=notable | eval rule_name=if(isnull(rule_name),source,rule_name)
+ | eval rule_title=if(isnull(rule_title),rule_name,rule_title) | `get_urgency`
+ | `risk_correlation` | eval rule_description=if(isnull(rule_description),source,rule_description)
+ | eval security_domain=if(isnull(security_domain),source,security_domain)
+ display: Fetch notable events ES query
+ name: fetchQuery
+ required: false
+ type: 0
+- defaultvalue: '50'
+ display: Fetch Limit (No more than 50)
+ name: fetch_limit
+ required: false
+ type: 0
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Timezone of the Splunk server, in minutes. For example, GMT is gmt +3, set +180 (set only
+ if different than Demisto server). Relevant only for fetching notable events.
+ name: timezone
+ required: false
+ type: 0
+- defaultvalue: 'false'
+ display: Parse Raw part of notable events
+ name: parseNotableEventsRaw
+ required: false
+ type: 8
+- display: Extract Fields - CSV fields that will be parsed out of _raw
+ notable events
+ name: extractFields
+ required: false
+ type: 12
+- defaultvalue: 'false'
+ display: Use Splunk Clock Time For Fetch
+ name: useSplunkTime
+ required: false
+ type: 8
+- defaultvalue: ''
+ display: Trust any certificate (not secure)
+ name: unsecure
+ required: false
+ type: 8
+- display: Earliest time to fetch (the name of the Splunk field whose value defines the query's
+ earliest time to fetch)
+ name: earliest_fetch_time_fieldname
+ defaultvalue: index_earliest
+ type: 0
+ required: false
+- display: Latest time to fetch (the name of the Splunk field whose value defines the query's
+ latest time to fetch)
+ name: latest_fetch_time_fieldname
+ defaultvalue: index_latest
+ type: 0
+ required: false
+- display: The app context of the namespace
+ name: app
+ defaultvalue: ""
+ type: 0
+ required: false
+description: Run queries on Splunk servers.
+display: SplunkPy
+name: SplunkPy
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: ID of the search for which to return results.
+ isArray: false
+ name: sid
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the results of a previous Splunk search. You can use this command in conjunction with the splunk-job-create command.
+ execution: false
+ name: splunk-results
+ - arguments:
+ - default: true
+ description: 'The Splunk search language string to execute. For example: "index=*
+ | head 3". '
+ isArray: false
+ name: query
+ required: true
+ secret: false
+ - default: false
+ description: 'Specifies the earliest time in the time range to search. The time
+ string can be a UTC time (with fractional seconds), a relative time specifier
+ (to now), or a formatted time string. Default is 1 week ago, in the format
+ "-7d". You can also specify time in the format: 2014-06-19T12:00:00.000-07:00"'
+ isArray: false
+ name: earliest_time
+ required: false
+ secret: false
+ - default: false
+ description: ' Specifies the latest time in the time range to search. The time
+ string can be a UTC time (with fractional seconds), a relative time specifier
+ (to now), or a formatted time string. For example: "2014-06-19T12:00:00.000-07:00"
+ or "-3d" (for time 3 days before now)'
+ isArray: false
+ name: latest_time
+ required: false
+ secret: false
+ - default: false
+ description: Maximum number of events to return. Default is 100. If "0", all results are returned.
+ isArray: false
+ name: event_limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches Splunk for events.
+ execution: false
+ name: splunk-search
+ outputs:
+ - contextPath: Splunk.Result
+ description: The results of the Splunk search. The results are a JSON array, in which each item is a Splunk
+ event.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Splunk index to which to push data. Run the splunk-get-indexes command to get all
+ indexes.
+ isArray: false
+ name: index
+ required: true
+ secret: false
+ - default: true
+ description: The new event data to push, can be any string.
+ isArray: false
+ name: data
+ required: true
+ secret: false
+ - default: false
+ description: Event source type.
+ isArray: false
+ name: sourcetype
+ required: true
+ secret: false
+ - default: false
+ description: Event host. Can be "Local" or "120.0.0.1".
+ isArray: false
+ name: host
+ required: true
+ secret: false
+ deprecated: false
+ description: Creates a new event in Splunk.
+ execution: false
+ name: splunk-submit-event
+ - deprecated: false
+ description: Prints all Splunk index names.
+ execution: false
+ name: splunk-get-indexes
+ - arguments:
+ - default: false
+ description: 'A CSV list of event IDs of notable events.s'
+ isArray: false
+ name: eventIDs
+ required: true
+ secret: false
+ - default: false
+ description: A Splunk user to assign to the notable event.
+ isArray: false
+ name: owner
+ required: false
+ secret: false
+ - default: false
+ description: Comment to add to the notable event.
+ isArray: false
+ name: comment
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Notable event urgency.
+ isArray: false
+ name: urgency
+ predefined:
+ - critical
+ - high
+ - medium
+ - low
+ - informational
+ required: false
+ secret: false
+ - default: false
+ description: Notable event status. 0 - Unassigned, 1 - Assigned, 2 - In
+ Progress, 3 - Pending, 4 - Resolved, 5 - Closed.
+ isArray: false
+ name: status
+ required: false
+ secret: false
+ deprecated: false
+ description: Update an existing Notable event in Splunk ES
+ execution: true
+ name: splunk-notable-event-edit
+ - arguments:
+ - default: false
+ description: 'The Splunk search language string to execute. For example :"index=*
+ | head 3".'
+ isArray: false
+ name: query
+ required: true
+ secret: false
+ deprecated: false
+ description: Creates a new search job in Splunk.
+ execution: false
+ name: splunk-job-create
+ outputs:
+ - contextPath: Splunk.Job
+ description: The SID of the created job.
+ type: Unknown
+ - arguments:
+ - default: true
+ defaultValue: ${Splunk.Result._raw}
+ description: The raw data of the Splunk event (string).
+ isArray: false
+ name: raw
+ required: false
+ secret: false
+ deprecated: false
+ description: Parses the raw part of the event.
+ execution: false
+ name: splunk-parse-raw
+ outputs:
+ - contextPath: Splunk.Raw.Parsed
+ description: The raw event data (parsed).
+ type: unknown
+ dockerimage: demisto/splunksdk:1.0
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
diff --git a/Integrations/SplunkPy/SplunkPy_description.md b/Integrations/SplunkPy/SplunkPy_description.md
new file mode 100644
index 000000000000..3c60feb02efd
--- /dev/null
+++ b/Integrations/SplunkPy/SplunkPy_description.md
@@ -0,0 +1 @@
+Use the SplunkPy integration to fetch incidents from Splunk ES, and query results by SID.
diff --git a/Integrations/SplunkPy/SplunkPy_image.png b/Integrations/SplunkPy/SplunkPy_image.png
new file mode 100644
index 000000000000..ac9ee3280752
Binary files /dev/null and b/Integrations/SplunkPy/SplunkPy_image.png differ
diff --git a/Integrations/Stealthwatch_Cloud/CHANGELOG.md b/Integrations/Stealthwatch_Cloud/CHANGELOG.md
new file mode 100644
index 000000000000..27b646496ae7
--- /dev/null
+++ b/Integrations/Stealthwatch_Cloud/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.8.2] - 2019-08-22
+-
\ No newline at end of file
diff --git a/Integrations/Stealthwatch_Cloud/Pipfile b/Integrations/Stealthwatch_Cloud/Pipfile
new file mode 100644
index 000000000000..66ad1243db8b
--- /dev/null
+++ b/Integrations/Stealthwatch_Cloud/Pipfile
@@ -0,0 +1,22 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+
+[packages]
+certifi = "==2017.11.5"
+chardet = "==3.0.4"
+idna = "==2.6"
+olefile = "==0.44"
+requests = "==2.18.4"
+urllib3 = "==1.22"
+PyYAML = "==3.12"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/Stealthwatch_Cloud/Pipfile.lock b/Integrations/Stealthwatch_Cloud/Pipfile.lock
new file mode 100644
index 000000000000..f74c63030bb8
--- /dev/null
+++ b/Integrations/Stealthwatch_Cloud/Pipfile.lock
@@ -0,0 +1,375 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "dbc7e9dc0a5be3767de3b107d1afe7a3e3b6c57f7cb8a820195e76b8ee681d40"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "certifi": {
+ "hashes": [
+ "sha256:244be0d93b71e93fc0a0a479862051414d0e00e16435707e5bf5000f92e04694",
+ "sha256:5ec74291ca1136b40f0379e1128ff80e866597e4e2c1e755739a913bbc3613c0"
+ ],
+ "index": "pypi",
+ "version": "==2017.11.5"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "index": "pypi",
+ "version": "==2.6"
+ },
+ "olefile": {
+ "hashes": [
+ "sha256:61f2ca0cd0aa77279eb943c07f607438edf374096b66332fae1ee64a6f0f73ad"
+ ],
+ "index": "pypi",
+ "version": "==0.44"
+ },
+ "pyyaml": {
+ "hashes": [
+ "sha256:16b20e970597e051997d90dc2cddc713a2876c47e3d92d59ee198700c5427736",
+ "sha256:3262c96a1ca437e7e4763e2843746588a965426550f3797a79fca9c6199c431f",
+ "sha256:592766c6303207a20efc445587778322d7f73b161bd994f227adaa341ba212ab",
+ "sha256:5ac82e411044fb129bae5cfbeb3ba626acb2af31a8d17d175004b70862a741a7",
+ "sha256:827dc04b8fa7d07c44de11fabbc888e627fa8293b695e0f99cb544fdfa1bf0d1",
+ "sha256:bc6bced57f826ca7cb5125a10b23fd0f2fff3b7c4701d64c439a300ce665fff8",
+ "sha256:c01b880ec30b5a6e6aa67b09a2fe3fb30473008c85cd6a67359a1b15ed6d83a4",
+ "sha256:e863072cdf4c72eebf179342c94e6989c67185842d9997960b3e69290b2fa269"
+ ],
+ "index": "pypi",
+ "version": "==3.12"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "index": "pypi",
+ "version": "==1.22"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.5"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:244be0d93b71e93fc0a0a479862051414d0e00e16435707e5bf5000f92e04694",
+ "sha256:5ec74291ca1136b40f0379e1128ff80e866597e4e2c1e755739a913bbc3613c0"
+ ],
+ "index": "pypi",
+ "version": "==2017.11.5"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32",
+ "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==3.7.4"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.3'",
+ "version": "==1.0.2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16",
+ "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.3.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "index": "pypi",
+ "version": "==2.6"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8",
+ "sha256:80d2de76188eabfbfcf27e6a37342c2827801e59c4cc14b0371c56fed43820e3"
+ ],
+ "version": "==0.19"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9",
+ "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe"
+ ],
+ "version": "==19.1"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e",
+ "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8"
+ ],
+ "markers": "python_version < '3.6'",
+ "version": "==2.3.4"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42",
+ "sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300"
+ ],
+ "index": "pypi",
+ "version": "==1.9.5"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
+ "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
+ ],
+ "version": "==2.4.2"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae",
+ "sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6"
+ ],
+ "index": "pypi",
+ "version": "==4.6.4"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:12e17c7ad1397fd1df5ead7727eb3f1bdc9fe1c18293b0492e0e01b57997e38d",
+ "sha256:dc9e416a095ee7c3360056990d52e5611fb94469352fc1c2dc85be1ff2189146"
+ ],
+ "index": "pypi",
+ "version": "==1.6.0"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "index": "pypi",
+ "version": "==1.22"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/Stealthwatch_Cloud/Stealthwatch_Cloud.py b/Integrations/Stealthwatch_Cloud/Stealthwatch_Cloud.py
new file mode 100644
index 000000000000..fd3e059e8208
--- /dev/null
+++ b/Integrations/Stealthwatch_Cloud/Stealthwatch_Cloud.py
@@ -0,0 +1,551 @@
+import demistomock as demisto
+from CommonServerPython import *
+''' IMPORTS '''
+import requests
+import json
+import os
+from datetime import datetime, timedelta
+import collections
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARS '''
+SERVER = demisto.params().get('serverURL')[
+ :-1] if demisto.params().get('serverURL').endswith('/') else demisto.params().get('serverURL')
+SERVER_URL = SERVER + '/api/v3'
+API_KEY = demisto.params()['APIKey']
+
+USE_SSL = not demisto.params().get('insecure')
+
+DEFAULT_HEADERS = {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json',
+ 'Authorization': API_KEY
+}
+
+''' HELPER FUNCTIONS '''
+
+if not demisto.params()['proxy']:
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+
+def http_request(method, url_suffix, params_dict, headers, data=None):
+ req_params = {} # type: Dict[Any,Any]
+ if params_dict is not None:
+ req_params.update(params_dict)
+
+ url = SERVER_URL + url_suffix
+
+ LOG('running %s request with url=%s\theaders=%s\nparams=%s' % (method, url, headers, json.dumps(req_params)))
+
+ try:
+ res = requests.request(method,
+ url,
+ verify=USE_SSL,
+ params=req_params,
+ headers=headers,
+ data=data
+ )
+ res.raise_for_status()
+ try:
+ return res.json()
+ except ValueError:
+ # in case the response doesn't have JSON
+ return "Request completed"
+ except Exception, e:
+ LOG(e)
+ raise(e)
+
+
+def underscore_to_camelcase(word):
+ return ' '.join(x.capitalize() or '_' for x in word.split('_'))
+
+
+def create_incident_data_from_alert(alert):
+ alert.pop('comments')
+ alert.pop('observations')
+ return {
+ 'name': 'Stealthwatch alert ' + str(alert.get('id', '')),
+ 'rawJSON': json.dumps(alert),
+ 'occurred': alert.get('created', '')
+ }
+
+
+def get_latest_id(alerts_data):
+ latest_id = 0
+ for alert in alerts_data:
+ current_id = alert.get('id', None)
+ if current_id is not None and current_id > latest_id:
+ latest_id = current_id
+
+ return latest_id
+
+ ''' COMMANDS FUNCTIONS '''
+
+
+def show_alert(alert_id):
+ """
+ Returns alert by specific id
+ """
+
+ api_endpoint = "/alerts/alert/{}/".format(alert_id)
+ return http_request('GET', api_endpoint, {}, DEFAULT_HEADERS)
+
+
+def show_alert_command():
+ """
+ corresponds to 'sw-show-alert' command. Returns information about a specific alert
+ """
+ alert_id = demisto.args().get('alertID')
+
+ alert_data = show_alert(alert_id)
+
+ if not demisto.args().get('addComments', False) == 'true':
+ alert_data.pop('comments')
+ alert_data.pop('new_comment')
+
+ alert_data.pop('observations')
+
+ list_for_md = ['resolved', 'id', 'last_modified', 'obj_created', 'assigned_to']
+
+ dict_for_md = {underscore_to_camelcase(k): v for k, v in alert_data.iteritems() if k in list_for_md}
+ md = tableToMarkdown(alert_data.get('text', ''), dict_for_md)
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': alert_data,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': {
+ "Stealthwatch.Alert(val.id==obj.id)": alert_data
+ }
+ }
+
+
+def update_alert(alert_id, params):
+ """
+ Updates alert by specific id
+ """
+
+ api_endpoint = "/alerts/alert/{}/".format(alert_id)
+ return http_request('PUT', api_endpoint, params, DEFAULT_HEADERS)
+
+
+def update_alert_command():
+ """
+ corresponds to 'sw-update-alert' command. Returns information about a specific alert
+ """
+ args = demisto.args()
+ alert_id = args.get('alertID')
+ update_params = {}
+ # adding the possible params for update
+ possible_params = ['new_comment', 'tags', 'publish_time', 'resolved', 'snooze_settings', 'merit', 'assigned_to']
+ for param in possible_params:
+ current_param = args.get(param, False)
+ if current_param:
+ update_params[param] = current_param
+ username = args.get('resolved_user', None)
+ if username is not None:
+ update_params['resolved_user'] = {
+ 'username': username
+ }
+
+ alert_data = update_alert(alert_id, update_params)
+
+ alert_data.pop('comments')
+ alert_data.pop('new_comment')
+ alert_data.pop('observations')
+
+ list_for_md = ['resolved', 'id', 'last_modified', 'obj_created', 'assigned_to']
+
+ dict_for_md = {k: v for k, v in alert_data.iteritems() if k in list_for_md}
+ md = tableToMarkdown(alert_data.get('text', ''), dict_for_md)
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': alert_data,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': {
+ "Stealthwatch.Alert(val.id==obj.id)": alert_data
+ }
+ }
+
+
+def list_alerts(params):
+ """
+ Retrieves alerts
+ """
+
+ api_endpoint = "/alerts/alert/"
+ return http_request('GET', api_endpoint, params, DEFAULT_HEADERS)
+
+
+def build_alert_dic(alert):
+ dic = collections.OrderedDict() # type: Dict[str,str]
+ list_for_md = ['id', 'last_modified', 'resolved', 'text', 'obj_created', 'assigned_to', 'description']
+ for item in list_for_md:
+ dic[underscore_to_camelcase(item)] = alert[item]
+
+ return dic
+
+
+def list_alerts_command():
+ """
+ corresponds to 'sw-list-alerts' command. Returns a list of Stealthwatch alerts
+ """
+ args = demisto.args()
+ list_params = {}
+ # adding the possible params for update
+ possible_params = ['status', 'tags', 'search', 'assignee', 'limit']
+ for param in possible_params:
+ current_param = args.get(param, False)
+ if current_param:
+ list_params[param] = current_param
+
+ alerts_data = list_alerts(list_params).get('objects')
+ md_dicts_list = []
+
+ for alert in alerts_data:
+ if not demisto.args().get('addComments', False) == 'true':
+ alert.pop('comments')
+ alert.pop('new_comment')
+ alert.pop('observations')
+ md_dicts_list.append(build_alert_dic(alert))
+
+ md = tableToMarkdown("The following alerts were retrieved", md_dicts_list)
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': alerts_data,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': {
+ "Stealthwatch.Alert(val.id==obj.id)": alerts_data
+ }
+ }
+
+
+def domain_block(params):
+ """
+ Updates domain blacklist status
+ """
+
+ api_endpoint = "/blacklist/domains/"
+ return http_request('POST', api_endpoint, {}, DEFAULT_HEADERS, params)
+
+
+def block_domain_command():
+ """
+ corresponds to 'sw-block-domain-or-ip' command. Adds a domain to the blacklist
+ """
+ domain = demisto.args().get('domain')
+ ip = demisto.args().get('ip')
+
+ if not (domain or ip):
+ return {
+ "Type": entryTypes["error"],
+ "ContentsFormat": formats["text"],
+ "Contents": 'Please enter either domain or ip'
+ }
+
+ if domain and ip:
+ return {
+ "Type": entryTypes["error"],
+ "ContentsFormat": formats["text"],
+ "Contents": 'Please enter only domain or ip, not both'
+ }
+
+ identifier = None
+ if domain:
+ identifier = domain
+ else:
+ identifier = ip
+
+ domain_params = {
+ "identifier": identifier,
+ "category": "domain",
+ "list_on": "blacklist"
+ }
+
+ domain_result = domain_block(json.dumps(domain_params))
+
+ ec = None
+
+ if domain:
+ ec = {
+ "Stealthwatch.Domain(val.identifier==obj.identifier)": domain_result
+ }
+ else:
+ ec = {
+ "Stealthwatch.IP(val.identifier==obj.identifier)": domain_result
+ }
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': domain_result,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Blacklist ' + domain + ' result', domain_result),
+ 'EntryContext': ec
+ }
+
+
+def domain_unblock(domain_id):
+ """
+ Removes domain from the blacklist
+ """
+
+ api_endpoint = "/blacklist/domains/{}/".format(domain_id)
+ return http_request('DELETE', api_endpoint, None, DEFAULT_HEADERS, None)
+
+
+def unblock_domain_command():
+ """
+ corresponds to 'sw-unblock-domain' command. Removes a domain to the blacklist
+ """
+ domain_id = demisto.args().get('id')
+
+ domain_result = domain_unblock(domain_id)
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': domain_result,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': 'Unblocked domain with id: ' + domain_id,
+ }
+
+
+def list_domains(list_params):
+ """
+ Lists blacklisted domains
+ """
+
+ api_endpoint = "/blacklist/domains/"
+ return http_request('GET', api_endpoint, list_params, DEFAULT_HEADERS, {})
+
+
+def list_blocked_domains_command():
+ """
+ corresponds to 'sw-list-blocked-domains' command. Returns a list of the blocked domains
+ """
+ args = demisto.args()
+ list_params = {}
+ # adding the possible params for update
+ possible_params = ['search', 'limit']
+ for param in possible_params:
+ current_param = args.get(param, False)
+ if current_param:
+ list_params[param] = current_param
+
+ specific_domain = args.get('domain', None)
+ if specific_domain is not None:
+ list_params['identifier'] = specific_domain
+
+ domains_data = list_domains(list_params)
+
+ domains_result = domains_data.get('objects', {})
+
+ data_output = []
+ for obs in domains_result:
+ data_output.append({underscore_to_camelcase(k): v for k, v in obs.items()})
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': domains_data,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Current blacklisted domains are', data_output),
+ 'EntryContext': {
+ "Stealthwatch.Domain(val.identifier==obj.identifier)": domains_result
+ }
+ }
+
+
+def list_observations(params):
+ """
+ Lists observations
+ """
+
+ api_endpoint = "/observations/all/"
+ return http_request('GET', api_endpoint, params, DEFAULT_HEADERS)
+
+
+def list_observations_command():
+ """
+ corresponds to 'sw-list-observations' command. Returns a list of Stealthwatch observations
+ """
+ args = demisto.args()
+ list_params = {
+ "order_by": 'creation_time'
+ }
+ # adding the possible params for update
+ possible_params = ['alert', 'id', 'search', 'limit']
+ for param in possible_params:
+ current_param = args.get(param, False)
+ if current_param:
+ list_params[param] = current_param
+
+ observations_data = list_observations(list_params).get('objects')
+
+ data_output = []
+ for obs in observations_data:
+ data_output.append({underscore_to_camelcase(k): v for k, v in obs.items()})
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': data_output,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Found the following observations', data_output),
+ 'EntryContext': {
+ "Stealthwatch.Observation(val.id==obj.id)": observations_data
+ }
+ }
+
+
+def list_sessions(params):
+ """
+ Lists observations
+ """
+
+ api_endpoint = "/snapshots/session-data/"
+ return http_request('GET', api_endpoint, params, DEFAULT_HEADERS)
+
+
+def list_sessions_command():
+ """
+ corresponds to 'sw-list-sessions' command. Returns a list of Stealthwatch
+ sessions
+ """
+ date_format = "%Y-%m-%dT%H:%M:%SZ"
+ list_params = {}
+
+ ip = demisto.args().get('ip')
+ connected_ip = demisto.args().get('connectedIP')
+ connected_device_id = demisto.args().get('connectedDeviceId')
+ limit = demisto.args().get('limit')
+ start_time = demisto.args().get('startTime', None)
+ end_time = demisto.args().get('endTime', None)
+ session_type = demisto.args().get('sessionType', 'all')
+
+ if start_time and end_time:
+ list_params['start_datetime'] = start_time
+ list_params['end_datetime'] = end_time
+ elif end_time is None:
+ start_time_object = datetime.strptime(start_time, date_format)
+ start_time_object = start_time_object - timedelta(minutes=5)
+ end_time_object = start_time_object + timedelta(minutes=5)
+ start_time = start_time_object.strftime(date_format)
+ end_time = end_time_object.strftime(date_format)
+
+ list_params['ip'] = ip
+ list_params['connected_ip'] = connected_ip
+ list_params['limit'] = limit
+ list_params['start_datetime'] = start_time
+ list_params['end_datetime'] = end_time
+ list_params['connected_device_id'] = connected_device_id
+
+ unique_session_ids = [] # type: List[str]
+ final_sessions_data = []
+ sessions_data = list_sessions(list_params).get('objects')
+ for sess in sessions_data:
+ if sess['connected_ip'] not in unique_session_ids:
+ unique_session_ids.append(sess['connected_ip'])
+ if demisto.get(sess, 'connected_device_id'):
+ sess['connected_device_is_external'] = False
+ if session_type == 'internal':
+ final_sessions_data.append(sess)
+ else:
+ sess['connected_device_is_external'] = True
+ if session_type == 'external':
+ final_sessions_data.append(sess)
+ if session_type == 'all':
+ final_sessions_data.append(sess)
+
+ data_output = []
+ for sess in final_sessions_data:
+ data_output.append({underscore_to_camelcase(k): v for k, v in sess.items()})
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': data_output,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Found the following session data', data_output),
+ 'EntryContext': {
+ "Stealthwatch.Session(val.id==obj.id)": final_sessions_data
+ }
+ }
+
+
+def fetch_incidents():
+
+ list_params = {
+ "order_by": 'newest',
+ "limit": 100
+ }
+ final_alerts = []
+
+ known_ids = demisto.getLastRun().get('ids', None)
+ if known_ids is None or not known_ids:
+ known_ids = []
+
+ alerts_response = list_alerts(list_params)
+
+ alerts_data = alerts_response.get('objects', None)
+
+ for alert in alerts_data:
+ current_alert_id = alert.get('id')
+ if current_alert_id not in known_ids:
+ incident_from_alert = create_incident_data_from_alert(alert)
+ final_alerts.append(incident_from_alert)
+
+ # maintaining queue of 100 last seen alert ids
+ if len(known_ids) >= 100:
+ known_ids.pop(0)
+ known_ids.append(current_alert_id)
+
+ demisto.setLastRun({
+ 'ids': known_ids
+ })
+ demisto.incidents(final_alerts)
+
+
+''' EXECUTION CODE '''
+try:
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ if list_alerts_command():
+ demisto.results('ok')
+ else:
+ demisto.results('test failed')
+ elif demisto.command() == 'sw-show-alert':
+ demisto.results(show_alert_command())
+ elif demisto.command() == 'sw-update-alert':
+ demisto.results(update_alert_command())
+ elif demisto.command() == 'sw-list-alerts':
+ demisto.results(list_alerts_command())
+ elif demisto.command() == 'sw-block-domain-or-ip':
+ demisto.results(block_domain_command())
+ elif demisto.command() == 'sw-unblock-domain':
+ demisto.results(unblock_domain_command())
+ elif demisto.command() == 'sw-list-blocked-domains':
+ demisto.results(list_blocked_domains_command())
+ elif demisto.command() == 'sw-list-observations':
+ demisto.results(list_observations_command())
+ elif demisto.command() == 'sw-list-sessions':
+ demisto.results(list_sessions_command())
+ elif demisto.command() == 'fetch-incidents':
+ demisto.results(fetch_incidents())
+except Exception, e:
+ LOG(e.message)
+ LOG.print_log()
+ raise
diff --git a/Integrations/Stealthwatch_Cloud/Stealthwatch_Cloud.yml b/Integrations/Stealthwatch_Cloud/Stealthwatch_Cloud.yml
new file mode 100644
index 000000000000..889d90728e74
--- /dev/null
+++ b/Integrations/Stealthwatch_Cloud/Stealthwatch_Cloud.yml
@@ -0,0 +1,334 @@
+commonfields:
+ id: Stealthwatch Cloud
+ version: -1
+name: Stealthwatch Cloud
+display: Stealthwatch Cloud
+category: Network Security
+description: Protect your cloud assets and private network
+configuration:
+- display: Stealthwatch's server URL
+ name: serverURL
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Stealthwatch Cloud API key. Should be in the form of "ApiKey :"
+ name: APIKey
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: "true"
+ type: 8
+ required: false
+- display: Trust any certificate (unsecure)
+ name: insecure
+ defaultvalue: "false"
+ type: 8
+ required: false
+- display: Fetch incidents
+ name: isFetch
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Incident type
+ name: incidentType
+ defaultvalue: ""
+ type: 13
+ required: false
+script:
+ script: ''
+ type: python
+ commands:
+ - name: sw-show-alert
+ arguments:
+ - name: alertID
+ required: true
+ description: The id of the required alert
+ - name: addComments
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Add comments information, can be long
+ defaultValue: "false"
+ outputs:
+ - contextPath: Stealthwatch.Alert.id
+ description: Alert's id
+ type: number
+ - contextPath: Stealthwatch.Alert.assigned_to
+ description: Alert's assignee
+ type: string
+ - contextPath: Stealthwatch.Alert.obj_created
+ description: Alert's creation date
+ type: date
+ - contextPath: Stealthwatch.Alert.last_modified
+ description: Alert's last modification
+ type: date
+ - contextPath: Stealthwatch.Alert.resolved
+ description: 'Alert''s state '
+ type: boolean
+ - contextPath: Stealthwatch.Alert.source_info.ips
+ description: IP of the alert's source
+ type: string
+ - contextPath: Stealthwatch.Alert.source_info.hostnames
+ description: Hostname of the alert's source
+ type: string
+ description: Get info about a specific alert by its ID
+ - name: sw-update-alert
+ arguments:
+ - name: alertID
+ required: true
+ description: The id of the alert for update
+ - name: resolved
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Set the resolved field to true and set the merit field to close
+ an alert. merit can be 8 ("helpful") or 9 ("not helpful").
+ - name: merit
+ auto: PREDEFINED
+ predefined:
+ - "0"
+ - "1"
+ - "2"
+ - "3"
+ - "4"
+ - "5"
+ - "6"
+ - "8"
+ - "9"
+ description: Set the resolved field to true and set the merit field to close
+ an alert. merit can be 8 ("helpful") or 9 ("not helpful").
+ - name: tags
+ description: Tags (string)
+ - name: new_comment
+ description: Set the new_comment field to add a comment to the alert.
+ - name: publish_time
+ description: Publish time (string). Example, publish_time=2018-08-01T07:54:39Z
+ - name: snooze_settings
+ description: Snooze settings (string)
+ - name: resolved_user
+ description: 'Username (string) '
+ - name: assigned_to
+ description: Assigned to (integer)
+ outputs:
+ - contextPath: Stealthwatch.Alert.id
+ description: Alert's id
+ type: number
+ - contextPath: Stealthwatch.Alert.assigned_to
+ description: Alert's assignee
+ type: string
+ - contextPath: Stealthwatch.Alert.obj_created
+ description: Alert's creation date
+ type: date
+ - contextPath: Stealthwatch.Alert.last_modified
+ description: Alert's last modification
+ type: date
+ - contextPath: Stealthwatch.Alert.resolved
+ description: 'Alert''s state '
+ type: boolean
+ - contextPath: Stealthwatch.Alert.source_info.ips
+ description: IP of the alert's source
+ type: string
+ - contextPath: Stealthwatch.Alert.source_info.hostname
+ description: Hostname of the alert's source
+ type: string
+ description: Update an alert
+ - name: sw-list-alerts
+ arguments:
+ - name: status
+ auto: PREDEFINED
+ predefined:
+ - open
+ - closed
+ - all
+ description: 'status filters alerts by status: open, closed, or all. Default
+ is open. all allows you to see an individual alert regardless of whether it''s
+ open or closed.'
+ - name: search
+ description: search finds a particular string amongst the alerts. For example,
+ a particular IP address, hostname, or alert type.
+ - name: assignee
+ description: assignee filters shows only alerts assigned to a particular user.
+ - name: tags
+ description: tags shows alerts that are assigned a particular incident tag.
+ - name: limit
+ description: Amount of alerts to list. Default is 5
+ defaultValue: "5"
+ - name: addComments
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Add comments information, can be long
+ defaultValue: "false"
+ outputs:
+ - contextPath: Stealthwatch.Alert.id
+ description: Alert's id
+ type: number
+ - contextPath: Stealthwatch.Alert.assigned_to
+ description: Alert's assignee
+ type: string
+ - contextPath: Stealthwatch.Alert.obj_created
+ description: Alert's creation date
+ type: date
+ - contextPath: Stealthwatch.Alert.last_modified
+ description: Alert's last modification
+ type: date
+ - contextPath: Stealthwatch.Alert.resolved
+ description: 'Alert''s state '
+ type: boolean
+ - contextPath: Stealthwatch.Alert.source_info.ips
+ description: IP of the alert's source
+ type: string
+ - contextPath: Stealthwatch.Alert.source_info.hostname
+ description: Hostname of the alert's source
+ type: string
+ description: Get the list of Stealthwatch alerts
+ - name: sw-block-domain-or-ip
+ arguments:
+ - name: domain
+ description: Domain to add to the blacklist
+ - name: ip
+ description: IP to add to the blacklist
+ outputs:
+ - contextPath: Stealthwatch.Domain.identifier
+ description: Domain name
+ type: string
+ - contextPath: Stealthwatch.Domain.title
+ description: Domain title
+ type: string
+ - contextPath: Stealthwatch.Domain.id
+ description: Domain id
+ type: number
+ - contextPath: Stealthwatch.IP.identifier
+ description: IP address
+ type: string
+ - contextPath: Stealthwatch.IP.title
+ description: IP title
+ type: string
+ - contextPath: Stealthwatch.IP.id
+ description: IP id
+ type: string
+ description: Add a domain or IP to the blacklist
+ - name: sw-unblock-domain
+ arguments:
+ - name: id
+ required: true
+ description: ID of the domain to remove from the blacklist. You can find the
+ id using 'sw-list-blocked-domains'
+ description: Remove a domain from the blacklist
+ - name: sw-list-blocked-domains
+ arguments:
+ - name: search
+ description: search finds a particular string amongst the alerts. For example,
+ a particular IP address, hostname, or alert type.
+ - name: domain
+ description: Search for specific domain
+ - name: limit
+ description: Amount of domains to list. Default is 5
+ defaultValue: "5"
+ outputs:
+ - contextPath: Stealthwatch.Domain.identifier
+ description: Domain name
+ type: string
+ - contextPath: Stealthwatch.Domain.title
+ description: Domain title
+ type: string
+ - contextPath: Stealthwatch.Domain.id
+ description: Domain id
+ type: number
+ description: ' Returns list of the blocked domains'
+ - name: sw-list-observations
+ arguments:
+ - name: search
+ description: search finds a particular string amongst the alerts. For example,
+ a particular IP address, hostname, or alert type.
+ - name: alert
+ description: Use the alert query parameter with an alert id to only show observations
+ referenced by the alert.
+ - name: id
+ description: Get a specific observation by its id
+ - name: limit
+ description: Amount of observations to list. Default is 5
+ defaultValue: "5"
+ outputs:
+ - contextPath: Stealthwatch.Observation.id
+ description: Observation ID
+ type: number
+ - contextPath: Stealthwatch.Observation.port_count
+ description: Observation port count
+ type: number
+ - contextPath: Stealthwatch.Observation.creation_time
+ description: Observation creation time
+ type: string
+ - contextPath: Stealthwatch.Observation.end_time
+ description: Observation end time
+ type: string
+ - contextPath: Stealthwatch.Observation.scanned_ip
+ description: Observation scanned ip
+ type: string
+ - contextPath: Stealthwatch.Observation.scanner_ip
+ description: Observation scanner ip
+ type: string
+ - contextPath: Stealthwatch.Observation.source
+ description: Observation source
+ type: unknown
+ description: Get observations by alert ID, observation ID, or free search
+ - name: sw-list-sessions
+ arguments:
+ - name: startTime
+ required: true
+ description: Session's start time (UTC). For example, startTime="2018-09-30T12:00:00Z"
+ - name: endTime
+ description: Session's end time (UTC). For example, endTime="2018-07-31T15:00:00Z"
+ - name: limit
+ description: Amount of observations to list. Default is 400
+ defaultValue: "400"
+ - name: ip
+ description: Source IP address to filter by
+ - name: connectedIP
+ description: Connected IP to filter by
+ - name: connectedDeviceId
+ description: Connected device ID
+ - name: sessionType
+ auto: PREDEFINED
+ predefined:
+ - all
+ - external
+ - internal
+ description: Select type of session - external/internal to receive data only
+ about this kind of session
+ defaultValue: all
+ outputs:
+ - contextPath: Stealthwatch.Session.id
+ description: Session ID
+ type: number
+ - contextPath: Stealthwatch.Session.port
+ description: Session port
+ type: number
+ - contextPath: Stealthwatch.Session.start_timestamp_utc
+ description: Session start time
+ type: string
+ - contextPath: Stealthwatch.Session.ip
+ description: Session IP
+ type: string
+ - contextPath: Stealthwatch.Session.connected_ip
+ description: Session connected IP
+ type: string
+ - contextPath: Stealthwatch.Session.device_id
+ description: Source device ID
+ type: number
+ - contextPath: Stealthwatch.Session.connected_device_id
+ description: Connected device ID
+ type: number
+ - contextPath: Stealthwatch.Session.connected_device_is_external
+ description: Is the connected device external
+ type: boolean
+ description: 'Get sessions by the session''s occurrence time ( Time format: YYYY-MM-DDTHH:MM:SSZ)'
+ isfetch: true
+ runonce: false
+tests:
+- No test
diff --git a/Integrations/Stealthwatch_Cloud/Stealthwatch_Cloud_image.png b/Integrations/Stealthwatch_Cloud/Stealthwatch_Cloud_image.png
new file mode 100644
index 000000000000..94689dd002af
Binary files /dev/null and b/Integrations/Stealthwatch_Cloud/Stealthwatch_Cloud_image.png differ
diff --git a/Integrations/SymantecEndpointProtection_V2/CHANGELOG.md b/Integrations/SymantecEndpointProtection_V2/CHANGELOG.md
new file mode 100644
index 000000000000..8f6cd205df71
--- /dev/null
+++ b/Integrations/SymantecEndpointProtection_V2/CHANGELOG.md
@@ -0,0 +1,14 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+ - New command `!sep-identify-old-clients` which identifies endpoints with a running
+ version that is inconsistant with the target version or the desired version (as optional argument).
+ - New argument added to `!sep-endpoints-info`. Now it's possible to specify a group to search.
+ - New context outputs for `!sep-endpoints-info`:
+ * Group
+ * RunningVersion
+ * TargetVersion
+ * PatterIdx
+ * OnlineStatus
+ * UpdateTime
\ No newline at end of file
diff --git a/Integrations/SymantecEndpointProtection_V2/SEPM_test.py b/Integrations/SymantecEndpointProtection_V2/SEPM_test.py
new file mode 100644
index 000000000000..34865478e35d
--- /dev/null
+++ b/Integrations/SymantecEndpointProtection_V2/SEPM_test.py
@@ -0,0 +1,43 @@
+import demistomock as demisto
+import json
+
+
+def mock_demisto(mocker):
+ mocker.patch.object(demisto, 'params', return_value={'proxy': True})
+
+
+def _get_api_response():
+ response = "test-data/SEPM-endpoint-api-response.json"
+ with open(response, 'r') as f:
+ api_response = json.loads(f.read())
+ return api_response
+
+
+def _get_expected_output():
+ response = "test-data/SEPM-expected-endpoint-extract.json"
+ with open(response, 'r') as f:
+ api_response = json.loads(f.read())
+ return api_response
+
+
+def test_endpoint_ip_extract(mocker):
+ mock_demisto(mocker)
+ from SymantecEndpointProtection_V2 import endpoint_ip_extract
+ raw_json = _get_api_response()
+ assert [{'Address': '192.168.1.12', 'Mac': 'demisto-PC'},
+ {'Address': '192.168.1.125', 'Mac': 'DESKTOP-TF35B9B'}] == endpoint_ip_extract(raw_json)
+
+
+def test_endpoint_endpoint_extract(mocker):
+ mock_demisto(mocker)
+ from SymantecEndpointProtection_V2 import endpoint_endpoint_extract
+ raw_json = _get_api_response()
+ assert endpoint_endpoint_extract(raw_json) == _get_expected_output()
+
+
+def test_filter_only_old_clients(mocker):
+ mock_demisto(mocker)
+ from SymantecEndpointProtection_V2 import filter_only_old_clients
+ raw_json = _get_api_response()
+ assert filter_only_old_clients(raw_json, None) == []
+ assert (filter_only_old_clients(raw_json, 10)) == raw_json
diff --git a/Integrations/SymantecEndpointProtection_V2/SymantecEndpointProtection_V2.py b/Integrations/SymantecEndpointProtection_V2/SymantecEndpointProtection_V2.py
new file mode 100644
index 000000000000..b6edb148cffa
--- /dev/null
+++ b/Integrations/SymantecEndpointProtection_V2/SymantecEndpointProtection_V2.py
@@ -0,0 +1,840 @@
+import demistomock as demisto
+from CommonServerPython import *
+import requests
+import json
+import re
+
+requests.packages.urllib3.disable_warnings()
+if not demisto.params()['proxy']:
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+ENDPOINTS_INFO_DEFAULT_COLUMNS = [
+ 'computerName',
+ 'ipAddresses',
+ 'operatingSystem',
+ 'osBitness',
+ 'cidsDefsetVersion',
+ 'lastScanTime',
+ 'description',
+ 'quarantineDesc',
+ 'domainOrWorkgroup',
+ 'macAddresses',
+ 'group',
+ 'dhcpServer',
+ 'biosVersion',
+ 'virtualizationPlatform',
+ 'computerTimeStamp',
+ 'creationTime',
+ 'agentTimestamp',
+ 'hardwareKey'
+]
+GROUPS_INFO_DEFAULT_COLUMNS = [
+ 'fullPathName',
+ 'numberOfPhysicalComputers',
+ 'numberOfRegisteredUsers',
+ 'policySerialNumber',
+ 'policyDate',
+ 'description',
+ 'created',
+ 'id'
+]
+
+'''LITERALS'''
+
+EPOCH_MINUTE = 60 * 1000
+EPOCH_HOUR = 60 * EPOCH_MINUTE
+
+
+'''HELPER FUNCTIONS'''
+
+
+def fix_url(base):
+ return base if base.endswith('/') else (base + '/')
+
+
+def endpoint_ip_extract(raw_json):
+ ips_array = []
+ for content in raw_json:
+ ip = {'Address': content.get('ipAddresses', [''])[0],
+ 'Mac': content.get('computerName')
+ }
+ ip = createContext(ip, removeNull=True)
+ if ip:
+ ips_array.append(ip)
+ return ips_array
+
+
+def endpoint_endpoint_extract(raw_json):
+ endpoints_arr = []
+ for content in raw_json:
+ endpoint = {'Hostname': content.get('computerName'),
+ 'MACAddress': content.get('macAddresses', [''])[0],
+ 'Domain': content.get('domainOrWorkgroup'),
+ 'IPAddress': content.get('ipAddresses', [''])[0],
+ 'DHCPServer': content.get('dhcpServer'),
+ 'OS': content.get('operatingSystem'),
+ 'OSVersion': content.get('osVersion'),
+ 'BIOSVersion': content.get('biosVersion'),
+ 'Memory': content.get('memory'),
+ 'Processors': content.get('processorType')
+ }
+ endpoint = createContext(endpoint, removeNull=True)
+ if endpoint:
+ endpoints_arr.append(endpoint)
+ return endpoints_arr
+
+
+def build_query_params(params):
+ list_params = map(lambda key: key + '=' + str(params[key]), params.keys())
+ query_params = '&'.join(list_params)
+ return '?' + query_params if query_params else ''
+
+
+def do_auth(server, crads, insecure):
+ url = fix_url(str(server)) + 'sepm/api/v1/identity/authenticate'
+ body = {
+ 'username': crads.get('identifier') if crads.get('identifier') else '',
+ 'password': crads.get('password') if crads.get('password') else '',
+ 'domain': crads.get('domain') if crads.get('domain') else ''
+ }
+ res = requests.post(url, headers={"Content-Type": "application/json"}, data=json.dumps(body), verify=not insecure)
+ return parse_response(res)
+
+
+def do_get(token, raw, suffix):
+ insecure = demisto.getParam('insecure')
+ server = demisto.getParam('server')
+ url = fix_url(server) + suffix
+ res = requests.get(url, headers={'Authorization': 'Bearer ' + token}, verify=not insecure)
+ if (raw):
+ return res
+ else:
+ return parse_response(res)
+
+
+def do_post(token, is_xml, suffix, body):
+ insecure = demisto.getParam('insecure')
+ server = demisto.getParam('server')
+ url = fix_url(server) + suffix
+ res = requests.post(url, headers={'Authorization': 'Bearer ' + token}, data=body, verify=not insecure)
+ if is_xml:
+ if res.content:
+ parsed_response = xml2json(res.content)
+ else:
+ return_error('Unable to parse the following response: {}'.format(res))
+ else:
+ parsed_response = parse_response(res)
+ return parsed_response
+
+
+def do_put(token, suffix, body):
+ insecure = demisto.getParam('insecure')
+ server = demisto.getParam('server')
+ url = fix_url(server) + suffix
+ res = requests.put(url, headers={'Authorization': 'Bearer ' + token,
+ 'Content-Type': 'application/json'}, data=json.dumps(body), verify=not insecure)
+ parsed_response = parse_response(res)
+ return parsed_response
+
+
+def do_patch(token, suffix, body):
+ insecure = demisto.getParam('insecure')
+ server = demisto.getParam('server')
+ url = fix_url(server) + suffix
+ res = requests.patch(url, headers={'Authorization': 'Bearer ' + token,
+ 'Content-Type': 'application/json'}, data=json.dumps(body), verify=not insecure)
+ parsed_response = parse_response(res)
+ return parsed_response
+
+
+def parse_response(resp):
+ if resp.status_code == 200 or resp.status_code == 207:
+ if resp.text == '':
+ return resp
+ try:
+ return resp.json()
+ except Exception, ex:
+ return_error('Unable to parse response: {}'.format(ex))
+ else:
+ try:
+ message = resp.json().get('errorMessage')
+ return_error('Error: {}'.format(message))
+ except Exception:
+ return_error('Error: {}'.format(resp))
+
+
+def get_token_from_response(resp):
+ if resp.get('token'):
+ return resp.get('token')
+ else:
+ return_error('No token: {}'.format(resp))
+
+
+def choose_columns(column_arg, default_list):
+ if not column_arg:
+ columns_list = default_list
+ columns_list.sort()
+ elif column_arg == 'all' or column_arg == '*':
+ columns_list = []
+ else:
+ columns_list = argToList(column_arg)
+ return columns_list
+
+
+def build_command_xml(data):
+ return ' \
+ {0} '.format(data)
+
+
+def build_client_xml(data):
+ return ' \
+ {0} '.format(data)
+
+
+def get_command_status_details(token, command_id):
+ xml = build_command_xml(
+ '{0} '.format(command_id))
+ res_json = do_post(token, True, 'sepm/ws/v1/CommandService', xml)
+ return res_json
+
+
+def build_command_response_output(title, command_id, message, response):
+ cmd_status_details = response.get('cmdStatusDetail')
+ cmd_status_details.pop('hardwareKey', None)
+ md = tableToMarkdown(title, cmd_status_details) + '\n'
+ md += '### Command ID: {0}\n'.format(command_id)
+ md += '### ' + message
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': {
+ 'cmdStatusDetail': cmd_status_details,
+ 'commandId': command_id
+ },
+ 'HumanReadable': md,
+ 'EntryContext': {
+ 'SEPM.LastCommand': createContext({'CommandDetails': cmd_status_details, 'CommandId': command_id},
+ removeNull=True)
+ }
+ })
+
+
+def get_computer_id_by_ip(token, ip):
+ xml = build_client_xml('{0} '.format(ip))
+ res_json = do_post(token, True, 'sepm/ws/v1/ClientService', xml)
+ return demisto.get(json.loads(res_json),
+ 'Envelope.Body.getComputersByIPResponse.ComputerResult.computers.computerId')
+
+
+def get_computer_id_by_hostname(token, hostname):
+ xml = build_client_xml(
+ '{0} '
+ ' '.format(hostname))
+ res_json = do_post(token, True, 'sepm/ws/v1/ClientService', xml)
+ return demisto.get(json.loads(res_json),
+ 'Envelope.Body.getComputersByHostNameResponse.ComputerResult.computers.computerId')
+
+
+def get_computer_id(token, endpoint_ip, endpoint_host_name):
+ if endpoint_ip:
+ try:
+ computer_id = get_computer_id_by_ip(token, endpoint_ip)
+ except Exception:
+ return_error('Failed to locate the endpoint by its IP address.')
+ elif endpoint_host_name:
+ try:
+ computer_id = get_computer_id_by_hostname(token, endpoint_host_name)
+ except Exception:
+ return_error('Failed to locat the endpoint by its hostname.')
+ else:
+ return_error('Please provide the IP address or the hostname of endpoint.')
+ return computer_id
+
+
+def update_content(token, computer_id):
+ xml = build_command_xml(
+ '{0} '
+ ' '.format(computer_id))
+ res_json = do_post(token, True, 'sepm/ws/v1/CommandService', xml)
+ command_id = demisto.get(json.loads(
+ res_json), 'Envelope.Body.runClientCommandUpdateContentResponse.CommandClientResult.commandId')
+ if not command_id:
+ error_code = demisto.get(
+ res_json, 'Envelope.Body.runClientCommandUpdateContentResponse.CommandClientResult.inputErrors.errorCode')
+ error_message = demisto.get(
+ res_json, 'Envelope.Body.runClientCommandUpdateContentResponse.CommandClientResult.inputErrors.errorMessage')
+ if error_code or error_message:
+ return_error('An error response has returned from server:'
+ ' {0} with code: {1}'.format(error_message, error_code))
+ else:
+ return_error('Could not retrieve command ID, no error was returned from server')
+ return command_id
+
+
+def scan(token, computer_id, scan_type):
+ xml = build_command_xml(
+ '{0} '
+ '{1} '.format(computer_id, scan_type))
+ res_json = do_post(token, True, 'sepm/ws/v1/CommandService', xml)
+ command_id = demisto.get(json.loads(res_json), 'Envelope.Body.runClientCommandScanResponse.'
+ 'CommandClientResult.commandId')
+ if not command_id:
+ error_code = demisto.get(json.loads(
+ res_json), 'Envelope.Body.runClientCommandScanResponse.CommandClientResult.inputErrors.errorCode')
+ error_message = demisto.get(json.loads(
+ res_json), 'Envelope.Body.runClientCommandScanResponse.CommandClientResult.inputErrors.errorMessage')
+ if error_code or error_message:
+ return_error('An error response has returned from server: {0} with code: {1}'.format(error_message,
+ error_code))
+ else:
+ return_error('Could not retrieve command ID, no error was returned from server')
+ return command_id
+
+
+def quarantine(token, computer_id, action_type):
+ xml = build_command_xml(
+ '{0} {1}'
+ ' COMPUTER '
+ ' '.format(action_type, computer_id))
+ res_json = do_post(token, True, 'sepm/ws/v1/CommandService', xml)
+ command_id = demisto.get(json.loads(
+ res_json), 'Envelope.Body.runClientCommandQuarantineResponse.CommandClientResult.commandId')
+ if not command_id:
+ error_code = demisto.get(json.loads(
+ res_json), 'Envelope.Body.runClientCommandQuarantineResponse.CommandClientResult.inputErrors.errorCode')
+ error_message = demisto.get(json.loads(
+ res_json), 'Envelope.Body.runClientCommandQuarantineResponse.CommandClientResult.inputErrors.errorMessage')
+ if error_code or error_message:
+ return_error('An error response has returned from server: {0} with code: {1}'.format(error_message,
+ error_code))
+ else:
+ return_error('Could not retrieve command ID, no error was returned from server')
+ return command_id
+
+
+def validate_time_zone(time_zone):
+ pattern = re.compile("^[+-][0-9][0-9]:[0-9][0-9]")
+ return bool(pattern.match(time_zone))
+
+
+def parse_epoch_to_local(epoch, time_zone):
+ if not validate_time_zone(time_zone):
+ return_error('timeZone param should be in the format of [+/-][h][h]:[m][m]. For exmaple +04:30')
+ operator = time_zone[0]
+ hour = int(time_zone[1:3])
+ minutes = int(time_zone[4:6])
+ time_zone_epoch = hour * EPOCH_HOUR + minutes * EPOCH_MINUTE
+ local = int(epoch) + time_zone_epoch if operator == '+' else int(epoch) - time_zone_epoch
+ return local
+
+
+def change_assigined(policy):
+ new_format = {
+ 'Policy Name': policy.get('PolicyName'),
+ 'Type': policy.get('Type'),
+ 'ID': policy.get('ID'),
+ 'Assigned': True if (policy.get('AssignedLocations') or policy.get('AssignedCloudGroups')) else False,
+ 'Discription': policy.get('Discription'),
+ 'Enabled': policy.get('Enabled')
+ }
+ return new_format
+
+
+def sanitize_policies_list_for_md(policies_list):
+ return map(change_assigined, policies_list)
+
+
+def sanitize_policies_list(policies_list):
+ return map(lambda policy: {
+ 'PolicyName': policy['name'],
+ 'Type': policy['policytype'],
+ 'ID': policy['id'],
+ 'Description': policy['desc'],
+ 'Enabled': policy['enabled'],
+ 'AssignedLocations': map(lambda location: {
+ 'GroupID': location.get('groupId'),
+ 'Locations': location.get('locationIds')
+ }, policy.get('assignedtolocations') if policy.get('assignedtolocations') else []),
+ 'AssignedCloudGroups': map(lambda location: {
+ 'GroupID': location.get('groupId'),
+ 'Locations': location.get('locationIds')
+ }, policy.get('assignedtocloudgroups') if policy.get('assignedtocloudgroups') else []),
+ }, policies_list)
+
+
+def validate_ip(ip):
+ pattern = re.compile('^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$')
+ return bool(pattern.match(ip))
+
+
+def get_client_content(token, time_zone):
+ client_content_json = do_get(token, False, 'sepm/api/v1/stats/client/content')
+ epoch_time = client_content_json.get('lastUpdated')
+ if time_zone:
+ epoch_time = parse_epoch_to_local(epoch_time, time_zone)
+ last_update_date = timestamp_to_datestring(epoch_time, '%a %b %d %y %H:%M:%S %z')
+ client_version = client_content_json.get('clientDefStatusList')
+ return client_content_json, client_version, last_update_date
+
+
+def get_endpoints_info(token, computer_name, last_update, os, page_size, columns, group_name=None):
+ params = {
+ 'computerName': computer_name,
+ 'lastUpdate': last_update,
+ 'os': os,
+ 'pageSize': page_size,
+ 'columns': columns
+ }
+ params = createContext(params, removeNull=True)
+ json_response = do_get(token, False, 'sepm/api/v1/computers' + build_query_params(params))
+ filtered_json_response = json_response.get('content')
+ final_json = []
+ entry_context = []
+ for content in filtered_json_response:
+ group = content.get('group', {'name': ''})
+ bool_start = group.get('name').startswith(group_name[:-1]) if group_name and group_name[-1] == '*' else False
+ if (not group_name) or group.get('name') == group_name or bool_start: # No group name filter
+ # used `set` on the mac address list as it sometimes contained duplicated values
+ content['macAddresses'] = list(set(content.get('macAddresses')))
+ entry_context.append({
+ 'Hostname': content.get('computerName'),
+ 'Domain': content.get('domainOrWorkgroup'),
+ 'IPAddresses': content.get('ipAddresses'),
+ 'OS': content.get('operatingSystem', '') + ' | ' + content.get('osBitness', ''),
+ 'Description': content.get('content.description'),
+ 'MACAddresses': content.get('macAddresses'),
+ 'BIOSVesrsion': content.get('biosVersion'),
+ 'DHCPServer': content.get('dhcpServer'),
+ 'HardwareKey': content.get('hardwareKey'),
+ 'LastScanTime': epochToTimestamp(content.get('lastScanTime')),
+ 'RunningVersion': content.get('deploymentRunningVersion'),
+ 'TargetVersion': content.get('deploymentTargetVersion'),
+ 'Group': group.get('name'),
+ 'PatternIdx': content.get('patternIdx'),
+ 'OnlineStatus': content.get('onlineStatus'),
+ 'UpdateTime': epochToTimestamp(content.get('lastUpdateTime')),
+ })
+ final_json.append(content)
+
+ return final_json, entry_context
+
+
+def create_endpints_filter_string(computer_name, last_update, os, page_size, group_name=None):
+ md = '## Endpoints Information'
+ if last_update != '0':
+ md += ', filtered for last updated status: {}'.format(last_update) if last_update else ''
+ md += ', filtered for hostname: {}'.format(computer_name) if computer_name else ''
+ md += ', filtered for os: {}'.format(os) if os else ''
+ md += ', filtered for group name: {}'.format(group_name) if group_name else ''
+ md += ', page size: {}'.format(page_size) if page_size else ''
+ md += '\n'
+ return md
+
+
+def get_groups_info(token, columns):
+ json_res = do_get(token, False, 'sepm/api/v1/groups' + build_query_params({'columns': columns}))
+ sepm_groups = []
+ filtered_json_response = json_res.get('content')
+ for entry in filtered_json_response:
+ group = {}
+ for header in GROUPS_INFO_DEFAULT_COLUMNS:
+ group[header] = entry[header]
+ sepm_groups.append(group)
+ return filtered_json_response, json_res, sepm_groups
+
+
+def get_command_status(token, command_id):
+ command_status_json = get_command_status_details(token, command_id)
+ cmd_status_detail = demisto.get(json.loads(command_status_json),
+ 'Envelope.Body.getCommandStatusDetailsResponse.'
+ 'CommandStatusDetailResult.cmdStatusDetail')
+ cmd_status_detail.pop('hardwareKey', None)
+ state_id = cmd_status_detail.get('stateId')
+ is_done = False
+ if state_id == '2' or state_id == '3':
+ is_done = True
+ message = 'Command is done.' if is_done else 'Command is in progress. Run !sep-command-status to check again.'
+ return cmd_status_detail, message
+
+
+def get_list_of_policies(token):
+ policies_list = do_get(token, False, 'sepm/api/v1/policies/summary').get('content')
+ fixed_policy_list = sanitize_policies_list(policies_list)
+ md_list = sanitize_policies_list_for_md(fixed_policy_list)
+ return md_list, policies_list, fixed_policy_list
+
+
+def endpoint_quarantine(token, endpoint, action):
+ action_type = 'Quarantine' if action == 'Add' else 'Undo'
+ computer_id = get_id_by_endpoint(token, endpoint)
+ command_id = quarantine(token, computer_id, action_type)
+ return command_id
+
+
+def get_location_list(token, group_id):
+ url = 'sepm/api/v1/groups/{}/locations'.format(group_id)
+ url_resp = do_get(token, False, url)
+ location_ids = map(lambda location_string: {'ID': location_string.split('/')[-1]}, url_resp)
+ return url_resp, location_ids
+
+
+def get_id_by_endpoint(token, endpoint):
+ if not endpoint:
+ return_error('Please provide the IP address or the hostname of endpoint.')
+ elif validate_ip(endpoint):
+ computer_id = get_computer_id(token, endpoint, None)
+ else:
+ computer_id = get_computer_id(token, None, endpoint)
+ return computer_id
+
+
+def scan_endpoint(token, endpoint, scan_type):
+ computer_id = get_id_by_endpoint(token, endpoint)
+ command_id = scan(token, computer_id, scan_type)
+ return command_id
+
+
+def update_endpoint_content(token, endpoint):
+ computer_id = get_id_by_endpoint(token, endpoint)
+ command_id = update_content(token, computer_id)
+ return command_id
+
+
+def filter_only_old_clients(filtered_json_response, desired_version):
+ filtered = []
+ for content in filtered_json_response:
+ RunningVersion = content.get('deploymentRunningVersion')
+ TargetVersion = content.get('deploymentTargetVersion')
+
+ if (desired_version and RunningVersion != desired_version) or \
+ (not desired_version and RunningVersion != TargetVersion):
+ filtered.append(content)
+ return filtered
+
+
+'''COMMANDS'''
+
+
+def system_info_command(token):
+ version_json = do_get(token, False, 'sepm/api/v1/version')
+ avdef_json = do_get(token, False, 'sepm/api/v1/content/avdef/latest')
+ system_info_json = {
+ 'version': version_json,
+ 'avdef': avdef_json
+ }
+ md = '## System Information\n'
+ md += tableToMarkdown('Version', version_json)
+ md += tableToMarkdown('AV Definitions', avdef_json)
+ context = avdef_json.get('publishedBySymantec')
+ if type(context) is dict:
+ context = createContext(context, removeNull=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': system_info_json,
+ 'HumanReadable': md,
+ 'EntryContext': {
+ 'SEPM.ServerAVDefVersion': context
+ }
+ })
+
+
+def old_clients_command(token):
+ computer_name = demisto.getArg('computerName')
+ last_update = demisto.getArg('lastUpdate')
+ os = demisto.getArg('os')
+ page_size = demisto.getArg('pageSize')
+ columns = demisto.getArg('columns')
+ group_name = demisto.getArg('groupName')
+ desired_version = demisto.getArg('desiredVersion')
+ filtered_json_response, entry_context = get_endpoints_info(token, computer_name, last_update, os, page_size,
+ columns, group_name)
+ columns_list = choose_columns(columns, ENDPOINTS_INFO_DEFAULT_COLUMNS)
+ filtered_json_response = filter_only_old_clients(filtered_json_response, desired_version)
+ md = create_endpints_filter_string(computer_name, last_update, os, page_size, group_name)
+ md += tableToMarkdown('Old Endpoints', filtered_json_response, columns_list)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': filtered_json_response,
+ 'HumanReadable': md
+ })
+
+
+def client_content_command(token):
+ time_zone = demisto.getParam('timeZone')
+ client_content_json, client_version, last_update_date = get_client_content(token, time_zone)
+ md = '## Client Content, last updated on {0}\n'.format(last_update_date)
+ md += tableToMarkdown('Client Content Versions', client_version)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': client_content_json,
+ 'HumanReadable': md,
+ 'EntryContext': {
+ 'SEPM.ClientContentVersions': client_version,
+ 'SEPM.LastUpdated': last_update_date
+ }
+ })
+
+
+def endpoints_info_command(token):
+ computer_name = demisto.getArg('computerName')
+ last_update = demisto.getArg('lastUpdate')
+ os = demisto.getArg('os')
+ page_size = demisto.getArg('pageSize')
+ columns = demisto.getArg('columns')
+ group_name = demisto.getArg('groupName')
+ filtered_json_response, entry_context = get_endpoints_info(token, computer_name, last_update, os, page_size,
+ columns, group_name)
+ columns_list = choose_columns(columns, ENDPOINTS_INFO_DEFAULT_COLUMNS)
+ md = create_endpints_filter_string(computer_name, last_update, os, page_size, group_name)
+ md += tableToMarkdown('Endpoints', filtered_json_response, columns_list)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': filtered_json_response,
+ 'HumanReadable': md,
+ 'IgnoreAutoExtract': True,
+ 'EntryContext': {
+ 'SEPM.Endpoint(val.Hostname == obj.Hostname)': createContext(entry_context, removeNull=True),
+ 'IP(val.Address === obj.Address)': endpoint_ip_extract(filtered_json_response),
+ 'Endpoint(val.Hostname == obj.Hostname)': endpoint_endpoint_extract(filtered_json_response)
+ }
+ })
+
+
+def groups_info_command(token):
+ columns = demisto.getArg('columns')
+ filtered_json_response, json_res, sepm_groups = get_groups_info(token, columns)
+ columns_list = choose_columns(columns, GROUPS_INFO_DEFAULT_COLUMNS)
+ md = tableToMarkdown('Groups Information', filtered_json_response, columns_list)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': json_res,
+ 'HumanReadable': md,
+ 'IgnoreAutoExtract': True,
+ 'EntryContext':
+ {
+ 'SEPM.Groups': sepm_groups
+ }
+ })
+
+
+def command_status(token):
+ command_id = demisto.getArg('commandId')
+ cmd_status_detail, message = get_command_status(token, command_id)
+ md = '### Command ID: {0}\n'.format(command_id)
+ md += '### State ID: {0}\n'.format(cmd_status_detail.get('stateId'))
+ md += '### ' + message
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': {
+ 'cmdStatusDetail': cmd_status_detail,
+ 'commandId': command_id
+ },
+ 'HumanReadable': md,
+ 'IgnoreAutoExtract': True,
+ 'EntryContext': {
+ 'SEPM.LastCommand': createContext({'CommandDetails': cmd_status_detail, 'CommandID': command_id},
+ removeNull=True)
+ }
+ })
+
+
+def list_policies_command(token):
+ md_list, policies_list, fixed_policy_list = get_list_of_policies(token)
+ md = tableToMarkdown('List of existing policies', md_list, [
+ 'Policy Name', 'Type', 'ID', 'Enabled', 'Assigned', 'Description'])
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': policies_list,
+ 'HumanReadable': md,
+ 'IgnoreAutoExtract': True,
+ 'EntryContext': {
+ 'SEPM.PoliciesList': createContext(fixed_policy_list, removeNull=True)
+ }
+ })
+
+
+def assign_policie_command(token):
+ group_id = demisto.getArg('groupID')
+ locatoion_id = demisto.getArg('locationID')
+ policy_type = demisto.getArg('policyType').lower()
+ policy_id = demisto.getArg('policyID')
+ do_put(token, 'sepm/api/v1/groups/{0}/locations/{1}/policies/{2}'.format(group_id,
+ locatoion_id, policy_type),
+ {'id': policy_id})
+ md = '### Policy: {0}, of type: {1}, was assigned to location: {2}, in group: {3}'.format(
+ policy_id, policy_type, locatoion_id, group_id)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': '',
+ 'HumanReadable': md,
+ 'EntryContext': {}
+ })
+
+
+def list_locations_command(token):
+ group_id = demisto.getArg('groupID')
+ url_resp, location_ids = get_location_list(token, group_id)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': url_resp,
+ 'HumanReadable': tableToMarkdown('Locations', map(lambda location: {'Location ID': location.get('ID')},
+ location_ids)),
+ 'IgnoreAutoExtract': True,
+ 'EntryContext': {
+ 'SEPM.Locations': location_ids
+ }
+ })
+
+
+def endpoint_quarantine_command(token):
+ endpoint = demisto.getArg('endpoint')
+ action = demisto.getArg('actionType')
+ command_id = endpoint_quarantine(token, endpoint, action)
+ message = '### Initiated quarantine for endpoint {0}.' \
+ ' Command ID: {1}.'.format(endpoint, command_id) \
+ if action == 'Add' else '### Removing endpoint: {0} from quarantine. Command ID: {1}.'.format(endpoint,
+ command_id)
+ context = {
+ 'CommandID': command_id,
+ 'Action': action,
+ 'Endpoint': endpoint
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': command_id,
+ 'HumanReadable': message,
+ 'IgnoreAutoExtract': True,
+ 'EntryContext': {
+ 'SEPM.Quarantine': context
+ }
+ })
+
+
+def scan_endpoint_command(token):
+ endpoint = demisto.getArg('endpoint')
+ scan_type = demisto.getArg('scanType')
+ command_id = scan_endpoint(token, endpoint, scan_type)
+ message = '### Initiated scan on endpoint: {0} with type: {1}. Command ID: {2}.'.format(endpoint,
+ scan_type, command_id)
+ context = {
+ 'CommandID': command_id,
+ 'Type': scan_type,
+ 'Endpoint': endpoint
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': command_id,
+ 'HumanReadable': message,
+ 'IgnoreAutoExtract': True,
+ 'EntryContext': {
+ 'SEPM.Scan': context
+ }
+ })
+
+
+def update_endpoint_content_command(token):
+ endpoint = demisto.getArg('endpoint')
+ command_id = update_endpoint_content(token, endpoint)
+ message = '### Updating endpoint: {0}. Command ID: {1}.'.format(endpoint, command_id)
+ context = {
+ 'CommandID': command_id,
+ 'Endpoint': endpoint
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': command_id,
+ 'HumanReadable': message,
+ 'IgnoreAutoExtract': True,
+ 'EntryContext': {
+ 'SEPM.Update': context
+ }
+ })
+
+
+def move_client_to_group(token, group_id, hardware_key):
+ body = [{
+ 'group': {
+ 'id': group_id
+ },
+ 'hardwareKey': hardware_key
+ }]
+ response = do_patch(token, 'sepm/api/v1/computers', body)
+ message = '### Moved client to requested group successfully' \
+ if response[0].get('responseCode') == '200' \
+ else '### Error moving client'
+ return response, message
+
+
+def move_client_to_group_command(token):
+ group_id = demisto.getArg('groupID')
+ hardware_key = demisto.getArg('hardwareKey')
+ response, message = move_client_to_group(token, group_id, hardware_key)
+ demisto.results(
+ {
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': response,
+ 'HumanReadable': message,
+ 'IgnoreAutoExtract': True,
+ })
+
+
+'''COMMANDS SWITCH'''
+
+current_command = demisto.command()
+try:
+ '''
+ Before EVERY command the following tow lines are performed (do_auth and get_token_from_response)
+ '''
+ resp = do_auth(server=demisto.getParam('server'), crads=demisto.getParam(
+ 'authentication'), insecure=demisto.getParam('insecure'))
+ token = get_token_from_response(resp)
+ if current_command == 'test-module':
+ # This is the call made when pressing the integration test button.
+ if token:
+ demisto.results('ok')
+ if current_command == 'sep-system-info':
+ system_info_command(token)
+ if current_command == 'sep-client-content':
+ client_content_command(token)
+ if current_command == 'sep-endpoints-info':
+ endpoints_info_command(token)
+ if current_command == 'sep-groups-info':
+ groups_info_command(token)
+ if current_command == 'sep-command-status':
+ command_status(token)
+ if current_command == 'sep-list-policies':
+ list_policies_command(token)
+ if current_command == 'sep-assign-policy':
+ assign_policie_command(token)
+ if current_command == 'sep-list-locations':
+ list_locations_command(token)
+ if current_command == 'sep-endpoint-quarantine':
+ endpoint_quarantine_command(token)
+ if current_command == 'sep-scan-endpoint':
+ scan_endpoint_command(token)
+ if current_command == 'sep-update-endpoint-content':
+ update_endpoint_content_command(token)
+ if current_command == 'sep-move-client-to-group':
+ move_client_to_group_command(token)
+ if current_command == 'sep-identify-old-clients':
+ old_clients_command(token)
+except Exception, ex:
+ demisto.results('Cannot perform the command: {}. Error: {}'.format(current_command, ex))
diff --git a/Integrations/SymantecEndpointProtection_V2/SymantecEndpointProtection_V2.yml b/Integrations/SymantecEndpointProtection_V2/SymantecEndpointProtection_V2.yml
new file mode 100644
index 000000000000..a09e1948e3ff
--- /dev/null
+++ b/Integrations/SymantecEndpointProtection_V2/SymantecEndpointProtection_V2.yml
@@ -0,0 +1,495 @@
+category: Endpoint
+commonfields:
+ id: Symantec Endpoint Protection V2
+ version: -1
+configuration:
+- display: Server (e.g., https://1.2.3.4:8446)
+ name: server
+ required: true
+ type: 0
+- display: Authentication
+ name: authentication
+ required: true
+ type: 9
+- display: SEPM domain for the user
+ name: domain
+ required: false
+ type: 0
+- defaultvalue: 'false'
+ display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: 'false'
+ display: Use proxy system settings
+ name: proxy
+ required: false
+ type: 8
+- display: Local time zone (e.g., +02:30,-06:00)
+ name: timeZone
+ required: false
+ type: 0
+description: Query the Symantec Endpoint Protection Manager using the official REST
+ API.
+display: Symantec Endpoint Protection V2
+name: Symantec Endpoint Protection V2
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: A CSV list of the displayed columns.
+ isArray: false
+ name: columns
+ required: false
+ secret: false
+ - default: false
+ description: Filters by the host name of the computer. A wild card search can be done using '*' at the end of the query.
+ isArray: false
+ name: computerName
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: Indicates when a computer's status was last updated. The default is "0", which returns all results.
+ isArray: false
+ name: lastUpdate
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The operating system by which to filter.
+ isArray: false
+ name: os
+ predefined:
+ - CentOs
+ - Debian
+ - Fedora
+ - MacOSX
+ - Oracle
+ - OSX
+ - RedHat
+ - SUSE
+ - Ubuntu
+ - Win10
+ - Win2K
+ - Win7
+ - Win8
+ - WinEmb7
+ - WinEmb8
+ - WinEmb81
+ - WinFundamental
+ - WinNT
+ - Win2K3
+ - Win2K8
+ - Win2K8R2
+ - WinVista
+ - WinXP
+ - WinXPEmb
+ - WinXPProf64
+ required: false
+ secret: false
+ - default: false
+ description: The number of results to include on each page. The default is 20.
+ isArray: false
+ name: pageSize
+ required: false
+ secret: false
+ - default: false
+ description: The name of the group to which the endpoint belongs. A wild card search can be done using '*' at the end of the query.
+ isArray: false
+ name: groupName
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns information about endpoints.
+ execution: false
+ name: sep-endpoints-info
+ outputs:
+ - contextPath: SEPM.Endpoint.Hostname
+ description: The hostname of the endpoint.
+ type: String
+ - contextPath: SEPM.Endpoint.Domain
+ description: The domain of the endpoint.
+ type: String
+ - contextPath: SEPM.Endpoint.IPAddresses
+ description: The IP addresses of the endpoint.
+ type: String
+ - contextPath: SEPM.Endpoint.OS
+ description: The OS information of the endpoint.
+ type: String
+ - contextPath: SEPM.Endpoint.Description
+ description: The description of the endpoint.
+ type: String
+ - contextPath: SEPM.Endpoint.MACAddresses
+ description: The MAC address of the endpoint.
+ type: String
+ - contextPath: SEPM.Endpoint.BIOSVersion
+ description: The BIOS version of the endpoint.
+ type: String
+ - contextPath: SEPM.Endpoint.DHCPServer
+ description: The DHCP server address of the endpoint.
+ type: String
+ - contextPath: SEPM.Endpoint.HardwareKey
+ description: The hardware key of the client to be moved.
+ type: String
+ - contextPath: SEPM.Endpoint.LastScanTime
+ description: The last scan time of the endpoint.
+ type: String
+ - contextPath: SEPM.Endpoint.RunningVersion
+ description: The running version of the endpoint.
+ type: String
+ - contextPath: SEPM.Endpoint.TargetVersion
+ description: The target version of the endpoint.
+ type: String
+ - contextPath: IP.Address
+ description: The IP address of the endpoint.
+ type: String
+ - contextPath: IP.Host
+ description: The IP host of the endpoint.
+ type: String
+ - contextPath: Endpoint.Hostname
+ description: The hostname of the endpoint.
+ type: Unknown
+ - contextPath: Endpoint.MACAddress
+ description: The MAC address of the endpoint.
+ type: Unknown
+ - contextPath: Endpoint.Domain
+ description: The domain of the endpoint.
+ type: Unknown
+ - contextPath: Endpoint.IPAddress
+ description: The IP address of the endpoint.
+ type: Unknown
+ - contextPath: Endpoint.DHCPServer
+ description: The DHCP server of the endpoint.
+ type: Unknown
+ - contextPath: Endpoint.OS
+ description: The OS of the endpoint.
+ type: String
+ - contextPath: Endpoint.OSVersion
+ description: The OS version of the endpoint.
+ type: String
+ - contextPath: Endpoint.BIOSVersion
+ description: The BIOS version of the endpoint.
+ type: String
+ - contextPath: Endpoint.Memory
+ description: The memory of the endpoint.
+ type: String
+ - contextPath: Endpoint.Processors
+ description: The processors that the endpoint uses.
+ type: String
+ - contextPath: IP.Hostname
+ description: The hostname that is mapped to this IP address.
+ type: String
+ - contextPath: SEPM.Endpoint.Group
+ description: The group of the endpoint.
+ type: String
+ - contextPath: SEPM.Endpoint.PatternIdx
+ description: The PatternIdx of the endpoint.
+ type: String
+ - contextPath: SEPM.Endpoint.OnlineStatus
+ description: The online status of the endpoint.
+ type: String
+ - contextPath: SEPM.Endpoint.UpdateTime
+ description: The update time of the endpoint.
+ type: String
+ - arguments:
+ - default: false
+ description: The column by which the results are sorted.
+ isArray: false
+ name: columns
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns information about groups.
+ execution: false
+ name: sep-groups-info
+ outputs:
+ - contextPath: SEPM.Groups
+ description: The list of groups.
+ type: Unknown
+ - contextPath: SEPM.Groups.created
+ description: The time of creation time (in Epoch).
+ type: number
+ - contextPath: SEPM.Groups.fullPathName
+ description: The name of the group.
+ type: string
+ - contextPath: SEPM.Groups.id
+ description: The ID of the group.
+ type: string
+ - contextPath: SEPM.Groups.numberOfPhysicalComputers
+ description: The number of physical computers in the group.
+ type: number
+ - contextPath: SEPM.Groups.numberOfRegisteredUsers
+ description: The number of registered users in the group.
+ type: number
+ - contextPath: SEPM.Groups.policyDate
+ description: The date of the policy (in Epoch).
+ type: number
+ - contextPath: SEPM.Groups.policySerialNumber
+ description: The serial number of the policy.
+ type: number
+ - deprecated: false
+ description: Returns information about the system, such as version or AV definition.
+ execution: false
+ name: sep-system-info
+ outputs:
+ - contextPath: SEPM.ServerAVDefVersion
+ description: The version of the AV definition.
+ type: string
+ - arguments:
+ - default: false
+ description: The ID of the command.
+ isArray: false
+ name: commandId
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the status of a command.
+ execution: false
+ name: sep-command-status
+ outputs:
+ - contextPath: SEPM.LastCommand.CommandDetails
+ description: The details of the command.
+ type: string
+ - contextPath: SEPM.LastCommand.CommandId
+ description: The ID of the command.
+ type: string
+ - deprecated: false
+ description: Retrieves the content of the client.
+ execution: false
+ name: sep-client-content
+ outputs:
+ - contextPath: SEPM.ClientContentVersions
+ description: Displays the versions for each client.
+ type: string
+ - contextPath: SEPM.LastUpdated
+ description: The last update of a date.
+ type: string
+ - deprecated: false
+ description: Retrieves a list of existing policies.
+ execution: false
+ name: sep-list-policies
+ outputs:
+ - contextPath: SEPM.PoliciesList.PolicyName
+ description: The name of the policy.
+ type: string
+ - contextPath: SEPM.PoliciesList.Type
+ description: The type of the policy.
+ type: string
+ - contextPath: SEPM.PoliciesList.ID
+ description: The ID of the policy.
+ type: string
+ - contextPath: SEPM.PoliciesList.Description
+ description: The description of the policy.
+ type: string
+ - contextPath: SEPM.PoliciesList.Enabled
+ description: Whether the list of polices is enabled. Enabled if "True".
+ type: boolean
+ - contextPath: SEPM.PoliciesList.AssignedLocations.GroupID
+ description: The ID of the group of the locations assigned to this policy.
+ type: string
+ - contextPath: SEPM.PoliciesList.AssignedLocations.Locations
+ description: The list of location IDs assigned to this policy.
+ type: string
+ - contextPath: SEPM.PoliciesList.AssignedCloudGroups.GroupID
+ description: The ID of the cloud group of the locations assigned to this policy.
+ type: string
+ - contextPath: SEPM.PoliciesList.AssignedCloudGroups.Locations
+ description: The list of location IDs belonging to a cloud group assigned to this policy.
+ type: string
+ - arguments:
+ - default: false
+ description: The ID of the group to which the endpoint belongs.
+ isArray: false
+ name: groupID
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the location of the endpoint.
+ isArray: false
+ name: locationID
+ required: true
+ secret: false
+ - default: false
+ description: The type of policy to be assigned.
+ isArray: false
+ name: policyType
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the policy to be assigned.
+ isArray: false
+ name: policyID
+ required: true
+ secret: false
+ deprecated: false
+ description: Assigns an existing policy to a specified location.
+ execution: false
+ name: sep-assign-policy
+ - arguments:
+ - default: false
+ description: The group ID for which to list locations.
+ isArray: false
+ name: groupID
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves a list of location IDs for a specified group.
+ execution: false
+ name: sep-list-locations
+ outputs:
+ - contextPath: SEPM.Locations.ID
+ description: The ID of the location.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The IP or hostname of the endpoint.
+ isArray: false
+ name: endpoint
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Adds or removes an endpoint from quarantine.
+ isArray: false
+ name: actionType
+ predefined:
+ - Add
+ - Remove
+ required: true
+ secret: false
+ deprecated: false
+ description: Quarantines an endpoint according to its policy.
+ execution: false
+ name: sep-endpoint-quarantine
+ outputs:
+ - contextPath: SEPM.Quarantine.CommandID
+ description: The ID of the command that was run.
+ type: string
+ - contextPath: SEPM.Quarantine.Action
+ description: The type of the action type. Can be "Add" or "Remove".
+ type: string
+ - contextPath: SEPM.Quarantine.Endpoint
+ description: The IP or hostname of the identifier of the endpoint.
+ type: string
+ - arguments:
+ - default: false
+ description: The IP address or hostname of the endpoint.
+ isArray: false
+ name: endpoint
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The scan type of the endpoint. Can be "ScanNow_Quick", "ScanNow_Full", or "ScanNow_Custom".
+ isArray: false
+ name: scanType
+ predefined:
+ - ScanNow_Quick
+ - ScanNow_Full
+ - ScanNow_Custom
+ required: true
+ secret: false
+ deprecated: false
+ description: Scans an endpoint.
+ execution: false
+ name: sep-scan-endpoint
+ outputs:
+ - contextPath: SEPM.Scan.CommandID
+ description: The ID of the command that was run.
+ type: string
+ - contextPath: SEPM.Scan.Type
+ description: The type of the scan. Can be "ScanNow_Quick", "ScanNow_Full", or "ScanNow_Custom".
+ type: string
+ - contextPath: SEPM.Scan.Endpoint
+ description: The IP or hostname of the identifier of the endpoint.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The IP address or hostname of the endpoint.
+ isArray: false
+ name: endpoint
+ required: true
+ secret: false
+ deprecated: false
+ description: Updates the content of a specified client.
+ execution: false
+ name: sep-update-endpoint-content
+ outputs:
+ - contextPath: SEPM.Update.Endpoint
+ description: The endpoint that is being updated.
+ type: String
+ - contextPath: SEPM.Update.CommandID
+ description: The ID of the command for which to check the status.
+ type: String
+ - arguments:
+ - default: false
+ description: The ID of the group to which to move the client.
+ isArray: false
+ name: groupID
+ required: true
+ secret: false
+ - default: false
+ description: The hardware key of the client to be moved.
+ isArray: false
+ name: hardwareKey
+ required: true
+ secret: false
+ deprecated: false
+ description: Moves a client to a group.
+ execution: false
+ name: sep-move-client-to-group
+ - arguments:
+ - default: false
+ description: Sets which columns will be displayed.
+ isArray: false
+ name: columns
+ required: false
+ secret: false
+ - default: false
+ description: Filters by the host name of the computer. A wild card search can be done using '*' at the end of the query.
+ isArray: false
+ name: computerName
+ required: false
+ secret: false
+ - default: false
+ description: Indicates when a computer's status was last updated. The default is "0", which returns all results.
+ isArray: false
+ name: lastUpdate
+ required: false
+ secret: false
+ - default: false
+ description: The operating system by which to filter.
+ isArray: false
+ name: os
+ required: false
+ secret: false
+ - default: false
+ description: The number of results to include on each page. The default is 20.
+ isArray: false
+ name: pageSize
+ required: false
+ secret: false
+ - default: false
+ description: The name of the group to which the endpoint belongs. A wild card search can be done using '*'at the end of the query.
+ isArray: false
+ name: groupName
+ required: false
+ secret: false
+ - default: false
+ description: desiredVersion
+ isArray: false
+ name: desiredVersion
+ required: false
+ secret: false
+ deprecated: false
+ description: Get endpoints for a running version that is different than the target version or the desired version (if specified).
+ execution: false
+ name: sep-identify-old-clients
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- SymantecEndpointProtection_Test
diff --git a/Integrations/SymantecEndpointProtection_V2/SymantecEndpointProtection_V2_description.md b/Integrations/SymantecEndpointProtection_V2/SymantecEndpointProtection_V2_description.md
new file mode 100644
index 000000000000..559c4ff03088
--- /dev/null
+++ b/Integrations/SymantecEndpointProtection_V2/SymantecEndpointProtection_V2_description.md
@@ -0,0 +1 @@
+Integration with Symantec Endpoint Protection Manager using the SEPM REST API.
\ No newline at end of file
diff --git a/Integrations/SymantecEndpointProtection_V2/SymantecEndpointProtection_V2_image.png b/Integrations/SymantecEndpointProtection_V2/SymantecEndpointProtection_V2_image.png
new file mode 100644
index 000000000000..832fd3871110
Binary files /dev/null and b/Integrations/SymantecEndpointProtection_V2/SymantecEndpointProtection_V2_image.png differ
diff --git a/Integrations/SymantecEndpointProtection_V2/test-data/SEPM-endpoint-api-response.json b/Integrations/SymantecEndpointProtection_V2/test-data/SEPM-endpoint-api-response.json
new file mode 100644
index 000000000000..ef8556a39af1
--- /dev/null
+++ b/Integrations/SymantecEndpointProtection_V2/test-data/SEPM-endpoint-api-response.json
@@ -0,0 +1 @@
+[{"subnetMasks": ["255.255.255.0", "64"], "profileVersion": "14.2.1015", "elamOnOff": 2, "minorVersion": 2, "avEngineOnOff": 1, "profileChecksum": null, "atpDeviceId": "51497511-5eb0-4e99-9eb5-3cf0aca4090d", "lastSiteName": "My Site", "cidsEngineVersion": "192.168.1.2", "lastDeploymentTime": 1549984114000, "processorType": "Intel64 Family 6 Model 85 Stepping 4", "oslanguage": "en-US", "licenseId": null, "isGrace": 0, "computerUsn": 9148520, "licenseStatus": -1, "agentId": "A6CB7C5FC0A8010B4AB57333E54BD9AF", "svaId": null, "cidsBrowserFfOnOff": 1, "group": {"domain": {"id": "543F1106C0A8010B6191BA7318DA525A", "name": "Default"}, "name": "My Company", "fullPathName": null, "externalReferenceId": null, "source": null, "id": "44BE96AFC0A8010B0CFACB30929326C2"}, "uuid": "9ED54D56-EA78-1D5C-7ED9-D9B70804F4B0", "groupUpdateProvider": false, "contentUpdate": 1, "loginDomain": "LocalComputer", "osBitness": "x64", "apOnOff": 1, "lastServerName": "WIN-L3E3H1DQNKA", "creationTime": 1547047523483, "edrStatus": 2, "freeDisk": 5720444928, "writeFiltersStatus": null, "infected": 0, "osservicePack": "Service Pack 1", "isNpvdiClient": 0, "diskDrive": "C:\\", "freeMem": 2734891008, "officePhone": "", "osFunction": "Workstation", "majorVersion": 14, "mobilePhone": "", "idsVersion": "", "jobTitle": "", "memory": 4294434816, "lastHeuristicThreatTime": 0, "osname": "Windows 7", "timeZone": -120, "winServers": ["0.0.0.0", "0.0.0.0"], "publicKey": "BgIAAACkAABSU0ExAAgAAAEAAQAFgOTyFDMyU8M0a5QFjrzAhV/rYsI9lUWBBN7kzMHJ8DsxrJnIebYy3Rc73j0kL3MdHYn8yk27qTuE4OAaX9cHzph06KHnWqkGsIFja8hcFaGr+00g0MufBUkeLc8BZj6T3mR8lkLSqvQ/L4R5Rq3FXO5uShWaBoIuQNcZsNG6/ghU/O0ZTaYxj94uNWyOBticnVHll/w2xIZavyV58rPQNdNnP9Pp5iNgMApm//J/8m/0cNmHc6RCu2CiBcvdBI2MDzRZYED+Zys/2QfYrIggSt0FKimWa0fGoHZbEhHGHgzZh6D0OTbtICgHvjbNU/S8+hiw7UWvGI0tq/HD2+G5", "deploymentMessage": "Cannot deploy. Client version is the same or later than the specified package 14.2.1015.0100.", "idsSerialNo": "", "employeeNumber": "", "snacLicenseId": null, "lastSiteId": "C8DEF65CC0A8010B35124BAB1EE78C25", "tamperOnOff": 1, "uwf": 2, "currentClientId": "F284AFF1C0A8010B4AB5733324B9A01B", "processorClock": 2194, "rebootReason": "", "telemetryHwid": "AA8C7C4D-F865-60D0-38EB-0A1B3DCB5F7E", "osbitness": "x64", "cidsSilentMode": 0, "lastScanTime": 1566437506000, "dnsServers": ["8.8.4.4", "8.8.8.8"], "macAddresses": ["00-50-56-B1-A9-3F"], "securityVirtualAppliance": null, "worstInfectionIdx": "0", "atpServer": "https://192.168.1.115:443", "encryptedDevicePassword": "VqvppKYtNJHyarJawyhJ8N8zuHbVHtQx3thu8YHHiYOu0wW7c4s8rUpr4EvRKz3lpAynqXP69swiD+qt+Fo6hwA8UNmd6aN9uY+LmbJGKiRbeI4UHhGwzP/LifuxaTxVp/ECXzY3wFS5vB/rFdWsIChxAm/tkTfLPv0vh+FsvKL/pzkRqbBEhntR0Sc06sfqyYh6zWUWdTzTcdE7YMGypTU6O2cwqwC0GStkzF+wvsBO9CdOABXcz1S38KemItMvqaGX0M4fbU2DGDI8yC9mpTwwxSevJpycBIpyQrKLV8pVoUC28fnOnW7OU9cQKFg8b95271djzmd4jsJIqOT5Vg==", "profileSerialNo": "44BE-07/07/2019 12:45:26 936", "idsChecksum": null, "osminor": 1, "lastServerId": "7ECF5F14C0A8010B0B7649E127857ADD", "kernel": null, "osmajor": 6, "serialNumber": "VMware-56 4d d5 9e 78 ea 5c 1d-7e d9 d9 b7 08 04 f4 b0", "virtualizationPlatform": "Unknown", "ipAddresses": ["192.168.1.12", "FE80:0000:0000:0000:0202:B3FF:FE1E:8329"], "lastUpdateTime": 1566465422724, "lastVirusTime": 1563266990000, "lastConnectedIpAddr": "FE80:0000:0000:0000:0202:B3FF:FE1E:8329", "biosVersion": "INTEL - 6040000 PhoenixBIOS 4.0 Release 6.0", "cidsDefsetVersion": "190109062", "agentVersion": "14.2.1015.0100", "deploymentRunningVersion": "14.2.1015.0100", "cidsDrvOnOff": 1, "agentTimeStamp": 1566465422724, "telemetryMid": "09AFFF7F-5729-4D26-A18B-3FCB95434B39", "osfunction": "Workstation", "osMajor": 6, "logonUserName": "demisto", "deploymentTargetVersion": "14.2.1015.0100", "osMinor": 1, "osFlavorNumber": 4, "logicalCpus": 0, "deploymentPreVersion": "", "osLanguage": "en-US", "hypervisorVendorId": "0", "gateways": ["192.168.1.150", "192.168.1.150", "0.0.0.0", "0.0.0.0"], "uniqueId": "7021067EC0A8010B4AB573338AEE1F99", "fbwf": 2, "department": "", "osversion": "6.1", "email": "", "dhcpServer": "192.168.1.2", "patternIdx": "D9A3D520EC8AD4CC0716C8676DC2CCBB", "vsicStatus": 3, "onlineStatus": 1, "description": "", "osflavorNumber": 4, "deleted": 0, "tpmDevice": "0", "deploymentStatus": "302449921", "physicalCpus": 2, "cidsBrowserIeOnOff": 1, "lastDownloadTime": 1547105451318, "computerTimeStamp": 1566405340383, "bwf": 2, "totalDiskSpace": 71576, "domainOrWorkgroup": "WORKGROUP", "fullName": "", "osVersion": "6.1", "cidsDrvMulfCode": 0, "attributeExtension": "", "homePhone": "", "licenseExpiry": 0, "osElamStatus": 0, "daOnOff": 1, "computerDescription": "", "pepOnOff": 1, "osServicePack": "Service Pack 1", "bashStatus": 1, "agentUsn": 9159272, "osName": "Windows 7", "operatingSystem": "Windows 7 Enterprise Edition", "agentType": "105", "installType": "0", "computerName": "demisto-PC", "ptpOnOff": 1, "hardwareKey": "065A82DBECE6B2B3184CBC9911AC13D1", "firewallOnOff": 1, "employeeStatus": "", "tmpDevice": null, "rebootRequired": 0, "quarantineDesc": ""}, {"subnetMasks": ["255.255.255.0"], "profileVersion": "14.2.1015", "elamOnOff": 1, "minorVersion": 2, "avEngineOnOff": 1, "profileChecksum": null, "atpDeviceId": "44f75348-6020-4686-9f9e-443406428155", "lastSiteName": "My Site", "cidsEngineVersion": "192.168.1.2", "lastDeploymentTime": 1543394705000, "processorType": "Intel64 Family 6 Model 85 Stepping 4", "oslanguage": "en-US", "licenseId": null, "isGrace": 0, "computerUsn": 9010164, "licenseStatus": -1, "agentId": "9434EA3DC0A8010B2E83CCAE42F0443A", "svaId": null, "cidsBrowserFfOnOff": 1, "group": {"domain": {"id": "543F1106C0A8010B6191BA7318DA525A", "name": "Default"}, "name": "My Company\\Default Group", "fullPathName": null, "externalReferenceId": null, "source": null, "id": "AA51516BC0A8010B3BFBBE37F7B71214"}, "uuid": "0E084D56-31F9-8D9F-8430-C3FD2C7CC5AE", "groupUpdateProvider": false, "contentUpdate": 1, "loginDomain": "LocalComputer", "osBitness": "x64", "apOnOff": 1, "lastServerName": "WIN-L3E3H1DQNKA", "creationTime": 1543394550136, "edrStatus": 2, "freeDisk": 12017520640, "writeFiltersStatus": null, "infected": 0, "osservicePack": "", "isNpvdiClient": 0, "diskDrive": "C:\\", "freeMem": 1397698560, "officePhone": "", "osFunction": "Workstation", "majorVersion": 14, "mobilePhone": "", "idsVersion": "", "jobTitle": "", "memory": 4294430720, "lastHeuristicThreatTime": 0, "osname": "Windows 10", "timeZone": 300, "winServers": ["0.0.0.0", "0.0.0.0"], "publicKey": "BgIAAACkAABSU0ExAAgAAAEAAQCdAE3ew1rgRgLXveWW7ldJAhYszI4xsQgv8OVvC6EZ0KNpFX1AMoD85n0ASLQW7sedtQNEbANdZchvME/jesEK0/kRsajeqdtgNjiq3rMwaENFaeqFbtxc5IfYfrws2HceWeM/CycQjn1lDhrrXlvKsnj1AyaOdKpkMimWHUhpItUKvnYK/NvsxrKKbagldHRVrOQdjdXFiQjjLnzXHD2dmUDtF83opUpahf7q8GRhyTtQDgz1WfW+DckqFlgBXqKYgBqDYCxhegGgGS32Abb/dZMi3V9JssYv/fTcjt0zCVTDpZFjYqHpI2etxsdHonlPmbT+kASOLgE0Vz4UoorS", "deploymentMessage": "", "idsSerialNo": "", "employeeNumber": "", "snacLicenseId": null, "lastSiteId": "C8DEF65CC0A8010B35124BAB1EE78C25", "tamperOnOff": 1, "uwf": 2, "currentClientId": "5F0F0AF4C0A8010B2E83CCAECDE51390", "processorClock": 2195, "rebootReason": "", "telemetryHwid": "09545F5D-4754-04EA-73DA-A1FEA7DFC17A", "osbitness": "x64", "cidsSilentMode": 0, "lastScanTime": 1564378381000, "dnsServers": ["8.8.8.8", "FE80:0000:0000:0000:0202:B3FF:FE1E:8329"], "macAddresses": ["00-0C-29-7C-C5-AE"], "securityVirtualAppliance": null, "worstInfectionIdx": "0", "atpServer": "https://192.168.1.115:443", "encryptedDevicePassword": "1EwsKCCJlNzNOO3tz39haMN1kekU3OzOeDfpcLptc+R70TtZbXEf6KDK9/THFXdN8DT24EeEKVDV1gYwlp1+UHfiY+yBTm7T1O6n7EO+zSIm0QtTIpnPjtMLyH2gib6v37IJ14LzUhlvqYDEWFF9B5EU6cbcdJ1/ZIcwP5+XZ6++ob3wLPTtPO+V/3U26g2IPn68vt4/AwpH1zFmVuRapFr7t27L5VmXAcADvBu+iXcqRAdTlHVuyXa7xQwiP6pq3dKLcmDjW2Vbo5D7mjeiJ2idIrdtwnM64a/QEmtXgEfLSgLProgG3etZu8HD9IkpGhqnz7NBPNhPbJWFJ5fdhw==", "profileSerialNo": "AA51-07/07/2019 12:45:26 936", "idsChecksum": null, "osminor": 0, "lastServerId": "7ECF5F14C0A8010B0B7649E127857ADD", "kernel": null, "osmajor": 10, "serialNumber": "VMware-56 4d 08 0e f9 31 9f 8d-84 30 c3 fd 2c 7c c5 ae", "virtualizationPlatform": "Unknown", "ipAddresses": ["192.168.1.125"], "lastUpdateTime": 1565755430846, "lastVirusTime": 1564410878000, "lastConnectedIpAddr": "192.168.1.125", "biosVersion": "INTEL - 6040000 PhoenixBIOS 4.0 Release 6.0", "cidsDefsetVersion": "181207061", "agentVersion": "14.2.1015.0100", "deploymentRunningVersion": "14.2.1015.0100", "cidsDrvOnOff": 1, "agentTimeStamp": 1565755963963, "telemetryMid": "71FC380B-E4E4-4477-84FF-FE00125893DD", "osfunction": "Workstation", "osMajor": 10, "logonUserName": "demisto", "deploymentTargetVersion": "14.2.1015.0100", "osMinor": 0, "osFlavorNumber": 48, "logicalCpus": 0, "deploymentPreVersion": "", "osLanguage": "en-US", "hypervisorVendorId": "0", "gateways": ["192.168.1.150", "0.0.0.0", "0.0.0.0", "0.0.0.0"], "uniqueId": "CBB2D51DC0A8010B2E83CCAE1C8C9F6F", "fbwf": 2, "department": "", "osversion": "10.0", "email": "", "dhcpServer": "192.168.1.2", "patternIdx": "2A940B80883CBEFEA47F071D7807DF9E", "vsicStatus": 3, "onlineStatus": 0, "description": "", "osflavorNumber": 48, "deleted": 0, "tpmDevice": "0", "deploymentStatus": "302456832", "physicalCpus": 2, "cidsBrowserIeOnOff": 1, "lastDownloadTime": 1564411734506, "computerTimeStamp": 1565681188739, "bwf": 2, "totalDiskSpace": 32216, "domainOrWorkgroup": "WORKGROUP", "fullName": "", "osVersion": "10.0", "cidsDrvMulfCode": 0, "attributeExtension": "", "homePhone": "", "licenseExpiry": 0, "osElamStatus": 0, "daOnOff": 0, "computerDescription": "", "pepOnOff": 1, "osServicePack": "", "bashStatus": 1, "agentUsn": 9029273, "osName": "Windows 10", "operatingSystem": "Windows 10 Professional Edition", "agentType": "105", "installType": "0", "computerName": "DESKTOP-TF35B9B", "ptpOnOff": 1, "hardwareKey": "269CE816FDB1BA25A2505D0A5A59294C", "firewallOnOff": 1, "employeeStatus": "", "tmpDevice": null, "rebootRequired": 0, "quarantineDesc": "Host Integrity check pending. No connection possible."}]
\ No newline at end of file
diff --git a/Integrations/SymantecEndpointProtection_V2/test-data/SEPM-expected-endpoint-extract.json b/Integrations/SymantecEndpointProtection_V2/test-data/SEPM-expected-endpoint-extract.json
new file mode 100644
index 000000000000..aa7c1a2a7c40
--- /dev/null
+++ b/Integrations/SymantecEndpointProtection_V2/test-data/SEPM-expected-endpoint-extract.json
@@ -0,0 +1 @@
+[{"BIOSVersion": "INTEL - 6040000 PhoenixBIOS 4.0 Release 6.0", "MACAddress": "00-50-56-B1-A9-3F", "Domain": "WORKGROUP", "OSVersion": "6.1", "Processors": "Intel64 Family 6 Model 85 Stepping 4", "Memory": 4294434816, "OS": "Windows 7 Enterprise Edition", "Hostname": "demisto-PC", "IPAddress": "192.168.1.12", "DHCPServer": "192.168.1.2"}, {"BIOSVersion": "INTEL - 6040000 PhoenixBIOS 4.0 Release 6.0", "MACAddress": "00-0C-29-7C-C5-AE", "Domain": "WORKGROUP", "OSVersion": "10.0", "Processors": "Intel64 Family 6 Model 85 Stepping 4", "Memory": 4294430720, "OS": "Windows 10 Professional Edition", "Hostname": "DESKTOP-TF35B9B", "IPAddress": "192.168.1.125", "DHCPServer": "192.168.1.2"}]
\ No newline at end of file
diff --git a/Integrations/SymantecManagementCenter/SymantecManagementCenter.py b/Integrations/SymantecManagementCenter/SymantecManagementCenter.py
new file mode 100644
index 000000000000..423d36e66db5
--- /dev/null
+++ b/Integrations/SymantecManagementCenter/SymantecManagementCenter.py
@@ -0,0 +1,989 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import json
+import requests
+from distutils.util import strtobool
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+USERNAME = demisto.params().get('credentials').get('identifier')
+PASSWORD = demisto.params().get('credentials').get('password')
+SERVER = (demisto.params()['url'][:-1]
+ if (demisto.params()['url'] and demisto.params()['url'].endswith('/')) else demisto.params()['url'])
+BASE_URL = SERVER + '/api/'
+USE_SSL = not demisto.params().get('insecure', False)
+HEADERS = {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+}
+URL_LIST_TYPE = 'URL_LIST'
+IP_LIST_TYPE = 'IP_LIST'
+CATEGORY_LIST_TYPE = 'CATEGORY_LIST'
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, path, params=None, data=None):
+ """
+ Sends an HTTP request using the provided arguments
+ :param method: HTTP method
+ :param path: URL path
+ :param params: URL query params
+ :param data: Request body
+ :return: JSON response (or the response itself if not serializable)
+ """
+ params = params if params is not None else {}
+ data = data if data is not None else {}
+ res = None
+
+ try:
+ res = requests.request(
+ method,
+ BASE_URL + path,
+ auth=(USERNAME, PASSWORD),
+ verify=USE_SSL,
+ params=params,
+ data=json.dumps(data, sort_keys=True),
+ headers=HEADERS)
+ except (requests.exceptions.ConnectionError, requests.exceptions.Timeout,
+ requests.exceptions.TooManyRedirects, requests.exceptions.RequestException) as e:
+ return return_error('Could not connect to Symantec MC: {}'.format(str(e)))
+
+ if res.status_code < 200 or res.status_code > 300:
+ status = res.status_code
+ message = res.reason
+ details = ''
+ if res.status_code == 500:
+ details = details + '\nA possible cause is that multiple requests were made simultaneously.'
+ try:
+ error_json = res.json()
+ message = error_json.get('statusMessage')
+ details = error_json.get('message')
+ except Exception:
+ pass
+ return_error('Error in API call to Symantec MC, status code: {}, reason: {}, details: {}'
+ .format(status, message, details))
+
+ try:
+ return res.json()
+ except Exception:
+ if res.status_code == 204:
+ return res
+ return_error('Failed parsing the response from Symantec MC API: {}'.format(res.content))
+
+
+def verify_policy_content(content_type, ips, categories, urls):
+ if ((content_type == IP_LIST_TYPE and not ips)
+ or (content_type == URL_LIST_TYPE and not urls)
+ or (content_type == CATEGORY_LIST_TYPE and not categories)):
+ return_error('Incorrect content provided for the type {}'.format(content_type))
+ if ((content_type == IP_LIST_TYPE and (urls or categories))
+ or (content_type == URL_LIST_TYPE and (ips or categories))
+ or (content_type == CATEGORY_LIST_TYPE and (ips or urls))):
+ return_error('More than one content type was provided for the type {}'.format(content_type))
+
+
+def get_policy_uuid(uuid, name):
+ if not uuid:
+ if not name:
+ return_error('Either a policy UUID or name must be provided')
+ name_query = 'EQ ' + name
+ policy = list_policies_request(name=name_query)
+ if not policy or len(policy) == 0:
+ return_error('Policy not found')
+ if len(policy) > 1:
+ return_error('Found more than one policy for the provided name')
+ uuid = policy[0].get('uuid')
+ if not uuid:
+ return_error('Policy not found')
+
+ return uuid
+
+
+''' FUNCTIONS '''
+
+
+def test_module():
+ """
+ Performs basic get request to get system info
+ """
+ http_request('GET', 'system/info')
+ demisto.results('ok')
+
+
+def list_devices_command():
+ """
+ List devices in Symantec MC using provided query filters
+ """
+
+ contents = []
+ context = {}
+ build = demisto.args().get('build')
+ description = demisto.args().get('description')
+ model = demisto.args().get('model')
+ name = demisto.args().get('name')
+ os_version = demisto.args().get('os_version')
+ platform = demisto.args().get('platform')
+ device_type = demisto.args().get('type')
+ limit = int(demisto.args().get('limit', 10))
+
+ devices = list_devices_request(build, description, model, name, os_version, platform, device_type)
+
+ if devices:
+ if limit:
+ devices = devices[:limit]
+
+ for device in devices:
+ contents.append({
+ 'UUID': device.get('uuid'),
+ 'Name': device.get('name'),
+ 'LastChanged': device.get('lastChanged'),
+ 'Host': device.get('host'),
+ 'Type': device.get('type')
+ })
+ headers = ['UUID', 'Name', 'LastChanged', 'Host', 'Type']
+ human_readable = tableToMarkdown('Symantec Management Center Devices', contents,
+ removeNull=True, headers=headers, headerTransform=pascalToSpace)
+ context['SymantecMC.Device(val.UUID && val.UUID === obj.UUID)'] = createContext(contents, removeNull=True)
+ else:
+ human_readable = 'No devices found'
+
+ return_outputs(human_readable, context, devices)
+
+
+def list_devices_request(build, description, model, name, os_version, platform, device_type):
+ """
+ Get devices from Symantec MC
+ :param build: Device build number query
+ :param description: Device description query
+ :param model: Device model query
+ :param name: Device name query
+ :param os_version: Device OS version query
+ :param platform: Device platform query
+ :param device_type: Device type
+ :return: List of Symantec MC devices
+ """
+
+ path = 'devices'
+ params = {}
+
+ if build:
+ params['build'] = build
+ if description:
+ params['description'] = description
+ if model:
+ params['model'] = model
+ if name:
+ params['name'] = name
+ if os_version:
+ params['osVersion'] = os_version
+ if platform:
+ params['platform'] = platform
+ if device_type:
+ params['type'] = device_type
+
+ response = http_request('GET', path, params)
+ return response
+
+
+def get_device_command():
+ """
+ Command to get information for a specified device
+ :return: An entry with the device data
+ """
+ uuid = demisto.args()['uuid']
+ content: dict = {}
+ context: dict = {}
+
+ device = get_device_request(uuid)
+ if device:
+ content = {
+ 'UUID': device.get('uuid'),
+ 'Name': device.get('name'),
+ 'LastChanged': device.get('lastChanged'),
+ 'LastChangedBy': device.get('lastChangedBy'),
+ 'Description': device.get('description'),
+ 'Model': device.get('model'),
+ 'Platform': device.get('platform'),
+ 'Type': device.get('type'),
+ 'OSVersion': device.get('osVersion'),
+ 'Build': device.get('build'),
+ 'SerialNumber': device.get('serialNumber'),
+ 'Host': device.get('host'),
+ 'ManagementStatus': device.get('managementStatus'),
+ 'DeploymentStatus': device.get('deploymentStatus')
+ }
+
+ context['SymantecMC.Device(val.UUID && val.UUID === obj.UUID)'] = createContext(content, removeNull=True)
+
+ headers = ['UUID', 'Name', 'LastChanged', 'LastChangedBy', 'Description',
+ 'Model', 'Platform', 'Host', 'Type', 'OSVersion', 'Build', 'SerialNumber',
+ 'ManagementStatus', 'DeploymentStatus']
+
+ return_outputs(tableToMarkdown('Symantec Management Center Device', content,
+ removeNull=True, headers=headers, headerTransform=pascalToSpace), context, device)
+
+
+def get_device_request(uuid):
+ """
+ Return data for a specified device
+ :param uuid: The device UUID
+ :return: The device data
+ """
+ path = 'devices/' + uuid
+
+ response = http_request('GET', path)
+
+ return response
+
+
+def get_device_health_command():
+ """
+ Command to get health information for a specified device
+ :return: An entry with the device data and health
+ """
+ uuid = demisto.args()['uuid']
+ health_content = []
+ human_readable = ''
+ context = {}
+
+ device_health = get_device_health_request(uuid)
+ if device_health:
+ device_content = {
+ 'UUID': device_health.get('uuid'),
+ 'Name': device_health.get('name')
+ }
+
+ if device_health.get('health'):
+ if not isinstance(device_health['health'], list):
+ device_health['health'] = [device_health['health']]
+ for health in device_health['health']:
+ health_content.append({
+ 'Category': health.get('category'),
+ 'Name': health.get('name'),
+ 'State': health.get('state'),
+ 'Message': health.get('message'),
+ 'Status': health.get('status')
+ })
+
+ device_headers = ['UUID', 'Name']
+ content = device_content
+ human_readable = tableToMarkdown('Symantec Management Center Device', device_content,
+ removeNull=True, headers=device_headers, headerTransform=pascalToSpace)
+ if health_content:
+ health_headers = ['Category', 'Name', 'State', 'Message', 'Status']
+ human_readable += tableToMarkdown('Device Health', health_content,
+ removeNull=True, headers=health_headers, headerTransform=pascalToSpace)
+ content['Health'] = health_content
+ else:
+ human_readable += '\n\nNo health details found for this device\n'
+
+ context['SymantecMC.Device(val.UUID && val.UUID === obj.UUID)'] = createContext(content, removeNull=True)
+
+ return_outputs(human_readable, context, device_health)
+
+
+def get_device_health_request(uuid):
+ """
+ Return health for a specified device
+ :param uuid: The device UUID
+ :return: The device health data
+ """
+ path = 'devices/' + uuid + '/health'
+
+ response = http_request('GET', path)
+
+ return response
+
+
+def get_device_license_command():
+ """
+ Command to get license information for a specified device
+ :return: An entry with the device data and license information
+ """
+ uuid = demisto.args()['uuid']
+ license_content = []
+ human_readable = ''
+ context = {}
+
+ device_license = get_device_license_request(uuid)
+ if device_license:
+
+ if not isinstance(device_license['components'], list):
+ device_license['components'] = [device_license['components']]
+
+ device_content = {
+ 'UUID': device_license.get('uuid'),
+ 'Name': device_license.get('name'),
+ 'Type': device_license.get('deviceType'),
+ 'LicenseStatus': device_license.get('licenseStatus')
+ }
+
+ for component in device_license['components']:
+ license_content.append({
+ 'Name': component.get('componentName'),
+ 'ActivationDate': component.get('activationDate'),
+ 'ExpirationDate': component.get('expirationDate'),
+ 'Validity': component.get('validity')
+ })
+
+ device_headers = ['UUID', 'Name', 'Type', 'LicenseStatus']
+ content = device_content
+ human_readable = tableToMarkdown('Symantec Management Center Device', device_content,
+ removeNull=True, headers=device_headers, headerTransform=pascalToSpace)
+ if license_content:
+ license_headers = ['Name', 'ActivationDate', 'ExpirationDate', 'Validity']
+ human_readable += tableToMarkdown('License Components', license_content,
+ removeNull=True, headers=license_headers, headerTransform=pascalToSpace)
+ content['LicenseComponent'] = license_content
+
+ context['SymantecMC.Device(val.UUID && val.UUID === obj.UUID)'] = createContext(content, removeNull=True)
+
+ return_outputs(human_readable, context, device_license)
+
+
+def get_device_license_request(uuid):
+ """
+ Return the license for a specified device
+ :param uuid: The device UUID
+ :return: The device license data
+ """
+ path = 'devices/' + uuid + '/license'
+
+ response = http_request('GET', path)
+
+ return response
+
+
+def get_device_status_command():
+ """
+ Command to get the status for a specified device
+ :return: An entry with the device status data
+ """
+ uuid = demisto.args()['uuid']
+ content: dict = {}
+ context: dict = {}
+
+ device = get_device_status_request(uuid)
+ if device:
+ content = {
+ 'UUID': device.get('uuid'),
+ 'Name': device.get('name'),
+ 'CheckDate': device.get('checkDate'),
+ 'StartDate': device.get('startDate'),
+ 'MonitorState': device.get('monitorState'),
+ 'Warnings': len(device.get('warnings', [])),
+ 'Errors': len(device.get('errors', []))
+ }
+
+ context['SymantecMC.Device(val.UUID && val.UUID === obj.UUID)'] = createContext(content, removeNull=True)
+
+ headers = ['UUID', 'Name', 'CheckDate', 'StartDate', 'MonitorState', 'Warnings', 'Errors']
+
+ return_outputs(tableToMarkdown('Symantec Management Center Device Status', content,
+ removeNull=True, headers=headers, headerTransform=pascalToSpace), context, device)
+
+
+def get_device_status_request(uuid):
+ """
+ Return data for a specified device status
+ :param uuid: The device UUID
+ :return: The device status data
+ """
+ path = 'devices/' + uuid + '/status'
+
+ response = http_request('GET', path)
+
+ return response
+
+
+def list_policies_command():
+ """
+ List policies in Symantec MC using provided query filters
+ """
+
+ contents = []
+ context = {}
+ content_type = demisto.args().get('content_type')
+ description = demisto.args().get('description')
+ name = demisto.args().get('name')
+ reference_id = demisto.args().get('reference_id')
+ shared = demisto.args().get('shared')
+ tenant = demisto.args().get('tenant')
+ limit = int(demisto.args().get('limit', 10))
+
+ policies = list_policies_request(content_type, description, name, reference_id, shared, tenant)
+
+ if policies:
+ if limit:
+ policies = policies[:limit]
+
+ for policy in policies:
+ contents.append({
+ 'UUID': policy.get('uuid'),
+ 'Name': policy.get('name'),
+ 'ContentType': policy.get('contentType'),
+ 'Author': policy.get('author'),
+ 'Shared': policy.get('shared'),
+ 'ReferenceID': policy.get('referenceId'),
+ 'Tenant': policy.get('tenant'),
+ 'ReplaceVariables': policy.get('replaceVariables')
+ })
+ headers = ['UUID', 'Name', 'ContentType', 'Author', 'Shared', 'ReferenceID', 'Tenant', 'ReplaceVariables']
+ human_readable = tableToMarkdown('Symantec Management Center Policies', contents,
+ removeNull=True, headers=headers, headerTransform=pascalToSpace)
+ context['SymantecMC.Policy(val.UUID && val.UUID === obj.UUID)'] = createContext(contents, removeNull=True)
+ else:
+ human_readable = 'No policies found'
+
+ return_outputs(human_readable, context, policies)
+
+
+def list_policies_request(content_type=None, description=None, name=None, reference_id=None, shared=None, tenant=None):
+ """
+ Get policies in Symantec MC
+ :param content_type: Policy content type query
+ :param description: Policy description query
+ :param name: Policy name query
+ :param reference_id: Policy reference ID query
+ :param shared: Policy shared query
+ :param tenant: Policy tenant query
+ :return: List of policies in Symantec MC
+ """
+ path = 'policies'
+ params = {}
+
+ if content_type:
+ params['contentType'] = content_type
+ if description:
+ params['description'] = description
+ if name:
+ params['name'] = name
+ if reference_id:
+ params['referenceId'] = reference_id
+ if shared:
+ params['shared'] = shared
+ if tenant:
+ params['tenant'] = tenant
+
+ response = http_request('GET', path, params)
+ return response
+
+
+def get_policy_command():
+ """
+ Command to get information for a specified policy including it's contents
+ :return: An entry with the policy data
+ """
+ uuid = demisto.args().get('uuid')
+ name = demisto.args().get('name')
+ policy_content_data: dict = {}
+ revision_content: dict = {}
+ policy_content_content = []
+ content_title = ''
+ human_readable = ''
+ content_headers: list = []
+ content_key = ''
+ context: dict = {}
+
+ if not name and not uuid:
+ return_error('Either a UUID or a name must be provided')
+
+ uuid = get_policy_uuid(uuid, name)
+
+ policy = get_policy_request(uuid)
+ if policy:
+ policy_content = {
+ 'UUID': policy.get('uuid'),
+ 'Name': policy.get('name'),
+ 'Description': policy.get('description'),
+ 'ContentType': policy.get('contentType'),
+ 'ReferenceID': policy.get('referenceId')
+ }
+ policy_content_data = get_policy_content_request(uuid)
+
+ if policy_content_data and policy_content_data.get('revisionInfo'):
+ policy_content['SchemaVersion'] = policy_content_data.get('schemaVersion')
+ revision_content = {
+ 'Number': policy_content_data['revisionInfo'].get('revisionNumber'),
+ 'Description': policy_content_data['revisionInfo'].get('revisionDescription'),
+ 'Author': policy_content_data['revisionInfo'].get('author'),
+ 'Date': policy_content_data['revisionInfo'].get('revisionDate')
+ }
+
+ if policy.get('contentType') == URL_LIST_TYPE:
+ content_title = 'URL List'
+ content_headers = ['Address', 'Description', 'Enabled']
+ content_key = 'URL'
+ urls = policy_content_data.get('content', {}).get('urls', [])
+ for url in urls:
+ policy_content_content.append({
+ 'Address': url.get('url'),
+ 'Description': url.get('description'),
+ 'Enabled': url.get('enabled')
+ })
+ elif policy.get('contentType') == IP_LIST_TYPE:
+ content_title = 'IP List'
+ content_headers = ['Address', 'Description', 'Enabled']
+ content_key = 'IP'
+ ips = policy_content_data.get('content', {}).get('ipAddresses', [])
+ for ip in ips:
+ policy_content_content.append({
+ 'Address': ip.get('ipAddress'),
+ 'Description': ip.get('description'),
+ 'Enabled': ip.get('enabled')
+ })
+ elif policy.get('contentType') == CATEGORY_LIST_TYPE:
+ content_title = 'Category List'
+ content_headers = ['Name']
+ content_key = 'Category'
+ categories = policy_content_data.get('content', {}).get('categories', [])
+ for category in categories:
+ policy_content_content.append({
+ 'Name': category.get('categoryName')
+ })
+
+ policy_headers = ['UUID', 'Name', 'SchemaVersion', 'ReferenceID', 'Description', 'ContentType']
+ human_readable = tableToMarkdown('Symantec Management Center Policy', policy_content,
+ removeNull=True, headers=policy_headers, headerTransform=pascalToSpace)
+ content = policy_content
+ if revision_content:
+ revision_headers = ['Number', 'Description', 'Author', 'Date']
+ content['RevisionInfo'] = revision_content
+ human_readable += tableToMarkdown('Revision Information', revision_content,
+ removeNull=True, headers=revision_headers, headerTransform=pascalToSpace)
+
+ if policy_content_content:
+ content[content_key] = policy_content_content
+ human_readable += tableToMarkdown(content_title, policy_content_content,
+ removeNull=True, headers=content_headers, headerTransform=pascalToSpace)
+
+ context['SymantecMC.Policy(val.UUID && val.UUID === obj.UUID)'] = createContext(content, removeNull=True)
+
+ policy.update(policy_content_data)
+ return_outputs(human_readable, context, policy)
+
+
+def get_policy_request(uuid):
+ """
+ Return data for a specified policy
+ :param uuid: The policy UUID
+ :return: The policy data
+ """
+ path = 'policies/' + uuid
+
+ response = http_request('GET', path)
+
+ return response
+
+
+def get_policy_content_request(uuid):
+ """
+ Return content data for a specified policy
+ :param uuid: The policy UUID
+ :return: The policy content data
+ """
+ path = 'policies/' + uuid + '/content'
+
+ response = http_request('GET', path)
+
+ return response
+
+
+def create_policy_command():
+ """
+ Command to create a new policy in Symantec MC
+ :return: An entry with the new policy data
+ """
+ name = demisto.args()['name']
+ content_type = demisto.args()['content_type']
+ description = demisto.args().get('description')
+ reference_id = demisto.args().get('reference_id')
+ tenant = demisto.args().get('tenant')
+ shared = demisto.args().get('shared')
+ replace_variables = demisto.args().get('replace_variables')
+
+ content: dict = {}
+ context: dict = {}
+
+ policy = create_policy_request(name, content_type, description, reference_id, tenant, shared, replace_variables)
+ if policy:
+ content = {
+ 'UUID': policy.get('uuid'),
+ 'Name': policy.get('name'),
+ 'ContentType': policy.get('contentType'),
+ 'Author': policy.get('author')
+ }
+
+ context['SymantecMC.Policy(val.UUID && val.UUID === obj.UUID)'] = createContext(content, removeNull=True)
+
+ headers = ['UUID', 'Name', 'ContentType', 'Author']
+
+ return_outputs(tableToMarkdown('Policy created successfully', content,
+ removeNull=True, headers=headers, headerTransform=pascalToSpace), context, policy)
+
+
+def create_policy_request(name, content_type, description, reference_id, tenant, shared, replace_variables):
+ """
+ Creates a policy in Symantec MC using the provided arguments
+ :param name: Policy name
+ :param content_type: Policy content type
+ :param description: Policy description
+ :param reference_id: Policy reference ID
+ :param tenant: Policy tenant
+ :param shared: Policy shared
+ :param replace_variables: Policy replace variables
+ :return: The created policy data
+ """
+ path = 'policies'
+
+ body = {
+ 'name': name,
+ 'contentType': content_type
+ }
+
+ if description:
+ body['description'] = description
+ if reference_id:
+ body['referenceId'] = reference_id
+ if tenant:
+ body['tenant'] = tenant
+ if shared:
+ body['shared'] = shared
+ if replace_variables:
+ body['replaceVariables'] = replace_variables
+
+ response = http_request('POST', path, data=body)
+ return response
+
+
+def update_policy_command():
+ """
+ Command to update an existing policy in Symantec MC
+ :return: An entry with the policy data
+ """
+ uuid = demisto.args()['uuid']
+ name = demisto.args().get('name')
+ description = demisto.args().get('description')
+ reference_id = demisto.args().get('reference_id')
+ replace_variables = demisto.args().get('replace_variables')
+
+ content: dict = {}
+ context: dict = {}
+
+ policy = update_policy_request(uuid, name, description, reference_id, replace_variables)
+ if policy:
+ content = {
+ 'UUID': policy.get('uuid'),
+ 'Name': policy.get('name'),
+ 'ContentType': policy.get('contentType'),
+ 'Author': policy.get('author')
+ }
+
+ context['SymantecMC.Policy(val.UUID && val.UUID === obj.UUID)'] = createContext(content, removeNull=True)
+
+ headers = ['UUID', 'Name', 'ContentType', 'Author']
+
+ return_outputs(tableToMarkdown('Policy updated successfully', content,
+ removeNull=True, headers=headers, headerTransform=pascalToSpace), context, policy)
+
+
+def update_policy_request(uuid, name, description, reference_id, replace_variables):
+ """
+ Updates a policy in Symantec MC using the provided arguments
+ :param uuid: Policy UUID
+ :param name: New policy name
+ :param description: New policy description
+ :param reference_id: New policy reference ID
+ :param replace_variables: New policy replace variables
+ :return: The updated policy data
+ """
+ path = 'policies/' + uuid
+
+ body = {}
+
+ if name:
+ body['name'] = name
+ if description:
+ body['description'] = description
+ if reference_id:
+ body['referenceId'] = reference_id
+ if replace_variables:
+ body['replaceVariables'] = replace_variables
+
+ response = http_request('PUT', path, data=body)
+ return response
+
+
+def delete_policy_command():
+ """
+ Command to delete an existing policy in Symantec MC
+ :return: An entry indicating whether the deletion was successful
+ """
+ uuid = demisto.args()['uuid']
+ force = demisto.args().get('force')
+
+ delete_policy_request(uuid, force)
+ return_outputs('Policy deleted successfully', {}, {})
+
+
+def delete_policy_request(uuid, force):
+ """
+ Deletes a policy in Symantec MC using the provided arguments
+ :param uuid: Policy UUID
+ :param force: Force policy delete
+ :return: The deletion response
+ """
+ path = 'policies/' + uuid
+
+ response = http_request('DELETE', path, data=force)
+ return response
+
+
+def add_policy_content_command():
+ """
+ Command to add content to an existing policy in Symantec MC
+ :return: An entry indicating whether the addition was successful
+ """
+ uuid = demisto.args().get('uuid')
+ name = demisto.args().get('name')
+ content_type = demisto.args()['content_type']
+ change_description = demisto.args()['change_description']
+ schema_version = demisto.args().get('schema_version')
+ ips = argToList(demisto.args().get('ip', []))
+ urls = argToList(demisto.args().get('url', []))
+ categories = argToList(demisto.args().get('category', []))
+ enabled = demisto.args().get('enabled')
+ description = demisto.args().get('description')
+
+ verify_policy_content(content_type, ips, categories, urls)
+
+ uuid = get_policy_uuid(uuid, name)
+
+ if content_type == IP_LIST_TYPE:
+ add_policy_content_request(uuid, content_type, change_description, schema_version,
+ ips=ips, enabled=enabled, description=description)
+ elif content_type == URL_LIST_TYPE:
+ add_policy_content_request(uuid, content_type, change_description, schema_version,
+ urls=urls, enabled=enabled, description=description)
+ elif content_type == CATEGORY_LIST_TYPE:
+ add_policy_content_request(uuid, content_type, change_description, schema_version,
+ categories=categories)
+
+ return_outputs('Successfully added content to the policy', {}, {})
+
+
+def add_policy_content_request(uuid, content_type, change_description, schema_version,
+ ips=None, urls=None, categories=None, enabled=None, description=''):
+ """
+ Add content to a specified policy using the provided arguments
+ :param uuid: Policy UUID
+ :param content_type: Policy content type
+ :param change_description: Policy update change description
+ :param schema_version: Policy schema version
+ :param ips: IPs to add to the content
+ :param urls: URLs to add to the content
+ :param categories: Category names to add to the content
+ :param enabled: Policy content enabled
+ :param description: Policy content description
+ :return: Content update response
+ """
+ path = 'policies/' + uuid + '/content'
+
+ body = {
+ 'contentType': content_type,
+ 'changeDescription': change_description
+ }
+
+ if schema_version:
+ body['schemaVersion'] = schema_version
+
+ content = get_policy_content_request(uuid)
+ if not content or 'content' not in content:
+ return_error('Could not update policy content - failed retrieving the current content')
+
+ if ips:
+ if 'ipAddresses' not in content['content']:
+ content['content']['ipAddresses'] = []
+ content['content']['ipAddresses'] += [{
+ 'ipAddress': ip,
+ 'description': description,
+ 'enabled': bool(strtobool(enabled))
+ } for ip in ips]
+ elif urls:
+ if 'urls' not in content['content']:
+ content['content']['urls'] = []
+ content['content']['urls'] += [{
+ 'url': url,
+ 'description': description,
+ 'enabled': bool(strtobool(enabled))
+ } for url in urls]
+ elif categories:
+ if 'categories' not in content['content']:
+ content['content']['categories'] = []
+ content['content']['categories'] += [{
+ 'categoryName': category,
+ } for category in categories]
+
+ body['content'] = content['content']
+ response = http_request('POST', path, data=body)
+
+ return response
+
+
+def delete_policy_content_command():
+ """
+ Command to delete content from an existing policy in Symantec MC
+ :return: An entry indicating whether the deletion was successful
+ """
+ uuid = demisto.args().get('uuid')
+ name = demisto.args().get('name')
+ content_type = demisto.args()['content_type']
+ change_description = demisto.args()['change_description']
+ schema_version = demisto.args().get('schema_version')
+ ips = argToList(demisto.args().get('ip', []))
+ urls = argToList(demisto.args().get('url', []))
+ categories = argToList(demisto.args().get('category', []))
+
+ verify_policy_content(content_type, ips, categories, urls)
+
+ uuid = get_policy_uuid(uuid, name)
+
+ if content_type == IP_LIST_TYPE:
+ delete_policy_content_request(uuid, content_type, change_description, schema_version, ips=ips)
+ elif content_type == URL_LIST_TYPE:
+ delete_policy_content_request(uuid, content_type, change_description, schema_version, urls=urls)
+ elif content_type == CATEGORY_LIST_TYPE:
+ delete_policy_content_request(uuid, content_type, change_description, schema_version, categories=categories)
+
+ return_outputs('Successfully deleted content from the policy', {}, {})
+
+
+def delete_policy_content_request(uuid, content_type, change_description, schema_version,
+ ips=None, urls=None, categories=None):
+ """
+ Add content to a specified policy using the provided arguments
+ :param uuid: Policy UUID
+ :param content_type: Policy content type
+ :param change_description: Policy update change description
+ :param schema_version: Policy schema version
+ :param ips: IPs to delete from the content
+ :param urls: URLs to delete from the content
+ :param categories: Category names to delete from the content
+ :return: Content update response
+ """
+ path = 'policies/' + uuid + '/content'
+
+ body = {
+ 'contentType': content_type,
+ 'changeDescription': change_description
+ }
+
+ if schema_version:
+ body['schemaVersion'] = schema_version
+
+ content = get_policy_content_request(uuid)
+ if not content or 'content' not in content:
+ return_error('Could not update policy content - failed retrieving the current content')
+
+ if ips:
+ if 'ipAddresses' in content['content']:
+ ips_to_keep = [ip for ip in content['content']['ipAddresses'] if ip['ipAddress'] not in ips]
+ content['content']['ipAddresses'] = ips_to_keep
+ elif urls:
+ if 'urls' in content['content']:
+ urls_to_keep = [url for url in content['content']['urls'] if url['url'] not in urls]
+ content['content']['urls'] = urls_to_keep
+ elif categories:
+ if 'categories' in content['content']:
+ categories_to_keep = [category for category in content['content']['categories']
+ if category['categoryName'] not in categories]
+ content['content']['categories'] = categories_to_keep
+
+ body['content'] = content['content']
+ response = http_request('POST', path, data=body)
+
+ return response
+
+
+def list_tenants_command():
+ """
+ List tenants in Symantec MC
+ """
+
+ contents = []
+ context = {}
+ limit = int(demisto.args().get('limit', 10))
+
+ tenants = list_tenants_request()
+
+ if tenants:
+ if limit:
+ tenants = tenants[:limit]
+
+ for tenant in tenants:
+ contents.append({
+ 'UUID': tenant.get('uuid'),
+ 'Name': tenant.get('name'),
+ 'ExternalID': tenant.get('externalId'),
+ 'Description': tenant.get('description'),
+ 'System': tenant.get('system')
+ })
+ headers = ['UUID', 'Name', 'ExternalID', 'Description', 'System']
+ human_readable = tableToMarkdown('Symantec Management Center Tenants', contents,
+ removeNull=True, headers=headers, headerTransform=pascalToSpace)
+ context['SymantecMC.Tenant(val.UUID && val.UUID === obj.UUID)'] = createContext(contents, removeNull=True)
+ else:
+ human_readable = 'No tenants found'
+
+ return_outputs(human_readable, context, tenants)
+
+
+def list_tenants_request():
+ """
+ Get devices from Symantec MC
+ :return: List of Symantec MC tenants
+ """
+
+ path = 'tenants'
+ params: dict = {}
+
+ response = http_request('GET', path, params)
+ return response
+
+
+''' COMMANDS '''
+LOG('Command being called is ' + demisto.command())
+handle_proxy()
+
+COMMAND_DICTIONARY = {
+ 'test-module': test_module,
+ 'symantec-mc-list-devices': list_devices_command,
+ 'symantec-mc-get-device': get_device_command,
+ 'symantec-mc-get-device-health': get_device_health_command,
+ 'symantec-mc-get-device-license': get_device_license_command,
+ 'symantec-mc-get-device-status': get_device_status_command,
+ 'symantec-mc-list-policies': list_policies_command,
+ 'symantec-mc-get-policy': get_policy_command,
+ 'symantec-mc-create-policy': create_policy_command,
+ 'symantec-mc-update-policy': update_policy_command,
+ 'symantec-mc-delete-policy': delete_policy_command,
+ 'symantec-mc-add-policy-content': add_policy_content_command,
+ 'symantec-mc-delete-policy-content': delete_policy_content_command,
+ 'symantec-mc-list-tenants': list_tenants_command
+}
+
+try:
+ command_func = COMMAND_DICTIONARY[demisto.command()]
+ command_func()
+except Exception as e:
+ LOG(str(e))
+ LOG.print_log()
+ return_error(str(e))
diff --git a/Integrations/SymantecManagementCenter/SymantecManagementCenter.yml b/Integrations/SymantecManagementCenter/SymantecManagementCenter.yml
new file mode 100644
index 000000000000..194fbe54bec2
--- /dev/null
+++ b/Integrations/SymantecManagementCenter/SymantecManagementCenter.yml
@@ -0,0 +1,736 @@
+category: Network Security
+commonfields:
+ id: Symantec Management Center
+ version: -1
+configuration:
+- display: Server URL (e.g. https://192.168.0.1:8082)
+ name: url
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: true
+ type: 9
+- display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Symantec Management Center provides a unified management environment
+ for the Symantec Security Platform portfolio of products.
+display: Symantec Management Center
+name: Symantec Management Center
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: 'Filter the query filter parameter by the OS build number, for
+ example: "GT 227900". '
+ isArray: true
+ name: build
+ required: false
+ secret: false
+ - default: false
+ description: 'Filter the query filter parameter by description, for example:
+ "CONTAINS" desc".'
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: 'Filter the query filter parameter by model, for example: "EQ VSWG-SE".'
+ isArray: false
+ name: model
+ required: false
+ secret: false
+ - default: false
+ description: 'Filter the query filter parameter by name, for example: "STARTSWITH
+ CAS". '
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: 'Filter the query filter parameter by OS version, for example:
+ "LT 2.3".'
+ isArray: false
+ name: os_version
+ required: false
+ secret: false
+ - default: false
+ description: 'Filter the query filter parameter by platform, for example: "CONTAINS
+ CAS". '
+ isArray: false
+ name: platform
+ required: false
+ secret: false
+ - default: false
+ description: 'Filter the query filter parameter by device type, for example:
+ "cas". '
+ isArray: false
+ name: type
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Maximum number of results to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Lists all devices in Symantec MC.
+ execution: false
+ name: symantec-mc-list-devices
+ outputs:
+ - contextPath: SymantecMC.Device.UUID
+ description: Device UUID.
+ type: String
+ - contextPath: SymantecMC.Device.Name
+ description: Device name.
+ type: String
+ - contextPath: SymantecMC.Device.LastChanged
+ description: Device last changed date.
+ type: Date
+ - contextPath: SymantecMC.Device.Type
+ description: Device type.
+ type: String
+ - contextPath: SymantecMC.Device.Host
+ description: Device host address.
+ type: String
+ - arguments:
+ - default: true
+ description: Device UUID. Run the symantec-mc-list-devices command to get the
+ UUID.
+ isArray: false
+ name: uuid
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets device information from Symantec MC.
+ execution: false
+ name: symantec-mc-get-device
+ outputs:
+ - contextPath: SymantecMC.Device.UUID
+ description: Device UUID.
+ type: String
+ - contextPath: SymantecMC.Device.Name
+ description: Device name.
+ type: String
+ - contextPath: SymantecMC.Device.LastChanged
+ description: Device last changed date.
+ type: String
+ - contextPath: SymantecMC.Device.LastChangedBy
+ description: User that last changed the device.
+ type: String
+ - contextPath: SymantecMC.Device.Description
+ description: Device description.
+ type: String
+ - contextPath: SymantecMC.Device.Model
+ description: Device model.
+ type: String
+ - contextPath: SymantecMC.Device..Platform
+ description: Device platform
+ type: String
+ - contextPath: SymantecMC.Device.Type
+ description: Device type.
+ type: String
+ - contextPath: SymantecMC.Device.OSVersion
+ description: Device OS version.
+ type: String
+ - contextPath: SymantecMC.Device.Build
+ description: Device build number.
+ type: Number
+ - contextPath: SymantecMC.Device.SerialNumber
+ description: Device serial number.
+ type: Number
+ - contextPath: SymantecMC.Device.Host
+ description: Device host address.
+ type: String
+ - contextPath: SymantecMC.Device.ManagementStatus
+ description: Device management status.
+ type: String
+ - contextPath: SymantecMC.Device.DeploymentStatus
+ description: Device deployment status.
+ type: String
+ - arguments:
+ - default: true
+ description: Device UUID. Run the symantec-mc-list-devices command to get the
+ UUID.
+ isArray: false
+ name: uuid
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets health information for a device.
+ execution: false
+ name: symantec-mc-get-device-health
+ outputs:
+ - contextPath: SymantecMC.Device.UUID
+ description: Device UUID.
+ type: String
+ - contextPath: SymantecMC.Device.Name
+ description: Device name.
+ type: String
+ - contextPath: SymantecMC.Device.Health.Category
+ description: Device health category.
+ type: String
+ - contextPath: SymantecMC.Device.Health.Name
+ description: Device health name.
+ type: String
+ - contextPath: SymantecMC.Device.Health.State
+ description: Device health state.
+ type: String
+ - contextPath: SymantecMC.Device.Health.Message
+ description: Device health message.
+ type: String
+ - contextPath: SymantecMC.Device.Health.Status
+ description: Device health status.
+ type: String
+ - arguments:
+ - default: true
+ description: Device UUID. Run the symantec-mc-list-devices command to get the
+ UUID.
+ isArray: false
+ name: uuid
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets license information for a device in Symantec MC.
+ execution: false
+ name: symantec-mc-get-device-license
+ outputs:
+ - contextPath: SymantecMC.Device.UUID
+ description: Device UUID.
+ type: String
+ - contextPath: SymantecMC.Device.Name
+ description: Device name.
+ type: String
+ - contextPath: SymantecMC.Device.Type
+ description: Device type.
+ type: String
+ - contextPath: SymantecMC.Device.LicenseStatus
+ description: Device license status.
+ type: String
+ - contextPath: SymantecMC.Device.LicenseComponent.Name
+ description: Device license component name.
+ type: String
+ - contextPath: SymantecMC.Device.LicenseComponent.ActivationDate
+ description: Device license component activation date.
+ type: Date
+ - contextPath: SymantecMC.Device.LicenseComponent.ExpirationDate
+ description: Device license component expiration date
+ type: Date
+ - contextPath: SymantecMC.Device.LicenseComponent.Validity
+ description: Device license component validity.
+ type: String
+ - arguments:
+ - default: true
+ description: Device UUID. Run the symantec-mc-list-devices command to get the
+ UUID.
+ isArray: false
+ name: uuid
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets the status of a device.
+ execution: false
+ name: symantec-mc-get-device-status
+ outputs:
+ - contextPath: SymantecMC.Device.UUID
+ description: Device UUID.
+ type: String
+ - contextPath: SymantecMC.Device.Name
+ description: Device name.
+ type: String
+ - contextPath: SymantecMC.Device.CheckDate
+ description: Device check date.
+ type: Date
+ - contextPath: SymantecMC.Device.StartDate
+ description: Device start date.
+ type: Date
+ - contextPath: SymantecMC.Device.MonitorState
+ description: Device monitor state.
+ type: String
+ - contextPath: SymantecMC.Device.Warnings
+ description: Device warning count.
+ type: Number
+ - contextPath: SymantecMC.Device.Errors
+ description: Device error count.
+ type: Number
+ - arguments:
+ - default: false
+ description: Filter the query filter parameter by content type of policy, e.g.,
+ "ENDSWITH URL"
+ isArray: false
+ name: content_type
+ required: false
+ secret: false
+ - default: false
+ description: 'Filter the query filter parameter by description, for example:
+ "CONTAINS desc."'
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: 'Filter the query filter parameter by name, for example: "STARTSWITH
+ my_list". '
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: 'Filter the query filter parameter by referenceId, for example:
+ "EQ my_list". '
+ isArray: false
+ name: reference_id
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Parameter to filter, based on whether the policy is shared or not
+ shared.
+ isArray: false
+ name: shared
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Filter the query filter parameter by tenant, e.g., "EQ TENANT_EXTERNAL_ID"
+ isArray: false
+ name: tenant
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Limit the number of results returned
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: List policies in Symantec MC.
+ execution: false
+ name: symantec-mc-list-policies
+ outputs:
+ - contextPath: SymantecMC.Policy.UUID
+ description: Policy UUID.
+ type: String
+ - contextPath: SymantecMC.Policy.Name
+ description: Policy name.
+ type: String
+ - contextPath: SymantecMC.Policy.ContentType
+ description: Policy content type.
+ type: String
+ - contextPath: SymantecMC.Policy.Author
+ description: Policy author.
+ type: String
+ - contextPath: SymantecMC.Policy.Shared
+ description: Policy shared.
+ type: Boolean
+ - contextPath: SymantecMC.Policy.ReferenceID
+ description: Policy reference ID
+ type: String
+ - contextPath: SymantecMC.Policy.Tenant
+ description: Policy tenant.
+ type: String
+ - contextPath: SymantecMC.ReplaceVariables
+ description: Policy replace variables.
+ type: Boolean
+ - arguments:
+ - default: true
+ description: Device UUID. Run the symantec-mc-list-devices command to get the
+ UUID.
+ isArray: false
+ name: uuid
+ required: false
+ secret: false
+ - default: false
+ description: The policy name
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ deprecated: false
+ description: Gets information for a policy.
+ execution: false
+ name: symantec-mc-get-policy
+ outputs:
+ - contextPath: SymantecMC.Policy.Name
+ description: Policy name.
+ type: String
+ - contextPath: SymantecMC.Policy.SchemaVersion
+ description: Policy content schema version.
+ type: Number
+ - contextPath: SymantecMC.Policy.RevisionInfo.Number
+ description: Policy content revision number.
+ type: Number
+ - contextPath: SymantecMC.Policy.RevisionInfo.Description
+ description: Policy content revision description.
+ type: String
+ - contextPath: SymantecMC.Policy.RevisionInfo.Author
+ description: Policy content revision author.
+ type: String
+ - contextPath: SymantecMC.Policy.RevisionInfo.Date
+ description: Policy content revision date.
+ type: Date
+ - contextPath: SymantecMC.Policy.IP.Address
+ description: Policy IP address.
+ type: String
+ - contextPath: SymantecMC.Policy.IP.Description
+ description: Policy IP description.
+ type: String
+ - contextPath: SymantecMC.Policy.IP.Enabled
+ description: Policy IP enabled.
+ type: Boolean
+ - contextPath: SymantecMC.Policy.URL.Address
+ description: Policy URL address.
+ type: String
+ - contextPath: SymantecMC.Policy.URL.Description
+ description: Policy URL description.
+ type: String
+ - contextPath: SymantecMC.Policy.URL.Enabled
+ description: Policy URL enabled.
+ type: Boolean
+ - contextPath: SymantecMC.Policy.Category.Name
+ description: Policy category name.
+ type: String
+ - contextPath: SymantecMC.Policy.UUID
+ description: Policy UUID.
+ type: String
+ - contextPath: SymantecMC.Policy.Description
+ description: Policy Description.
+ type: String
+ - contextPath: SymantecMC.Policy.ReferenceID
+ description: Policy reference ID.
+ type: String
+ - arguments:
+ - default: false
+ description: Policy name.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Policy content type.
+ isArray: false
+ name: content_type
+ predefined:
+ - URL_LIST
+ - IP_LIST
+ - CATEGORY_LIST
+ required: true
+ secret: false
+ - default: false
+ description: Policy description.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: Policy reference ID.
+ isArray: false
+ name: reference_id
+ required: false
+ secret: false
+ - default: false
+ description: UUID of the tenant associated with this policy. Run the symantec-mc-list-tenants
+ command to get the tenant UUID.
+ isArray: false
+ name: tenant
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'true'
+ description: Share policy
+ isArray: false
+ name: shared
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Replace variables supported
+ isArray: false
+ name: replace_variables
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a policy in Symantec MC.
+ execution: false
+ name: symantec-mc-create-policy
+ outputs:
+ - contextPath: SymantecMC.Policy.UUID
+ description: Policy UUID.
+ type: String
+ - contextPath: SymantecMC.Policy.Name
+ description: Policy name.
+ type: String
+ - contextPath: SymantecMC.Policy.ContentType
+ description: Policy content type.
+ type: String
+ - contextPath: SymantecMC.Policy.Author
+ description: Policy author.
+ type: String
+ - arguments:
+ - default: true
+ description: Policy UUID. Run the symantec-mc-list-policies command to get the
+ UUID.
+ isArray: false
+ name: uuid
+ required: true
+ secret: false
+ - default: false
+ description: New name of the policy.
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: New description of the policy.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: New reference ID of the policy.
+ isArray: false
+ name: reference_id
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Replace variables in the policy.
+ isArray: false
+ name: replace_variables
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Updates the metadata for a policy in Symantec MC.
+ execution: false
+ name: symantec-mc-update-policy
+ outputs:
+ - contextPath: SymantecMC.Policy.UUID
+ description: Policy UUID.
+ type: String
+ - contextPath: SymantecMC.Policy.Name
+ description: Policy name.
+ type: String
+ - arguments:
+ - default: true
+ description: Policy UUID. Run the symantec-mc-list-policies command to get the
+ UUID.
+ isArray: false
+ name: uuid
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Set to "true" to force the policy object to be removed even if
+ it is referenced by another policy.
+ isArray: false
+ name: force
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes a policy in Symantec MC.
+ execution: true
+ name: symantec-mc-delete-policy
+ - arguments:
+ - default: false
+ defaultValue: '10'
+ description: Maximum number of results to return.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: List tenants in Symantec MC.
+ execution: false
+ name: symantec-mc-list-tenants
+ outputs:
+ - contextPath: SymantecMC.Tenant.UUID
+ description: Tenant UUID.
+ type: String
+ - contextPath: SymantecMC.Tenant.Name
+ description: Tenant name.
+ type: String
+ - contextPath: SymantecMC.Tenant.ExternalID
+ description: Tenant external ID.
+ type: String
+ - contextPath: SymantecMC.Tenant.Description
+ description: Tenant description.
+ type: String
+ - contextPath: SymantecMC.Tenant.System
+ description: Whether the system is a tenant.
+ type: Boolean
+ - arguments:
+ - default: false
+ description: Policy UUID. Run the symantec-mc-list-policies command to get the
+ UUID.
+ isArray: false
+ name: uuid
+ required: false
+ secret: false
+ - default: false
+ description: The policy name to add content to.
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Policy content type
+ isArray: false
+ name: content_type
+ predefined:
+ - URL_LIST
+ - IP_LIST
+ - CATEGORY_LIST
+ required: true
+ secret: false
+ - default: false
+ description: Description of the policy change.
+ isArray: false
+ name: change_description
+ required: true
+ secret: false
+ - default: false
+ description: The version of the schema for this content. This value will correspond
+ to the format of the content. Run the symantec-mc-get-policy command to get
+ the schema vesion.
+ isArray: false
+ name: schema_version
+ required: false
+ secret: false
+ - default: false
+ description: 'CSV list of IP addresses to add, for example: "1.2.3.4, 8.8.8.8".'
+ isArray: true
+ name: ip
+ required: false
+ secret: false
+ - default: false
+ description: 'CSV list of URLs to add, for example: "www.google.com, www.github.com".'
+ isArray: true
+ name: url
+ required: false
+ secret: false
+ - default: false
+ description: 'CSV list of category names to add, for example: "Job Search/Careers,
+ Content Servers".'
+ isArray: true
+ name: category
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Relevant for URL and IP.
+ isArray: false
+ name: enabled
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Content description.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ deprecated: false
+ description: Adds content to a policy in Symantec MC. Can be IPs, URLs, or category
+ names.
+ execution: false
+ name: symantec-mc-add-policy-content
+ - arguments:
+ - default: false
+ description: Policy UUID. Run the symantec-mc-list-policies command to get the
+ UUID.
+ isArray: false
+ name: uuid
+ required: false
+ secret: false
+ - default: false
+ description: The policy name to add content to.
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Policy content type.
+ isArray: false
+ name: content_type
+ predefined:
+ - URL_LIST
+ - IP_LIST
+ - CATEGORY_LIST
+ required: true
+ secret: false
+ - default: false
+ description: Description of the policy change.
+ isArray: false
+ name: change_description
+ required: true
+ secret: false
+ - default: false
+ description: The version of the schema for this content. This value will correspond
+ to the format of the content. Run the symantec-mc-get-policy command to get
+ the schema version.
+ isArray: false
+ name: schema_version
+ required: false
+ secret: false
+ - default: false
+ description: 'CSV list of IP addresses to delete, for example: "1.2.3.4, 8.8.8.8".'
+ isArray: true
+ name: ip
+ required: false
+ secret: false
+ - default: false
+ description: 'CSV list of URLs to delete, for example: "www.google.com, www.github.com".'
+ isArray: true
+ name: url
+ required: false
+ secret: false
+ - default: false
+ description: 'CSV list of category names to delete, for example: "Job Search/Careers,
+ Content Servers".'
+ isArray: true
+ name: category
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes content from a policy in Symantec MC.
+ execution: false
+ name: symantec-mc-delete-policy-content
+ dockerimage: demisto/python3:3.7.3.221
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
+tests:
+- SymantecMC_TestPlaybook
diff --git a/Integrations/SymantecManagementCenter/SymantecManagementCenter_description.md b/Integrations/SymantecManagementCenter/SymantecManagementCenter_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/SymantecManagementCenter/SymantecManagementCenter_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/SymantecManagementCenter/SymantecManagementCenter_image.png b/Integrations/SymantecManagementCenter/SymantecManagementCenter_image.png
new file mode 100644
index 000000000000..832fd3871110
Binary files /dev/null and b/Integrations/SymantecManagementCenter/SymantecManagementCenter_image.png differ
diff --git a/Integrations/Tanium/.pylintrc b/Integrations/Tanium/.pylintrc
new file mode 100644
index 000000000000..3effe9528b8c
--- /dev/null
+++ b/Integrations/Tanium/.pylintrc
@@ -0,0 +1,2 @@
+[MASTER]
+init-hook='import sys; sys.path.append("/pytan-2.2.2/lib")'
diff --git a/Integrations/Tanium/CHANGELOG.md b/Integrations/Tanium/CHANGELOG.md
new file mode 100644
index 000000000000..bbf5a5b2ce80
--- /dev/null
+++ b/Integrations/Tanium/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+Fixed an issue in the ***tn-deploy-package*** command.
+
+
+## [19.8.2] - 2019-08-22
+-
diff --git a/Integrations/Tanium/Tanium.py b/Integrations/Tanium/Tanium.py
new file mode 100644
index 000000000000..7fe029434a47
--- /dev/null
+++ b/Integrations/Tanium/Tanium.py
@@ -0,0 +1,620 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import json
+import os
+import sys
+
+from cStringIO import StringIO
+
+if not demisto.params()['proxy']:
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+# disable python from generating a .pyc file
+sys.dont_write_bytecode = True
+
+# change me to the path of pytan
+pytan_loc = "/pytan-2.2.2"
+pytan_static_path = os.path.join(os.path.expanduser(pytan_loc), 'lib')
+
+# Determine our script name, script dir
+my_file = os.path.abspath(sys.argv[0])
+my_dir = os.path.dirname(my_file)
+
+# try to automatically determine the pytan lib directory by assuming it is in '../../lib/'
+parent_dir = os.path.dirname(my_dir)
+pytan_root_dir = os.path.dirname(parent_dir)
+lib_dir = os.path.join(pytan_root_dir, 'lib')
+
+# add pytan_loc and lib_dir to the PYTHONPATH variable
+path_adds = [lib_dir, pytan_static_path]
+[sys.path.append(aa) for aa in path_adds if aa not in sys.path] # type: ignore
+
+try:
+ import pytan
+ import pytan.binsupport
+ import taniumpy
+except Exception:
+ raise
+
+
+def csvstr_to_list(str):
+ lines = str.splitlines()
+ if len(lines) < 2:
+ return []
+ else:
+ headers = lines[0].split(',')
+ return [dict(zip(headers, line.split(','))) for line in lines[1:]]
+
+
+def parseToJson(handler, response):
+ LOG("exporting tanium response")
+ export_kwargs = {}
+ export_kwargs['obj'] = response
+ export_kwargs['export_format'] = 'json'
+ out = handler.export_obj(**export_kwargs)
+ return json.loads(out)
+
+
+def create_error_entry(contents):
+ return {
+ 'ContentsFormat': formats['text'],
+ 'Type': entryTypes['error'],
+ 'Contents': "Error - " + contents
+ }
+
+
+def create_entry(header, table, context={}, headers=None):
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': table,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(header, table,
+ headers) if table else '### ' + header + '\nNo results were found',
+ 'EntryContext': context
+ }
+
+
+def filter_list(lst, keys):
+ res = []
+ for i in range(len(lst)):
+ tmp = {}
+ for key in keys:
+ tmp[key] = lst[i].get(key)
+ res.append(tmp)
+ return res
+
+
+def get_handler():
+ handler_args = {}
+
+ handler_args['username'] = demisto.params()['credentials']['identifier']
+ handler_args['password'] = demisto.params()['credentials']['password']
+ handler_args['host'] = demisto.params()['host']
+ handler_args['port'] = demisto.params()['port']
+
+ handler_args['loglevel'] = 1
+ handler_args['debugformat'] = False
+ handler_args['record_all_requests'] = True
+
+ return pytan.Handler(**handler_args)
+
+
+def get_all_objects(handler, objtype):
+ LOG("getting all tanium objects of type %s" % objtype)
+ kwargs = {}
+ kwargs["objtype"] = objtype
+ response = handler.get_all(**kwargs)
+ return parseToJson(handler, response)
+
+
+def get_all_objects_with_entry(handler, objtype):
+ response = get_all_objects(handler, unicode(objtype))
+ parsed = response[objtype]
+ return create_entry('Tanium ' + objtype + 's', parsed)
+
+
+def get_all_sensors(handler):
+ response = get_all_objects(handler, u'sensor')
+ parsed = response.get('sensor')
+ return create_entry('Tanium Sensors', parsed, {'Tanium.Sensors': parsed})
+
+
+def get_all_saved_actions(handler):
+ response = get_all_objects(handler, u'saved_action')
+ parsed = response.get('saved_action')
+ return create_entry('Tanium Saved Actions', parsed, {'Tanium.SavedActions': parsed},
+ ['id', 'name', 'creation_time', 'action_group_id', 'approved_flag'])
+
+
+def getAllPendingActions(handler):
+ response = get_all_objects(handler, u'saved_action')
+ parsed = response.get('saved_action')
+ filterNonPending = filter(lambda x: x['approved_flag'] == 0, parsed)
+ return create_entry('Tanium Pending Actions', filterNonPending, {'Tanium.PendingActions': filterNonPending},
+ ['id', 'name', 'creation_time', 'action_group_id', 'approved_flag'])
+
+
+def getAllPackages(handler):
+ response = get_all_objects(handler, u'package')
+ parsed = response.get('package_spec')
+ return create_entry('Tanium Packages', parsed, {'Tanium.Packages': parsed},
+ ['id', 'name', 'creation_time', 'command', 'last_modified_by'])
+
+
+def get_all_saved_questions(handler):
+ response = get_all_objects(handler, u'saved_question')
+ parsed = response.get('saved_question')
+ return create_entry('Tanium Saved Questions', parsed, {'Tanium.SavedQuestions': parsed},
+ ['query_text', 'name', 'id'])
+
+
+def get_object(handler, objtype, name=None, id=None):
+ LOG("getting Tanium %s - %s" % (objtype, name if name is not None else id))
+ kwargs = {}
+ kwargs["objtype"] = objtype
+ kwargs["id"] = id
+ kwargs["name"] = name
+
+ response = handler.get(**kwargs)
+ return parseToJson(handler, response)
+
+
+def parameter_table_builder(data, header, object_type):
+ if not data.get('parameter_definition'):
+ return 'No arguments needed for this ' + object_type
+ param_data = json.loads(data['parameter_definition']).get('parameters', [])
+ parsed_params = []
+ parsed_param = {}
+ for param in param_data:
+ parsed_param['Description'] = param.get('helpString', 'No description')
+ parsed_param['Name'] = param.get('label', 'No argument name')
+ parsed_param['Values'] = ','.join(param['values']) if param.get('values') else 'Any value'
+ parsed_param['Key'] = param.get('key', 'No key')
+ parsed_param['Type'] = param['parameterType'].split('::')[-1] if param.get(
+ 'parameterType') else 'Type not specified'
+ parsed_params.append(parsed_param)
+ parsed_param = {}
+ data['parameters'] = parsed_params
+ del data['parameter_definition']
+ return tableToMarkdown(header, parsed_params, ['Key', 'Name', 'Values', 'Description', 'Type'])
+
+
+def get_sensor_variable(parsed):
+ if len(parsed) > 0 and parsed[0].get('command'):
+ command = parsed[0].get('command')
+ if command:
+ idx1 = command.find("||")
+ if idx1 > -1:
+ idx2 = command.find("||", idx1 + 2)
+ if idx2 > -1:
+ return command[idx1:idx2 + 2]
+ return None
+
+
+def get_package(handler):
+ response = get_object(handler, u'package', demisto.args().get('name'), demisto.args().get('id'))
+ parsed = response.get('package_spec')
+ sensor_var = get_sensor_variable(parsed)
+ res = parsed[0]
+ res['sensor_variable'] = sensor_var
+ parameters = parameter_table_builder(res, 'Package Arguments Details', 'package')
+ final_result = create_entry(
+ 'Tanium Package',
+ res,
+ {'Tanium.Packages(val.id && val.id == obj.id)': filter_list([res], ['name', 'id', 'display_name', 'command',
+ 'command_timeout', 'deleted_flag', 'files',
+ 'parameters', 'sensor_variable'])},
+ ['id', 'name', 'creation_time', 'command', 'last_modified_by']
+ )
+ final_result['HumanReadable'] += parameters
+ if sensor_var is not None:
+ final_result['HumanReadable'] += '\n### Sensor Variables Type\n' + sensor_var
+ return final_result
+
+
+def get_saved_question(handler):
+ response = get_object(handler, u'saved_question', demisto.args().get('name'), demisto.args().get('id'))
+ parsed = response.get('saved_question')
+ return create_entry(
+ 'Tanium Saved Question',
+ parsed,
+ {'Tanium.SavedQuestions(val.id && val.id == obj.id)': filter_list(parsed,
+ ['query_text', 'mod_time', 'user', 'name',
+ 'expire_seconds', 'id', 'issue_seconds'])},
+ ['query_text', 'name', 'id']
+ )
+
+
+def get_sensor(handler):
+ response = get_object(handler, u'sensor', demisto.args().get('name'), demisto.args().get('id'))
+ parsed = response.get('sensor', None)
+ parameters = parameter_table_builder(parsed[0], 'Sensor Parameters Details', 'sensor')
+ final_result = create_entry(
+ 'Tanium Sensor - ' + demisto.args()['name'],
+ parsed,
+ {'Tanium.Sensors(val.id && val.id == obj.id)': filter_list(parsed,
+ ['id', 'name', 'max_age_seconds', 'description',
+ 'parameters'])},
+ ['id', 'name', 'category', 'description'])
+ final_result['HumanReadable'] += '\n' + parameters
+ return final_result
+
+
+def get_action(handler):
+ response = get_object(handler, u'action', demisto.args().get('name'), demisto.args().get('id'))
+ parsed = response.get('action', None)
+ if 'saved_action' in parsed[0]:
+ parsed[0]['saved_action_id'] = parsed[0]['saved_action']['id']
+ del parsed[0]['saved_action']
+
+ return create_entry(
+ 'Tanium Action - ' + parsed[0]['name'],
+ parsed,
+ {'Tanium.Actions(val.id && val.id == obj.id)': filter_list(parsed,
+ ['name', 'id', 'status', 'start_time', 'approver',
+ 'creation_time', 'package_spec'])},
+ ['id', 'name', 'status', 'saved_action_id', 'stopped_flag'])
+
+
+def handle_cgs(handler, obj, kwargs):
+ """Example PreAddAction callback that modifies the target_group of an Action if computer group names are supplied.
+ callbacks = {}
+ callbacks["PreAddAction"] = handle_cgs
+ deploy_action(package="blah", cg_names=["ip has 192.168", "has tanium app"], action_filters=["Computer Name, that
+ contains:a"], callbacks=callbacks)
+ """
+ cgs = kwargs.get("cg_names", [])
+ LOG("handling cgs %s" % cgs)
+ cg_objs = [handler.get("group", name=x)[0] for x in cgs]
+ cg_listobj = taniumpy.GroupList()
+ [cg_listobj.append(x) for x in cg_objs]
+
+ if cg_objs:
+ tg_obj = taniumpy.Group()
+ tg_obj.sub_groups = cg_listobj
+ tg_obj.and_flag = 0
+ if obj.target_group is not None:
+ tg_obj.sub_groups.append(obj.target_group)
+ obj.target_group = tg_obj
+ return obj
+
+
+def parse_deploy_action_raw_resp(handler, response):
+ saved_action_object = response.get('saved_action_object')
+ action_object = response.get('action_object')
+ package_object = response.get('package_object')
+
+ return {
+ 'saved_action_object': parseToJson(handler, saved_action_object),
+ 'action_object': parseToJson(handler, action_object),
+ 'package_object': parseToJson(handler, package_object)
+ }
+
+
+def deploy_action(handler):
+ kwargs = {}
+ kwargs["run"] = True
+
+ for key, value in demisto.args().items():
+ kwargs[key] = value
+
+ kwargs["get_results"] = True if str(kwargs.get('get_results', '')).lower() == 'true' else False
+
+ callbacks = {}
+ callbacks['PreAddAction'] = handle_cgs
+ kwargs['callbacks'] = callbacks # type: ignore
+ kwargs['action_options'] = ['or'] # type: ignore
+ if demisto.get(demisto.args(), 'action_options'):
+ kwargs['action_options'] = demisto.args()['action_options'].split(',')
+ if demisto.get(demisto.args(), 'action_filters'):
+ kwargs['action_filters'] = demisto.args()['action_filters'].split(';')
+ if demisto.get(demisto.args(), 'action_filters_groups'):
+ kwargs['cg_names'] = demisto.args()['action_filters_groups'].split(',')
+
+ # Building the package query
+ package = demisto.args()['package']
+ package_with_args = [package]
+
+ formatted_args = ''
+ if demisto.args().get('package_args'):
+ package_args = demisto.args().get('package_args', '').split(",")
+ for i in range(0, len(package_args)):
+ formatted_args = formatted_args + '$' + str(i + 1) + '=' + package_args[i] + ','
+ formatted_args = formatted_args[:-1]
+
+ replace_str = get_sensor_variable(get_object(handler, u'package', package).get('package_spec'))
+ sensor_var = demisto.args().get('sensor_variables')
+
+ if replace_str is None and sensor_var:
+ return create_error_entry("Package \"" + package + "\" does not have a sensor variable.")
+ if replace_str and sensor_var is None:
+ return create_error_entry("Package \"" + package + "\" requires a sensor variable.")
+
+ if sensor_var:
+ sensor_vars = demisto.args().get('sensor_variables', '').split(";")
+ package_with_args = []
+ if formatted_args != '':
+ formatted_args += ','
+ for var in sensor_vars:
+ package_with_args.append(package + '{' + formatted_args + replace_str + '=' + var + '}')
+
+ elif formatted_args != '':
+ package_with_args = [package + '{' + formatted_args + '}']
+
+ response = []
+ for pack in package_with_args:
+ kwargs['package'] = pack
+ LOG("deploying Tanium package %s" % pack)
+ response.append(handler.deploy_action(**kwargs))
+
+ ec = { # type: ignore
+ 'Tanium.SavedActions(val.Id && val.Id == obj.Id)': [],
+ 'Tanium.Actions(val.id && val.id == obj.id)': []
+ }
+ contents = []
+ tbl = []
+
+ for res in response:
+ ec['Tanium.SavedActions(val.Id && val.Id == obj.Id)'].append({
+ 'Name': res['saved_action_object'].name,
+ 'Id': res['saved_action_object'].id
+ })
+ parsed = parse_deploy_action_raw_resp(handler, res)
+ ec['Tanium.Actions(val.id && val.id == obj.id)'] += filter_list([parsed['action_object']],
+ ['name', 'id', 'status', 'start_time',
+ 'approver', 'creation_time', 'package_spec'])
+ contents.append(parsed)
+ tbl.append({
+ 'Action ID': parsed['action_object']['id'],
+ 'Saved Action ID': parsed['saved_action_object']['id'],
+ 'Name': parsed['action_object']['name'],
+ 'Package Name': parsed['package_object']['name'],
+ 'Command': parsed['package_object']['command']
+ })
+
+ return {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tanium Deployed Actions', tbl) if tbl else 'No results were found',
+ 'EntryContext': ec
+ }
+
+
+def approveSavedAction(handler, action_id, saved_action_id):
+ LOG("approving saved action with id %s %s" % (action_id, saved_action_id))
+ kwargs = {}
+ if not saved_action_id and not action_id:
+ raise Exception('Missing action ID')
+ if not saved_action_id:
+ action = get_object(handler, u'action', id=action_id)
+ parsed = action.get('action', None)
+ saved_action_id = parsed[0]['saved_action']['id']
+ kwargs['id'] = saved_action_id
+ response = handler.approve_saved_action(**kwargs)
+ parsed = {'Id': response.id, 'Name': response.name, 'ApprovedFlag': response.approved_flag}
+ final_result = create_entry('Action Approval', [parsed], {'Tanium.ActionApproval': parsed})
+ return final_result
+
+
+def askQuestion(handler, kwargs):
+ response = handler.ask(**kwargs)
+
+ if isinstance(response, str):
+ return response
+
+ query_text = response['question_object'].query_text
+ if response.get('question_results'):
+ export_kwargs = {}
+ export_kwargs['obj'] = response['question_results']
+ export_kwargs['export_format'] = 'csv'
+ LOG("exporting tanium question response")
+ out = handler.export_obj(**export_kwargs)
+
+ result = csvstr_to_list(out)
+
+ ec = {'Tanium.QuestionResults': result}
+ return create_entry(
+ 'Result for parsed query - %s' % (query_text,),
+ result,
+ ec)
+ else:
+ return 'Parsed query - %s\nNo results were found' % (query_text,)
+
+
+def get_parse_query_options(handler, question):
+ LOG("parsing query options")
+ parse_job_results = handler.parse_query(question)
+ jsonable = parse_job_results.to_jsonable()
+
+ ans = jsonable["parse_result_group"]
+ res = []
+ i = 0
+ while i < len(ans):
+ res.append({"index": i + 1, "question": ans[i]["question_text"]})
+ i = i + 1
+ return create_entry('Tanium Questions', res, {}, ['index', 'question'])
+
+
+def get_ask_manual_help():
+ desc = '# Tanium Ask Manual Question - Help\n' + \
+ 'The _tn-ask-manual-question_ command corresponds directly with the Tanium Question Builder.\n' + \
+ 'Each command argument can be mapped to a field in the Tanium Question Builder.\n' + \
+ '## Sensors\n---\n' + \
+ 'The _sensors_ argument correlates with the **"Get all ____ from..."** part of a the Tanium Question.\n' + \
+ 'All sensor types share the same filters and sensor options, but each sensor has its own parameters.\n' + \
+ '\n' + \
+ '**Example** (simple sensor list):\n' + \
+ '`!tn-ask-manual-question sensors="Computer Name;IP Address"`\n' + \
+ '### Sensor filters\n' + \
+ 'You can only apply a single filter to a sensor, and should be passed using this format:\n' + \
+ '_,that :;,that :_\n' + \
+ '\n' + \
+ '**Example #1** (2 sensors, 2 filters):\n`!tn-ask-manual-question sensors="Computer Name,that starts with:' \
+ 'D;IP Address,that does not contain:192"`\n**Example #2** (2 sensors, 1 filter):\n' + \
+ '`!tn-ask-manual-question sensors="Computer Name,that starts with:D;IP Address"`\n' + \
+ '### Sensor parameters\n' + \
+ 'Each sensor has its own unique parameters. ' \
+ 'To get a complete list of sensor parameters, run the _!tn-get-sensor_ command.\n' + \
+ 'Parameters are passed in curly brackets, after the sensor name, and before the filter.\n' + \
+ '**Example** (1 sensor, 1 filter, 1 parameter):\n' + \
+ '`!tn-ask-manual-question sensors=' \
+ '"Index Query File Exists{fileMD5Hash=4F83C01E8F7507D23C67AB085BF79E97},that contains:yes"`\n' + \
+ '### Sensor options\n' + \
+ 'All sensors have the same options: _ignore_case_, _match_case_, _match_any_value_, _match_all_values_, ' \
+ '_max_data_age_, _value_type_\nOptions are passed directly after filters.\n\n' \
+ '**Example** (1 sensor, 1 filter, 1 parameter):\n' + \
+ '`!tn-ask-manual-question sensors="Index Query File Exists{fileMD5Hash=4F83C01E8F7507D23C67AB085BF79E97},' \
+ 'that contains:yes,opt:match_all_values, opt:ignore_case, opt:max_data_age:3600"`\n' + \
+ '## Question Filters\n---\n' + \
+ 'The _question_filters_ argument is a semicolon-separated list of sensors, with filters and parameters, ' \
+ 'that correlates to the **"... from all computers with ___"** part of a the Tanium Question.\n' + \
+ 'Question filters can have an ***and*** or an ***or*** relation. ' \
+ 'You set the relation by using the _question_options_ argument.\n\n' + \
+ '**Example** (2 question filters):\n`!tn-ask-manual-question sensors="Computer Name" question_filters=' \
+ '"Index Query File Exists{fileMD5Hash=4F83C01E8F7507D23C67AB085BF79E97},that contains:yes;' \
+ 'Index Query File Exists{fileMD5Hash=4F83C01E8F7507D23C67AB085BF79E98},that contains:yes"`\n\n' + \
+ '## Question Options\n---\n' + \
+ 'Question options are a semicolon-separated list of options that apply to the entire question. They are ' \
+ 'generally used to define the relation of the different filter statements.\n' + \
+ '**Example** (1 question option):\n`!tn-ask-manual-question sensors="Computer Name" question_options="or" ' \
+ 'question_filters="Index Query File Exists{' \
+ 'fileMD5Hash=4F83C01E8F7507D23C67AB085BF79E97},that contains:yes"`\n'
+ return {
+ 'ContentsFormat': formats['markdown'],
+ 'Type': entryTypes['note'],
+ 'Contents': desc,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': desc
+ }
+
+
+def ask_manual_question(handler, args):
+ if args.get('help') == 'True':
+ return get_ask_manual_help()
+
+ kwargs = {}
+ kwargs["qtype"] = u'manual'
+ kwargs["sensors_help"] = True if args.get('sensors_help') == 'True' else False # type: ignore
+ kwargs["filters_help"] = True if args.get('filters_help') == 'True' else False # type: ignore
+ kwargs["options_help"] = True if args.get('options_help') == 'True' else False # type: ignore
+
+ if kwargs["filters_help"] or kwargs["sensors_help"] or kwargs["options_help"]:
+ try:
+ response = handler.ask(**kwargs)
+ # should always throw an exception
+ return response
+ except Exception as ex:
+ return str(ex)
+
+ kwargs["question_options"] = args.get('question_options', '').split(';') if args.get('question_options',
+ '') != '' else None
+ kwargs["question_filters"] = args.get('question_filters', '').split(';') if args.get('question_filters',
+ '') != '' else None
+ kwargs["sensors"] = args.get('sensors', '').split(';') if args.get('sensors', '') != '' else None
+ kwargs["polling_secs"] = int(args.get('polling_secs', '5')) # type: ignore
+ kwargs["complete_pct"] = int(args.get('complete_pct', '99')) # type: ignore
+
+ LOG("asking Tanium question")
+ return askQuestion(handler, kwargs)
+
+
+def ask_parsed_question(handler, question, index):
+ kwargs = {
+ 'picker': int(index),
+ 'question_text': question,
+ 'qtype': u'parsed',
+ 'get_results': True
+ }
+
+ LOG("asking Tanium question %s" % (question))
+ return askQuestion(handler, kwargs)
+
+
+def create_package(handler):
+ kwargs = {}
+ dArgs = demisto.args()
+ for key, value in dArgs.items():
+ kwargs[key] = value
+ if demisto.get(dArgs, 'file_urls'):
+ kwargs['file_urls'] = dArgs['file_urls'].split(",")
+ LOG("creating Tanium package")
+ response = handler.create_package(**kwargs)
+ parsed = parseToJson(handler, response)
+ final_result = create_entry('Tanium Package', [parsed], {'Tanium.Packages': parsed},
+ ['id', 'name', 'creation_time', 'command'])
+ return final_result
+
+
+def restore_sout_and_exit(final_result):
+ sys.stdout = sout
+ LOG.print_log()
+ demisto.results(final_result)
+ sys.exit(0)
+
+
+# Dealing with Broken Pipe issues raised by some commands
+sout = sys.stdout
+sys.stdout = StringIO()
+
+try:
+ handler = get_handler()
+ LOG("successfully logged into Tanium")
+ response = ''
+ d_args = demisto.args()
+
+ if demisto.command() == 'test-module':
+ test_question = 'get Computer Name from all machines with Computer Name contains "this is a test"'
+ final_result = ask_parsed_question(handler, test_question, '1')
+ restore_sout_and_exit('ok')
+ if demisto.command() == 'tn-get-package':
+ final_result = get_package(handler)
+ if demisto.command() == 'tn-get-saved-question':
+ final_result = get_saved_question(handler)
+ if demisto.command() == 'tn-get-object':
+ final_result = get_object(handler, unicode(d_args['object_type']), d_args.get('name'), d_args.get('id'))
+ if demisto.command() == 'tn-get-all-objects':
+ final_result = get_all_objects(handler, unicode(d_args['object_type']))
+ if demisto.command() == 'tn-get-all-packages':
+ final_result = getAllPackages(handler)
+ if demisto.command() == 'tn-get-all-sensors':
+ final_result = get_all_sensors(handler)
+ if demisto.command() == 'tn-get-all-saved-questions':
+ final_result = get_all_saved_questions(handler)
+ if demisto.command() == 'tn-get-all-saved-actions':
+ final_result = get_all_saved_actions(handler)
+ if demisto.command() == 'tn-get-all-pending-actions':
+ final_result = getAllPendingActions(handler)
+ if demisto.command() == 'tn-deploy-package':
+ final_result = deploy_action(handler)
+ if demisto.command() == 'tn-ask-system':
+ final_result = ask_parsed_question(handler, 'Get Computer Name from all machines with Computer Name matching \"'
+ + demisto.args()['hostname'] + '\"', '1')
+ if demisto.command() == 'tn-ask-question':
+ final_result = ask_parsed_question(handler, d_args['question'], d_args.get('index', '1'))
+ if demisto.command() == 'tn-create-package':
+ final_result = create_package(handler)
+ if demisto.command() == 'tn-approve-pending-action':
+ final_result = approveSavedAction(handler, d_args.get('id'),
+ d_args.get('saved_action_id', d_args.get('action_id')))
+ if demisto.command() == 'tn-ask-manual-question':
+ final_result = ask_manual_question(handler, d_args)
+ if demisto.command() == 'tn-parse-query':
+ final_result = get_parse_query_options(handler, d_args['question'])
+ if demisto.command() == 'tn-get-sensor':
+ final_result = get_sensor(handler)
+ if demisto.command() == 'tn-get-action':
+ final_result = get_action(handler)
+
+except Exception as ex:
+ sys.stdout = sout
+ LOG.print_log()
+ raise
+
+restore_sout_and_exit(final_result)
diff --git a/Integrations/Tanium/Tanium.yml b/Integrations/Tanium/Tanium.yml
new file mode 100644
index 000000000000..2702cc25bae5
--- /dev/null
+++ b/Integrations/Tanium/Tanium.yml
@@ -0,0 +1,769 @@
+category: Endpoint
+commonfields:
+ id: Tanium
+ version: -1
+configuration:
+- display: Host URL (e.g. 1.2.3.4)
+ name: host
+ required: true
+ type: 0
+- defaultvalue: '443'
+ display: Port
+ name: port
+ required: true
+ type: 0
+- display: Credentials
+ name: credentials
+ required: true
+ type: 9
+- defaultvalue: 'false'
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Tanium endpoint security and systems management
+display: Tanium
+name: Tanium
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Name of package
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: Tanium id of package (to be used instead of name)
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ deprecated: false
+ description: Get a package object based on name or ID.
+ execution: false
+ name: tn-get-package
+ outputs:
+ - contextPath: Tanium.Packages.display_name
+ description: Display name of package
+ type: string
+ - contextPath: Tanium.Packages.name
+ description: Name of created package
+ type: string
+ - contextPath: Tanium.Packages.command
+ description: Command to execute
+ type: string
+ - contextPath: Tanium.Packages.command_timeout
+ description: Timeout for command execution in seconds
+ type: number
+ - contextPath: Tanium.Packages.deleted_flag
+ description: Is the package deleted
+ type: boolean
+ - contextPath: Tanium.Packages.id
+ description: Tanium unique package id
+ type: string
+ - contextPath: Tanium.Packages.files
+ description: Packge files
+ type: Unknown
+ - contextPath: Tanium.Packages.parameters.Description
+ description: Description of the package parameter
+ type: string
+ - contextPath: Tanium.Packages.parameters.Key
+ description: Key of the package parameter as it appears in the command
+ type: string
+ - contextPath: Tanium.Packages.parameters.Name
+ description: Name of the package parameter
+ type: string
+ - contextPath: Tanium.Packages.parameters.Type
+ description: Type of package parameter
+ type: string
+ - contextPath: Tanium.Packages.parameters.Values
+ description: Possible values of the package parameter
+ type: string
+ - contextPath: Tanium.Packages.sensor_variable
+ description: The type of sensor that this package requires as a variable
+ type: string
+ - deprecated: false
+ description: Get all Tanium package objects
+ execution: false
+ name: tn-get-all-packages
+ outputs:
+ - contextPath: Tanium.Packages.verify_expire_seconds
+ description: Timeout for verify action expiry in seconds
+ type: number
+ - contextPath: Tanium.Packages.display_name
+ description: Display name of package
+ type: string
+ - contextPath: Tanium.Packages.name
+ description: Name of created package
+ type: string
+ - contextPath: Tanium.Packages.command
+ description: Command to execute
+ type: string
+ - contextPath: Tanium.Packages.creation_time
+ description: Pacakge creation time
+ type: date
+ - contextPath: Tanium.Packages.command_timeout
+ description: Timeout for command execution in seconds
+ type: number
+ - contextPath: Tanium.Packages.modification_time
+ description: Package modification time
+ type: date
+ - contextPath: Tanium.Packages.last_modified_by
+ description: User who last modified packge
+ type: string
+ - contextPath: Tanium.Packages.last_update
+ description: Time when package was last updated
+ type: date
+ - contextPath: Tanium.Packages.deleted_flag
+ description: Is the package deleted
+ type: boolean
+ - contextPath: Tanium.Packages.id
+ description: Tanium unique package id
+ type: string
+ - contextPath: Tanium.Packages.expire_seconds
+ description: Timeout for action expiry in seconds
+ type: number
+ - contextPath: Tanium.Packages.files
+ description: Packge files
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Name of object
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: Tanium id of object (to be used instead of name)
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Type of object to get
+ isArray: false
+ name: object_type
+ predefined:
+ - user
+ - package
+ - saved_question
+ - sensor
+ - saved_action
+ - action
+ required: true
+ secret: false
+ deprecated: false
+ description: Send a generic Get Object request
+ execution: false
+ name: tn-get-object
+ - deprecated: false
+ description: Gets all saved questions
+ execution: false
+ name: tn-get-all-saved-questions
+ outputs:
+ - contextPath: Tanium.SavedQuestions.query_text
+ description: Question query text
+ type: string
+ - contextPath: Tanium.SavedQuestions.mod_time
+ description: Question modification time
+ type: date
+ - contextPath: Tanium.SavedQuestions.user.id
+ description: Unique id of user who saved question
+ type: string
+ - contextPath: Tanium.SavedQuestions.user.name
+ description: Name of user who saved question
+ type: string
+ - contextPath: Tanium.SavedQuestions.name
+ description: Name of saved question
+ type: string
+ - contextPath: Tanium.SavedQuestions.expire_seconds
+ description: Question expire time in seconds
+ type: number
+ - contextPath: Tanium.SavedQuestions.id
+ description: Saved question unique id
+ type: string
+ - contextPath: Tanium.SavedQuestions.issue_seconds
+ description: Issue time in seconds
+ type: number
+ - arguments:
+ - default: true
+ description: Name of package to deploy with this action
+ isArray: false
+ name: package
+ required: true
+ secret: false
+ - default: false
+ description: Comma separated list of arguments needed to execute the package
+ command. Please run 'tn-get-package' for a detailed list of arguments
+ isArray: false
+ name: package_args
+ required: false
+ secret: false
+ - default: false
+ description: Semicolon separated list of strings. Each string must describe
+ a sensor and a filter which limits which computers the action will deploy
+ package to (e.g. Operating System, that contains:Windows; Computer Name, that
+ contains:WIN)
+ isArray: false
+ name: action_filters
+ required: false
+ secret: false
+ - default: false
+ description: Comma separated list of options to apply to action_filters (e.g. "max_data_age:3600,and").
+ Default is "or".
+ isArray: false
+ name: action_options
+ required: false
+ secret: false
+ - default: false
+ description: Semicolon separated list of sensor results that apply to the package.
+ A separate action will be issued for each given variable .
+ isArray: false
+ name: sensor_variables
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Specifies whether to wait for result completion after deploying
+ action
+ isArray: false
+ name: get_results
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: Comma separated list of computer group names to filter by
+ isArray: false
+ name: action_filters_groups
+ required: false
+ secret: false
+ - default: false
+ description: Name of action group
+ isArray: false
+ name: action_group
+ required: false
+ secret: false
+ deprecated: false
+ description: Deploy a package and get the results back
+ execution: false
+ name: tn-deploy-package
+ outputs:
+ - contextPath: Tanium.SavedActions.Id
+ description: Saved action id
+ type: string
+ - contextPath: Tanium.SavedActions.Name
+ description: Saved action name
+ type: string
+ - contextPath: Tanium.Actions.id
+ description: ID of deployed action
+ type: string
+ - contextPath: Tanium.Actions.name
+ description: Name of deployed action
+ type: string
+ - contextPath: Tanium.Actions.creation_time
+ description: Time when action was created
+ type: date
+ - contextPath: Tanium.Actions.status
+ description: Action status (Close, Pending, etc.)
+ type: string
+ - contextPath: Tanium.Actions.package_spec.command
+ description: The command that will run once action is approved
+ type: string
+ - contextPath: Tanium.Actions.package_spec.name
+ description: The name of the package associated with the action
+ type: string
+ - contextPath: Tanium.Actions.user.name
+ description: Name of user that created the action
+ type: string
+ - arguments:
+ - default: true
+ description: The question text
+ isArray: false
+ name: question
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '1'
+ description: The index of the parsed question to be asked (as returned by tn-parse-query)
+ isArray: false
+ name: index
+ required: false
+ secret: false
+ deprecated: false
+ description: Ask the server to parse the question text and choose the first parsed
+ result as the question to run
+ execution: false
+ name: tn-ask-question
+ outputs:
+ - contextPath: Tanium.QuestionResults
+ description: Results of requested question. May be a complex object
+ type: Unknown
+ - arguments:
+ - default: true
+ description: Name of host
+ isArray: false
+ name: hostname
+ required: true
+ secret: false
+ deprecated: false
+ description: Ask a question about a specific endpoint
+ execution: false
+ name: tn-ask-system
+ outputs:
+ - contextPath: Tanium.QuestionResults
+ description: Results of requested computer name. May be a complex object
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Name of saved question
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: Tanium unique id of saved question to be used instead of name
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ deprecated: false
+ description: Get a saved question by name or id
+ execution: false
+ name: tn-get-saved-question
+ outputs:
+ - contextPath: Tanium.SavedQuestions.query_text
+ description: Question query text
+ type: string
+ - contextPath: Tanium.SavedQuestions.mod_time
+ description: Question modification time
+ type: date
+ - contextPath: Tanium.SavedQuestions.user.id
+ description: Unique id of user who saved question
+ type: string
+ - contextPath: Tanium.SavedQuestions.user.name
+ description: Name of user who saved question
+ type: string
+ - contextPath: Tanium.SavedQuestions.name
+ description: Name of saved question
+ type: string
+ - contextPath: Tanium.SavedQuestions.expire_seconds
+ description: Question expire time in seconds
+ type: number
+ - contextPath: Tanium.SavedQuestions.id
+ description: Saved question unique id
+ type: string
+ - contextPath: Tanium.SavedQuestions.issue_seconds
+ description: Issue time in seconds
+ type: number
+ - arguments:
+ - default: false
+ description: Name of package to create
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: Command to execute
+ isArray: false
+ name: command
+ required: true
+ secret: false
+ - default: false
+ description: Display name of package
+ isArray: false
+ name: display_name
+ required: false
+ secret: false
+ - default: false
+ description: Comma separated list of URLs of files to add to package
+ isArray: false
+ name: file_urls
+ required: false
+ secret: false
+ - default: false
+ description: Timeout for command execution in seconds
+ isArray: false
+ name: command_timeout_seconds
+ required: false
+ secret: false
+ - default: false
+ description: Timeout for action expiry in seconds
+ isArray: false
+ name: expire_seconds
+ required: false
+ secret: false
+ deprecated: false
+ description: Create a package object
+ execution: false
+ name: tn-create-package
+ outputs:
+ - contextPath: Tanium.Packages.verify_expire_seconds
+ description: Timeout for verify action expiry in seconds
+ type: string
+ - contextPath: Tanium.Packages.display_name
+ description: Display name of package
+ type: string
+ - contextPath: Tanium.Packages.name
+ description: Name of created package
+ type: string
+ - contextPath: Tanium.Packages.command
+ description: Command to execute
+ type: string
+ - contextPath: Tanium.Packages.creation_time
+ description: Pacakge creation time
+ type: date
+ - contextPath: Tanium.Packages.command_timeout
+ description: Timeout for command execution in seconds
+ type: number
+ - contextPath: Tanium.Packages.modification_time
+ description: Package modification time
+ type: date
+ - contextPath: Tanium.Packages.last_update
+ description: Time when package was last updated
+ type: date
+ - contextPath: Tanium.Packages.deleted_flag
+ description: Is the package deleted
+ type: boolean
+ - contextPath: Tanium.Packages.id
+ description: Tanium unique package id
+ type: string
+ - contextPath: Tanium.Packages.expire_seconds
+ description: Timeout for action expiry in seconds
+ type: number
+ - arguments:
+ - default: false
+ description: Saved action ID to be approved
+ isArray: false
+ name: saved_action_id
+ required: false
+ secret: false
+ - default: true
+ description: Action ID to be approved
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ - default: false
+ description: (deprecated) Id of saved action to approve
+ isArray: false
+ name: action_id
+ required: false
+ secret: false
+ deprecated: false
+ description: Approve saved actions
+ execution: false
+ name: tn-approve-pending-action
+ outputs:
+ - contextPath: Tanium.ActionApproval.ApprovedFlag
+ description: Approval status
+ type: boolean
+ - contextPath: Tanium.ActionApproval.Id
+ description: Saved action id
+ type: string
+ - contextPath: Tanium.ActionApproval.Name
+ description: Saved action name
+ type: string
+ - arguments:
+ - auto: PREDEFINED
+ default: true
+ description: Type of object to get
+ isArray: false
+ name: object_type
+ predefined:
+ - user
+ - package
+ - saved_question
+ - sensor
+ - saved_action
+ - group
+ required: true
+ secret: false
+ deprecated: false
+ description: Gets all objects of the specified type
+ execution: false
+ name: tn-get-all-objects
+ - deprecated: false
+ description: Gets all saved actions
+ execution: false
+ name: tn-get-all-saved-actions
+ outputs:
+ - contextPath: Tanium.SavedActions.distribute_seconds
+ description: Distribute seconds of action
+ type: number
+ - contextPath: Tanium.SavedActions.public_flag
+ description: Whether action is public or not
+ type: boolean
+ - contextPath: Tanium.SavedActions.action_group_id
+ description: Group id of action
+ type: string
+ - contextPath: Tanium.SavedActions.approver.id
+ description: Id of user who approved the action
+ type: string
+ - contextPath: Tanium.SavedActions.start_time
+ description: Action start time
+ type: date
+ - contextPath: Tanium.SavedActions.name
+ description: Action name
+ type: string
+ - contextPath: Tanium.SavedActions.user.id
+ description: Id of user who created the action
+ type: string
+ - contextPath: Tanium.SavedActions.creation_time
+ description: Time when action was created
+ type: date
+ - contextPath: Tanium.SavedActions.end_time
+ description: Time when action ended
+ type: date
+ - contextPath: Tanium.SavedActions.status
+ description: Action status
+ type: string
+ - contextPath: Tanium.SavedActions.last_start_time
+ description: Last time action started
+ type: date
+ - contextPath: Tanium.SavedActions.id
+ description: Id of action
+ type: string
+ - contextPath: Tanium.SavedActions.package_spec.id
+ description: The package associated with the action
+ type: string
+ - contextPath: Tanium.SavedActions.approved_flag
+ description: Whether action was approved or not
+ type: boolean
+ - deprecated: false
+ description: Gets all pending actions
+ execution: false
+ name: tn-get-all-pending-actions
+ outputs:
+ - contextPath: Tanium.PendingActions.distribute_seconds
+ description: Distribute seconds of action
+ type: number
+ - contextPath: Tanium.PendingActions.public_flag
+ description: Whether action is public or not
+ type: boolean
+ - contextPath: Tanium.PendingActions.action_group_id
+ description: Group id of action
+ type: string
+ - contextPath: Tanium.PendingActions.approver.id
+ description: Id of user who approved the action
+ type: string
+ - contextPath: Tanium.PendingActions.start_time
+ description: Action start time
+ type: date
+ - contextPath: Tanium.PendingActions.name
+ description: Action name
+ type: string
+ - contextPath: Tanium.PendingActions.user.id
+ description: Id of user who created the action
+ type: string
+ - contextPath: Tanium.PendingActions.creation_time
+ description: Time when action was created
+ type: date
+ - contextPath: Tanium.PendingActions.end_time
+ description: Time when action ended
+ type: date
+ - contextPath: Tanium.PendingActions.status
+ description: Action status
+ type: string
+ - contextPath: Tanium.PendingActions.last_start_time
+ description: Last time action started
+ type: date
+ - contextPath: Tanium.PendingActions.id
+ description: Id of action
+ type: string
+ - contextPath: Tanium.PendingActions.package_spec.id
+ description: The package associated with the action
+ type: string
+ - contextPath: Tanium.PendingActions.approved_flag
+ description: Whether action was approved or not
+ type: boolean
+ - deprecated: false
+ description: Gets all sensors
+ execution: false
+ name: tn-get-all-sensors
+ - arguments:
+ - default: false
+ description: The question text to be parsed
+ isArray: false
+ name: question
+ required: false
+ secret: false
+ deprecated: false
+ description: Ask the server to parse the question text and return all parsing
+ options
+ execution: false
+ name: tn-parse-query
+ - arguments:
+ - default: false
+ description: A semicolon separated list of sensors (columns) to include in a
+ question. For detailed information please use !tn-get-sensor
+ isArray: false
+ name: sensors
+ required: false
+ secret: false
+ - default: false
+ description: A semicolon separated list of filters that apply to the whole question
+ isArray: false
+ name: question_filters
+ required: false
+ secret: false
+ - default: false
+ description: A semicolon separated list of options that apply to the whole question.
+ Options are ignore_case, match_case, match_any_value, match_all_values, max_data_age,
+ value_type, and, or
+ isArray: false
+ name: question_options
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: Print the help string for filters and exit
+ isArray: false
+ name: filters_help
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: Print the help string for sensors and exit
+ isArray: false
+ name: sensors_help
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'False'
+ description: Print the help string for options and exit
+ isArray: false
+ name: options_help
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '5'
+ description: Number of seconds to wait in between result fetching attempts
+ isArray: false
+ name: polling_secs
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '99'
+ description: Percentage of mr_tested out of estimated_total to consider the
+ question
+ isArray: false
+ name: complete_pct
+ required: false
+ secret: false
+ - default: false
+ description: Print an extended command usage description to the War Room. Default
+ is False.
+ isArray: false
+ name: help
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ deprecated: false
+ description: Ask a manual question using human strings and get the results back
+ execution: false
+ name: tn-ask-manual-question
+ outputs:
+ - contextPath: Tanium.QuestionResults
+ description: Results of requested question. May be a complex object
+ type: Unknown
+ - arguments:
+ - default: true
+ description: Name of sensor
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: false
+ description: Get detailed information about a given sensor.
+ execution: false
+ name: tn-get-sensor
+ outputs:
+ - contextPath: Tanium.Sensors.max_age_seconds
+ description: Sensor max age in seconds
+ type: number
+ - contextPath: Tanium.Sensors.description
+ description: Description of the sensor
+ type: string
+ - contextPath: Tanium.Sensors.name
+ description: Name of the sensor
+ type: string
+ - contextPath: Tanium.Sensors.id
+ description: Id of the sensor
+ type: string
+ - contextPath: Tanium.Sensors.parameters.Description
+ description: Description of the sensor parameter
+ type: string
+ - contextPath: Tanium.Sensors.parameters.Key
+ description: Key of the sensor parameter as it appears in the command string
+ type: string
+ - contextPath: Tanium.Sensors.parameters.Name
+ description: Name of the sensor parameter
+ type: string
+ - contextPath: Tanium.Sensors.parameters.Type
+ description: Type of the sensor parameter
+ type: string
+ - contextPath: Tanium.Sensors.parameters.Values
+ description: Possible values of the sensor parameter
+ type: string
+ - arguments:
+ - default: false
+ description: ID of action of retrieve
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ deprecated: false
+ description: Get detailed information about a given action.
+ execution: false
+ name: tn-get-action
+ outputs:
+ - contextPath: Tanium.Actions.name
+ description: Name of the actions
+ type: string
+ - contextPath: Tanium.Actions.id
+ description: ID of the action
+ type: number
+ - contextPath: Tanium.Actions.status
+ description: Status of the action - Closed, Pending, etc.
+ type: string
+ - contextPath: Tanium.Actions.start_time
+ description: Time when the action started running
+ type: date
+ - contextPath: Tanium.Actions.approver.name
+ description: Name of Tanium user who approved the action
+ type: string
+ - contextPath: Tanium.Actions.creation_time
+ description: Time when the action was created
+ type: date
+ - contextPath: Tanium.Actions.package_spec.command
+ description: The command that is issued by the action
+ type: string
+ - contextPath: Tanium.Actions.package_spec.name
+ description: Name of the package that was deployed
+ type: string
+ dockerimage: demisto/pytan
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+ - Tanium Test Playbook
diff --git a/Integrations/Tanium/Tanium_description.md b/Integrations/Tanium/Tanium_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/Tanium/Tanium_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/Tanium/Tanium_image.png b/Integrations/Tanium/Tanium_image.png
new file mode 100644
index 000000000000..c863f157d314
Binary files /dev/null and b/Integrations/Tanium/Tanium_image.png differ
diff --git a/Integrations/Tenable_sc/CHANGELOG.md b/Integrations/Tenable_sc/CHANGELOG.md
new file mode 100644
index 000000000000..17d28f21aa33
--- /dev/null
+++ b/Integrations/Tenable_sc/CHANGELOG.md
@@ -0,0 +1,10 @@
+## [Unreleased]
+
+
+## [19.9.0] - 2019-09-04
+ - Added the **tenable-sc-get-all-scan-results** command, which retrieves all the scan results in Tenable SC.
+ - Added the **Port** and **Protocol** fields to the **Hosts** output in the **get-vulnerability** command.
+
+## [19.8.2] - 2019-08-22
+ -
+
diff --git a/Integrations/Tenable_sc/Pipfile b/Integrations/Tenable_sc/Pipfile
new file mode 100644
index 000000000000..1f4881ed672c
--- /dev/null
+++ b/Integrations/Tenable_sc/Pipfile
@@ -0,0 +1,15 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+
+[packages]
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/Tenable_sc/Pipfile.lock b/Integrations/Tenable_sc/Pipfile.lock
new file mode 100644
index 000000000000..bd5a86d9d4c5
--- /dev/null
+++ b/Integrations/Tenable_sc/Pipfile.lock
@@ -0,0 +1,308 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "1fd564b978cf016eca093f3dfd295ed6ecae2fed0d591fcda830d512fa1fe4b8"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.5"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
+ "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
+ ],
+ "version": "==2019.6.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32",
+ "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==3.7.4"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==1.0.2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265",
+ "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7",
+ "sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db"
+ ],
+ "version": "==0.18"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af",
+ "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"
+ ],
+ "version": "==19.0"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e",
+ "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8"
+ ],
+ "markers": "python_version < '3.6'",
+ "version": "==2.3.4"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:02c2b6d268695a8b64ad61847f92e611e6afcff33fd26c3a2125370c4662905d",
+ "sha256:ee1e85575587c5b58ddafa25e1c1b01691ef172e139fc25585e5d3f02451da93"
+ ],
+ "index": "pypi",
+ "version": "==1.9.4"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a",
+ "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03"
+ ],
+ "version": "==2.4.0"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae",
+ "sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6"
+ ],
+ "index": "pypi",
+ "version": "==4.6.4"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
+ "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ ],
+ "version": "==2.22.0"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:12e17c7ad1397fd1df5ead7727eb3f1bdc9fe1c18293b0492e0e01b57997e38d",
+ "sha256:dc9e416a095ee7c3360056990d52e5611fb94469352fc1c2dc85be1ff2189146"
+ ],
+ "index": "pypi",
+ "version": "==1.6.0"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
+ "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
+ ],
+ "version": "==1.25.3"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/Tenable_sc/Tenable_sc.py b/Integrations/Tenable_sc/Tenable_sc.py
new file mode 100644
index 000000000000..40f84576b4cb
--- /dev/null
+++ b/Integrations/Tenable_sc/Tenable_sc.py
@@ -0,0 +1,1847 @@
+import demistomock as demisto
+from CommonServerPython import *
+import re
+from requests import Session
+import requests
+import functools
+import json
+from datetime import datetime
+from requests import cookies
+
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+if not demisto.params().get('proxy', False):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+''' GLOBAL VARIABLES'''
+USERNAME = demisto.params()['credentials']['identifier']
+PASSWORD = demisto.params()['credentials']['password']
+VERIFY_SSL = not demisto.params().get('unsecure', False)
+
+MAX_REQUEST_RETRIES = 3
+
+FETCH_TIME_DEFAULT = '3 days'
+FETCH_TIME = demisto.params().get('fetch_time', FETCH_TIME_DEFAULT)
+FETCH_TIME = FETCH_TIME if FETCH_TIME and FETCH_TIME.strip() else FETCH_TIME_DEFAULT
+
+SESSION = Session()
+TOKEN = demisto.getIntegrationContext().get('token')
+COOKIE = demisto.getIntegrationContext().get('cookie')
+
+
+def get_server_url():
+ url = demisto.params()['server']
+ url = re.sub('/[\/]+$/', '', url)
+ url = re.sub('\/$', '', url)
+ return url
+
+
+BASE_URL = get_server_url()
+SERVER_URL = BASE_URL + '/rest'
+
+ACTION_TYPE_TO_VALUE = {
+ 'notification': 'users.username',
+ 'email': 'users.username',
+ 'syslog': 'host',
+ 'scan': 'scan.name',
+ 'report': 'report.name',
+ 'ticket': 'assignee.username'
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def send_request(path, method='get', body=None, params=None, headers=None, try_number=1):
+ body = body if body is not None else {}
+ params = params if params is not None else {}
+ headers = headers if headers is not None else get_headers()
+
+ headers['X-SecurityCenter'] = TOKEN
+ url = '{}/{}'.format(SERVER_URL, path)
+
+ session_cookie = cookies.create_cookie('TNS_SESSIONID', COOKIE)
+ SESSION.cookies.set_cookie(session_cookie) # type: ignore
+
+ res = SESSION.request(method, url, data=json.dumps(body), params=params, headers=headers, verify=VERIFY_SSL)
+
+ if res.status_code == 403 and try_number <= MAX_REQUEST_RETRIES:
+ login()
+ headers['X-SecurityCenter'] = TOKEN # The Token is being updated in the login
+ return send_request(path, method, body, params, headers, try_number + 1)
+
+ elif res.status_code < 200 or res.status_code >= 300:
+ try:
+ error = res.json()
+ except Exception:
+ return_error('Error: Got status code {} with url {} with body {} with headers {}'.format(
+ str(res.status_code), url, res.content, str(res.headers)))
+
+ return_error('Error: Got an error from TenableSC, code: {}, details: {}'.format(error['error_code'],
+ error['error_msg']))
+ return res.json()
+
+
+def get_headers():
+ headers = {
+ 'Accept': 'application/json',
+ 'Content-Type': 'application/json'
+ }
+
+ return headers
+
+
+def send_login_request(login_body):
+ path = 'token'
+ url = '{}/{}'.format(SERVER_URL, path)
+
+ headers = get_headers()
+ res = SESSION.request('post', url, headers=headers, data=json.dumps(login_body), verify=VERIFY_SSL)
+
+ if res.status_code < 200 or res.status_code >= 300:
+ return_error('Error: Got status code {} with url {} with body {} with headers {}'.format(
+ str(res.status_code), url, res.content, str(res.headers)))
+
+ global COOKIE
+ COOKIE = res.cookies.get('TNS_SESSIONID', COOKIE)
+ demisto.setIntegrationContext({'cookie': COOKIE})
+
+ return res.json()
+
+
+def login():
+ login_body = {
+ 'username': USERNAME,
+ 'password': PASSWORD
+ }
+ login_response = send_login_request(login_body)
+
+ if 'response' not in login_response:
+ return_error('Error: Could not retrieve login token')
+
+ token = login_response['response'].get('token')
+ # There might be a case where the API does not return a token because there are too many sessions with the same user
+ # In that case we need to add 'releaseSession = true'
+ if not token:
+ login_body['releaseSession'] = 'true'
+ login_response = send_login_request(login_body)
+ if 'response' not in login_response or 'token' not in login_response['response']:
+ return_error('Error: Could not retrieve login token')
+ token = login_response['response']['token']
+
+ global TOKEN
+ TOKEN = str(token)
+ demisto.setIntegrationContext({'token': TOKEN})
+
+
+def logout():
+ send_request(path='token', method='delete')
+
+
+def return_message(msg):
+ demisto.results(msg)
+ sys.exit(0)
+
+
+''' FUNCTIONS '''
+
+
+def list_scans_command():
+ res = get_scans('id,name,description,policy,ownerGroup,owner')
+ manageable = demisto.args().get('manageable', 'false').lower()
+
+ if not res or 'response' not in res or not res['response']:
+ return_message('No scans found')
+
+ scans_dicts = get_elements(res['response'], manageable)
+
+ if len(scans_dicts) == 0:
+ return_message('No scans found')
+
+ headers = ['ID', 'Name', 'Description', 'Policy', 'Group', 'Owner']
+
+ mapped_scans = [{
+ 'Name': s['name'],
+ 'ID': s['id'],
+ 'Description': s['description'],
+ 'Policy': s['policy'].get('name'),
+ 'Group': s['ownerGroup'].get('name'),
+ 'Owner': s['owner'].get('username')
+ } for s in scans_dicts]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tenable.sc Scans', mapped_scans, headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.Scan(val.ID===obj.ID)': createContext(mapped_scans, removeNull=True)
+ }
+ })
+
+
+def get_scans(fields):
+ path = 'scan'
+ params = None
+
+ if fields:
+ params = {
+ 'fields': fields
+ }
+
+ return send_request(path, params=params)
+
+
+def list_policies_command():
+ res = get_policies('id,name,description,tags,modifiedTime,owner,ownerGroup,policyTemplate')
+
+ manageable = demisto.args().get('manageable', 'false').lower()
+
+ if not res or 'response' not in res or not res['response']:
+ return_message('No policies found')
+
+ policies = get_elements(res['response'], manageable)
+
+ if len(policies) == 0:
+ return_message('No policies found')
+
+ headers = ['ID', 'Name', 'Description', 'Tag', 'Type', 'Group', 'Owner', 'LastModified']
+
+ mapped_policies = [{
+ 'ID': p['id'],
+ 'Name': p['name'],
+ 'Description': p['description'],
+ 'Tag': p['tags'],
+ 'Type': p['policyTemplate'].get('name'),
+ 'Group': p['ownerGroup'].get('name'),
+ 'Owner': p['owner'].get('username'),
+ 'LastModified': timestamp_to_utc(p['modifiedTime'])
+ } for p in policies]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tenable.sc Scan Policies', mapped_policies, headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.ScanPolicy(val.ID===obj.ID)': createContext(mapped_policies, removeNull=True)
+ }
+ })
+
+
+def get_policies(fields):
+ path = 'policy'
+ params = None
+
+ if fields:
+ params = {
+ 'fields': fields
+ }
+
+ return send_request(path, params=params)
+
+
+def list_repositories_command():
+ res = get_repositories()
+
+ if not res or 'response' not in res or not res['response']:
+ return_message('No repositories found')
+
+ repositories = res['response']
+
+ if len(repositories) == 0:
+ return_message('No repositories found')
+
+ headers = [
+ 'ID',
+ 'Name',
+ 'Description'
+ ]
+
+ mapped_repositories = [{'ID': r['id'], 'Name': r['name'], 'Description': r['description']} for r in repositories]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tenable.sc Scan Repositories', mapped_repositories, headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.ScanRepository(val.ID===obj.ID)': createContext(mapped_repositories, removeNull=True)
+ }
+ })
+
+
+def get_repositories():
+ path = 'repository'
+
+ return send_request(path)
+
+
+def list_credentials_command():
+ res = get_credentials('id,name,description,type,ownerGroup,owner,tags,modifiedTime')
+
+ manageable = demisto.args().get('manageable', 'false').lower()
+
+ if not res or 'response' not in res or not res['response']:
+ return_message('No credentials found')
+
+ credentials = get_elements(res['response'], manageable)
+
+ if len(credentials) == 0:
+ return_message('No credentials found')
+
+ headers = ['ID', 'Name', 'Description', 'Type', 'Tag', 'Group', 'Owner', 'LastModified']
+
+ mapped_credentials = [{
+ 'ID': c['id'],
+ 'Name': c['name'],
+ 'Description': c['description'],
+ 'Type': c['type'],
+ 'Tag': c['tags'],
+ 'Group': c.get('ownerGroup', {}).get('name'),
+ 'Owner': c.get('owner', {}).get('name'),
+ 'LastModified': timestamp_to_utc(c['modifiedTime'])
+ } for c in credentials]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tenable.sc Credentials', mapped_credentials, headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.Credential(val.ID===obj.ID)': createContext(mapped_credentials, removeNull=True)
+ }
+ })
+
+
+def get_credentials(fields):
+ path = 'credential'
+ params = None
+
+ if fields:
+ params = {
+ 'fields': fields
+ }
+
+ return send_request(path, params=params)
+
+
+def list_assets_command():
+ res = get_assets('id,name,description,ipCount,type,tags,modifiedTime,groups,owner')
+
+ manageable = demisto.args().get('manageable', 'false').lower()
+
+ if not res or 'response' not in res or not res['response']:
+ return_message('No assets found')
+
+ assets = get_elements(res['response'], manageable)
+
+ if len(assets) == 0:
+ return_message('No assets found')
+
+ headers = ['ID', 'Name', 'Tag', 'Owner', 'Group', 'Type', 'HostCount', 'LastModified']
+
+ mapped_assets = [{
+ 'ID': a['id'],
+ 'Name': a['name'],
+ 'Tag': a['tags'],
+ 'Owner': a.get('owner', {}).get('username'),
+ 'Type': a['type'],
+ 'Group': a.get('ownerGroup', {}).get('name'),
+ 'HostCount': a['ipCount'],
+ 'LastModified': timestamp_to_utc(a['modifiedTime'])
+ } for a in assets]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tenable.sc Assets', mapped_assets, headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.Asset(val.ID===obj.ID)': createContext(mapped_assets, removeNull=True)
+ }
+ })
+
+
+def get_assets(fields):
+ path = 'asset'
+ params = None
+
+ if fields:
+ params = {
+ 'fields': fields
+ }
+
+ return send_request(path, params=params)
+
+
+def get_asset_command():
+ asset_id = demisto.args()['asset_id']
+
+ res = get_asset(asset_id)
+
+ if not res or 'response' not in res:
+ return_message('Asset not found')
+
+ asset = res['response']
+
+ ips = [] # type: List[str]
+ ip_lists = [v['ipList'] for v in asset['viewableIPs']]
+
+ for ip_list in ip_lists:
+ # Extract IPs
+ ips += re.findall('[0-9]+(?:\.[0-9]+){3}', ip_list)
+
+ headers = ['ID', 'Name', 'Description', 'Tag', 'Created', 'Modified', 'Owner', 'Group', 'IPs']
+
+ mapped_asset = {
+ 'ID': asset['id'],
+ 'Name': asset['name'],
+ 'Description': asset['description'],
+ 'Tag': asset['tags'],
+ 'Created': timestamp_to_utc(asset['createdTime']),
+ 'Modified': timestamp_to_utc(asset['modifiedTime']),
+ 'Owner': asset.get('owner', {}).get('username'),
+ 'Group': asset.get('ownerGroup', {}).get('name'),
+ 'IPs': ips
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tenable.sc Asset', mapped_asset, headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.Asset(val.ID===obj.ID)': createContext(mapped_asset, removeNull=True)
+ }
+ })
+
+
+def get_asset(asset_id):
+ path = 'asset/' + asset_id
+
+ params = {
+ 'fields': 'id,name,description,status,createdTime,modifiedTime,viewableIPs,ownerGroup,tags,owner'
+ }
+
+ return send_request(path, params=params)
+
+
+def create_asset_command():
+ name = demisto.args()['name']
+ description = demisto.args().get('description')
+ owner_id = demisto.args().get('owner_id')
+ tags = demisto.args().get('tags')
+ ips = demisto.args().get('ip_list')
+
+ res = create_asset(name, description, owner_id, tags, ips)
+
+ if not res or 'response' not in res:
+ return_error('Error: Could not retrieve the asset')
+
+ asset = res['response']
+
+ mapped_asset = {
+ 'ID': asset['id'],
+ 'Name': asset['name'],
+ 'OwnerName': asset['owner'].get('username'),
+ 'Tags': asset['tags'],
+ }
+
+ headers = [
+ 'ID',
+ 'Name',
+ 'OwnerName',
+ 'Tags'
+ ]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Asset created successfully', mapped_asset, headers=headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.Asset(val.ID===obj.ID)': createContext(mapped_asset, removeNull=True)
+ }
+ })
+
+
+def create_asset(name, description, owner_id, tags, ips):
+ path = 'asset'
+
+ body = {
+ 'name': name,
+ 'definedIPs': ips,
+ 'type': 'static'
+ }
+
+ if description:
+ body['description'] = description
+
+ if owner_id:
+ body['ownerID'] = owner_id
+
+ if tags:
+ body['tags'] = tags
+
+ return send_request(path, method='post', body=body)
+
+
+def delete_asset_command():
+ asset_id = demisto.args()['asset_id']
+
+ res = delete_asset(asset_id)
+
+ if not res:
+ return_error('Error: Could not delete the asset')
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Asset successfully deleted'
+ })
+
+
+def delete_asset(asset_id):
+ path = 'asset/' + asset_id
+
+ return send_request(path, method='delete')
+
+
+def list_report_definitions_command():
+ res = get_report_definitions('id,name,description,modifiedTime,type,ownerGroup,owner')
+
+ manageable = demisto.args().get('manageable', 'false').lower()
+
+ if not res or 'response' not in res or not res['response']:
+ return_message('No report definitions found')
+
+ reports = get_elements(res['response'], manageable)
+ # Remove duplicates, take latest
+ reports = [functools.reduce(lambda x, y: x if int(x['modifiedTime']) > int(y['modifiedTime']) else y,
+ filter(lambda e: e['name'] == n, reports)) for n in {r['name'] for r in reports}]
+
+ if len(reports) == 0:
+ return_message('No report definitions found')
+
+ headers = ['ID', 'Name', 'Description', 'Type', 'Group', 'Owner']
+
+ mapped_reports = [{
+ 'ID': r['id'],
+ 'Name': r['name'],
+ 'Description': r['description'],
+ 'Type': r['type'],
+ 'Group': r.get('ownerGroup', {}).get('name'),
+ 'Owner': r.get('owner', {}).get('username')
+ } for r in reports]
+
+ hr = tableToMarkdown('Tenable.sc Report Definitions', mapped_reports, headers, removeNull=True)
+ for r in mapped_reports:
+ del r['Description']
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': {
+ 'TenableSC.ReportDefinition(val.ID===obj.ID)': createContext(mapped_reports, removeNull=True)
+ }
+ })
+
+
+def get_report_definitions(fields):
+ path = 'reportDefinition'
+ params = None
+
+ if fields:
+ params = {
+ 'fields': fields
+ }
+
+ return send_request(path, params=params)
+
+
+def list_zones_command():
+ res = get_zones()
+
+ if not res or 'response' not in res:
+ return_message('No zones found')
+
+ zones = res['response']
+ if len(zones) == 0:
+ zones = [{
+ 'id': 0,
+ 'name': 'All Zones',
+ 'description': '',
+ 'ipList': '',
+ 'activeScanners': ''
+ }]
+
+ headers = ['ID', 'Name', 'Description', 'IPList', 'ActiveScanners']
+
+ mapped_zones = [{
+ 'ID': z['id'],
+ 'Name': z['name'],
+ 'Description': z['description'],
+ 'IPList': z['ipList'],
+ 'ActiveScanners': z['activeScanners']
+ } for z in zones]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tenable.sc Scan Zones', mapped_zones, headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.ScanZone(val.ID===obj.ID)': createContext(mapped_zones, removeNull=True)
+ }
+ })
+
+
+def get_zones():
+ path = 'zone'
+
+ return send_request(path)
+
+
+def get_elements(elements, manageable):
+ if manageable == 'false':
+ return elements.get('usable')
+
+ return elements.get('manageable')
+
+
+def create_scan_command():
+ name = demisto.args()['name']
+ repo_id = demisto.args()['repository_id']
+ policy_id = demisto.args()['policy_id']
+ plugin_id = demisto.args().get('plugin_id')
+ description = demisto.args().get('description')
+ zone_id = demisto.args().get('zone_id')
+ schedule = demisto.args().get('schedule')
+ asset_ids = demisto.args().get('asset_ids')
+ ips = demisto.args().get('ip_list')
+ scan_virtual_hosts = demisto.args().get('scan_virtual_hosts')
+ report_ids = demisto.args().get('report_ids')
+ credentials = demisto.args().get('credentials')
+ timeout_action = demisto.args().get('timeout_action')
+ max_scan_time = demisto.args().get('max_scan_time')
+ dhcp_track = demisto.args().get('dhcp_tracking')
+ rollover_type = demisto.args().get('rollover_type')
+ dependent = demisto.args().get('dependent_id')
+
+ if not asset_ids and not ips:
+ return_error('Error: Assets and/or IPs must be provided')
+
+ if schedule == 'dependent' and not dependent:
+ return_error('Error: Dependent schedule must include a dependent scan ID')
+
+ res = create_scan(name, repo_id, policy_id, plugin_id, description, zone_id, schedule, asset_ids,
+ ips, scan_virtual_hosts, report_ids, credentials, timeout_action, max_scan_time,
+ dhcp_track, rollover_type, dependent)
+
+ if not res or 'response' not in res:
+ return_error('Error: Could not retrieve the scan')
+
+ scan = res['response']
+
+ headers = [
+ 'ID',
+ 'CreatorID',
+ 'Name',
+ 'Type',
+ 'CreationTime',
+ 'OwnerName',
+ 'Reports'
+ ]
+
+ mapped_scan = {
+ 'ID': scan['id'],
+ 'CreatorID': scan['creator'].get('id'),
+ 'Name': scan['name'],
+ 'Type': scan['type'],
+ 'CreationTime': timestamp_to_utc(scan['createdTime']),
+ 'OwnerName': scan['owner'].get('name'),
+ 'Reports': demisto.dt(scan['reports'], 'id')
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Scan created successfully', mapped_scan, headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.Scan(val.ID===obj.ID)': createContext(mapped_scan, removeNull=True)
+ }
+ })
+
+
+def create_scan(name, repo_id, policy_id, plugin_id, description, zone_id, schedule, asset_ids,
+ ips, scan_virtual_hosts, report_ids, credentials, timeout_action, max_scan_time,
+ dhcp_track, rollover_type, dependent):
+ path = 'scan'
+
+ scan_type = 'policy' if policy_id else 'plugin'
+
+ body = {
+ 'name': name,
+ 'type': scan_type,
+ 'repository': {
+ 'id': repo_id
+ }
+ }
+
+ if policy_id:
+ body['policy'] = {
+ 'id': policy_id
+ }
+
+ if plugin_id:
+ body['pluginID'] = plugin_id
+
+ if description:
+ body['description'] = description
+
+ if zone_id:
+ body['zone'] = {
+ 'id': zone_id
+ }
+
+ if dhcp_track:
+ body['dhcpTracking'] = dhcp_track
+
+ if schedule:
+ body['schedule'] = {
+ 'type': schedule
+ }
+
+ if dependent:
+ body['schedule']['dependentID'] = dependent
+
+ if report_ids:
+ body['reports'] = [{'id': r_id, 'reportSource': 'individual'} for r_id in argToList(report_ids)]
+
+ if asset_ids:
+ if str(asset_ids).startswith('All'):
+ manageable = True if asset_ids == 'AllManageable' else False
+ res = get_assets(None)
+ assets = get_elements(res['response'], manageable)
+ asset_ids = list(map(lambda a: a['id'], assets))
+ body['assets'] = [{'id': a_id} for a_id in argToList(asset_ids)]
+
+ if credentials:
+ body['credentials'] = [{'id': c_id} for c_id in argToList(credentials)]
+
+ if timeout_action:
+ body['timeoutAction'] = timeout_action
+
+ if scan_virtual_hosts:
+ body['scanningVirtualHosts'] = scan_virtual_hosts
+
+ if rollover_type:
+ body['rolloverType'] = rollover_type
+
+ if ips:
+ body['ipList'] = ips
+
+ if max_scan_time:
+ body['maxScanTime'] = max_scan_time * 3600
+
+ return send_request(path, method='post', body=body)
+
+
+def launch_scan_command():
+ scan_id = demisto.args()['scan_id']
+ target_address = demisto.args().get('diagnostic_target')
+ target_password = demisto.args().get('diagnostic_password')
+
+ if (target_address and not target_password) or (target_password and not target_address):
+ return_error('Error: If a target is provided, both IP/Hostname and the password must be provided')
+
+ res = launch_scan(scan_id, {'address': target_address, 'password': target_password})
+
+ if not res or 'response' not in res or not res['response'] or 'scanResult' not in res['response']:
+ return_error('Error: Could not retrieve the scan')
+
+ scan_result = res['response']['scanResult']
+
+ headers = [
+ 'Name',
+ 'ID',
+ 'OwnerID',
+ 'JobID',
+ 'Status'
+ ]
+
+ mapped_scan = {
+ 'Name': scan_result['name'],
+ 'ID': scan_result['id'],
+ 'OwnerID': scan_result['ownerID'],
+ 'JobID': scan_result['jobID'],
+ 'Status': scan_result['status']
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tenable.sc Scan', mapped_scan, headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.ScanResults(val.ID===obj.ID)': createContext(mapped_scan, removeNull=True)
+ }
+ })
+
+
+def launch_scan(scan_id, scan_target):
+ path = 'scan/' + scan_id + '/launch'
+ body = None
+ if scan_target:
+ body = {
+ 'diagnosticTarget': scan_target['address'],
+ 'diagnosticPassword': scan_target['password']
+ }
+
+ return send_request(path, 'post', body=body)
+
+
+def get_scan_status_command():
+ scan_results_ids = argToList(demisto.args()['scan_results_id'])
+
+ scans_results = []
+ for scan_results_id in scan_results_ids:
+ res = get_scan_results(scan_results_id)
+ if not res or 'response' not in res or not res['response']:
+ return_message('Scan results not found')
+
+ scans_results.append(res['response'])
+
+ headers = ['ID', 'Name', 'Status', 'Description']
+
+ mapped_scans_results = [{
+ 'ID': scan_result['id'],
+ 'Name': scan_result['name'],
+ 'Status': scan_result['status'],
+ 'Description': scan_result['description']
+ } for scan_result in scans_results]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tenable.sc Scan Status', mapped_scans_results, headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.ScanResults(val.ID===obj.ID)': createContext(mapped_scans_results, removeNull=True)
+ }
+ })
+
+
+def get_scan_results(scan_results_id):
+ path = 'scanResult/' + scan_results_id
+
+ return send_request(path)
+
+
+def get_scan_report_command():
+ scan_results_id = demisto.args()['scan_results_id']
+ vulnerabilities_to_get = argToList(demisto.args().get('vulnerability_severity', []))
+
+ res = get_scan_report(scan_results_id)
+
+ if not res or 'response' not in res or not res['response']:
+ return_message('Scan results not found')
+
+ scan_results = res['response']
+
+ headers = ['ID', 'Name', 'Description', 'Policy', 'Group', 'Owner', 'ScannedIPs',
+ 'StartTime', 'EndTime', 'Duration', 'Checks', 'ImportTime', 'RepositoryName', 'Status']
+ vuln_headers = ['ID', 'Name', 'Family', 'Severity', 'Total']
+
+ mapped_results = {
+ 'ID': scan_results['id'],
+ 'Name': scan_results['name'],
+ 'Status': scan_results['status'],
+ 'Description': scan_results['description'],
+ 'Policy': scan_results['details'],
+ 'Group': scan_results.get('ownerGroup', {}).get('name'),
+ 'Checks': scan_results['completedChecks'],
+ 'StartTime': timestamp_to_utc(scan_results['startTime']),
+ 'EndTime': timestamp_to_utc(scan_results['finishTime']),
+ 'Duration': scan_duration_to_demisto_format(scan_results['scanDuration']),
+ 'ImportTime': timestamp_to_utc(scan_results['importStart']),
+ 'ScannedIPs': scan_results['scannedIPs'],
+ 'Owner': scan_results['owner'].get('username'),
+ 'RepositoryName': scan_results['repository'].get('name')
+ }
+
+ hr = tableToMarkdown('Tenable.sc Scan ' + mapped_results['ID'] + ' Report',
+ mapped_results, headers, removeNull=True)
+
+ if len(vulnerabilities_to_get) > 0:
+ vulns = get_vulnearbilites(scan_results_id)
+
+ if isinstance(vulns, list):
+ vulnerabilities = list(filter(lambda v: v['Severity'] in vulnerabilities_to_get, vulns))
+ if vulnerabilities and len(vulnerabilities) > 0:
+ hr += tableToMarkdown('Vulnerabilities', vulnerabilities, vuln_headers, removeNull=True)
+ mapped_results['Vulnerability'] = vulnerabilities
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': {
+ 'TenableSC.ScanResults(val.ID===obj.ID)': createContext(mapped_results, removeNull=True)
+ }
+ })
+
+
+def get_scan_report(scan_results_id):
+ path = 'scanResult/' + scan_results_id
+
+ params = {
+ 'fields': 'name,description,details,status,scannedIPs,progress,startTime,scanDuration,importStart,'
+ 'finishTime,completedChecks,owner,ownerGroup,repository,policy'
+ }
+
+ return send_request(path, params=params)
+
+
+def list_plugins_command():
+ name = demisto.args().get('name'),
+ cve = demisto.args().get('cve'),
+ plugin_type = demisto.args().get('type')
+
+ res = list_plugins(name, plugin_type, cve)
+
+ if not res or 'response' not in res:
+ return_message('No plugins found')
+
+ plugins = res['response']
+
+ headers = ['ID', 'Name', 'Type', 'Description', 'Family']
+ mapped_plugins = [{
+ 'ID': p['id'],
+ 'Name': p['name'],
+ 'Type': p['type'],
+ 'Description': p['description'],
+ 'Family': p['family'].get('name')
+ } for p in plugins]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tenable.sc Plugins', mapped_plugins, headers=headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.Plugin(val.ID===obj.ID)': createContext(mapped_plugins, removeNull=True)
+ }
+ })
+
+
+def list_plugins(name, plugin_type, cve):
+ path = 'plugin'
+
+ params = {
+ 'fields': 'id,type,name,description,family'
+ }
+
+ if cve:
+ params['filterField'] = 'xrefs:CVE'
+ params['op'] = 'eq'
+ params['value'] = cve
+
+ if plugin_type:
+ params['type'] = plugin_type
+
+ return send_request(path, params=params)
+
+
+def get_vulnearbilites(scan_results_id):
+ query = create_query(scan_results_id, 'vulnipdetail')
+
+ if not query or 'response' not in query:
+ return 'Could not get vulnerabilites query'
+
+ analysis = get_analysis(query['response']['id'], scan_results_id)
+
+ if not analysis or 'response' not in analysis:
+ return 'Could not get vulnerabilites analysis'
+
+ results = analysis['response']['results']
+
+ if not results or len(results) == 0:
+ return 'No vulnerabilities found'
+
+ mapped_vulns = []
+
+ for vuln in results:
+ mapped_vuln = {
+ 'ID': vuln['pluginID'],
+ 'Name': vuln['name'],
+ 'Description': vuln['pluginDescription'],
+ 'Family': vuln['family'].get('name'),
+ 'Severity': vuln['severity'].get('name'),
+ 'Total': vuln['total']
+ }
+
+ mapped_vulns.append(mapped_vuln)
+
+ sv_level = {
+ 'Critical': 4,
+ 'High': 3,
+ 'Medium': 2,
+ 'Low': 1,
+ 'Info': 0
+ }
+
+ mapped_vulns.sort(key=lambda r: sv_level[r['Severity']])
+
+ return mapped_vulns
+
+
+def create_query(scan_id, tool, query_filters=None):
+ path = 'query'
+
+ body = {
+ 'name': 'scan ' + scan_id + ' query',
+ 'type': 'vuln',
+ 'tool': tool,
+ 'scanID': scan_id
+ }
+
+ if query_filters:
+ body['filters'] = query_filters
+
+ return send_request(path, method='post', body=body)
+
+
+def get_analysis(query, scan_results_id):
+ path = 'analysis'
+
+ # This function can receive 'query' argument either as a dict (as in get_vulnerability_command),
+ # or as an ID of an existing query (as in get_vulnearbilites).
+ # Here we form the query field in the request body as a dict, as required.
+ if not isinstance(query, dict):
+ query = {'id': query}
+
+ body = {
+ 'type': 'vuln',
+ 'query': query,
+ 'sourceType': 'individual',
+ 'scanID': scan_results_id,
+ 'view': 'all'
+ }
+
+ return send_request(path, method='post', body=body)
+
+
+def get_vulnerability_command():
+ vuln_id = demisto.args()['vulnerability_id']
+ scan_results_id = demisto.args()['scan_results_id']
+ page = int(demisto.args().get('page'))
+ limit = int(demisto.args().get('limit'))
+ if limit > 200:
+ limit = 200
+
+ vuln_filter = [{
+ 'filterName': 'pluginID',
+ 'operator': '=',
+ 'value': vuln_id
+ }]
+
+ query = {
+ 'scanID': scan_results_id,
+ 'filters': vuln_filter,
+ 'tool': 'vulndetails',
+ 'type': 'vuln',
+ 'startOffset': page, # Lower bound for the results list (must be specified)
+ 'endOffset': page + limit # Upper bound for the results list (must be specified)
+ }
+
+ analysis = get_analysis(query, scan_results_id)
+
+ if not analysis or 'response' not in analysis:
+ return_error('Error: Could not get vulnerability analysis')
+
+ results = analysis['response']['results']
+
+ if not results or len(results) == 0:
+ return_error('Error: Vulnerability not found in the scan results')
+
+ vuln_response = get_vulnerability(vuln_id)
+
+ if not vuln_response or 'response' not in vuln_response:
+ return_message('Vulnerability not found')
+
+ vuln = vuln_response['response']
+ vuln['severity'] = results[0]['severity'] # The vulnerability severity is the same in all the results
+
+ hosts = get_vulnerability_hosts_from_analysis(results)
+
+ cves = None
+ cves_output = [] # type: List[dict]
+ if vuln.get('xrefs'):
+ # Extract CVE
+ cve_filter = list(filter(lambda x: x.strip().startswith('CVE'), vuln['xrefs'].split(',')))
+ if cve_filter and len(cve_filter) > 0:
+ cves = list(map(lambda c: c.replace('CVE:', '').strip(), cve_filter))
+ cves_output += map(lambda c: {
+ 'ID': c
+ }, cves)
+
+ mapped_vuln = {
+ 'ID': vuln['id'],
+ 'Name': vuln['name'],
+ 'Description': vuln['description'],
+ 'Type': vuln['type'],
+ 'Severity': vuln['severity'].get('name'),
+ 'Synopsis': vuln['synopsis'],
+ 'Solution': vuln['solution']
+ }
+
+ vuln_info = {
+ 'Published': timestamp_to_utc(vuln['vulnPubDate']),
+ 'CPE': vuln['cpe'],
+ 'CVE': cves
+ }
+
+ exploit_info = {
+ 'ExploitAvailable': vuln['exploitAvailable'],
+ 'ExploitEase': vuln['exploitEase']
+ }
+
+ risk_info = {
+ 'RiskFactor': vuln['riskFactor'],
+ 'CVSSBaseScore': vuln['baseScore'],
+ 'CVSSTemporalScore': vuln['temporalScore'],
+ 'CVSSVector': vuln['cvssVector']
+ }
+
+ plugin_details = {
+ 'Family': vuln['family'].get('name'),
+ 'Published': timestamp_to_utc(vuln['pluginPubDate']),
+ 'Modified': timestamp_to_utc(vuln['pluginModDate']),
+ 'CheckType': vuln['checkType']
+ }
+
+ hr = '## Vulnerability: {} ({})\n'.format(mapped_vuln['Name'], mapped_vuln['ID'])
+ hr += '### Synopsis\n{}\n### Description\n{}\n### Solution\n{}\n'.format(
+ mapped_vuln['Synopsis'], mapped_vuln['Description'], mapped_vuln['Solution'])
+ hr += tableToMarkdown('Hosts', hosts, removeNull=True)
+ hr += tableToMarkdown('Risk Information', risk_info, removeNull=True)
+ hr += tableToMarkdown('Exploit Information', exploit_info, removeNull=True)
+ hr += tableToMarkdown('Plugin Details', plugin_details, removeNull=True)
+ hr += tableToMarkdown('Vulnerability Information', vuln_info, removeNull=True)
+
+ mapped_vuln.update(vuln_info)
+ mapped_vuln.update(exploit_info)
+ mapped_vuln.update(risk_info)
+ mapped_vuln['PluginDetails'] = plugin_details
+ mapped_vuln['Host'] = hosts
+
+ scan_result = {
+ 'ID': scan_results_id,
+ 'Vulnerability': mapped_vuln,
+ }
+
+ context = {}
+
+ context['TenableSC.ScanResults(val.ID===obj.ID)'] = createContext(scan_result, removeNull=True)
+ if len(cves_output) > 0:
+ context['CVE(val.ID===obj.ID)'] = createContext(cves_output)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': vuln_response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': context
+ })
+
+
+def get_vulnerability(vuln_id):
+ path = 'plugin/' + vuln_id
+
+ params = {
+ 'fields': 'name,description,family,type,cpe,riskFactor,solution,synopsis,exploitEase,exploitAvailable,'
+ 'cvssVector,baseScore,pluginPubDate,pluginModDate,vulnPubDate,temporalScore,xrefs,checkType'
+ }
+
+ return send_request(path, params=params)
+
+
+def get_vulnerability_hosts_from_analysis(results):
+ return [{
+ 'IP': host['ip'],
+ 'MAC': host['macAddress'],
+ 'Port': host['port'],
+ 'Protocol': host['protocol']
+ } for host in results]
+
+
+def stop_scan_command():
+ scan_results_id = demisto.args()['scanResultsID']
+
+ res = change_scan_status(scan_results_id, 'stop')
+
+ if not res:
+ return_error('Error: Could not stop the scan')
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Scan succsefully stopped'
+ })
+
+
+def pause_scan_command():
+ scan_results_id = demisto.args()['scanResultsID']
+
+ res = change_scan_status(scan_results_id, 'pause')
+
+ if not res:
+ return_error('Error: Could not pause the scan')
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Successfully paused the scan'
+ })
+
+
+def resume_scan_command():
+ scan_results_id = demisto.args()['scanResultsID']
+
+ res = change_scan_status(scan_results_id, 'resume')
+
+ if not res:
+ return_error('Error: Could not resume the scan')
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Scan successfully resumed'
+ })
+
+
+def change_scan_status(scan_results_id, status):
+ path = 'scanResult/' + scan_results_id + '/' + status
+
+ return send_request(path, method='post')
+
+
+def delete_scan_command():
+ scan_id = demisto.args()['scan_id']
+
+ res = delete_scan(scan_id)
+
+ if not res:
+ return_error('Error: Could not delete the scan')
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['text'],
+ 'HumanReadable': 'Scan successfully deleted'
+ })
+
+
+def delete_scan(scan_id):
+ path = 'scan/' + scan_id
+
+ return send_request(path, method='delete')
+
+
+def get_device_command():
+ uuid = demisto.args().get('uuid')
+ ip = demisto.args().get('ip')
+ dns_name = demisto.args().get('dns_name')
+ repo = demisto.args().get('repository_id')
+
+ res = get_device(uuid, ip, dns_name, repo)
+
+ if not res or 'response' not in res:
+ return_message('Device not found')
+
+ device = res['response']
+
+ headers = [
+ 'IP',
+ 'UUID',
+ 'MacAddress',
+ 'RepositoryID',
+ 'RepositoryName',
+ 'NetbiosName',
+ 'DNSName',
+ 'OS',
+ 'OsCPE',
+ 'LastScan',
+ 'TotalScore',
+ 'LowSeverity',
+ 'MediumSeverity',
+ 'HighSeverity',
+ 'CriticalSeverity'
+ ]
+
+ mapped_device = {
+ 'IP': device['ip'],
+ 'UUID': device.get('uuid'),
+ 'MacAddress': device.get('macAddress'),
+ 'RepositoryID': device.get('repository', {}).get('id'),
+ 'RepositoryName': device.get('repository', {}).get('name'),
+ 'NetbiosName': device.get('netbiosName'),
+ 'DNSName': device.get('dnsName'),
+ 'OS': re.sub('<[^<]+?>', ' ', device['os']).lstrip() if device.get('os') else '',
+ 'OsCPE': device.get('osCPE'),
+ 'LastScan': timestamp_to_utc(device['lastScan']),
+ 'TotalScore': device.get('total'),
+ 'LowSeverity': device.get('severityLow'),
+ 'MediumSeverity': device.get('severityMedium'),
+ 'HighSeverity': device.get('severityHigh'),
+ 'CriticalSeverity': device.get('severityCritical')
+ }
+
+ endpoint = {
+ 'IPAddress': mapped_device['IP'],
+ 'MACAddress': mapped_device['MacAddress'],
+ 'Hostname': mapped_device['DNSName'],
+ 'OS': mapped_device['OS']
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tenable.sc Device', mapped_device, headers=headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.Device(val.UUID===obj.UUID)': createContext(mapped_device, removeNull=True),
+ 'Endpoint(val.IP===obj.IP)': createContext(endpoint, removeNull=True)
+ }
+ })
+
+
+def get_device(uuid, ip, dns_name, repo):
+ path = 'repository/' + repo + '/' if repo else ''
+ path += 'deviceInfo'
+ params = {
+ 'fields': 'ip,uuid,macAddress,netbiosName,dnsName,os,osCPE,lastScan,repository,total,severityLow,'
+ 'severityMedium,severityHigh,severityCritical'
+ }
+ if uuid:
+ params['uuid'] = uuid
+ else:
+ params['ip'] = ip
+ if dns_name:
+ params['dnsName'] = dns_name
+
+ return send_request(path, params=params)
+
+
+def list_users_command():
+ user_id = demisto.args().get('id')
+ username = demisto.args().get('username')
+ email = demisto.args().get('email')
+
+ res = get_users('id,username,firstname,lastname,title,email,createdTime,modifiedTime,lastLogin,role', user_id)
+
+ if not res or 'response' not in res:
+ return_message('No users found')
+
+ users = res['response']
+
+ if not isinstance(users, list):
+ users = [users]
+
+ if not user_id:
+ if username:
+ users = list(filter(lambda u: u['username'] == username, users))
+ elif email:
+ users = list(filter(lambda u: u['email'] == email, users))
+
+ if len(users) == 0:
+ return_message('No users found')
+
+ headers = [
+ 'ID',
+ 'Username',
+ 'Firstname',
+ 'Lastname',
+ 'Title',
+ 'Email',
+ 'Created',
+ 'Modified',
+ 'LastLogin',
+ 'Role'
+ ]
+
+ mapped_users = [{
+ 'ID': u['id'],
+ 'Username': u['username'],
+ 'FirstName': u['firstname'],
+ 'LastName': u['lastname'],
+ 'Title': u['title'],
+ 'Email': u['email'],
+ 'Created': timestamp_to_utc(u['createdTime']),
+ 'Modified': timestamp_to_utc(u['modifiedTime']),
+ 'LastLogin': timestamp_to_utc(u['lastLogin']),
+ 'Role': u['role'].get('name')
+ } for u in users]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tenable.sc Users', mapped_users, headers=headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.User(val.ID===obj.ID)': createContext(mapped_users, removeNull=True)
+ }
+ })
+
+
+def get_users(fields, user_id):
+ path = 'user'
+
+ if user_id:
+ path += '/' + user_id
+
+ params = None
+
+ if fields:
+ params = {
+ 'fields': fields
+ }
+
+ return send_request(path, params=params)
+
+
+def get_system_licensing_command():
+ res = get_system_licensing()
+
+ if not res or 'response' not in res:
+ return_error('Error: Could not retrieve system licensing')
+
+ status = res['response']
+
+ mapped_licensing = {
+ 'License': status['licenseStatus'],
+ 'LicensedIPS': status['licensedIPs'],
+ 'ActiveIPS': status['activeIPs']
+ }
+
+ headers = [
+ 'License',
+ 'LicensedIPS',
+ 'ActiveIPS'
+ ]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tenable.sc Licensing information',
+ mapped_licensing, headers=headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.Status': createContext(mapped_licensing, removeNull=True)
+ }
+ })
+
+
+def get_system_licensing():
+ path = 'status'
+
+ return send_request(path)
+
+
+def get_system_information_command():
+ sys_res = get_system()
+
+ if not sys_res or 'response' not in sys_res:
+ return_error('Error: Could not retrieve system information')
+
+ diag_res = get_system_diagnostics()
+
+ if not diag_res or 'response' not in diag_res:
+ return_error('Error: Could not retrieve system information')
+
+ sys_res.update(diag_res)
+ diagnostics = diag_res['response']
+ system = sys_res['response']
+
+ mapped_information = {
+ 'Version': system['version'],
+ 'BuildID': system['buildID'],
+ 'ReleaseID': system['releaseID'],
+ 'License': system['licenseStatus'],
+ 'RPMStatus': diagnostics['statusRPM'],
+ 'JavaStatus': diagnostics['statusJava'],
+ 'DiskStatus': diagnostics['statusDisk'],
+ 'DiskThreshold': diagnostics['statusThresholdDisk'],
+ 'LastCheck': timestamp_to_utc(diagnostics['statusLastChecked']),
+ }
+
+ headers = [
+ 'Version',
+ 'BuildID',
+ 'ReleaseID',
+ 'License',
+ 'RPMStatus',
+ 'JavaStatus',
+ 'DiskStatus',
+ 'DiskThreshold',
+ 'LastCheck'
+ ]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': sys_res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tenable.sc System information',
+ mapped_information, headers=headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.System(val.BuildID===obj.BuildID)': createContext(mapped_information, removeNull=True)
+ }
+ })
+
+
+def get_system_diagnostics():
+ path = 'system/diagnostics'
+
+ return send_request(path)
+
+
+def get_system():
+ path = 'system'
+
+ return send_request(path)
+
+
+def list_alerts_command():
+ res = get_alerts(fields='id,name,description,didTriggerLastEvaluation,lastTriggered,'
+ 'action,lastEvaluated,ownerGroup,owner')
+ manageable = demisto.args().get('manageable', 'false').lower()
+
+ if not res or 'response' not in res or not res['response']:
+ return_message('No alerts found')
+
+ alerts = get_elements(res['response'], manageable)
+
+ if len(alerts) == 0:
+ return_message('No alerts found')
+
+ headers = ['ID', 'Name', 'Actions', 'State', 'LastTriggered', 'LastEvaluated', 'Group', 'Owner']
+ mapped_alerts = [{
+ 'ID': a['id'],
+ 'Name': a['name'],
+ 'State': 'Triggered' if a['didTriggerLastEvaluation'] == 'true' else 'Not Triggered',
+ 'Actions': demisto.dt(a['action'], 'type'),
+ 'LastTriggered': timestamp_to_utc(a['lastTriggered'], default_returned_value='Never'),
+ 'LastEvaluated': timestamp_to_utc(a['lastEvaluated']),
+ 'Group': a['ownerGroup'].get('name'),
+ 'Owner': a['owner'].get('username')
+ } for a in alerts]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Tenable.sc Alerts', mapped_alerts, headers=headers, removeNull=True),
+ 'EntryContext': {
+ 'TenableSC.Alert(val.ID===obj.ID)': createContext(mapped_alerts, removeNull=True)
+ }
+ })
+
+
+def get_alert_command():
+ alert_id = demisto.args()['alert_id']
+ res = get_alerts(alert_id=alert_id)
+
+ if not res or 'response' not in res or not res['response']:
+ return_message('Alert not found')
+
+ alert = res['response']
+ query_res = get_query(alert['query'].get('id'))
+ query = query_res.get('response')
+
+ alert_headers = ['ID', 'Name', 'Description', 'LastTriggered', 'State', 'Behavior', 'Actions']
+ query_headers = ['Trigger', 'Query']
+ action_headers = ['Type', 'Values']
+
+ filter_headers = ['Name', 'Values']
+ mapped_alert = {
+ 'ID': alert['id'],
+ 'Name': alert['name'],
+ 'Description': alert['description'],
+ 'LastTriggered': timestamp_to_utc(alert['lastTriggered'], default_returned_value='Never'),
+ 'State': 'Triggered' if alert['didTriggerLastEvaluation'] == 'true' else 'Not Triggered',
+ 'Behavior': 'Execute on every trigger ' if alert['executeOnEveryTrigger'] == 'true' else 'Execute only on'
+ ' first trigger'
+ }
+
+ mapped_condition = {
+ 'Trigger': '{} {} {}'.format(alert['triggerName'], alert['triggerOperator'], alert['triggerValue']),
+ 'Query': alert['query'].get('name')
+ }
+
+ mapped_filters = None
+ if query:
+ mapped_filters = [{
+ 'Name': f['filterName'],
+ 'Values': demisto.dt(f['value'], 'name') if isinstance(f['value'], list) else f['value']
+ } for f in query.get('filters', [])]
+ mapped_condition['Filter'] = mapped_filters
+
+ mapped_actions = [{
+ 'Type': a['type'],
+ 'Values': demisto.dt(a, '{}.{}'.format('definition', ACTION_TYPE_TO_VALUE[a['type']]))
+ } for a in alert['action']]
+
+ hr = tableToMarkdown('Tenable.sc Alert', mapped_alert, headers=alert_headers, removeNull=True)
+ hr += tableToMarkdown('Condition', mapped_condition, headers=query_headers, removeNull=True)
+ if mapped_filters:
+ hr += tableToMarkdown('Filters', mapped_filters, headers=filter_headers, removeNull=True)
+ if mapped_actions:
+ hr += tableToMarkdown('Actions', mapped_actions, headers=action_headers, removeNull=True)
+ mapped_alert['Action'] = mapped_actions
+
+ mapped_alert['Condition'] = mapped_condition
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': {
+ 'TenableSC.Alert(val.ID===obj.ID)': createContext(mapped_alert, removeNull=True)
+ }
+ })
+
+
+def get_alerts(fields=None, alert_id=None):
+ path = 'alert'
+ params = {} # type: Dict[str, Any]
+
+ if alert_id:
+ path += '/' + alert_id
+
+ if fields:
+ params = {
+ 'fields': fields
+ }
+
+ return send_request(path, params=params)
+
+
+def get_query(query_id):
+ path = 'query/' + query_id
+
+ return send_request(path)
+
+
+def fetch_incidents():
+ incidents = []
+ last_run = demisto.getLastRun()
+ if not last_run:
+ last_run = {}
+ if 'time' not in last_run:
+ # get timestamp in seconds
+ timestamp, _ = parse_date_range(FETCH_TIME, to_timestamp=True)
+ timestamp /= 1000
+ else:
+ timestamp = last_run['time']
+
+ max_timestamp = timestamp
+ res = get_alerts(
+ fields='id,name,description,lastTriggered,triggerName,triggerOperator,'
+ 'triggerValue,action,query,owner,ownerGroup,schedule,canManage')
+
+ alerts = get_elements(res.get('response', {}), manageable='false')
+ for alert in alerts:
+ # 0 corresponds to never triggered
+ if int(alert.get('lastTriggered', 0)) > timestamp:
+ incidents.append({
+ 'name': 'Tenable.sc Alert Triggered - ' + alert['name'],
+ 'occurred': timestamp_to_utc(alert['lastTriggered']),
+ 'rawJSON': json.dumps(alert)
+ })
+
+ if int(alert['lastTriggered']) > max_timestamp:
+ max_timestamp = int(alert['lastTriggered'])
+
+ demisto.incidents(incidents)
+ demisto.setLastRun({'time': max_timestamp})
+
+
+def get_all_scan_results():
+ path = 'scanResult'
+ params = {
+ 'fields': 'name,description,details,status,scannedIPs,startTime,scanDuration,importStart,'
+ 'finishTime,completedChecks,owner,ownerGroup,repository'
+ }
+ return send_request(path, params=params)
+
+
+def get_all_scan_results_command():
+ res = get_all_scan_results()
+ get_manageable_results = demisto.args().get('manageable', 'false').lower() # 'true' or 'false'
+ page = int(demisto.args().get('page'))
+ limit = int(demisto.args().get('limit'))
+ if limit > 200:
+ limit = 200
+
+ if not res or 'response' not in res or not res['response']:
+ return_message('Scan results not found')
+
+ elements = get_elements(res['response'], get_manageable_results)
+
+ headers = ['ID', 'Name', 'Status', 'Description', 'Policy', 'Group', 'Owner', 'ScannedIPs',
+ 'StartTime', 'EndTime', 'Duration', 'Checks', 'ImportTime', 'RepositoryName']
+
+ scan_results = [{
+ 'ID': elem['id'],
+ 'Name': elem['name'],
+ 'Status': elem['status'],
+ 'Description': elem.get('description', None),
+ 'Policy': elem['details'],
+ 'Group': elem.get('ownerGroup', {}).get('name'),
+ 'Checks': elem.get('completedChecks', None),
+ 'StartTime': timestamp_to_utc(elem['startTime']),
+ 'EndTime': timestamp_to_utc(elem['finishTime']),
+ 'Duration': scan_duration_to_demisto_format(elem['scanDuration']),
+ 'ImportTime': timestamp_to_utc(elem['importStart']),
+ 'ScannedIPs': elem['scannedIPs'],
+ 'Owner': elem['owner'].get('username'),
+ 'RepositoryName': elem['repository'].get('name')
+ } for elem in elements[page:page + limit]]
+
+ readable_title = 'Tenable.sc Scan results - {0}-{1}'.format(page, page + limit - 1)
+ hr = tableToMarkdown(readable_title, scan_results, headers, removeNull=True,
+ metadata='Total number of elements is {}'.format(len(elements)))
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': hr,
+ 'EntryContext': {
+ 'TenableSC.ScanResults(val.ID===obj.ID)': createContext(scan_results, removeNull=True)
+ }
+ })
+
+
+def timestamp_to_utc(timestamp_str, default_returned_value=''):
+ if timestamp_str and (int(timestamp_str) > 0): # no value is when timestamp_str == '-1'
+ return datetime.utcfromtimestamp(int(timestamp_str)).strftime(
+ '%Y-%m-%dT%H:%M:%SZ')
+ return default_returned_value
+
+
+def scan_duration_to_demisto_format(duration, default_returned_value=''):
+ if duration:
+ return float(duration) / 60
+ return default_returned_value
+
+
+''' LOGIC '''
+
+LOG('Executing command ' + demisto.command())
+
+
+try:
+ if not TOKEN or not COOKIE:
+ login()
+
+ if demisto.command() == 'test-module':
+ demisto.results('ok')
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents()
+ elif demisto.command() == 'tenable-sc-list-scans':
+ list_scans_command()
+ elif demisto.command() == 'tenable-sc-list-policies':
+ list_policies_command()
+ elif demisto.command() == 'tenable-sc-list-repositories':
+ list_repositories_command()
+ elif demisto.command() == 'tenable-sc-list-credentials':
+ list_credentials_command()
+ elif demisto.command() == 'tenable-sc-list-zones':
+ list_zones_command()
+ elif demisto.command() == 'tenable-sc-list-report-definitions':
+ list_report_definitions_command()
+ elif demisto.command() == 'tenable-sc-list-assets':
+ list_assets_command()
+ elif demisto.command() == 'tenable-sc-list-plugins':
+ list_plugins_command()
+ elif demisto.command() == 'tenable-sc-get-asset':
+ get_asset_command()
+ elif demisto.command() == 'tenable-sc-create-asset':
+ create_asset_command()
+ elif demisto.command() == 'tenable-sc-delete-asset':
+ delete_asset_command()
+ elif demisto.command() == 'tenable-sc-create-scan':
+ create_scan_command()
+ elif demisto.command() == 'tenable-sc-launch-scan':
+ launch_scan_command()
+ elif demisto.command() == 'tenable-sc-get-scan-status':
+ get_scan_status_command()
+ elif demisto.command() == 'tenable-sc-get-scan-report':
+ get_scan_report_command()
+ elif demisto.command() == 'tenable-sc-get-vulnerability':
+ get_vulnerability_command()
+ elif demisto.command() == 'tenable-sc-delete-scan':
+ delete_scan_command()
+ elif demisto.command() == 'tenable-sc-get-device':
+ get_device_command()
+ elif demisto.command() == 'tenable-sc-list-users':
+ list_users_command()
+ elif demisto.command() == 'tenable-sc-list-alerts':
+ list_alerts_command()
+ elif demisto.command() == 'tenable-sc-get-alert':
+ get_alert_command()
+ elif demisto.command() == 'tenable-sc-get-system-information':
+ get_system_information_command()
+ elif demisto.command() == 'tenable-sc-get-system-licensing':
+ get_system_licensing_command()
+ elif demisto.command() == 'tenable-sc-get-all-scan-results':
+ get_all_scan_results_command()
+except Exception as e:
+ LOG(e)
+ LOG.print_log(False)
+ return_error(str(e))
+finally:
+ logout()
diff --git a/Integrations/Tenable_sc/Tenable_sc.yml b/Integrations/Tenable_sc/Tenable_sc.yml
new file mode 100644
index 000000000000..ccbce68383a9
--- /dev/null
+++ b/Integrations/Tenable_sc/Tenable_sc.yml
@@ -0,0 +1,1131 @@
+category: Vulnerability Management
+commonfields:
+ id: Tenable.sc
+ version: -1
+configuration:
+- display: Server URL (e.g. https://192.168.0.1)
+ name: server
+ required: true
+ type: 0
+- display: Username
+ name: credentials
+ required: true
+ type: 9
+- display: Trust any certificate (not secure)
+ name: unsecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- defaultvalue: 3 days
+ display: First fetch timestamp ( , e.g., 12 hours, 7 days, 3
+ months, 1 year)
+ name: fetch_time
+ required: false
+ type: 0
+description: With Tenable.sc (formerly SecurityCenter) you get a real-time, continuous
+ assessment of your security posture so you can find and fix vulnerabilities faster.
+display: Tenable.sc
+name: Tenable.sc
+script:
+ commands:
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Whether to return only manageable scans. Returns both usable and
+ manageable scans by default.
+ isArray: false
+ name: manageable
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Get a list of Tenable.sc existing scans
+ execution: false
+ name: tenable-sc-list-scans
+ outputs:
+ - contextPath: TenableSC.Scan.Name
+ description: Scan name
+ type: string
+ - contextPath: TenableSC.Scan.ID
+ description: Scan ID
+ type: number
+ - contextPath: TenableSC.Scan.Description
+ description: Scan description
+ type: string
+ - contextPath: TenableSC.Scan.Policy
+ description: Scan policy name
+ type: string
+ - contextPath: TenableSC.Scan.Group
+ description: Scan policy owner group name
+ type: string
+ - contextPath: TenableSC.Scan.Owner
+ description: Scan policy owner user name
+ type: string
+ - arguments:
+ - default: false
+ description: Scan ID, can be retrieved from list-scans command
+ isArray: false
+ name: scan_id
+ required: true
+ secret: false
+ - default: false
+ description: Valid IP/Hostname of a specific target to scan. Must be provided
+ with diagnosticPassword.
+ isArray: false
+ name: diagnostic_target
+ required: false
+ secret: false
+ - default: false
+ description: Non empty string password
+ isArray: false
+ name: diagnostic_password
+ required: false
+ secret: false
+ deprecated: false
+ description: Launch an existing scan from Tenable.sc
+ execution: false
+ name: tenable-sc-launch-scan
+ outputs:
+ - contextPath: TenableSC.ScanResults.Name
+ description: Scan name
+ type: string
+ - contextPath: TenableSC.ScanResults.ID
+ description: Scan Results ID
+ type: string
+ - contextPath: TenableSC.ScanResults.OwnerID
+ description: Scan owner ID
+ type: string
+ - contextPath: TenableSC.ScanResults.JobID
+ description: Job ID
+ type: string
+ - contextPath: TenableSC.ScanResults.Status
+ description: Scan status
+ type: string
+ - arguments:
+ - default: false
+ description: Vulnerability ID from the scan-report command
+ isArray: false
+ name: vulnerability_id
+ required: true
+ secret: false
+ - default: false
+ description: Scan results ID from the scan-report command
+ isArray: false
+ name: scan_results_id
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: The number of objects to return in one response (maximum limit
+ is 200).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: The page to return, starting from 0.
+ isArray: false
+ name: page
+ required: false
+ secret: false
+ deprecated: false
+ description: Get details about a given vulnerability from a given Tenable.sc scan
+ execution: false
+ name: tenable-sc-get-vulnerability
+ outputs:
+ - contextPath: TenableSC.ScanResults.ID
+ description: Scan results ID
+ type: number
+ - contextPath: TenableSC.ScanResults.Vulnerability.ID
+ description: Vulnerability plugin ID
+ type: number
+ - contextPath: TenableSC.ScanResults.Vulnerability.Name
+ description: Vulnerability name
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.Description
+ description: Vulnerability description
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.Type
+ description: Vulnerability type
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.Severity
+ description: Vulnerability Severity
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.Synopsis
+ description: Vulnerability Synopsis
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.Solution
+ description: Vulnerability Solution
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.Published
+ description: Vulnerability publish date
+ type: date
+ - contextPath: TenableSC.ScanResults.Vulnerability.CPE
+ description: Vulnerability CPE
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.CVE
+ description: Vulnerability CVE
+ type: Unknown
+ - contextPath: TenableSC.ScanResults.Vulnerability.ExploitAvailable
+ description: Vulnerability exploit available
+ type: boolean
+ - contextPath: TenableSC.ScanResults.Vulnerability.ExploitEase
+ description: Vulnerability exploit ease
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.RiskFactor
+ description: Vulnerability risk factor
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.CVSSBaseScore
+ description: Vulnerability CVSS base score
+ type: number
+ - contextPath: TenableSC.ScanResults.Vulnerability.CVSSTemporalScore
+ description: Vulnerability CVSS temporal score
+ type: number
+ - contextPath: TenableSC.ScanResults.Vulnerability.CVSSVector
+ description: Vulnerability CVSS vector
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.PluginDetails
+ description: Vulnerability plugin details
+ type: Unknown
+ - contextPath: CVE.ID
+ description: CVE ID
+ type: Unknown
+ - contextPath: TenableSC.ScanResults.Vulnerability.Host.IP
+ description: Vulnerability Host IP
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.Host.MAC
+ description: Vulnerability Host MAC
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.Host.Port
+ description: Vulnerability Host Port
+ type: number
+ - contextPath: TenableSC.ScanResults.Vulnerability.Host.Protocol
+ description: Vulnerability Host Protocol
+ type: string
+ - arguments:
+ - default: false
+ description: Scan results ID from the launch-scan command
+ isArray: true
+ name: scan_results_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Get the status of a specific scan in Tenable.sc
+ execution: false
+ name: tenable-sc-get-scan-status
+ outputs:
+ - contextPath: TenableSC.ScanResults.Status
+ description: Scan status
+ type: string
+ - contextPath: TenableSC.ScanResults.Name
+ description: Scan Name
+ type: string
+ - contextPath: TenableSC.ScanResults.Description
+ description: Scan description
+ type: Unknown
+ - contextPath: TenableSC.ScanResults.ID
+ description: Scan results ID
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Scan results ID
+ isArray: false
+ name: scan_results_id
+ required: true
+ secret: false
+ - default: false
+ defaultValue: Critical,High,Medium,Low,Info
+ description: Comma separated list of severity values of vulnerabilities to retrieve
+ isArray: true
+ name: vulnerability_severity
+ required: false
+ secret: false
+ deprecated: false
+ description: Get a single report with Tenable.sc scan results
+ execution: false
+ name: tenable-sc-get-scan-report
+ outputs:
+ - contextPath: TenableSC.ScanResults.ID
+ description: Scan results ID
+ type: number
+ - contextPath: TenableSC.ScanResults.Name
+ description: Scan name
+ type: string
+ - contextPath: TenableSC.ScanResults.Status
+ description: Scan status
+ type: string
+ - contextPath: TenableSC.ScanResults.ScannedIPs
+ description: Scan number of scanned IPs
+ type: number
+ - contextPath: TenableSC.ScanResults.StartTime
+ description: Scan start time
+ type: date
+ - contextPath: TenableSC.ScanResults.EndTime
+ description: Scan end time
+ type: date
+ - contextPath: TenableSC.ScanResults.Checks
+ description: Scan completed checks
+ type: number
+ - contextPath: TenableSC.ScanResults.RepositoryName
+ description: Scan repository name
+ type: string
+ - contextPath: TenableSC.ScanResults.Description
+ description: Scan description
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.ID
+ description: Scan vulnerability ID
+ type: number
+ - contextPath: TenableSC.ScanResults.Vulnerability.Name
+ description: Scan vulnerability Name
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.Family
+ description: Scan vulnerability family
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.Severity
+ description: Scan vulnerability severity
+ type: string
+ - contextPath: TenableSC.ScanResults.Vulnerability.Total
+ description: Scan vulnerability total hosts
+ type: number
+ - contextPath: TenableSC.ScanResults.Policy
+ description: Scan policy
+ type: string
+ - contextPath: TenableSC.ScanResults.Group
+ description: Scan owner group name
+ type: string
+ - contextPath: TenableSC.ScanResults.Owner
+ description: Scan owner user name
+ type: string
+ - contextPath: TenableSC.ScanResults.Duration
+ description: Scan duration in minutes
+ type: number
+ - contextPath: TenableSC.ScanResults.ImportTime
+ description: Scan import time
+ type: date
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Whether to return only manageable scan credentials. Returns both
+ usable and manageable by default.
+ isArray: false
+ name: manageable
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Get a list of Tenable.sc credentials
+ execution: false
+ name: tenable-sc-list-credentials
+ outputs:
+ - contextPath: TenableSC.Credential.Name
+ description: Credential name
+ type: string
+ - contextPath: TenableSC.Credential.ID
+ description: Credential ID
+ type: number
+ - contextPath: TenableSC.Credential.Description
+ description: Credential description
+ type: string
+ - contextPath: TenableSC.Credential.Type
+ description: Credential type
+ type: string
+ - contextPath: TenableSC.Credential.Tag
+ description: Credential tag
+ type: string
+ - contextPath: TenableSC.Credential.Group
+ description: Credential owner group name
+ type: string
+ - contextPath: TenableSC.Credential.Owner
+ description: Credential owner user name
+ type: string
+ - contextPath: TenableSC.Credential.LastModified
+ description: Credential last modified time
+ type: date
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Whether to return only manageable scan policies. Returns both usable
+ and manageable by default.
+ isArray: false
+ name: manageable
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Get a list of Tenable.sc scan policies
+ execution: false
+ name: tenable-sc-list-policies
+ outputs:
+ - contextPath: TenableSC.ScanPolicy.Name
+ description: Scan policy name
+ type: string
+ - contextPath: TenableSC.ScanPolicy.ID
+ description: Scan policy ID
+ type: number
+ - contextPath: TenableSC.ScanPolicy.Description
+ description: Scan policy description
+ type: string
+ - contextPath: TenableSC.ScanPolicy.Tag
+ description: Scan policy tag
+ type: string
+ - contextPath: TenableSC.ScanPolicy.Group
+ description: Scan policy owner group name
+ type: string
+ - contextPath: TenableSC.ScanPolicy.Owner
+ description: Scan policy owner user name
+ type: string
+ - contextPath: TenableSC.ScanPolicy.LastModified
+ description: Scan policy last modified time
+ type: date
+ - contextPath: TenableSC.ScanPolicy.Type
+ description: Scan policy type
+ type: string
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Whether to return only manageable reports. Returns both usable
+ and manageable by default.
+ isArray: false
+ name: manageable
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Get a list of Tenable.sc report definitions
+ execution: false
+ name: tenable-sc-list-report-definitions
+ outputs:
+ - contextPath: TenableSC.ReportDefinition.Name
+ description: Report definition name
+ type: string
+ - contextPath: TenableSC.ReportDefinition.ID
+ description: Report definition ID
+ type: number
+ - contextPath: TenableSC.ReportDefinition.Description
+ description: Report definition description
+ type: string
+ - contextPath: TenableSC.ReportDefinition.Type
+ description: Report definition type
+ type: string
+ - contextPath: TenableSC.ReportDefinition.Group
+ description: Report definition owner group name
+ type: string
+ - contextPath: TenableSC.ReportDefinition.Owner
+ description: Report definition owner user name
+ type: string
+ - deprecated: false
+ description: Get a list of Tenable.sc scan repositories
+ execution: false
+ name: tenable-sc-list-repositories
+ outputs:
+ - contextPath: TenableSC.ScanRepository.Name
+ description: Scan Repository name
+ type: string
+ - contextPath: TenableSC.ScanRepository.ID
+ description: Scan Repository ID
+ type: number
+ - contextPath: TenableSC.ScanRepository.Description
+ description: Scan Repository
+ type: string
+ - deprecated: false
+ description: Get a list of Tenable.sc scan zones
+ execution: false
+ name: tenable-sc-list-zones
+ outputs:
+ - contextPath: TenableSC.ScanZone.Name
+ description: Scan Zone name
+ type: string
+ - contextPath: TenableSC.ScanZone.ID
+ description: Scan Zone ID
+ type: number
+ - contextPath: TenableSC.ScanZone.Description
+ description: Scan Zone description
+ type: string
+ - contextPath: TenableSC.ScanZone.IPList
+ description: Scan Zone IP list
+ type: unknown
+ - contextPath: TenableSC.ScanZone.ActiveScanners
+ description: Scan Zone active scanners
+ type: number
+ - arguments:
+ - default: false
+ description: Scan name
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: Policy ID, can be retrieved from list-policies command
+ isArray: false
+ name: policy_id
+ required: true
+ secret: false
+ - default: false
+ description: Scan description
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: Scan Repository ID, can be retrieved from list-repositories command
+ isArray: false
+ name: repository_id
+ required: true
+ secret: false
+ - default: false
+ description: Scan zone ID (default is all zones), can be retrieved from list-zones
+ command
+ isArray: false
+ name: zone_id
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Schedule for the scan
+ isArray: false
+ name: schedule
+ predefined:
+ - dependent
+ - ical
+ - never
+ - rollover
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Either all assets or comma separated asset IDs to scan, can be
+ retrieved from list-assets command
+ isArray: true
+ name: asset_ids
+ predefined:
+ - All
+ - AllManageable
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether to includes virtual hosts, default false
+ isArray: false
+ name: scan_virtual_hosts
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: Comma separated IPs to scan e.g 10.0.0.1,10.0.0.2
+ isArray: false
+ name: ip_list
+ required: false
+ secret: false
+ - default: false
+ description: Comma separated list of report definition IDs to create post-scan,
+ can be retrieved from list-report-definitions command
+ isArray: true
+ name: report_ids
+ required: false
+ secret: false
+ - default: false
+ description: Comma separated credentials IDs to use, can be retrieved from list-credentials
+ command
+ isArray: true
+ name: credentials
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Scan timeout action, default is import
+ isArray: false
+ name: timeout_action
+ predefined:
+ - discard
+ - import
+ - rollover
+ required: false
+ secret: false
+ - default: false
+ description: Maximum scan run time in hours, default is 1
+ isArray: false
+ name: max_scan_time
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Track hosts which have been issued new IP address, (e.g. DHCP)
+ isArray: false
+ name: dhcp_tracking
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Scan rollover type
+ isArray: false
+ name: rollover_type
+ predefined:
+ - nextDay
+ required: false
+ secret: false
+ - default: false
+ description: Dependent scan ID in case of a dependent schedule, can be retrieved
+ from list-scans command
+ isArray: false
+ name: dependent_id
+ required: false
+ secret: false
+ deprecated: false
+ description: Create a scan on Tenable.sc
+ execution: false
+ name: tenable-sc-create-scan
+ outputs:
+ - contextPath: TenableSC.Scan.ID
+ description: Scan ID
+ type: string
+ - contextPath: TenableSC.Scan.CreatorID
+ description: Scan's creator ID
+ type: string
+ - contextPath: TenableSC.Scan.Name
+ description: Scan Name
+ type: string
+ - contextPath: TenableSC.Scan.Type
+ description: Scan type
+ type: string
+ - contextPath: TenableSC.Scan.CreatedTime
+ description: Scan creation time
+ type: date
+ - contextPath: TenableSC.Scan.OwnerName
+ description: Scan owner Username
+ type: string
+ - contextPath: TenableSC.Scan.Reports
+ description: Scan report defintion IDs
+ type: unknown
+ - arguments:
+ - default: false
+ description: Scan ID, can be. retrieved from the list-scans command
+ isArray: false
+ name: scan_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Delete a scan in Tenable.sc
+ execution: false
+ name: tenable-sc-delete-scan
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Whether to return only manageable assets. Returns both usable and
+ manageable by default.
+ isArray: false
+ name: manageable
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: Get a list of Tenable.sc Assets
+ execution: false
+ name: tenable-sc-list-assets
+ outputs:
+ - contextPath: TenableSC.Asset.ID
+ description: Asset ID
+ type: string
+ - contextPath: TenableSC.Asset.Name
+ description: Asset Name
+ type: string
+ - contextPath: TenableSC.Asset.HostCount
+ description: Asset host IPs count
+ type: number
+ - contextPath: TenableSC.Asset.Type
+ description: Asset type
+ type: string
+ - contextPath: TenableSC.Asset.Tag
+ description: Asset tag
+ type: string
+ - contextPath: TenableSC.Asset.Owner
+ description: Asset owner username
+ type: string
+ - contextPath: TenableSC.Asset.Group
+ description: Asset group
+ type: string
+ - contextPath: TenableSC.Asset.LastModified
+ description: Asset last modified time
+ type: date
+ - arguments:
+ - default: false
+ description: Asset Name
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: Asset description
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: Asset owner ID, default is the Session User ID, can be retrieved
+ from the list-users command
+ isArray: false
+ name: owner_id
+ required: false
+ secret: false
+ - default: false
+ description: 'Asset tag '
+ isArray: true
+ name: tag
+ required: false
+ secret: false
+ - default: false
+ description: Comma separated list of IPs to include in the asset, e.g 10.0.0.2,10.0.0.4
+ isArray: false
+ name: ip_list
+ required: true
+ secret: false
+ deprecated: false
+ description: Create an Asset in Tenable.sc with provided IP addresses
+ execution: false
+ name: tenable-sc-create-asset
+ outputs:
+ - contextPath: TenableSC.Asset.Name
+ description: Asset Name
+ type: string
+ - contextPath: TenableSC.Asset.ID
+ description: Asset ID
+ type: string
+ - contextPath: TenableSC.Asset.OwnerName
+ description: Asset owner name
+ type: string
+ - contextPath: TenableSC.Asset.Tags
+ description: Asset tags
+ type: string
+ - arguments:
+ - default: false
+ description: Asset ID that can be retrieved from the list-assets command
+ isArray: false
+ name: asset_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Get details for a given asset in Tenable.sc
+ execution: false
+ name: tenable-sc-get-asset
+ outputs:
+ - contextPath: TenableSC.Asset.ID
+ description: Asset ID
+ type: number
+ - contextPath: TenableSC.Asset.Name
+ description: Asset name
+ type: string
+ - contextPath: TenableSC.Asset.Description
+ description: Asset description
+ type: string
+ - contextPath: TenableSC.Asset.Tag
+ description: Asset tag
+ type: string
+ - contextPath: TenableSC.Asset.Modified
+ description: Asset last modified time
+ type: date
+ - contextPath: TenableSC.Asset.Owner
+ description: Asset owner user name
+ type: string
+ - contextPath: TenableSC.Asset.Group
+ description: Asset owner group
+ type: string
+ - contextPath: TenableSC.Asset.IPs
+ description: Asset viewable IPs
+ type: unknown
+ - arguments:
+ - default: false
+ description: Asset ID
+ isArray: false
+ name: asset_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Delete the Asset with the given ID from Tenable.sc
+ execution: true
+ name: tenable-sc-delete-asset
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: Whether to return only manageable alerts. Returns both usable and
+ manageable by default.
+ isArray: false
+ name: manageable
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ deprecated: false
+ description: List alerts from Tenable.sc
+ execution: false
+ name: tenable-sc-list-alerts
+ outputs:
+ - contextPath: TenableSC.Alert.ID
+ description: Alert ID
+ type: string
+ - contextPath: TenableSC.Alert.Name
+ description: Alert name
+ type: string
+ - contextPath: TenableSC.Alert.Description
+ description: Alert description
+ type: string
+ - contextPath: TenableSC.Alert.State
+ description: Alert state
+ type: string
+ - contextPath: TenableSC.Alert.Actions
+ description: Alert Actions
+ type: string
+ - contextPath: TenableSC.Alert.LastTriggered
+ description: Alert last triggered time
+ type: date
+ - contextPath: TenableSC.Alert.LastEvaluated
+ description: Alert last evaluated time
+ type: date
+ - contextPath: TenableSC.Alert.Group
+ description: Alert owner group name
+ type: string
+ - contextPath: TenableSC.Alert.Owner
+ description: Alert owner user name
+ type: string
+ - arguments:
+ - default: false
+ description: Alert ID, can be retrieved from list-alerts command
+ isArray: false
+ name: alert_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Get information about a given alert in Tenable.sc
+ execution: false
+ name: tenable-sc-get-alert
+ outputs:
+ - contextPath: TenableSC.Alert.ID
+ description: Alert ID
+ type: string
+ - contextPath: TenableSC.Alert.Name
+ description: Alert name
+ type: string
+ - contextPath: TenableSC.Alert.Description
+ description: Alert description
+ type: string
+ - contextPath: TenableSC.Alert.State
+ description: Alert state
+ type: string
+ - contextPath: TenableSC.Alert.Condition.Trigger
+ description: Alert trigger
+ type: string
+ - contextPath: TenableSC.Alert.LastTriggered
+ description: Alert last triggered time
+ type: date
+ - contextPath: TenableSC.Alert.Condition.Query
+ description: Alert query name
+ type: string
+ - contextPath: TenableSC.Alert.Condition.Filter.Name
+ description: Alert query filter name
+ type: string
+ - contextPath: TenableSC.Alert.Condition.Filter.Values
+ description: Alert query filter values
+ type: Unknown
+ - contextPath: TenableSC.Alert.Action.Type
+ description: Alert action type
+ type: string
+ - contextPath: TenableSC.Alert.Action.Values
+ description: Alert action values
+ type: Unknown
+ - arguments:
+ - default: false
+ description: A valid IP address of a device
+ isArray: false
+ name: ip
+ required: false
+ secret: false
+ - default: false
+ description: DNS name of a device
+ isArray: false
+ name: dns_name
+ required: false
+ secret: false
+ - default: false
+ description: Repository ID to get the device from, can be retrieved from list-repositories
+ command
+ isArray: false
+ name: repository_id
+ required: false
+ secret: false
+ deprecated: false
+ description: Gets the specified device information
+ execution: false
+ name: tenable-sc-get-device
+ outputs:
+ - contextPath: TenableSC.Device.IP
+ description: Device IP address
+ type: string
+ - contextPath: TenableSC.Device.UUID
+ description: Device UUID
+ type: string
+ - contextPath: TenableSC.Device.RepositoryID
+ description: Device repository ID
+ type: string
+ - contextPath: TenableSC.Device.MacAddress
+ description: Device Mac address
+ type: string
+ - contextPath: TenableSC.Device.NetbiosName
+ description: Device Netbios name
+ type: string
+ - contextPath: TenableSC.Device.DNSName
+ description: Device DNS name
+ type: string
+ - contextPath: TenableSC.Device.OS
+ description: Device Operating System
+ type: string
+ - contextPath: TenableSC.Device.OsCPE
+ description: Device Common Platform Enumeration
+ type: string
+ - contextPath: TenableSC.Device.LastScan
+ description: Device's last scan time
+ type: date
+ - contextPath: TenableSC.Device.RepositoryName
+ description: Device repository name
+ type: string
+ - contextPath: TenableSC.Device.TotalScore
+ description: Device total threat score
+ type: number
+ - contextPath: TenableSC.Device.LowSeverity
+ description: Device total threat scores with low severity
+ type: number
+ - contextPath: TenableSC.Device.MediumSeverity
+ description: Device total threat scores with medium severity
+ type: number
+ - contextPath: TenableSC.Device.HighSeverity
+ description: Device total threat scores with high severity
+ type: number
+ - contextPath: TenableSC.Device.CriticalSeverity
+ description: Device total threat scores with critical severity
+ type: number
+ - contextPath: Endpoint.IPAddress
+ description: Endpoint IP address
+ type: string
+ - contextPath: Endpoint.Hostname
+ description: Endpoint DNS name
+ type: string
+ - contextPath: Endpoint.MACAddress
+ description: Endpoint mac address
+ type: string
+ - contextPath: Endpoint.OS
+ description: Endpoint OS
+ type: string
+ - arguments:
+ - default: false
+ description: Filter by user ID
+ isArray: false
+ name: id
+ required: false
+ secret: false
+ - default: false
+ description: Filter by user username
+ isArray: false
+ name: username
+ required: false
+ secret: false
+ - default: false
+ description: Filter by user email address
+ isArray: false
+ name: email
+ required: false
+ secret: false
+ deprecated: false
+ description: List users in Tenable.sc
+ execution: false
+ name: tenable-sc-list-users
+ outputs:
+ - contextPath: TenableSC.User.ID
+ description: User ID
+ type: string
+ - contextPath: TenableSC.User.Username
+ description: Username
+ type: string
+ - contextPath: TenableSC.User.FirstName
+ description: User first name
+ type: string
+ - contextPath: TenableSC.User.LastName
+ description: User last name
+ type: string
+ - contextPath: TenableSC.User.Title
+ description: User title
+ type: string
+ - contextPath: TenableSC.User.Email
+ description: User email address
+ type: string
+ - contextPath: TenableSC.User.Created
+ description: The creation time of the user
+ type: date
+ - contextPath: TenableSC.User.Modified
+ description: Last modification time of the user
+ type: date
+ - contextPath: TenableSC.User.Login
+ description: User last login
+ type: date
+ - contextPath: TenableSC.User.Role
+ description: User role name
+ type: string
+ - deprecated: false
+ description: Retrieve licensing information from Tenable.sc
+ execution: false
+ name: tenable-sc-get-system-licensing
+ outputs:
+ - contextPath: TenableSC.Status.ActiveIPS
+ description: Number of active IP addresses
+ type: number
+ - contextPath: TenableSC.Status.LicensedIPS
+ description: Number of licensed IP addresses
+ type: Unknown
+ - contextPath: TenableSC.Status.License
+ description: License status
+ type: Unknown
+ - deprecated: false
+ description: Get the system information and diagnostics from Tenable.sc
+ execution: false
+ name: tenable-sc-get-system-information
+ outputs:
+ - contextPath: TenableSC.System.Version
+ description: System version
+ type: string
+ - contextPath: TenableSC.System.BuildID
+ description: System build ID
+ type: string
+ - contextPath: TenableSC.System.ReleaseID
+ description: System release ID
+ type: string
+ - contextPath: TenableSC.System.License
+ description: System license status
+ type: string
+ - contextPath: TenableSC.System.JavaStatus
+ description: Server java status
+ type: boolean
+ - contextPath: TenableSC.System.RPMStatus
+ description: Server RPM status
+ type: boolean
+ - contextPath: TenableSC.System.DiskStatus
+ description: Server disk status
+ type: boolean
+ - contextPath: TenableSC.System.DiskThreshold
+ description: System left space on disk
+ type: number
+ - contextPath: TenableSC.System.LastCheck
+ description: System last check time
+ type: date
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'false'
+ description: |-
+ Filter only manageable alerts. By default, returns both usable and
+ manageable alerts.
+ isArray: false
+ name: manageable
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '0'
+ description: The page to return, starting from 0.
+ isArray: false
+ name: page
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: The number of objects to return in one response (maximum limit
+ is 200).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns all scan results in Tenable.sc.
+ execution: false
+ name: tenable-sc-get-all-scan-results
+ outputs:
+ - contextPath: TenableSC.ScanResults.ID
+ description: Scan ID.
+ type: Number
+ - contextPath: TenableSC.ScanResults.Name
+ description: Scan name.
+ type: string
+ - contextPath: TenableSC.ScanResults.Status
+ description: Scan status.
+ type: string
+ - contextPath: TenableSC.ScanResults.Description
+ description: Scan description.
+ type: string
+ - contextPath: TenableSC.ScanResults.Policy
+ description: Scan policy.
+ type: string
+ - contextPath: TenableSC.ScanResults.Group
+ description: Scan group name.
+ type: string
+ - contextPath: TenableSC.ScanResults.Checks
+ description: Scan completed number of checks.
+ type: number
+ - contextPath: TenableSC.ScanResults.StartTime
+ description: Scan results start time.
+ type: date
+ - contextPath: TenableSC.ScanResults.EndTime
+ description: Scan results end time.
+ type: date
+ - contextPath: TenableSC.ScanResults.Duration
+ description: Scan duration in minutes.
+ type: number
+ - contextPath: TenableSC.ScanResults.ImportTime
+ description: Scan import time.
+ type: date
+ - contextPath: TenableSC.ScanResults.ScannedIPs
+ description: Number of scanned IPs.
+ type: number
+ - contextPath: TenableSC.ScanResults.Owner
+ description: Scan owner name.
+ type: string
+ - contextPath: TenableSC.ScanResults.RepositoryName
+ description: Scan repository name.
+ type: string
+ isfetch: true
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- tenable-sc-test
diff --git a/Integrations/Tenable_sc/Tenable_sc_image.png b/Integrations/Tenable_sc/Tenable_sc_image.png
new file mode 100644
index 000000000000..536ddebde5fb
Binary files /dev/null and b/Integrations/Tenable_sc/Tenable_sc_image.png differ
diff --git a/Integrations/ThinkstCanary/CHANGELOG.md b/Integrations/ThinkstCanary/CHANGELOG.md
new file mode 100644
index 000000000000..3cf20d57b0b8
--- /dev/null
+++ b/Integrations/ThinkstCanary/CHANGELOG.md
@@ -0,0 +1 @@
+-
\ No newline at end of file
diff --git a/Integrations/ThinkstCanary/ThinkstCanary.py b/Integrations/ThinkstCanary/ThinkstCanary.py
new file mode 100644
index 000000000000..7c367d1f447d
--- /dev/null
+++ b/Integrations/ThinkstCanary/ThinkstCanary.py
@@ -0,0 +1,395 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+import requests
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+# remove proxy if not set to true in params
+if not demisto.params().get('proxy', False):
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+''' GLOBALS'''
+
+SERVER = demisto.params().get('server').rstrip('/') + '/api/v1/'
+VERIFY_CERTIFICATE = not demisto.params().get('insecure', True)
+FETCH_DELTA = demisto.params().get('fetchDelta', '24 hours').strip()
+RELEVANT_DEVICE_ENTRIES = {
+ 'description': 'Description',
+ 'id': 'ID',
+ 'ip_address': 'Address',
+ 'last_seen': 'LastSeen',
+ 'live': 'Status',
+ 'location': 'Location',
+ 'name': 'Name',
+ 'updated_std': 'LastUpdated',
+ 'version': 'Version'
+}
+RELEVANT_TOKEN_ENTRIES = {
+ 'canarytoken': 'CanaryToken',
+ 'created_printable': 'CreatedTime',
+ 'enabled': 'Enabled',
+ 'kind': 'Kind',
+ 'triggered_count': 'Triggered',
+ 'doc_name': 'DocName',
+ 'url': 'TokenURL'
+}
+DEF_PARAMS = {
+ 'auth_token': demisto.params().get('auth_token')
+}
+'''HELPER FUNCTIONS'''
+
+
+def http_request(method, url, params=None):
+ """
+ HTTP request helper function
+ """
+ if params is None:
+ params = DEF_PARAMS
+ else:
+ params.update(DEF_PARAMS)
+ res = requests.request(
+ method=method,
+ url=url,
+ params=params,
+ verify=VERIFY_CERTIFICATE
+ )
+
+ if not res.ok:
+ try:
+ res_json = res.json()
+ if 'message' in res_json:
+ LOG(str(res.text))
+ LOG(res_json.get('message'))
+ return_error(res_json.get('message'))
+ except ValueError:
+ LOG(str(res.text))
+ return_error(str(res.text))
+ except Exception as ex:
+ LOG(res.text)
+ return_error(str(ex))
+
+ try:
+ res_json = res.json()
+ return res_json
+ except Exception as ex:
+ demisto.debug(str(ex))
+ return_error(str(ex))
+
+
+def get_alerts(last_fetch=None):
+ """
+ Retrieve all unacknowledged alerts from Canary Tools
+ :param last_fetch: Last fetch incidents time
+ """
+
+ if last_fetch:
+ params = {
+ 'newer_than': last_fetch
+ }
+ res = http_request('GET', SERVER + 'incidents/unacknowledged', params)
+ else:
+ res = http_request('GET', SERVER + 'incidents/unacknowledged')
+ alerts = res.get('incidents')
+ return alerts
+
+
+def create_incident(alert):
+ """
+ Turns an alert from Canary Tools to the incident structure in Demisto
+ :return: Demisto incident, e.g., CanaryToken triggered
+ """
+ incident = {
+ 'name': demisto.get(alert, 'description.description'),
+ 'occurred': timestamp_to_datestring(1000 * (int(demisto.get(alert, 'description.created')))),
+ 'rawJSON': json.dumps(alert)
+ }
+ return incident
+
+
+'''COMMANDS'''
+
+
+def test_module():
+ try:
+ res = requests.request('GET', SERVER + 'ping', params=DEF_PARAMS, verify=VERIFY_CERTIFICATE)
+ if not res.ok:
+ try:
+ res_json = res.json()
+ return_error('Could not connect, reason: {}'.format(res_json.get('message')))
+
+ except Exception as ex:
+ demisto.debug(str(ex))
+ return_error('Could not parse server response, please verify instance parameters')
+ demisto.results('ok')
+ except Exception as ex:
+ demisto.debug(str(ex))
+ return_error('Failed to establish new connection, please verify instance parameters')
+
+
+def list_canaries():
+ """
+ Retrieve all Canaries available in Canary Tools
+ :return: json response, a list of all devices
+ """
+ res = http_request('GET', SERVER + 'devices/all')
+ new_devices = [
+ {new_key: device[old_key] if old_key in device else None for old_key, new_key in
+ RELEVANT_DEVICE_ENTRIES.items()} for
+ device in res['devices']]
+ return res, new_devices
+
+
+def list_canaries_command():
+ """
+ Retrieve all Canaries available in Canary Tools
+ """
+ res_json, new_devices = list_canaries()
+ context = createContext(new_devices, removeNull=True)
+ headers = [
+ 'ID',
+ 'Name',
+ 'Description',
+ 'Address',
+ 'Status',
+ 'Location',
+ 'Version',
+ 'LastSeen',
+ 'LastUpdated'
+ ]
+ contents = res_json
+ human_readable = tableToMarkdown('Canary Devices', new_devices, headers=headers)
+ outputs = {'CanaryTools.Device(val.ID && val.ID === obj.ID)': context}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+
+def list_tokens():
+ """
+ Retrieve all Canary Tokens available in Canary Tools
+ :return: json response, a list of all tokens
+ """
+ res = http_request('GET', SERVER + 'canarytokens/fetch')
+ new_tokens = []
+ for token in res['tokens']:
+ new_tokens.append({new_key: token[old_key] if old_key in token else None for old_key, new_key in
+ RELEVANT_TOKEN_ENTRIES.items()})
+ return res, new_tokens
+
+
+def list_tokens_command():
+ """
+ Retrieve all Canary Tokens available in Canary Tools
+ """
+ res_json, new_tokens = list_tokens()
+ headers = sorted(new_tokens[0].keys()) if new_tokens else None
+ context = createContext(new_tokens, removeNull=True)
+
+ contents = res_json
+ human_readable = tableToMarkdown('Canary Tools Tokens', new_tokens, headers=headers)
+ outputs = {'CanaryTools.Token(val.CanaryToken && val.CanaryToken === obj.CanaryToken)': context}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+
+def get_token_command():
+ """
+ Fetch a Canary Token from the Canary Tools server
+ :return: Canary Token information or file
+ """
+ token = demisto.args().get('token')
+ params = {
+ 'canarytoken': token
+ }
+ res = http_request('GET', SERVER + 'canarytoken/fetch', params=params)
+ context = demisto.get(res, 'token.canarytoken')
+ contents = res
+ human_readable = 'File Fetched Successfully'
+ outputs = {'CanaryTools.Token(val.CanaryToken && val.CanaryToken === obj.CanaryToken)': context}
+
+ if demisto.get(res, 'token.doc'):
+ name = demisto.get(res, 'token.doc_name')
+ content = demisto.get(res, 'token.doc')
+ token_file = fileResult(name, content)
+ demisto.results(token_file)
+ if demisto.get(res, 'token.web_image'):
+ name = demisto.get(res, 'token.web_image_name')
+ content = demisto.get(res, 'token.web_image')
+ token_file = fileResult(name, content)
+ demisto.results(token_file)
+ else:
+ human_readable = tableToMarkdown('Canary Tools Tokens', res.get('token'))
+
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+
+def check_whitelist(ip, port):
+ """
+ Check if a given IP address is whitelisted in Canary Tools
+ :return: json response
+ """
+ params = {
+ 'src_ip': ip,
+ 'dst_port': port
+ }
+
+ res = http_request('GET', SERVER + 'settings/is_ip_whitelisted', params=params)
+ return res
+
+
+def check_whitelist_command():
+ """
+ Check if a given IP address is whitelisted in Canary Tools
+ """
+ ip = demisto.args().get('ip')
+ port = demisto.args().get('port')
+ res = check_whitelist(ip, port)
+
+ if not port:
+ port = 'Any'
+ context = {
+ 'Address': str(ip),
+ 'Port': str(port),
+ 'Whitelisted': str(res.get('is_ip_whitelisted'))
+ }
+ contents = res
+ context = createContext(context, removeNull=True)
+ outputs = {'CanaryTools.IP(val.Address && val.Address===obj.Address && val.Port && val.Port===obj.Port)': context}
+
+ if res.get('is_ip_whitelisted'):
+ human_readable = 'The IP address {}:{} is Whitelisted'.format(ip, port)
+ else:
+ human_readable = 'The IP address {}:{} is not Whitelisted'.format(ip, port)
+
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+
+def whitelist_ip(ip, port):
+ """
+ Whitelist an IP address in Canary Tools
+ :return: json response
+ """
+ params = {
+ 'src_ip': ip,
+ 'dst_port': port
+ }
+
+ res = http_request('POST', SERVER + 'settings/whitelist_ip_port', params=params)
+ return res
+
+
+def whitelist_ip_command():
+ """
+ Whitelist an IP address in Canary Tools
+ """
+ ip = demisto.args().get('ip')
+ port = demisto.args().get('port')
+ res = whitelist_ip(ip, port)
+
+ if not port:
+ port = 'Any'
+
+ result_status = res.get('result')
+ if result_status == 'success':
+ context = {
+ 'Address': str(ip),
+ 'Port': str(port),
+ 'Whitelisted': 'True'
+ }
+ context = createContext(context, removeNull=True)
+ contents = res
+ human_readable = 'The IP address {}:{} was added to the Whitelist'.format(ip, port)
+ outputs = {
+ 'CanaryTools.IP(val.Address && val.Address===obj.Address && val.Port && val.Port===obj.Port)': context}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+ elif result_status == 'failure':
+ return_outputs(readable_output=res.get('message'), outputs={}, raw_response=res)
+
+ elif result_status == 'error':
+ return_error(res.get('message'))
+
+
+def alert_status_command():
+ """
+ Acknowledge or Uncknowledge an Alert in Canary Tools
+ """
+ args = demisto.args()
+ alert = args.get('alert_id')
+ status = args.get('status')
+ context = {
+ 'ID': str(alert),
+ 'Status': str(status),
+ }
+ context = createContext(context, removeNull=True)
+ params = {
+ 'incident': alert
+ }
+ if status == 'Acknowledge':
+ res = http_request('POST', SERVER + 'incident/acknowledge', params=params)
+ if res.get('action') == 'acknowledged':
+ contents = res
+ human_readable = 'The Alert {} was '.format(alert) + res.get('action')
+ outputs = {'CanaryTools.Alert(val.ID && val.ID === obj.ID)': context}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+
+ elif status == 'Unacknowledge':
+ res = http_request('POST', SERVER + 'incident/unacknowledge', params=params)
+ if res.get('action') == 'unacknowledged':
+ contents = res
+ human_readable = 'The Alert {} was '.format(alert) + res.get('action')
+ outputs = {'CanaryTools.Alert(val.ID && val.ID === obj.ID)': context}
+ return_outputs(readable_output=human_readable, outputs=outputs, raw_response=contents)
+ else:
+ return_error('Unsupported command')
+
+
+def fetch_incidents_command():
+ """
+ Fetch alerts from Canary Tools as incidents in Demisto
+ last_fetch: The latest fetched alert creation time
+ """
+ last_fetch = demisto.getLastRun().get('time')
+
+ if last_fetch is None:
+ last_fetch = parse_date_range(FETCH_DELTA, '%Y-%m-%d-%H:%M:%S')[0]
+
+ # All alerts retrieved from get_alerts are newer than last_fetch and are in a chronological order
+ alerts = get_alerts(last_fetch)
+
+ incidents = []
+ current_fetch = last_fetch
+ for alert in alerts:
+ current_fetch = 1000 * (int(demisto.get(alert, 'description.created')))
+ current_fetch = timestamp_to_datestring(current_fetch, '%Y-%m-%d-%H:%M:%S')
+ incident = create_incident(alert)
+ incidents.append(incident)
+
+ demisto.incidents(incidents)
+ demisto.setLastRun({'time': current_fetch})
+
+
+# Execution Code
+try:
+ if demisto.command() == 'test-module':
+ test_module()
+ elif demisto.command() == 'canarytools-list-tokens':
+ list_tokens_command()
+ elif demisto.command() == 'canarytools-get-token':
+ get_token_command()
+ elif demisto.command() == 'canarytools-list-canaries':
+ list_canaries_command()
+ elif demisto.command() == 'canarytools-check-whitelist':
+ check_whitelist_command()
+ elif demisto.command() == 'canarytools-whitelist-ip':
+ whitelist_ip_command()
+ elif demisto.command() == 'canarytools-edit-alert-status':
+ alert_status_command()
+ elif demisto.command() == 'fetch-incidents':
+ fetch_incidents_command()
+except Exception as e:
+ return_error('Unable to perform command : {}, Reason: {}'.format(demisto.command, e))
diff --git a/Integrations/ThinkstCanary/ThinkstCanary.yml b/Integrations/ThinkstCanary/ThinkstCanary.yml
new file mode 100644
index 000000000000..effff64d69bc
--- /dev/null
+++ b/Integrations/ThinkstCanary/ThinkstCanary.yml
@@ -0,0 +1,206 @@
+commonfields:
+ id: Thinkst Canary
+ version: -1
+name: Thinkst Canary
+display: Thinkst Canary
+category: Deception
+description: By presenting itself as an apparently benign and legitimate service(s), the Canary draws the attention of
+ unwanted activity. When someone trips one of the Canary's triggers, an alert is sent to notify the responsible
+ parties so that action can be taken before valubale systems in your network are compromised.
+configuration:
+- display: Canary Server URL (e.g., https://***.canary.tools)
+ name: server
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: API Authentication Token
+ name: auth_token
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Trust any certificate (unsecure)
+ name: insecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Fetch incidents
+ name: isFetch
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: First fetch time ( , e.g., 12 hours, 7 days, 3 months, 1 year)
+ name: fetchDelta
+ defaultvalue: "24 hours"
+ type: 0
+ required: false
+- display: Incident type
+ name: incidentType
+ defaultvalue: ""
+ type: 13
+ required: false
+script:
+ script: ''
+ type: python
+ commands:
+ - name: canarytools-list-canaries
+ arguments: []
+ outputs:
+ - contextPath: CanaryTools.Device.ID
+ description: Device ID
+ type: string
+ - contextPath: CanaryTools.Device.Name
+ description: Device name
+ type: string
+ - contextPath: CanaryTools.Device.Description
+ description: Device description
+ type: string
+ - contextPath: CanaryTools.Device.Address
+ description: Device IP address
+ type: string
+ - contextPath: CanaryTools.Device.Status
+ description: Device status - Live (True/False)
+ type: boolean
+ - contextPath: CanaryTools.Device.Location
+ description: Device location
+ type: string
+ - contextPath: CanaryTools.Device.Version
+ description: Device version
+ type: string
+ - contextPath: CanaryTools.Device.LastSeen
+ description: Device last seen time
+ type: date
+ - contextPath: CanaryTools.Device.LastUpdated
+ description: Device last updated time
+ type: date
+ description: Lists all registered Canaries.
+ - name: canarytools-list-tokens
+ arguments: []
+ outputs:
+ - contextPath: CanaryTools.Token.CanaryToken
+ description: Canary Token
+ type: string
+ - contextPath: CanaryTools.Token.CreatedTime
+ description: Token Created time
+ type: date
+ - contextPath: CanaryTools.token.Enabled
+ description: Token status - Enabled (True / False)
+ type: boolean
+ - contextPath: CanaryTools.Token.Kind
+ description: Token Kind
+ type: string
+ - contextPath: CanaryTools.Token.Triggered
+ description: Token triggered count
+ type: number
+ - contextPath: CanaryTools.Token.DocName
+ description: Token document name (If the token is from type document)
+ type: string
+ - contextPath: CanaryTools.Token.TokenURL
+ description: Token URL (How the token is presented)
+ type: string
+ description: Lists all Canary tokens.
+ - name: canarytools-check-whitelist
+ arguments:
+ - name: ip
+ required: true
+ description: IP address
+ - name: port
+ description: Destination port
+ outputs:
+ - contextPath: CanaryTools.IP.Address
+ description: IP address
+ type: string
+ - contextPath: CanaryTools.IP.Port
+ description: Destination port for the IP whitelist
+ type: string
+ - contextPath: CanaryTools.IP.Whitelisted
+ description: Is the IP address whitelisted (true/false)
+ type: boolean
+ description: Checks whether a given IP address and port are whitelisted.
+ - name: canarytools-whitelist-ip
+ arguments:
+ - name: ip
+ required: true
+ description: IP address to whitelist
+ - name: port
+ description: Destination port to whitelist
+ outputs:
+ - contextPath: CanaryTools.IP.Address
+ description: IP address
+ type: string
+ - contextPath: CanaryTools.IP.Port
+ description: Destination port for the IP whitelist
+ type: string
+ - contextPath: CanaryTools.IP.Whitelisted
+ description: Is the IP address whitelisted (True/False)
+ type: boolean
+ description: Adds an IP address to the whitelist in Canary
+ - name: canarytools-edit-alert-status
+ arguments:
+ - name: alert_id
+ required: true
+ description: 'Alert ID (e.g., incident:canarytoken:d6fe0ae4dfd36cc3cc6d9d4f::1548593719, Can be retrieved through
+ fetch incidents)'
+ - name: status
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - Acknowledge
+ - Unacknowledge
+ description: Required status for the alert (Acknowledge, Unacknowledge)
+ outputs:
+ - contextPath: CanaryTools.Alert.ID
+ description: Alert ID
+ type: string
+ - contextPath: CanaryTools.Alert.Status
+ description: Alert status
+ type: string
+ description: Edits the status for an alert in Canary Tools.
+ - name: canarytools-get-token
+ arguments:
+ - name: token
+ required: true
+ description: Canary Token, Can be retrieved through list-tokens command
+ outputs:
+ - contextPath: CanaryTools.Token.CanaryToken
+ description: Canary Token
+ type: string
+ - contextPath: File.Size
+ description: File Size
+ type: string
+ - contextPath: File.SHA1
+ description: File SHA-1
+ type: string
+ - contextPath: File.SHA256
+ description: File SHA-256
+ type: string
+ - contextPath: File.Name
+ description: File name
+ type: string
+ - contextPath: File.SSDeep
+ description: File SSDeep
+ type: string
+ - contextPath: File.EntryID
+ description: File EntryID
+ type: string
+ - contextPath: File.Info
+ description: File info
+ type: string
+ - contextPath: File.Type
+ description: File type
+ type: string
+ - contextPath: File.MD5
+ description: File MD5
+ type: string
+ - contextPath: File.Extension
+ description: File extension
+ type: string
+ description: Fetches a Canary Token file from the Canary Tools server.
+ isfetch: true
+ runonce: false
+tests:
+- CanaryTools Test
diff --git a/Integrations/ThinkstCanary/ThinkstCanary_Image.png b/Integrations/ThinkstCanary/ThinkstCanary_Image.png
new file mode 100644
index 000000000000..73545f84e6b1
Binary files /dev/null and b/Integrations/ThinkstCanary/ThinkstCanary_Image.png differ
diff --git a/Integrations/ThinkstCanary/ThinkstCanary_description.md b/Integrations/ThinkstCanary/ThinkstCanary_description.md
new file mode 100644
index 000000000000..aa11aa602f86
--- /dev/null
+++ b/Integrations/ThinkstCanary/ThinkstCanary_description.md
@@ -0,0 +1,2 @@
+Generate your API Authentication Token from Canary Tools:
+ https://help.canary.tools/knowledge_base/topics/forget-logging-in-what-apis-does-canary-support
\ No newline at end of file
diff --git a/Integrations/ThreatConnect/CHANGELOG.md b/Integrations/ThreatConnect/CHANGELOG.md
new file mode 100644
index 000000000000..6c54dfc19234
--- /dev/null
+++ b/Integrations/ThreatConnect/CHANGELOG.md
@@ -0,0 +1,25 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+Added 8 new commands.
+ - ***tc-get-group***
+ - ***tc-get-group-attributes***
+ - ***tc-get-group-security-labels***
+ - ***tc-get-group-tags***
+ - ***tc-download-document***
+ - ***tc-get-group-indicators***
+ - ***tc-get-associated-groups***
+ - ***tc-associate-group-to-group***
+
+
+## [19.8.2] - 2019-08-22
+Added 8 new commands.
+ - ***tc-get-groups***
+ - ***tc-add-group-security-label***
+ - ***tc-add-group-tag***
+ - ***tc-get-indicator-types***
+ - ***tc-group-associate-indicator***
+ - ***tc-get-events***
+ - ***tc-add-group-attribute***
+ - ***tc-create-document-group***
\ No newline at end of file
diff --git a/Integrations/ThreatConnect/ThreatConnect.py b/Integrations/ThreatConnect/ThreatConnect.py
new file mode 100644
index 000000000000..bd5e1a0c4ef7
--- /dev/null
+++ b/Integrations/ThreatConnect/ThreatConnect.py
@@ -0,0 +1,1812 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+from urlparse import urlparse
+from datetime import timedelta
+from distutils.util import strtobool
+from threatconnect import ThreatConnect
+from threatconnect.RequestObject import RequestObject
+from threatconnect.Config.ResourceType import ResourceType
+from threatconnect.Config.FilterOperator import FilterOperator
+
+'''GLOBAL VARS'''
+FRESHNESS = int(demisto.params()['freshness'])
+MAX_CONTEXT = 100
+
+''' HELPER FUNCTIONS '''
+
+
+def get_client():
+ params = demisto.params()
+ access = params['accessId']
+ secret = params['secretKey']
+ default_org = params.get('defaultOrg')
+ url = params['baseUrl']
+ proxy_ip = params['proxyIp']
+ proxy_port = params['proxyPort']
+
+ tc = ThreatConnect(access, secret, default_org, url)
+ if proxy_ip and proxy_port and len(proxy_ip) > 0 and len(proxy_port) > 0:
+ tc.set_proxies(proxy_ip, int(proxy_port))
+
+ return tc
+
+
+def calculate_freshness_time(freshness):
+ t = datetime.now() - timedelta(days=freshness)
+ return t.strftime('%Y-%m-%dT00:00:00Z')
+
+
+def create_context(indicators, include_dbot_score=False):
+ context = {
+ 'DBotScore': [],
+ outputPaths['ip']: [],
+ outputPaths['url']: [],
+ outputPaths['domain']: [],
+ outputPaths['file']: [],
+ 'TC.Indicator(val.ID && val.ID === obj.ID)': [],
+ } # type: dict
+ tc_type_to_demisto_type = {
+ 'Address': 'ip',
+ 'URL': 'url',
+ 'Host': 'domain',
+ 'File': 'file'
+ }
+ type_to_value_field = {
+ 'Address': 'ip',
+ 'URL': 'text',
+ 'Host': 'hostName',
+ 'File': 'md5'
+ }
+
+ for ind in indicators:
+ indicator_type = tc_type_to_demisto_type.get(ind['type'], ind['type'])
+ value_field = type_to_value_field.get(ind['type'], 'summary')
+ value = ind.get(value_field, ind.get('summary', ''))
+
+ if ind.get('confidence') is not None: # returned in specific indicator request - SDK
+ confidence = int(ind['confidence'])
+ else:
+ # returned in general indicator request - REST API
+ confidence = int(ind.get('threatAssessConfidence', 0))
+
+ if ind.get('rating') is not None: # returned in specific indicator request - SDK
+ rating = int(ind['rating'])
+ else:
+ # returned in general indicator request - REST API
+ rating = int(ind.get('threatAssessRating', 0))
+
+ if confidence >= demisto.params()['rating'] and rating >= demisto.params()['confidence']:
+ dbot_score = 3
+ desc = ''
+ if hasattr(ind, 'description'):
+ desc = ind.description
+ mal = {
+ 'Malicious': {
+ 'Vendor': 'ThreatConnect',
+ 'Description': desc,
+ }
+ }
+ if indicator_type == 'ip':
+ mal['Address'] = value
+
+ elif indicator_type == 'file':
+ mal['MD5'] = value
+ mal['SHA1'] = ind.get('sha1')
+ mal['SHA256'] = ind.get('sha256')
+
+ elif indicator_type == 'url':
+ mal['Data'] = value
+
+ elif indicator_type == 'domain':
+ mal['Name'] = value
+
+ context_path = outputPaths.get(indicator_type)
+ if context_path is not None:
+ context[context_path].append(mal)
+
+ elif rating >= 1:
+ dbot_score = 2
+ else:
+ dbot_score = 1
+
+ if include_dbot_score:
+ context['DBotScore'].append({
+ 'Indicator': value,
+ 'Score': dbot_score,
+ 'Type': indicator_type,
+ 'Vendor': 'ThreatConnect'
+ })
+
+ context['TC.Indicator(val.ID && val.ID === obj.ID)'].append({
+ 'ID': ind['id'],
+ 'Name': value,
+ 'Type': ind['type'],
+ 'Owner': ind['ownerName'],
+ 'Description': ind.get('description'),
+ 'CreateDate': ind['dateAdded'],
+ 'LastModified': ind['lastModified'],
+ 'Rating': rating,
+ 'Confidence': confidence,
+
+ # relevant for domain
+ 'Active': ind.get('whoisActive'),
+
+ # relevant for file
+ 'File.MD5': ind.get('md5'),
+ 'File.SHA1': ind.get('sha1'),
+ 'File.SHA256': ind.get('sha256'),
+ })
+
+ context = {k: createContext(v, removeNull=True)[:MAX_CONTEXT] for k, v in context.items() if len(v) > 0}
+ return context, context.get('TC.Indicator(val.ID && val.ID === obj.ID)', [])
+
+
+# pylint: disable=E1101
+def get_indicators(indicator_value=None, indicator_type=None, owners=None, rating_threshold=-1, confidence_threshold=-1,
+ freshness=None):
+ tc = get_client()
+ indicators_obj = tc.indicators()
+ _filter = indicators_obj.add_filter()
+
+ if indicator_value is not None:
+ _filter.add_indicator(indicator_value)
+ if indicator_type is not None:
+ _filter.add_pf_type(indicator_type, FilterOperator.EQ)
+
+ if owners is not None:
+ owners = owners.split(",")
+ _filter.add_owner(owners)
+
+ if rating_threshold != -1:
+ _filter.add_pf_rating(rating_threshold, FilterOperator.GE)
+ if confidence_threshold != -1:
+ _filter.add_pf_confidence(confidence_threshold, FilterOperator.GE)
+ if freshness is not None:
+ _filter.add_pf_last_modified(calculate_freshness_time(freshness), FilterOperator.GE)
+
+ raw_indicators = indicators_obj.retrieve()
+
+ indicators = [json.loads(indicator.json) for indicator in raw_indicators]
+
+ return indicators
+
+
+''' FUNCTIONS '''
+
+
+def ip_command():
+ args = demisto.args()
+ owners = args.get('owners', demisto.params().get('defaultOrg'))
+ if not owners:
+ return_error('You must specify an owner in the command, or by using the Organization parameter.')
+ rating_threshold = int(args.get('ratingThreshold', -1))
+ confidence_threshold = int(args.get('confidenceThreshold', -1))
+ ip_addr = args['ip']
+
+ ec, indicators = ip(ip_addr, owners, rating_threshold, confidence_threshold)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': indicators,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ThreatConnect IP Reputation for: {}'.format(ip_addr), indicators,
+ headerTransform=pascalToSpace),
+ 'EntryContext': ec
+ })
+
+
+@logger
+def ip(ip_addr, owners, rating_threshold, confidence_threshold):
+ indicators = get_indicators(ip_addr, 'Address', owners, rating_threshold, confidence_threshold)
+
+ if not indicators:
+ demisto.results('Make sure that the indicator exists in your ThreatConnect environment')
+ ec, indicators = create_context(indicators, include_dbot_score=True)
+
+ return ec, indicators
+
+
+def url_command():
+ args = demisto.args()
+ owners = args.get('owners', demisto.params().get('defaultOrg'))
+ if not owners:
+ return_error('You must specify an owner in the command, or by using the Organization parameter.')
+ url_addr = args['url']
+ parsed_url = urlparse(url_addr)
+ if not parsed_url.scheme:
+ return_error('Please provide a valid URL including a protocol (http/https)')
+ rating_threshold = int(args.get('ratingThreshold', -1))
+ confidence_threshold = int(args.get('confidenceThreshold', -1))
+
+ ec, indicators = url(url_addr, owners, rating_threshold, confidence_threshold)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': indicators,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ThreatConnect URL Reputation for: {}'.format(url_addr), indicators,
+ headerTransform=pascalToSpace),
+ 'EntryContext': ec
+ })
+
+
+@logger
+def url(url_addr, owners, rating_threshold, confidence_threshold):
+ indicators = get_indicators(url_addr, 'URL', owners, rating_threshold, confidence_threshold)
+ if not indicators:
+ demisto.results('Make sure that the indicator exists in your ThreatConnect environment')
+ ec, indicators = create_context(indicators, include_dbot_score=True)
+
+ return ec, indicators
+
+
+def file_command():
+ args = demisto.args()
+ owners = args.get('owners', demisto.params().get('defaultOrg'))
+ if not owners:
+ return_error('You must specify an owner in the command, or by using the Organization parameter.')
+ file_name = args['file']
+ rating_threshold = int(args.get('ratingThreshold', -1))
+ confidence_threshold = int(args.get('confidenceThreshold', -1))
+
+ ec, indicators = _file(file_name, owners, rating_threshold, confidence_threshold)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': indicators,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ThreatConnect File Report for: {}'.format(file_name), indicators,
+ headerTransform=pascalToSpace),
+ 'EntryContext': ec
+ })
+
+
+@logger
+def _file(url_addr, owners, rating_threshold, confidence_threshold):
+ indicators = get_indicators(url_addr, 'File', owners, rating_threshold, confidence_threshold)
+ if not indicators:
+ demisto.results('Make sure that the indicator exists in your ThreatConnect environment')
+ ec, indicators = create_context(indicators, include_dbot_score=True)
+
+ return ec, indicators
+
+
+def domain_command():
+ args = demisto.args()
+ owners = args.get('owners', demisto.params().get('defaultOrg'))
+ if not owners:
+ return_error('You must specify an owner in the command, or by using the Organization parameter.')
+ rating_threshold = int(args.get('ratingThreshold', -1))
+ confidence_threshold = int(args.get('confidenceThreshold', -1))
+ domain_addr = args['domain']
+
+ ec, indicators = domain(domain_addr, owners, rating_threshold, confidence_threshold)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': indicators,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ThreatConnect Domain Reputation for: {}'.format(domain_addr), indicators,
+ headerTransform=pascalToSpace),
+ 'EntryContext': ec
+ })
+
+
+@logger
+def domain(domain_addr, owners, rating_threshold, confidence_threshold):
+ indicators = get_indicators(domain_addr, 'Host', owners, rating_threshold, confidence_threshold)
+ ec, indicators = create_context(indicators, include_dbot_score=True)
+
+ return ec, indicators
+
+
+def tc_owners_command():
+ raw_owners = tc_owners()
+ owners = []
+ for owner in raw_owners['data']['owner']:
+ owners.append({
+ 'ID': owner['id'],
+ 'Type': owner['type'],
+ 'Name': owner['name']
+ })
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_owners,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ThreatConnect Owners:', owners),
+ 'EntryContext': {'TC.Owner(val.ID && val.ID === obj.ID)': owners}
+ })
+
+
+@logger
+def tc_owners():
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('GET')
+ ro.set_request_uri('/v2/owners')
+ results = tc.api_request(ro)
+
+ return results.json()
+
+
+def tc_indicators_command():
+ args = demisto.args()
+ limit = int(args.get('limit', 500))
+ owners = args.get('owners')
+ ec, indicators, raw_response = tc_indicators(owners, limit)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_response,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ThreatConnect Indicators:', indicators, headerTransform=pascalToSpace),
+ 'EntryContext': ec
+ })
+
+
+@logger
+def tc_indicators(owners, limit):
+ tc = get_client()
+ tc.set_api_result_limit(limit)
+ ro = RequestObject()
+ ro.set_http_method('GET')
+ ro.set_request_uri('/v2/indicators?resultLimit={}'.format(limit))
+
+ if owners is not None:
+ ro.set_owner(owners)
+ ro.set_owner_allowed(True)
+
+ response = tc.api_request(ro).json()
+ indicators = response['data']['indicator']
+ ec, indicators = create_context(indicators, include_dbot_score=True)
+
+ return ec, indicators, response
+
+
+def tc_get_tags_command():
+ raw_response = tc_get_tags()
+ tags = [t['name'] for t in raw_response['data']['tag']]
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_response,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ThreatConnect Tags:', tags, headers='Name'),
+ 'EntryContext': {'TC.Tags': tags}
+ })
+
+
+@logger
+def tc_get_tags():
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('GET')
+ ro.set_request_uri('/v2/tags')
+
+ return tc.api_request(ro).json()
+
+
+def tc_tag_indicator_command():
+ args = demisto.args()
+ indicator = args['indicator']
+ tag = args['tag']
+ owners = args.get('owner')
+ indicators = tc_tag_indicator(indicator, tag, owners)
+
+ md = []
+ for ind in indicators:
+ md.append('Indicator {} with ID {}, was tagged with: {}'.format(indicator, ind.id, tag))
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': '\n'.join(md)
+ })
+
+
+def tc_tag_indicator(indicator, tag, owners=None):
+ tc = get_client()
+ indicators = tc.indicators()
+ filter1 = indicators.add_filter()
+ filter1.add_indicator(indicator)
+
+ if owners is not None:
+ owners = owners.split(",")
+ filter1.add_owner(owners)
+
+ indicators = indicators.retrieve()
+ for indicator in indicators:
+ indicator.add_tag(tag)
+ indicator.commit()
+
+ return indicators
+
+
+def tc_get_indicator_command():
+ args = demisto.args()
+ owners = args.get('owners', demisto.params().get('defaultOrg'))
+ if not owners:
+ return_error('You must specify an owner in the command, or by using the Organization parameter.')
+ rating_threshold = int(args.get('ratingThreshold', -1))
+ confidence_threshold = int(args.get('confidenceThreshold', -1))
+ indicator = args['indicator']
+
+ ec, indicators, raw_indicators = tc_get_indicator(indicator, owners, rating_threshold, confidence_threshold)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_indicators,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ThreatConnect indicator for: {}'.format(indicator), indicators,
+ headerTransform=pascalToSpace),
+ 'EntryContext': ec
+ })
+
+
+@logger
+def tc_get_indicator(indicator, owners, rating_threshold, confidence_threshold):
+ raw_indicators = get_indicators(indicator, owners=owners, rating_threshold=rating_threshold,
+ confidence_threshold=confidence_threshold)
+ ec, indicators = create_context(raw_indicators, include_dbot_score=True)
+
+ return ec, indicators, raw_indicators
+
+
+def tc_get_indicators_by_tag_command():
+ args = demisto.args()
+ tag = args['tag']
+ owner = args.get('owner')
+ response = tc_get_indicators_by_tag(tag, owner)
+ raw_indicators = response['data']['indicator']
+ ec, indicators = create_context(raw_indicators, include_dbot_score=True)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': response,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('ThreatConnect Indicators with tag: {}'.format(tag), indicators,
+ headerTransform=pascalToSpace),
+ 'EntryContext': ec
+ })
+
+
+@logger
+def tc_get_indicators_by_tag(tag, owner):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('GET')
+ cmd = '/v2/tags/{}/indicators'.format(tag)
+ if owner is not None:
+ cmd += '?owner={}'.format(owner)
+
+ ro.set_request_uri(cmd)
+
+ return tc.api_request(ro).json()
+
+
+def tc_add_indicator_command():
+ args = demisto.args()
+ indicator = args['indicator']
+ owner = args.get('owner', demisto.params().get('defaultOrg'))
+ if not owner:
+ return_error('You must specify an owner in the command, or by using the Organization parameter.')
+
+ rating = int(args.get('rating', 0))
+ confidence = int(args.get('confidence', 0))
+
+ tc_add_indicator(indicator, owner, rating, confidence)
+ # get the indicator for full object data
+ raw_indicators = get_indicators(indicator)
+ ec, indicators = create_context(raw_indicators)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_indicators,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Created new indicator successfully:', indicators,
+ headerTransform=pascalToSpace),
+ 'EntryContext': ec
+ })
+
+
+@logger
+def tc_add_indicator(indicator, organization, rating=0, confidence=0):
+ tc = get_client()
+ indicators = tc.indicators()
+ indicator = indicators.add(indicator, organization)
+ indicator.set_rating(rating)
+ indicator.set_confidence(confidence)
+
+ return json.loads(indicator.commit().json)
+
+
+def tc_create_incident_command():
+ args = demisto.args()
+ incident_name = args['incidentName']
+ owner = args.get('owner', demisto.params()['defaultOrg'])
+ if not owner:
+ return_error('You must specify an owner in the command, or by using the Organization parameter.')
+
+ event_date = args.get('eventDate', datetime.utcnow().isoformat().split('.')[0] + 'Z')
+ tag = args.get('tag')
+ security_label = args.get('securityLabel')
+ description = args.get('description')
+
+ raw_incident = tc_create_incident(incident_name, owner, event_date, tag, security_label, description)
+ ec = {
+ 'ID': raw_incident['id'],
+ 'Name': raw_incident['name'],
+ 'Owner': raw_incident['ownerName'],
+ 'EventDate': raw_incident['eventDate'],
+ 'Tag': tag,
+ 'SecurityLabel': security_label
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_incident,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': 'Incident {} Created Successfully'.format(incident_name),
+ 'EntryContext': {
+ 'TC.Incident(val.ID && val.ID === obj.ID)': createContext([ec], removeNull=True)
+ }
+ })
+
+
+@logger
+def tc_create_incident(incident_name, owner, event_date, tag=None, security_label=None, description=None):
+ tc = get_client()
+ incidents = tc.incidents()
+ incident = incidents.add(incident_name, owner)
+ incident.set_event_date(event_date)
+ if tag is not None:
+ incident.add_tag(tag)
+ if security_label is not None:
+ incident.set_security_label(security_label)
+ if description is not None:
+ incident.add_attribute('Description', description)
+
+ return json.loads(incident.commit().json)
+
+
+def tc_fetch_incidents_command():
+ args = demisto.args()
+ incident_id = args.get('incidentId')
+ incident_name = args.get('incidentName')
+ owner = args.get('owner')
+
+ raw_incidents = tc_fetch_incidents(incident_id, incident_name, owner)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_incidents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Incidents:', raw_incidents, headerTransform=pascalToSpace),
+ 'EntryContext': {
+ 'TC.Incident(val.ID && val.ID === obj.ID)': createContext(raw_incidents, removeNull=True),
+ 'ThreatConnect.incidents': raw_incidents # backward compatible
+ }
+ })
+
+
+@logger
+def tc_fetch_incidents(incident_id, incident_name, owner):
+ tc = get_client()
+ incidents = tc.incidents()
+ if any((incident_id, owner, incident_name)):
+ filter1 = incidents.add_filter()
+ if incident_id is not None:
+ filter1.add_id(int(incident_id))
+ if owner is not None:
+ filter1.add_owner(owner)
+ if incident_name is not None:
+ filter1.add_pf_name(incident_name)
+
+ incidents.retrieve()
+ return [json.loads(incident.json) for incident in incidents]
+
+
+def tc_get_incident_associate_indicators_command():
+ args = demisto.args()
+ incident_id = int(args['incidentId'])
+ owners = args.get('owner')
+ if owners is not None:
+ owners = owners.split(",")
+
+ raw_indicators = tc_get_incident_associate_indicators(incident_id, owners)
+ ec, indicators = create_context(raw_indicators, include_dbot_score=True)
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_indicators,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown('Incident Associated Indicators:', indicators, headerTransform=pascalToSpace),
+ 'EntryContext': ec
+ })
+
+
+@logger
+def tc_get_incident_associate_indicators(incident_id, owners):
+ tc = get_client()
+ incidents = tc.incidents()
+ _filter = incidents.add_filter()
+ _filter.add_id(incident_id)
+
+ incidents = incidents.retrieve()
+ indicators = []
+ for incident in incidents:
+ for ind in incident.indicator_associations:
+ if ind.type == 'File':
+ indicators.append(ind.indicator['md5'])
+ else:
+ indicators.append(ind.indicator)
+ if len(indicators) == 0:
+ return []
+
+ indicators_obj = tc.indicators()
+ _filter = indicators_obj.add_filter()
+ if owners is not None:
+ _filter.add_owner(owners)
+ for ind in indicators:
+ _filter.add_indicator(ind)
+
+ raw_indicators = indicators_obj.retrieve()
+ return [json.loads(indicator.json) for indicator in raw_indicators]
+
+
+def tc_incident_associate_indicator_command():
+ args = demisto.args()
+ incident_id = int(args['incidentId'])
+ indicator = args['indicator']
+ types = {
+ 'ADDRESSES': ResourceType.ADDRESSES,
+ 'EMAIL_ADDRESSES': ResourceType.EMAIL_ADDRESSES,
+ 'FILES': ResourceType.FILES,
+ 'HOSTS': ResourceType.HOSTS,
+ 'URLS': ResourceType.URLS,
+ }
+ indicator_type = types.get(args['indicatorType'], args['indicatorType'])
+ owners = args.get('owner')
+ if owners is not None:
+ owners = owners.split(",")
+
+ incidents = tc_incident_associate_indicator(incident_id, indicator_type, indicator, owners)
+ md = []
+ for inc in incidents:
+ md.append('Incident {} with ID {}, was tagged with: {}'.format(inc['name'], inc['id'], indicator))
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': incidents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': '\n'.join(md),
+ 'EntryContext': {'TC.Incident(val.ID && val.ID === obj.ID)': createContext(incidents, removeNull=True)}
+ })
+
+
+@logger
+def tc_incident_associate_indicator(incident_id, indicator_type, indicator, owners):
+ tc = get_client()
+ incidents = tc.incidents()
+ filter1 = incidents.add_filter()
+ filter1.add_id(incident_id)
+ if owners is not None:
+ filter1.add_owner(owners)
+ raw_incidents = incidents.retrieve()
+
+ incidents = []
+ for incident in raw_incidents:
+ incident.associate_indicator(indicator_type, indicator)
+ incidents.append(json.loads(incident.commit().json))
+
+ return incidents
+
+
+def tc_update_indicator_command():
+ args = demisto.args()
+ indicator = args['indicator']
+ rating = args.get('rating')
+ confidence = args.get('confidence')
+ size = args.get('size')
+ dns_active = args.get('dnsActive')
+ whois_active = args.get('whoisActive')
+ false_positive = args.get('falsePositive', 'False') == 'True'
+ observations = int(args.get('observations', 0))
+ security_label = args.get('securityLabel')
+ threat_assess_confidence = int(args.get('threatAssessConfidence', -1))
+ threat_assess_rating = int(args.get('threatAssessRating', -1))
+
+ raw_indicators = tc_update_indicator(indicator, rating=rating, confidence=confidence, size=size,
+ dns_active=dns_active, whois_active=whois_active,
+ false_positive=false_positive, observations=observations,
+ security_label=security_label,
+ threat_assess_confidence=threat_assess_confidence,
+ threat_assess_rating=threat_assess_rating)
+ ec, indicators = create_context(raw_indicators)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_indicators,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': '\n'.join('Indicator {} Updated Successfully'.format(ind['ID']) for ind in indicators),
+ 'EntryContext': ec
+ })
+
+
+@logger
+def tc_update_indicator(indicator, rating=None, confidence=None, size=None, dns_active=None, whois_active=None,
+ false_positive=False, observations=0, security_label=None, threat_assess_confidence=-1,
+ threat_assess_rating=-1):
+ tc = get_client()
+ indicators = tc.indicators()
+ filter1 = indicators.add_filter()
+ filter1.add_indicator(indicator)
+
+ raw_indicators = []
+ for ind in indicators.retrieve():
+ if rating is not None:
+ ind.set_rating(rating)
+ if confidence is not None:
+ ind.set_confidence(int(confidence))
+ if false_positive:
+ ind.add_false_positive()
+ if observations != 0:
+ ind.add_observation(observations)
+ if security_label is not None:
+ ind.add_security_label(security_label)
+ if threat_assess_confidence != -1:
+ ind.set_threat_assess_confidence(threat_assess_confidence)
+ if threat_assess_rating != -1:
+ ind.set_threat_assess_rating(threat_assess_rating)
+
+ if ind.type == 'File' and size is not None:
+ ind.add_size(size)
+ if ind.type == 'Host' and dns_active is not None:
+ ind.set_dns_active(dns_active)
+ if ind.type == 'Host' and whois_active is not None:
+ ind.set_whois_active(whois_active)
+
+ raw_indicators.append(json.loads(ind.commit().json))
+
+ return raw_indicators
+
+
+def tc_delete_indicator_command():
+ args = demisto.args()
+ indicator = args['indicator']
+
+ tc_delete_indicator(indicator)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': 'Indicator {} removed Successfully'.format(indicator)
+ })
+
+
+@logger
+def tc_delete_indicator(indicator):
+ tc = get_client()
+ indicators = tc.indicators()
+ filter1 = indicators.add_filter()
+ filter1.add_indicator(indicator)
+ indicators = indicators.retrieve()
+ for ind in indicators:
+ ind.delete()
+
+
+def tc_delete_indicator_tag_command():
+ args = demisto.args()
+ indicator = args['indicator']
+ tag = args['tag']
+
+ indicators = tc_delete_indicator_tag(indicator, tag)
+ raw_indicators = [json.loads(ind.json) for ind in indicators]
+ ec, _ = create_context(raw_indicators)
+
+ md = []
+ for ind in indicators:
+ md.append('Removed tag {} from indicator {}.'.format(tag, ind.indicator))
+ if len(md) == 0:
+ md.append('No indicators found')
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': raw_indicators,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': '\n'.join(md),
+ 'EntryContext': ec
+ })
+
+
+@logger
+def tc_delete_indicator_tag(indicator, tag, owners=None):
+ tc = get_client()
+ indicators = tc.indicators()
+ filter1 = indicators.add_filter()
+ filter1.add_indicator(indicator)
+
+ if owners is not None:
+ owners = owners.split(",")
+ filter1.add_owner(owners)
+
+ indicators = indicators.retrieve()
+ for indicator in indicators:
+ indicator.delete_tag(tag)
+ indicator.commit()
+
+ return indicators
+
+
+def tc_create_campaign_command():
+ args = demisto.args()
+ name = args['name']
+ owner = args.get('owner', demisto.params()['defaultOrg'])
+ if owner == '':
+ return_error('You must specify an owner in the command, or by using the Organization parameter.')
+
+ first_seen = args.get('firstSeen', datetime.utcnow().isoformat().split('.')[0] + 'Z')
+ tag = args.get('tag')
+ security_label = args.get('securityLabel')
+ description = args.get('description')
+
+ raw_campaign = tc_create_campaign(name, owner, first_seen, tag, security_label, description)
+ ec = {
+ 'ID': raw_campaign['id'],
+ 'Name': raw_campaign['name'],
+ 'Owner': raw_campaign['owner']['name'],
+ 'FirstSeen': raw_campaign['firstSeen'],
+ 'Tag': tag,
+ 'SecurityLabel': security_label
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_campaign,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': 'Campaign {} Created Successfully'.format(name),
+ 'EntryContext': {
+ 'TC.Campaign(val.ID && val.ID === obj.ID)': createContext([ec], removeNull=True)
+ }
+ })
+
+
+@logger
+def tc_create_campaign(name, owner, first_seen, tag=None, security_label=None, description=None):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('POST')
+ ro.set_request_uri('/v2/groups/campaigns')
+ body = {
+ 'name': name,
+ 'firstSeen': first_seen,
+ }
+ ro.set_body(json.dumps(body))
+ response = tc.api_request(ro).json()
+
+ if response.get('status') == 'Success':
+ output = response.get('data', {}).get('campaign', {})
+ event_id = output['id']
+ if description is not None:
+ # Associate Attribute description
+ ro = RequestObject()
+ ro.set_http_method('POST')
+ ro.set_request_uri('/v2/groups/events/{}/attributes'.format(event_id))
+ body = {
+ 'type': 'Description',
+ 'value': description,
+ 'displayed': 'true'
+ }
+ ro.set_body(json.dumps(body))
+ tc.api_request(ro).json()
+
+ return output
+ else:
+ return_error('Failed to create event')
+
+
+def tc_create_event_command():
+ args = demisto.args()
+ name = args['name']
+ event_date = args.get('EventDate', datetime.utcnow().isoformat().split('.')[0] + 'Z')
+ status = args.get('status')
+ owner = args.get('owner', demisto.params()['defaultOrg'])
+ if owner == '':
+ return_error('You must specify an owner in the command, or by using the Organization parameter.')
+
+ description = args.get('description')
+ tag = args.get('tag')
+
+ raw_event = tc_create_event(name, owner, event_date, tag, status, description)
+ ec = {
+ 'ID': raw_event['id'],
+ 'Name': raw_event['name'],
+ 'Owner': raw_event['owner']['name'],
+ 'Date': raw_event['eventDate'],
+ 'Tag': tag,
+ 'Status': status
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_event,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': 'Incident {} Created Successfully'.format(name),
+ 'EntryContext': {
+ 'TC.Event(val.ID && val.ID === obj.ID)': createContext([ec], removeNull=True)
+ }
+ })
+
+
+def tc_create_event(name, owner, event_date, tag=None, status=None, description=None):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('POST')
+ ro.set_request_uri('/v2/groups/events')
+ body = {
+ 'name': name,
+ 'eventDate': event_date,
+ 'status': status
+ }
+ ro.set_body(json.dumps(body))
+ response = tc.api_request(ro).json()
+
+ if response.get('status') == 'Success':
+ output = response.get('data', {}).get('event', {})
+ event_id = output['id']
+ if description is not None:
+ # Associate Attribute description
+ ro = RequestObject()
+ ro.set_http_method('POST')
+ ro.set_request_uri('/v2/groups/events/{}/attributes'.format(event_id))
+ body = {
+ 'type': 'Description',
+ 'value': description,
+ 'displayed': 'true'
+ }
+ ro.set_body(json.dumps(body))
+ tc.api_request(ro).json()
+
+ return output
+ else:
+ return_error('Failed to create event')
+
+
+def tc_create_threat_command():
+ args = demisto.args()
+ name = args['name']
+ date = args.get('dateAdded', datetime.utcnow().isoformat().split('.')[0] + 'Z')
+ owner = args.get('owner', demisto.params()['defaultOrg'])
+ if owner == '':
+ return_error('You must specify an owner in the command, or by using the Organization parameter.')
+
+ raw_threat = tc_create_threat(name, owner, date)
+ ec = {
+ 'ID': raw_threat['id'],
+ 'Name': raw_threat['name']
+ }
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': raw_threat,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': 'Threat {} Created Successfully'.format(name),
+ 'EntryContext': {
+ 'TC.Threat(val.ID && val.ID === obj.ID)': createContext([ec], removeNull=True)
+ }
+ })
+
+
+def tc_create_threat(name, owner, date):
+ tc = get_client()
+ threats = tc.threats()
+ threat = threats.add(name, owner)
+ threat.set_date_added(date)
+
+ return json.loads(threat.commit().json)
+
+
+def tc_delete_group_command():
+ args = demisto.args()
+ group_id = int(args['groupID'])
+ group_type = args['type']
+
+ success = tc_delete_group(group_id, group_type.lower())
+ if success:
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': '{} {} deleted Successfully'.format(group_type.lower(), group_id)
+ })
+ else:
+ return_error('Failed to delete {} {}'.format(group_type, group_id))
+
+
+def tc_delete_group(group_id, group_type):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('DELETE')
+ ro.set_request_uri('/v2/groups/{}/{}'.format(group_type, group_id))
+ response = tc.api_request(ro).json()
+
+ return response['status'] == 'Success'
+
+
+def tc_add_group_attribute_request(group_type, group_id, attribute_type, attribute_value):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('POST')
+ ro.set_request_uri('/v2/groups/{}/{}/attributes'.format(group_type, group_id))
+ body = {
+ 'type': attribute_type,
+ 'value': attribute_value,
+ 'displayed': 'true'
+ }
+ ro.set_body(json.dumps(body))
+ response = tc.api_request(ro).json()
+
+ return response
+
+
+def tc_add_group_attribute():
+ group_id = int(demisto.args().get('group_id'))
+ group_type = demisto.args().get('group_type')
+ attribute_type = demisto.args().get('attribute_type')
+ attribute_value = demisto.args().get('attribute_value')
+ headers = ['Type', 'Value', 'ID', 'DateAdded', 'LastModified']
+ attribute = tc_add_group_attribute_request(group_type, group_id, attribute_type, attribute_value)
+ data = attribute.get('data').get('attribute')
+ contents = {
+ 'Type': data.get('type'),
+ 'Value': data.get('value'),
+ 'ID': data.get('id'),
+ 'DateAdded': data.get('dateAdded'),
+ 'LastModified': data.get('lastModified')
+ }
+ context = {
+ 'TC.Group(val.ID && val.ID === obj.ID)': contents
+ }
+
+ return_outputs(
+ tableToMarkdown('The attribute was added successfully to group {}'.format(group_id), contents, headers,
+ removeNull=True),
+ context,
+ attribute
+ )
+
+
+def add_group_security_label_request(group_type, group_id, security_label):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('POST')
+ ro.set_request_uri('/v2/groups/{}/{}/securityLabels/{}'.format(group_type, group_id, security_label))
+
+ response = tc.api_request(ro).json()
+
+ return response.get('status') == 'Success'
+
+
+def add_group_security_label():
+ group_id = int(demisto.args().get('group_id'))
+ group_type = demisto.args().get('group_type')
+ security_label = demisto.args().get('security_label_name')
+
+ add_group_security_label_request(group_type, group_id, security_label)
+
+ demisto.results('The security label {} was added successfully to {} {}'.format(security_label, group_type,
+ group_id))
+
+
+def add_group_tags_request(group_type, group_id, tag_name):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('POST')
+ ro.set_request_uri('/v2/groups/{}/{}/tags/{}'.format(group_type, group_id, tag_name))
+
+ response = tc.api_request(ro).json()
+
+ return response.get('status') == 'Success'
+
+
+def add_group_tag():
+ group_id = int(demisto.args().get('group_id'))
+ group_type = demisto.args().get('group_type')
+ tag_name = demisto.args().get('tag_name')
+
+ add_group_tags_request(group_type, group_id, tag_name)
+
+ demisto.results('The tag {} was added successfully to group {} {}'.format(tag_name, group_type, group_id))
+
+
+def get_events_request():
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('GET')
+ ro.set_request_uri('/v2/groups/events')
+
+ return tc.api_request(ro).json()
+
+
+def tc_get_events():
+ raw_response = get_events_request()
+ data = raw_response.get('data', {}).get('event', [])
+ content = []
+ headers = ['ID', 'Name', 'OwnerName', 'EventDate', 'DateAdded', 'Status']
+
+ for event in data:
+ content.append({
+ 'ID': event.get('id'),
+ 'Name': event.get('name'),
+ 'OwnerName': event.get('ownerName'),
+ 'DateAdded': event.get('dateAdded'),
+ 'EventDate': event.get('eventDate'),
+ 'Status': event.get('status')
+ })
+ context = {
+ 'TC.Event(val.ID && val.ID === obj.ID)': content
+ }
+
+ return_outputs(
+ tableToMarkdown('ThreatConnect Events', content, headers, removeNull=True),
+ context,
+ raw_response
+ )
+
+
+def tc_get_indicator_types_request():
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('GET')
+ ro.set_request_uri('/v2/types/indicatorTypes')
+
+ return tc.api_request(ro).json()
+
+
+def tc_get_indicator_types():
+ raw_response = tc_get_indicator_types_request()
+ data = raw_response.get('data', {}).get('indicatorType', [])
+ content = []
+ headers = ['Name', 'Custom', 'Parsable', 'ApiBranch', 'CasePreference', 'value1Label', 'Value1Type']
+
+ for type_ in data:
+ content.append({
+ 'Custom': type_.get('custom'),
+ 'Name': type_.get('name'),
+ 'Parsable': type_.get('parsable'),
+ 'ApiBranch': type_.get('apiBranch'),
+ 'ApiEntity': type_.get('apiEntity'),
+ 'CasePreference': type_.get('casePreference'),
+ 'Value1Label': type_.get('value1Label'),
+ 'Value1Type': type_.get('value1Type')
+ })
+ context = {
+ 'TC.IndicatorType(val.Name && val.Name === obj.Name)': content
+ }
+
+ return_outputs(
+ tableToMarkdown('ThreatConnect indicator types', content, headers, removeNull=True),
+ context,
+ raw_response
+ )
+
+
+def associate_indicator_request(indicator_type, indicator, group_type, group_id):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('POST')
+ ro.set_request_uri('/v2/indicators/{}/{}/groups/{}/{}'.format(indicator_type, indicator, group_type, group_id))
+ response = tc.api_request(ro).json()
+
+ return response
+
+
+def associate_indicator():
+ group_id = int(demisto.args().get('group_id'))
+ group_type = demisto.args().get('group_type')
+ indicator_type = demisto.args().get('indicator_type')
+ indicator = demisto.args().get('indicator')
+
+ response = associate_indicator_request(indicator_type, indicator, group_type, group_id)
+
+ if response.get('status') == 'Success':
+ contents = {
+ 'IndicatorType': indicator_type,
+ 'Indicator': indicator,
+ 'GroupType': group_type,
+ 'GroupID': group_id
+ }
+ else:
+ return_error(response.get('message'))
+
+ context = {
+ 'TC.Group(val.Indicator && val.Indicator === obj.Indicator)': contents
+ }
+
+ return_outputs(
+ tableToMarkdown('The indicator was associated successfully', contents, removeNull=True),
+ context
+ )
+
+
+def get_groups_request(group_type):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('GET')
+ ro.set_request_uri('/v2/groups/{}'.format(group_type))
+
+ return tc.api_request(ro).json()
+
+
+def tc_get_groups():
+ group_type = demisto.args().get('group_type')
+ raw_response = get_groups_request(group_type)
+ headers = ['ID', 'Name', 'OwnerName', 'EventDate', 'DateAdded', 'Status']
+ if group_type == 'adversaries':
+ data = raw_response.get('data', {}).get('adversarie', {})
+ if group_type == 'campaigns':
+ data = raw_response.get('data', {}).get('campaign', {})
+ if group_type == 'documents':
+ data = raw_response.get('data', {}).get('document', {})
+ if group_type == 'emails':
+ data = raw_response.get('data', {}).get('email', {})
+ if group_type == 'events':
+ data = raw_response.get('data', {}).get('event', {})
+ if group_type == 'incidents':
+ data = raw_response.get('data', {}).get('incident', {})
+ if group_type == 'intrusionSets':
+ data = raw_response.get('data', {}).get('intrusionSet', {})
+ if group_type == 'reports':
+ data = raw_response.get('data', {}).get('report', {})
+ if group_type == 'signatures':
+ data = raw_response.get('data', {}).get('signature', {})
+ if group_type == 'threats':
+ data = raw_response.get('data', {}).get('threat', {})
+
+ content = []
+
+ for group in data:
+ content.append({
+ 'ID': group.get('id'),
+ 'Name': group.get('name'),
+ 'OwnerName': group.get('ownerName'),
+ 'DateAdded': group.get('dateAdded'),
+ 'EventDate': group.get('eventDate'),
+ 'Status': group.get('status')
+ })
+ context = {
+ 'TC.Group(val.ID && val.ID === obj.ID)': content
+ }
+
+ return_outputs(
+ tableToMarkdown('ThreatConnect {}'.format(group_type), content, headers, removeNull=True),
+ context,
+ raw_response
+ )
+
+
+def get_group_request(group_type, group_id):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('GET')
+ ro.set_request_uri('/v2/groups/{}/{}'.format(group_type, group_id))
+
+ return tc.api_request(ro).json()
+
+
+def get_group():
+ """
+ Retrieve a single Group
+ """
+ group_type = demisto.args().get('group_type')
+ try:
+ group_id = int(demisto.args().get('group_id'))
+ except TypeError as t:
+ return_error('group_id must be a number', t)
+
+ response = get_group_request(group_type, group_id).get('data', {})
+ if group_type == 'adversaries':
+ data = response.get('adversarie', {})
+ if group_type == 'campaigns':
+ data = response.get('campaign', {})
+ if group_type == 'documents':
+ data = response.get('document', {})
+ if group_type == 'emails':
+ data = response.get('email', {})
+ if group_type == 'events':
+ data = response.get('event', {})
+ if group_type == 'incidents':
+ data = response.get('incident', {})
+ if group_type == 'intrusionSets':
+ data = response.get('intrusionSet', {})
+ if group_type == 'reports':
+ data = response.get('report', {})
+ if group_type == 'signatures':
+ data = response.get('signature', {})
+ if group_type == 'threats':
+ data = response.get('threat', {})
+
+ owner = {
+ 'Name': data.get('owner').get('name'),
+ 'ID': data.get('owner').get('id'),
+ 'Type': data.get('owner').get('type')
+ }
+ contents = {
+ 'ID': data.get('id'),
+ 'Name': data.get('name'),
+ 'Owner': owner,
+ 'DateAdded': data.get('dateAdded'),
+ 'EventDate': data.get('eventDate'),
+ 'Status': data.get('status')
+ }
+
+ context = {
+ 'TC.Group(val.ID && val.ID === obj.ID)': contents
+ }
+
+ return_outputs(
+ tableToMarkdown('ThreatConnect Group information', contents, removeNull=True),
+ context,
+ response
+ )
+
+
+def get_group_attributes_request(group_type, group_id):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('GET')
+ ro.set_request_uri('/v2/groups/{}/{}/attributes'.format(group_type, group_id))
+
+ return tc.api_request(ro).json()
+
+
+def get_group_attributes():
+ """
+ Retrieve a Group's Attributes
+ """
+ group_type = demisto.args().get('group_type')
+ try:
+ group_id = int(demisto.args().get('group_id'))
+ except TypeError as t:
+ return_error('group_id must be a number', t)
+ contents = []
+ headers = ['AttributeID', 'Type', 'Value', 'DateAdded', 'LastModified', 'Displayed']
+ response = get_group_attributes_request(group_type, group_id)
+ data = response.get('data', {}).get('attribute', [])
+
+ if response.get('status') == 'Success':
+ for attribute in data:
+ contents.append({
+ 'GroupID': group_id,
+ 'AttributeID': attribute.get('id'),
+ 'Type': attribute.get('type'),
+ 'Value': attribute.get('value'),
+ 'DateAdded': attribute.get('dateAdded'),
+ 'LastModified': attribute.get('lastModified'),
+ 'Displayed': attribute.get('displayed')
+ })
+
+ else:
+ return_error(response.get('message'))
+
+ context = {
+ 'TC.Group.Attribute(val.GroupID && val.GroupID === obj.GroupID && val.AttributeID && val.AttributeID ==='
+ ' obj.AttributeID)': contents
+ }
+
+ return_outputs(
+ tableToMarkdown('ThreatConnect Group Attributes', contents, headers, removeNull=True),
+ context,
+ response
+ )
+
+
+def get_group_security_labels_request(group_type, group_id):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('GET')
+ ro.set_request_uri('/v2/groups/{}/{}/securityLabels'.format(group_type, group_id))
+
+ return tc.api_request(ro).json()
+
+
+def get_group_security_labels():
+ """
+ Retrieve a Group's Security Labels
+ """
+ group_type = demisto.args().get('group_type')
+ try:
+ group_id = int(demisto.args().get('group_id'))
+ except TypeError as t:
+ return_error('group_id must be a number', t)
+ contents = []
+ headers = ['Name', 'Description', 'DateAdded']
+ response = get_group_security_labels_request(group_type, group_id)
+ data = response.get('data', {}).get('securityLabel', [])
+
+ if response.get('status') == 'Success':
+ for security_label in data:
+ contents.append({
+ 'GroupID': group_id,
+ 'Name': security_label.get('name'),
+ 'Description': security_label.get('description'),
+ 'DateAdded': security_label.get('dateAdded')
+ })
+
+ else:
+ return_error(response.get('message'))
+
+ context = {
+ 'TC.Group.SecurityLabel(val.GroupID && val.GroupID === obj.GroupID && val.Name && val.Name === '
+ 'obj.Name)': contents
+ }
+
+ return_outputs(
+ tableToMarkdown('ThreatConnect Group Security Labels', contents, headers, removeNull=True),
+ context
+ )
+
+
+def get_group_tags_request(group_type, group_id):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('GET')
+ ro.set_request_uri('/v2/groups/{}/{}/tags'.format(group_type, group_id))
+
+ return tc.api_request(ro).json()
+
+
+def get_group_tags():
+ """
+ Retrieve the Tags for a Group
+ """
+ group_type = demisto.args().get('group_type')
+ try:
+ group_id = int(demisto.args().get('group_id'))
+ except TypeError as t:
+ return_error('group_id must be a number', t)
+ contents = []
+ context_entries = []
+ response = get_group_tags_request(group_type, group_id)
+ data = response.get('data', {}).get('tag', [])
+
+ if response.get('status') == 'Success':
+ for tags in data:
+ contents.append({
+ 'Name': tags.get('name')
+ })
+
+ context_entries.append({
+ 'GroupID': group_id,
+ 'Name': tags.get('name')
+ })
+ else:
+ return_error(response.get('message'))
+
+ context = {
+ 'TC.Group.Tag(val.GroupID && val.GroupID === obj.GroupID && val.Name && val.Name === obj.Name)': context_entries
+ }
+
+ return_outputs(
+ tableToMarkdown('ThreatConnect Group Tags', contents, removeNull=True),
+ context,
+ response
+ )
+
+
+def get_group_indicator_request(group_type, group_id):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('GET')
+ ro.set_request_uri('/v2/groups/{}/{}/indicators'.format(group_type, group_id))
+
+ return tc.api_request(ro).json()
+
+
+def get_group_indicator():
+ """
+ View Indicators associated with a given Group
+ """
+ group_type = demisto.args().get('group_type')
+ try:
+ group_id = int(demisto.args().get('group_id'))
+ except TypeError as t:
+ return_error('group_id must be a number', t)
+ contents = []
+ response = get_group_indicator_request(group_type, group_id)
+ data = response.get('data', {}).get('indicator', [])
+
+ if response.get('status') == 'Success':
+ for indicator in data:
+ contents.append({
+ 'GroupID': group_id,
+ 'IndicatorID': indicator.get('id'),
+ 'OwnerName': indicator.get('ownerName'),
+ 'Type': indicator.get('type'),
+ 'DateAdded': indicator.get('dateAdded'),
+ 'LastModified': indicator.get('lastModified'),
+ 'Rating': indicator.get('rating'),
+ 'Confidence': indicator.get('confidence'),
+ 'ThreatAssertRating': indicator.get('threatAssessRating'),
+ 'ThreatAssessConfidence': indicator.get('threatAssessConfidence'),
+ 'Summary': indicator.get('summary')
+ })
+
+ else:
+ return_error(response.get('message'))
+
+ context = {
+ 'TC.Group.Indicator(val.GroupID && val.GroupID === obj.GroupID && val.IndicatorID && val.IndicatorID === '
+ 'obj.IndicatorID)': contents
+ }
+
+ return_outputs(
+ tableToMarkdown('ThreatConnect Group Indicators', contents, removeNull=True),
+ context,
+ response
+ )
+
+
+def get_group_associated_request(group_type, group_id):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('GET')
+ ro.set_request_uri('/v2/groups/{}/{}/groups'.format(group_type, group_id))
+
+ return tc.api_request(ro).json()
+
+
+def get_group_associated():
+ """
+ View Indicators associated with a given Group
+ """
+ group_type = demisto.args().get('group_type')
+ try:
+ group_id = int(demisto.args().get('group_id'))
+ except TypeError as t:
+ return_error('group_id must be a number', t)
+ contents = []
+ headers = ['GroupID', 'Name', 'Type', 'OwnerName', 'DateAdded']
+ response = get_group_associated_request(group_type, group_id)
+ data = response.get('data', {}).get('group', [])
+
+ if response.get('status') == 'Success':
+ for group in data:
+ contents.append({
+ 'GroupID': group.get('id'),
+ 'Name': group.get('name'),
+ 'Type': group.get('type'),
+ 'DateAdded': group.get('dateAdded'),
+ 'OwnerName': group.get('ownerName')
+ })
+
+ else:
+ return_error(response.get('message'))
+
+ context = {
+ 'TC.Group.AssociatedGroup(val.GroupID && val.GroupID === obj.GroupID)': contents
+ }
+
+ return_outputs(
+ tableToMarkdown('ThreatConnect Associated Groups', contents, headers, removeNull=True),
+ context,
+ response
+ )
+
+
+def associate_group_to_group_request(group_type, group_id, associated_group_type, associated_group_id):
+ tc = get_client()
+ ro = RequestObject()
+ ro.set_http_method('POST')
+ ro.set_request_uri('/v2/groups/{}/{}/groups/{}/{}'.format(group_type, group_id, associated_group_type,
+ associated_group_id))
+
+ return tc.api_request(ro).json()
+
+
+def associate_group_to_group():
+ """
+ Associate one Group with another
+ """
+
+ group_type = demisto.args().get('group_type')
+ associated_group_type = demisto.args().get('associated_group_type')
+ try:
+ group_id = int(demisto.args().get('group_id'))
+ except TypeError as t:
+ return_error('group_id must be a number', t)
+ try:
+ associated_group_id = int(demisto.args().get('associated_group_id'))
+ except TypeError as t:
+ return_error('associated_group_id must be a number', t)
+
+ response = associate_group_to_group_request(group_type, group_id, associated_group_type, associated_group_id)
+
+ if response.get('status') == 'Success':
+ context_entries = {
+ 'GroupID': group_id,
+ 'GroupType': group_type,
+ 'AssociatedGroupID': associated_group_id,
+ 'AssociatedGroupType': associated_group_type
+ }
+ context = {
+ 'TC.Group.AssociatedGroup(val.GroupID && val.GroupID === obj.GroupID)': context_entries
+ }
+ return_outputs('The group {} was associated successfully.'.format(associated_group_id),
+ context,
+ response)
+ else:
+ return_error(response.get('message'))
+
+
+def create_document_group_request(contents, file_name, name, owner, res, malware, password, security_label,
+ description):
+ tc = get_client()
+ documents = tc.documents()
+
+ document = documents.add(name, owner)
+ document.set_file_name(file_name)
+
+ # upload the contents of the file into the Document
+ document.upload(contents)
+ if malware:
+ document.set_malware(True)
+ document.set_password(password)
+ if security_label:
+ document.set_security_label(security_label)
+ if description:
+ document.add_attribute('Description', description)
+
+ return json.loads(document.commit().json)
+
+
+def create_document_group():
+ file_name = demisto.args().get('file_name')
+ name = demisto.args().get('name')
+ malware = bool(strtobool(demisto.args().get('malware', False)))
+ password = demisto.args().get('password')
+ res = demisto.getFilePath(demisto.args()['entry_id'])
+ owner = demisto.args().get('owner', demisto.params().get('defaultOrg'))
+ if not owner:
+ return_error('You must specify an owner in the command, or by using the Organization parameter.')
+
+ security_label = demisto.args().get('securityLabel')
+ description = demisto.args().get('description')
+
+ # open a file handle for a local file and read the contents thereof
+ f = open(res['path'], 'rb')
+ contents = f.read()
+
+ raw_document = create_document_group_request(contents, file_name, name, owner, res, malware, password,
+ security_label, description)
+ content = {
+ 'ID': raw_document.get('id'),
+ 'Name': raw_document.get('name'),
+ 'Owner': raw_document.get('ownerName'),
+ 'EventDate': raw_document.get('eventDate'),
+ 'Description': description,
+ 'SecurityLabel': security_label
+ }
+ context = {
+ 'TC.Group(val.ID && val.ID === obj.ID)': content
+ }
+ return_outputs(tableToMarkdown('ThreatConnect document group was created successfully', content, removeNull=True),
+ context,
+ raw_document)
+
+
+def get_document_request(document_id):
+
+ tc = get_client()
+ documents = tc.documents()
+ # set a filter to retrieve only the Document with ID: 123456
+ filter1 = documents.add_filter()
+ filter1.add_id(document_id)
+ try:
+ # retrieve the Document
+ documents.retrieve()
+ except RuntimeError as e:
+ return_error('Error: {0}'.format(str(e)))
+
+ # iterate through the retrieved Documents (in this case there should only be one) and print its properties
+ for document in documents:
+ document.download()
+ if document.contents is not None:
+ return document
+ else:
+ return_error('No document was found.')
+
+
+def download_document():
+ """
+ Download the contents of a Document
+ """
+ try:
+ document_id = int(demisto.args().get('document_id'))
+ except TypeError as t:
+ return_error('document_id must be a number', t)
+ document = get_document_request(document_id)
+
+ file_name = document.file_name
+ file_content = document.contents
+ demisto.results(fileResult(file_name, file_content))
+
+
+def test_integration():
+ tc = get_client()
+ owners = tc.owners()
+ owners.retrieve()
+ demisto.results('ok')
+
+
+''' EXECUTION CODE '''
+COMMANDS = {
+ 'test-module': test_integration,
+ 'ip': ip_command,
+ 'url': url_command,
+ 'file': file_command,
+ 'domain': domain_command,
+
+ 'tc-owners': tc_owners_command,
+ 'tc-indicators': tc_indicators_command,
+ 'tc-get-tags': tc_get_tags_command,
+ 'tc-tag-indicator': tc_tag_indicator_command,
+ 'tc-get-indicator': tc_get_indicator_command,
+ 'tc-get-indicators-by-tag': tc_get_indicators_by_tag_command,
+ 'tc-add-indicator': tc_add_indicator_command,
+
+ 'tc-create-incident': tc_create_incident_command,
+ 'tc-fetch-incidents': tc_fetch_incidents_command,
+ 'tc-get-incident-associate-indicators': tc_get_incident_associate_indicators_command,
+ 'tc-incident-associate-indicator': tc_incident_associate_indicator_command,
+ 'tc-update-indicator': tc_update_indicator_command,
+ 'tc-delete-indicator': tc_delete_indicator_command,
+ 'tc-delete-indicator-tag': tc_delete_indicator_tag_command,
+ 'tc-create-campaign': tc_create_campaign_command,
+ 'tc-create-event': tc_create_event_command,
+ 'tc-get-events': tc_get_events,
+ 'tc-add-group-attribute': tc_add_group_attribute,
+ 'tc-create-threat': tc_create_threat_command,
+ 'tc-delete-group': tc_delete_group_command,
+ 'tc-get-groups': tc_get_groups,
+ 'tc-add-group-security-label': add_group_security_label,
+ 'tc-add-group-tag': add_group_tag,
+ 'tc-get-indicator-types': tc_get_indicator_types,
+ 'tc-group-associate-indicator': associate_indicator,
+ 'tc-create-document-group': create_document_group,
+ 'tc-get-group': get_group,
+ 'tc-get-group-attributes': get_group_attributes,
+ 'tc-get-group-security-labels': get_group_security_labels,
+ 'tc-get-group-tags': get_group_tags,
+ 'tc-download-document': download_document,
+ 'tc-get-group-indicators': get_group_indicator,
+ 'tc-get-associated-groups': get_group_associated,
+ 'tc-associate-group-to-group': associate_group_to_group
+}
+
+try:
+ command_func = demisto.command()
+ LOG('command is %s' % (demisto.command(),))
+ if command_func in COMMANDS.keys():
+ COMMANDS[command_func]()
+
+except Exception as e:
+ LOG(e.message)
+ LOG.print_log()
+ return_error('error has occurred: {}'.format(e.message, ))
diff --git a/Integrations/ThreatConnect/ThreatConnect.yml b/Integrations/ThreatConnect/ThreatConnect.yml
new file mode 100644
index 000000000000..b42f7af2ef35
--- /dev/null
+++ b/Integrations/ThreatConnect/ThreatConnect.yml
@@ -0,0 +1,2379 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: ThreatConnect
+ version: -1
+configuration:
+- display: Access ID
+ name: accessId
+ required: true
+ type: 0
+- display: Secret Key
+ name: secretKey
+ required: true
+ type: 4
+- defaultvalue: https://api.threatconnect.com
+ display: baseUrl
+ name: baseUrl
+ required: true
+ type: 0
+- display: Default Organization
+ name: defaultOrg
+ required: false
+ type: 0
+- display: ProxyIP (or http://${ip} )
+ name: proxyIp
+ required: false
+ type: 0
+- display: ProxyPort
+ name: proxyPort
+ required: false
+ type: 0
+- defaultvalue: '3'
+ display: Rating threshold for Malicious Indicators
+ name: rating
+ required: false
+ type: 0
+- defaultvalue: '50'
+ display: Confidence threshold for Malicious Indicators
+ name: confidence
+ required: false
+ type: 0
+- defaultvalue: '7'
+ display: Indicator Reputation Freshness (in days)
+ name: freshness
+ required: false
+ type: 0
+description: Threat intelligence platform.
+display: ThreatConnect
+name: ThreatConnect
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: The IPv4 or IPv6 address.
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ - default: false
+ description: A CSV list of a client's organizations, sources, or communities to which a user has permissions. For example, users with admin permissions can search for indicators belonging to all owners.
+ isArray: false
+ name: owners
+ required: false
+ secret: false
+ - default: false
+ description: A list of results filtered by indicators whose threat rating is greater than the specified value. Can be "0" - "Unknown", "1" - "Suspicious", "2" - "Low", "3" - Moderate, "4" - High, or "5" - "Critical".
+ isArray: false
+ name: ratingThreshold
+ required: false
+ secret: false
+ - default: false
+ description: A list of results filtered by indicators whose confidence rating is greater than the specified value. Can be "0%" - "Unknown," "1% " - "Discredited", "2-29%" - "Improbable," "30-49%" - "Doubtful," "50-69%" - "Possible", "70-89%" - "Probable," or "90-100%" - "Confirmed".
+ isArray: false
+ name: confidenceThreshold
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches for an indicator of type IP address.
+ execution: false
+ name: ip
+ outputs:
+ - contextPath: TC.Indicator.Name
+ description: The name of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Type
+ description: The type of the indicator.
+ type: string
+ - contextPath: TC.Indicator.ID
+ description: The ID of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Description
+ description: The description of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Owner
+ description: The owner of the indicator.
+ type: string
+ - contextPath: TC.Indicator.CreateDate
+ description: The date on which the indicator was created.
+ type: date
+ - contextPath: TC.Indicator.LastModified
+ description: The date on which the indicator was modified.
+ type: date
+ - contextPath: TC.Indicator.Rating
+ description: The threat rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.Confidence
+ description: The confidence rating of the indicator.
+ type: number
+ - contextPath: DBotScore.Indicator
+ description: The value assigned by DBot for the indicator.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type assigned by DBot for the indicator.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The score assigned by DBot for the indicator.
+ type: number
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ type: string
+ - contextPath: IP.Address
+ description: The IP address of the indicator.
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IP addresses, the vendor that made the decision.
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IP addresses, the full description.
+ type: string
+ - arguments:
+ - default: true
+ description: The URL for which to search. For example, "www.demisto.com".
+ isArray: false
+ name: url
+ required: true
+ secret: false
+ - default: false
+ description: A CSV list of a client's organizations, sources, or communities to which a client’s API user has been granted permission. For example, "owner1", "owner2", or "owner3".
+ isArray: false
+ name: owners
+ required: false
+ secret: false
+ - default: false
+ description: A list of results filtered by indicators whose threat rating is greater than the specified value. Can be "0" - "Unknown", "1" - "Suspicious", "2" - "Low", "3" - Moderate, "4" - High, or "5" - "Critical".
+ isArray: false
+ name: ratingThreshold
+ required: false
+ secret: false
+ - default: false
+ description: A list of results filtered by indicators whose confidence rating is greater than the specified value. Can be "0%" - "Unknown," "1% " - "Discredited", "2-29%" - "Improbable," "30-49%" - "Doubtful," "50-69%" - "Possible", "70-89%" - "Probable," or "90-100%" - "Confirmed".
+ isArray: false
+ name: confidenceThreshold
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches for an indicator of type URL.
+ execution: false
+ name: url
+ outputs:
+ - contextPath: TC.Indicator.Name
+ description: The name of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Type
+ description: The type of the indicator.
+ type: string
+ - contextPath: TC.Indicator.ID
+ description: The ID of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Description
+ description: The description of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Owner
+ description: The owner of the indicator.
+ type: string
+ - contextPath: TC.Indicator.CreateDate
+ description: The date on which the indicator was created.
+ type: date
+ - contextPath: TC.Indicator.LastModified
+ description: The date on which the indicator was last modified.
+ type: date
+ - contextPath: TC.Indicator.Rating
+ description: The threat rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.Confidence
+ description: The confidence rating of the indicator.
+ type: number
+ - contextPath: DBotScore.Indicator
+ description: The value assigned by DBot for the indicator.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type assigned by DBot for the indicator.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The score assigned by DBot for the indicator.
+ type: number
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ type: string
+ - contextPath: URL.Data
+ description: The data of the URL indicator.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the full description.
+ type: string
+ - arguments:
+ - default: true
+ description: The hash of the file. Can be "MD5", "SHA-1", or "SHA-256".
+ isArray: false
+ name: file
+ required: true
+ secret: false
+ - default: false
+ description: A CSV list of a client's organizations, sources, or communities to which a user has permissions. For example, users with admin permissions can search for indicators belonging to all owners.
+ isArray: false
+ name: owners
+ required: false
+ secret: false
+ - default: false
+ description: A list of results filtered by indicators whose threat rating is greater than the specified value. Can be "0" - "Unknown", "1" - "Suspicious", "2" - "Low", "3" - Moderate, "4" - High, or "5" - "Critical".
+ isArray: false
+ name: ratingThreshold
+ required: false
+ secret: false
+ - default: false
+ description: A list of results filtered by indicators whose confidence rating is greater than the specified value. Can be "0%" - "Unknown," "1% " - "Discredited", "2-29%" - "Improbable," "30-49%" - "Doubtful," "50-69%" - "Possible", "70-89%" - "Probable," or "90-100%" - "Confirmed".
+ isArray: false
+ name: confidenceThreshold
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches for an indicator of type file.
+ execution: false
+ name: file
+ outputs:
+ - contextPath: TC.Indicator.Name
+ description: The name of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Type
+ description: The type of the indicator.
+ type: string
+ - contextPath: TC.Indicator.ID
+ description: The ID of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Description
+ description: The description of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Owner
+ description: The owner of the indicator.
+ type: string
+ - contextPath: TC.Indicator.CreateDate
+ description: The date on which the indicator was created.
+ type: date
+ - contextPath: TC.Indicator.LastModified
+ description: The last date on which the indicator was modified.
+ type: date
+ - contextPath: TC.Indicator.Rating
+ description: The threat rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.Confidence
+ description: The confidence rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.File.MD5
+ description: The MD5 hash of the indicator.
+ type: string
+ - contextPath: TC.Indicator.File.SHA1
+ description: The SHA1 hash of the indicator.
+ type: string
+ - contextPath: TC.Indicator.File.SHA256
+ description: The SHA256 hash of the indicator.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The value assigned by DBot for the indicator.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type assigned by DBot for the indicator.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The score assigned by DBot for the indicator.
+ type: number
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 hash of the indicator.
+ type: string
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the indicator.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the indicator.
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision.
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the full description.
+ type: string
+ - deprecated: false
+ description: Retrieves all owners for the current account.
+ execution: false
+ name: tc-owners
+ outputs:
+ - contextPath: TC.Owner.Name
+ description: The name of the owner.
+ type: string
+ - contextPath: TC.Owner.ID
+ description: The ID of the owner.
+ type: string
+ - contextPath: TC.Owner.Type
+ description: The type of the owner.
+ type: string
+ - arguments:
+ - default: false
+ description: A list of results filtered by the owner of the indicator.
+ isArray: false
+ name: owner
+ required: false
+ secret: false
+ - default: false
+ description: The maximum number of results that can be returned. The default is 500.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves a list of all indicators.
+ execution: false
+ name: tc-indicators
+ outputs:
+ - contextPath: TC.Indicator.Name
+ description: The name of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Type
+ description: The type of the indicator.
+ type: string
+ - contextPath: TC.Indicator.ID
+ description: The ID of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Description
+ description: The description of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Owner
+ description: The owner of the indicator.
+ type: string
+ - contextPath: TC.Indicator.CreateDate
+ description: The date on which the indicator was created.
+ type: date
+ - contextPath: TC.Indicator.LastModified
+ description: The last date on which the indicator was modified.
+ type: date
+ - contextPath: TC.Indicator.Rating
+ description: The threat rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.Confidence
+ description: The confidence rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.WhoisActive
+ description: The active indicator (for domains only).
+ type: string
+ - contextPath: TC.Indicator.File.MD5
+ description: The MD5 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA1
+ description: The SHA1 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA256
+ description: The SHA256 hash of the indicator of the file.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The value assigned by DBot for the indicator.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type assigned by DBot for the indicator.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The score assigned by DBot for the indicator.
+ type: number
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ type: string
+ - contextPath: IP.Address
+ description: The IP address of the indicator.
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IP addresses, the vendor that made the decision.
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IP addresses, the full description.
+ type: string
+ - contextPath: URL.Data
+ description: The data of the URL of the indicator.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the full description.
+ type: string
+ - contextPath: Domain.Name
+ description: The name of the domain.
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision.
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the full description.
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 hash of the file.
+ type: string
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision.
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the full description.
+ type: string
+ - deprecated: false
+ description: Returns a list of all ThreatConnect tags.
+ execution: false
+ name: tc-get-tags
+ outputs:
+ - contextPath: TC.Tags
+ description: A list of tags.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: The name of the tag.
+ isArray: false
+ name: tag
+ required: true
+ secret: false
+ - default: false
+ description: The indicator to tag. For example, for an IP indicator, "8.8.8.8".
+ isArray: false
+ name: indicator
+ required: true
+ secret: false
+ - default: false
+ description: A list of indicators filtered by the owner.
+ isArray: false
+ name: owner
+ required: false
+ secret: false
+ deprecated: false
+ description: Adds a tag to an existing indicator.
+ execution: false
+ name: tc-tag-indicator
+ - arguments:
+ - default: true
+ description: The name of the indicator by which to search. The command retrieves information from all owners. Can be an IP address, a URL, or a file hash.
+ isArray: false
+ name: indicator
+ required: true
+ secret: false
+ - default: false
+ description: A list of results filtered by indicators whose threat rating is greater than the specified value. Can be "0" - "Unknown", "1" - "Suspicious", "2" - "Low", "3" - Moderate, "4" - High, or "5" - "Critical".
+ isArray: false
+ name: ratingThreshold
+ required: false
+ secret: false
+ - default: false
+ description: A list of results filtered by indicators whose confidence rating is greater than the specified value. Can be "0%" - "Unknown," "1% " - "Discredited", "2-29%" - "Improbable," "30-49%" - "Doubtful," "50-69%" - "Possible", "70-89%" - "Probable," or "90-100%" - "Confirmed".
+ isArray: false
+ name: confidenceThreshold
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves information about an indicator.
+ execution: false
+ name: tc-get-indicator
+ outputs:
+ - contextPath: TC.Indicator.Name
+ description: The name of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Type
+ description: The type of the indicator.
+ type: string
+ - contextPath: TC.Indicator.ID
+ description: The ID of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Description
+ description: The description of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Owner
+ description: The owner of the indicator.
+ type: string
+ - contextPath: TC.Indicator.CreateDate
+ description: The date on which the indicator was created.
+ type: date
+ - contextPath: TC.Indicator.LastModified
+ description: The last date on which the indicator was modified.
+ type: date
+ - contextPath: TC.Indicator.Rating
+ description: The threat rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.Confidence
+ description: The confidence rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.WhoisActive
+ description: The active indicator (for domains only).
+ type: string
+ - contextPath: TC.Indicator.File.MD5
+ description: The MD5 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA1
+ description: The SHA1 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA256
+ description: The SHA256 hash of the indicator of the file.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The value assigned by DBot for the indicator.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type assigned by DBot for the indicator.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The score assigned by DBot for the indicator.
+ type: number
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ type: string
+ - contextPath: IP.Address
+ description: The IP address of the indicator.
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IP addresses, the vendor that made the decision.
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IP addresses, the full description.
+ type: string
+ - contextPath: URL.Data
+ description: The data of the indicator of the URL.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the full description.
+ type: string
+ - contextPath: Domain.Name
+ description: The domain name of the indicator.
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision.
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the full description.
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 hash of the file.
+ type: string
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision.
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the full description.
+ type: string
+ - arguments:
+ - default: true
+ description: The name of the tag by which to filter.
+ isArray: false
+ name: tag
+ required: true
+ secret: false
+ - default: false
+ description: A list of indicators filtered by the owner.
+ isArray: false
+ name: owner
+ required: false
+ secret: false
+ deprecated: false
+ description: Fetches all indicators that have a tag.
+ execution: false
+ name: tc-get-indicators-by-tag
+ outputs:
+ - contextPath: TC.Indicator.Name
+ description: The name of the tagged indicator.
+ type: string
+ - contextPath: TC.Indicator.Type
+ description: The type of the tagged indicator.
+ type: string
+ - contextPath: TC.Indicator.ID
+ description: The ID of the tagged indicator.
+ type: string
+ - contextPath: TC.Indicator.Description
+ description: The description of the tagged indicator.
+ type: string
+ - contextPath: TC.Indicator.Owner
+ description: The owner of the tagged indicator.
+ type: string
+ - contextPath: TC.Indicator.CreateDate
+ description: The date on which the tagged indicator was created.
+ type: date
+ - contextPath: TC.Indicator.LastModified
+ description: The last date on which the tagged indicator was modified.
+ type: date
+ - contextPath: TC.Indicator.Rating
+ description: The threat rating of the tagged indicator.
+ type: number
+ - contextPath: TC.Indicator.Confidence
+ description: The confidence rating of the tagged indicator.
+ type: number
+ - contextPath: TC.Indicator.WhoisActive
+ description: The active indicator (for domains only).
+ type: string
+ - contextPath: TC.Indicator.File.MD5
+ description: The MD5 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA1
+ description: The SHA1 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA256
+ description: The SHA256 hash of the indicator of the file.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The value assigned by DBot for the tagged indicator.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type assigned by DBot for the tagged indicator.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The score assigned by DBot for the tagged indicator.
+ type: number
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ type: string
+ - contextPath: IP.Address
+ description: The IP address of the tagged indicator.
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IP addresses, the vendor that made the decision.
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IP addresses, the full description.
+ type: string
+ - contextPath: URL.Data
+ description: The data of the URL of the tagged indicator.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the full description.
+ type: string
+ - contextPath: Domain.Name
+ description: The domain name of the tagged indicator.
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision.
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the full description.
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 hash of the file.
+ type: string
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision.
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the full description.
+ type: string
+ - arguments:
+ - default: false
+ description: The indicator to add.
+ isArray: false
+ name: indicator
+ required: true
+ secret: false
+ - default: false
+ description: The threat rating of the indicator. Can be "0" - "Unknown", "1" - "Suspicious", "2" - "Low", "3" - Moderate, "4" - High, or "5" - "Critical".
+ isArray: false
+ name: rating
+ required: false
+ secret: false
+ - default: false
+ description: The confidence rating of the indicator. Can be "0%" - "Unknown," "1% " - "Discredited", "2-29%" - "Improbable," "30-49%" - "Doubtful," "50-69%" - "Possible", "70-89%" - "Probable," or "90-100%" - "Confirmed".
+ isArray: false
+ name: confidence
+ required: false
+ secret: false
+ - default: false
+ description: The owner of the new indicator. The default is the "defaultOrg" parameter.
+ isArray: false
+ name: owner
+ required: false
+ secret: false
+ deprecated: false
+ description: Adds a new indicator to ThreatConnect.
+ execution: false
+ name: tc-add-indicator
+ outputs:
+ - contextPath: TC.Indicator.Name
+ description: The name the indicator.
+ type: string
+ - contextPath: TC.Indicator.Type
+ description: The type of indicator.
+ type: string
+ - contextPath: TC.Indicator.ID
+ description: The ID of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Description
+ description: The description of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Owner
+ description: The owner of the indicator.
+ type: string
+ - contextPath: TC.Indicator.CreateDate
+ description: The date on which the added indicator was created.
+ type: date
+ - contextPath: TC.Indicator.LastModified
+ description: The last date on which the added indicator was modified.
+ type: date
+ - contextPath: TC.Indicator.Rating
+ description: The threat rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.Confidence
+ description: The confidence rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.WhoisActive
+ description: The active indicator (for domains only).
+ type: string
+ - contextPath: TC.Indicator.File.MD5
+ description: The MD5 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA1
+ description: The SHA1 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA256
+ description: The SHA256 hash of the indicator of the file.
+ type: string
+ - contextPath: IP.Address
+ description: The IP address of the indicator.
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IP addresses, the vendor that made the decision.
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IP addresses, the full description.
+ type: string
+ - contextPath: URL.Data
+ description: The data of the URL of the indicator.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the full description.
+ type: string
+ - contextPath: Domain.Name
+ description: The name of the added indicator of the domain.
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision.
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the full description.
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 hash of the file.
+ type: string
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision.
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the full description.
+ type: string
+ - arguments:
+ - default: false
+ description: The owner of the new incident. The default is the "defaultOrg" parameter.
+ isArray: false
+ name: owner
+ required: false
+ secret: false
+ - default: true
+ description: The name of the incident group.
+ isArray: false
+ name: incidentName
+ required: true
+ secret: false
+ - default: false
+ description: The creation time of an incident in the "2017-03-21T00:00:00Z" format.
+ isArray: false
+ name: eventDate
+ required: false
+ secret: false
+ - default: false
+ description: The tag applied to the incident.
+ isArray: false
+ name: tag
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The security label applied to the incident. Can be "TLP:RED", "TLP:GREEN", "TLP:AMBER", or "TLP:WHITE".
+ isArray: false
+ name: securityLabel
+ predefined:
+ - TLP:RED
+ - TLP:GREEN
+ - TLP:AMBER
+ - TLP:WHITE
+ required: false
+ secret: false
+ - default: false
+ description: The description of the incident.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a new incident group.
+ execution: false
+ name: tc-create-incident
+ outputs:
+ - contextPath: TC.Incident.Name
+ description: The name of the new incident group.
+ type: string
+ - contextPath: TC.Incident.Owner
+ description: The owner of the new incident.
+ type: string
+ - contextPath: TC.Incident.EventDate
+ description: The date on which the event that indicates an incident occurred.
+ type: date
+ - contextPath: TC.Incident.Tag
+ description: The name of the tag of the new incident.
+ type: string
+ - contextPath: TC.Incident.SecurityLabel
+ description: The security label of the new incident.
+ type: string
+ - contextPath: TC.Incident.ID
+ description: The ID of the new incident.
+ type: Unknown
+ - arguments:
+ - default: true
+ description: The fetched incidents filtered by ID.
+ isArray: false
+ name: incidentId
+ required: false
+ secret: false
+ - default: false
+ description: The fetched incidents filtered by owner.
+ isArray: false
+ name: owner
+ required: false
+ secret: false
+ - default: false
+ description: The fetched incidents filtered by incident name.
+ isArray: false
+ name: incidentName
+ required: false
+ secret: false
+ deprecated: false
+ description: Fetches incidents from ThreatConnect.
+ execution: false
+ name: tc-fetch-incidents
+ outputs:
+ - contextPath: TC.Incident
+ description: The name of the group of fetched incidents.
+ type: string
+ - contextPath: TC.Incident.ID
+ description: The ID of the fetched incidents.
+ type: string
+ - contextPath: TC.Incident.Owner
+ description: The owner of the fetched incidents.
+ type: string
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The type of the indicator. Can be "ADDRESSES", "EMAIL_ADDRESSES", "URLS", "HOSTS", "FILES", or "CUSTOM_INDICATORS".
+ isArray: false
+ name: indicatorType
+ predefined:
+ - ADDRESSES
+ - EMAIL_ADDRESSES
+ - URLS
+ - HOSTS
+ - FILES
+ - CUSTOM_INDICATORS
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the incident to which the indicator is associated.
+ isArray: false
+ name: incidentId
+ required: true
+ secret: false
+ - default: true
+ description: The name of the indicator.
+ isArray: false
+ name: indicator
+ required: true
+ secret: false
+ - default: false
+ description: A list of indicators filtered by the owner.
+ isArray: false
+ name: owner
+ required: false
+ secret: false
+ deprecated: false
+ description: Associates an indicator with an existing incident. The indicator must exist before running this command. To add an indicator, run the tc-add-indicator command.
+ execution: false
+ name: tc-incident-associate-indicator
+ outputs:
+ - contextPath: TC.Indicator.Name
+ description: The name of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Type
+ description: The type of the indicator.
+ type: string
+ - contextPath: TC.Indicator.ID
+ description: The ID of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Description
+ description: The description of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Owner
+ description: The owner of the indicator.
+ type: string
+ - contextPath: TC.Indicator.CreateDate
+ description: The date on which the indicator associated was created.
+ type: date
+ - contextPath: TC.Indicator.LastModified
+ description: The last date on which the indicator associated was modified.
+ type: date
+ - contextPath: TC.Indicator.Rating
+ description: The threat rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.Confidence
+ description: The confidence rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.WhoisActive
+ description: The active indicator (for domains only).
+ type: string
+ - contextPath: TC.Indicator.File.MD5
+ description: The MD5 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA1
+ description: The SHA1 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA256
+ description: The SHA256 hash of the indicator of the file.
+ type: string
+ - contextPath: IP.Address
+ description: IP address of the associated indicator of the file.
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IP addresses, the vendor that made the decision.
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IP addresses, the full description.
+ type: string
+ - contextPath: URL.Data
+ description: The data of the URL of the associated indicator of the file.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the full description.
+ type: string
+ - contextPath: Domain.Name
+ description: The name of the indicator of the domain.
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision.
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the full description.
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 hash of the file.
+ type: string
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision.
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the full description.
+ type: string
+ - arguments:
+ - default: true
+ description: The name of the domain.
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ - default: false
+ description: A CSV list of a client's organizations, sources, or communities to which a user has permissions. For example, users with admin permissions can search for indicators belonging to all owners.
+ isArray: false
+ name: owners
+ required: false
+ secret: false
+ - default: false
+ description: A list of results filtered by indicators whose threat rating is greater than the specified value. Can be "0" - "Unknown", "1" - "Suspicious", "2" - "Low", "3" - Moderate, "4" - High, or "5" - "Critical".
+ isArray: false
+ name: ratingThreshold
+ required: false
+ secret: false
+ - default: false
+ description: A list of results filtered by indicators whose confidence rating is greater than the specified value. Can be "0%" - "Unknown," "1% " - "Discredited", "2-29%" - "Improbable," "30-49%" - "Doubtful," "50-69%" - "Possible", "70-89%" - "Probable," or "90-100%" - "Confirmed".
+ isArray: false
+ name: confidenceThreshold
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches for an indicator of type domain.
+ execution: false
+ name: domain
+ outputs:
+ - contextPath: TC.Indicator.Name
+ description: The name of the of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Type
+ description: The type of the domain.
+ type: string
+ - contextPath: TC.Indicator.ID
+ description: The ID of the domain.
+ type: string
+ - contextPath: TC.Indicator.Description
+ description: The description of the domain.
+ type: string
+ - contextPath: TC.Indicator.Owner
+ description: The owner of the domain.
+ type: string
+ - contextPath: TC.Indicator.CreateDate
+ description: The date on which the indicator of the domain was created.
+ type: date
+ - contextPath: TC.Indicator.LastModified
+ description: The last date on which the indicator of the domain was modified.
+ type: date
+ - contextPath: TC.Indicator.Rating
+ description: The threat rating of the domain.
+ type: number
+ - contextPath: TC.Indicator.Confidence
+ description: The confidence rating of the domain.
+ type: number
+ - contextPath: TC.Indicator.WhoisActive
+ description: The active indicator (for domains only).
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The value assigned by DBot for the indicator.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type assigned by DBot for the indicator.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The score assigned by DBot for the indicator.
+ type: number
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ type: string
+ - contextPath: Domain.Name
+ description: The name of the domain.
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision.
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the full description.
+ type: string
+ - arguments:
+ - default: true
+ description: The ID of the incident.
+ isArray: false
+ name: incidentId
+ required: true
+ secret: false
+ - default: false
+ description: A list of indicators filtered by the owner.
+ isArray: false
+ name: owner
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns indicators that are related to a specific incident.
+ execution: false
+ name: tc-get-incident-associate-indicators
+ outputs:
+ - contextPath: TC.Indicator.Name
+ description: The name of the returned indicator.
+ type: string
+ - contextPath: TC.Indicator.Type
+ description: The type of the returned indicator.
+ type: string
+ - contextPath: TC.Indicator.ID
+ description: The ID of the returned indicator.
+ type: string
+ - contextPath: TC.Indicator.Description
+ description: The description of the returned indicator.
+ type: string
+ - contextPath: TC.Indicator.Owner
+ description: The owner of the returned indicator.
+ type: string
+ - contextPath: TC.Indicator.CreateDate
+ description: The date on which the returned indicator was created.
+ type: date
+ - contextPath: TC.Indicator.LastModified
+ description: The last date on which the returned indicator was modified.
+ type: date
+ - contextPath: TC.Indicator.Rating
+ description: The threat rating of the returned indicator.
+ type: number
+ - contextPath: TC.Indicator.Confidence
+ description: The confidence rating of the returned indicator.
+ type: number
+ - contextPath: TC.Indicator.WhoisActive
+ description: The active indicator (for domains only).
+ type: string
+ - contextPath: TC.Indicator.File.MD5
+ description: The MD5 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA1
+ description: The SHA1 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA256
+ description: The SHA256 hash of the indicator of the file.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The value assigned by DBot for the indicator.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type assigned by DBot for the indicator.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The score assigned by DBot for the indicator.
+ type: number
+ - contextPath: DBotScore.Vendor
+ description: The vendor used to calculate the score.
+ type: string
+ - contextPath: IP.Address
+ description: The IP address of the returned indicator.
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IP addresses, the vendor that made the decision.
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IP addresses, the full description.
+ type: string
+ - contextPath: URL.Data
+ description: The data of the URL of the returned indicator.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the full description.
+ type: string
+ - contextPath: Domain.Name
+ description: The name of the domain.
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision.
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the full description.
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 hash of the file.
+ type: string
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision.
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the full description.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the updated indicator.
+ isArray: false
+ name: indicator
+ required: true
+ secret: false
+ - default: false
+ description: The threat rating of the updated indicator.
+ isArray: false
+ name: rating
+ required: false
+ secret: false
+ - default: false
+ description: The confidence rating of the updated indicator.
+ isArray: false
+ name: confidence
+ required: false
+ secret: false
+ - default: false
+ description: The size of the file of the updated indicator.
+ isArray: false
+ name: size
+ required: false
+ secret: false
+ - default: false
+ description: The active DNS indicator (only for hosts).
+ isArray: false
+ name: dnsActive
+ required: false
+ secret: false
+ - default: false
+ description: The active indicator (only for hosts).
+ isArray: false
+ name: whoisActive
+ required: false
+ secret: false
+ - default: false
+ description: A CSV list of field:value pairs to update. For example, "rating=3", "confidence=42", and "description=helloWorld".
+ isArray: false
+ name: updatedValues
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The updated indicator set as a false positive. Can be "True" or "False".
+ isArray: false
+ name: falsePositive
+ predefined:
+ - 'True'
+ - 'False'
+ required: false
+ secret: false
+ - default: false
+ description: The number observations on the updated indicator.
+ isArray: false
+ name: observations
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The security label applied to the incident. Can be "TLP:RED", "TLP:GREEN", "TLP:AMBER", or "TLP:WHITE".
+ isArray: false
+ name: securityLabel
+ predefined:
+ - TLP:RED
+ - TLP:GREEN
+ - TLP:AMBER
+ - TLP:WHITE
+ required: false
+ secret: false
+ - default: false
+ description: Assesses the confidence rating of the indicator.
+ isArray: false
+ name: threatAssessConfidence
+ required: false
+ secret: false
+ - default: false
+ description: Assesses the threat rating of the indicator.
+ isArray: false
+ name: threatAssessRating
+ required: false
+ secret: false
+ deprecated: false
+ description: Updates the indicator in ThreatConnect.
+ execution: false
+ name: tc-update-indicator
+ outputs:
+ - contextPath: TC.Indicator.Name
+ description: The name of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Type
+ description: The type of the indicator.
+ type: string
+ - contextPath: TC.Indicator.ID
+ description: The ID of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Description
+ description: The description of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Owner
+ description: The owner of the indicator.
+ type: string
+ - contextPath: TC.Indicator.CreateDate
+ description: The date on which the indicator was created.
+ type: date
+ - contextPath: TC.Indicator.LastModified
+ description: The last date on which the indicator was modified.
+ type: date
+ - contextPath: TC.Indicator.Rating
+ description: The threat rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.Confidence
+ description: The confidence rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.WhoisActive
+ description: The active indicator (for domains only).
+ type: string
+ - contextPath: TC.Indicator.File.MD5
+ description: The MD5 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA1
+ description: The SHA1 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA256
+ description: The SHA256 hash of the indicator of the file.
+ type: string
+ - contextPath: IP.Address
+ description: The IP address of the indicator.
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IP addresses, the vendor that made the decision.
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IP addresses, the full description.
+ type: string
+ - contextPath: URL.Data
+ description: The data of the URL of the indicator.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the full description.
+ type: string
+ - contextPath: Domain.Name
+ description: The domain name of the indicator.
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision.
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the full description.
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 hash of the file.
+ type: string
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision.
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the full description.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the indicator from which to remove a tag.
+ isArray: false
+ name: indicator
+ required: true
+ secret: false
+ - default: false
+ description: The name of the tag to remove from the indicator.
+ isArray: false
+ name: tag
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes a tag from a specified indicator.
+ execution: false
+ name: tc-delete-indicator-tag
+ outputs:
+ - contextPath: TC.Indicator.Name
+ description: The name of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Type
+ description: The type of the indicator.
+ type: string
+ - contextPath: TC.Indicator.ID
+ description: The ID of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Description
+ description: The description of the indicator.
+ type: string
+ - contextPath: TC.Indicator.Owner
+ description: The owner of the indicator.
+ type: string
+ - contextPath: TC.Indicator.CreateDate
+ description: The date on which the indicator was created.
+ type: date
+ - contextPath: TC.Indicator.LastModified
+ description: The last date on which the indicator was modified.
+ type: date
+ - contextPath: TC.Indicator.Rating
+ description: The threat rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.Confidence
+ description: The confidence rating of the indicator.
+ type: number
+ - contextPath: TC.Indicator.WhoisActive
+ description: The active indicator (for domains only).
+ type: string
+ - contextPath: TC.Indicator.File.MD5
+ description: The MD5 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA1
+ description: The SHA1 hash of the indicator of the file.
+ type: string
+ - contextPath: TC.Indicator.File.SHA256
+ description: The SHA256 hash of the indicator of the file.
+ type: string
+ - contextPath: IP.Address
+ description: The IP address of the indicator.
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IP addresses, the vendor that made the decision.
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IP addresses, the full description.
+ type: string
+ - contextPath: URL.Data
+ description: The data of the URL of the indicator.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the full description.
+ type: string
+ - contextPath: Domain.Name
+ description: The domain name of the indicator.
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision.
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the full description.
+ type: string
+ - contextPath: File.MD5
+ description: The MD5 hash of the file.
+ type: string
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision.
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the full description.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the indicator to delete.
+ isArray: false
+ name: indicator
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes an indicator from ThreatConnect.
+ execution: false
+ name: tc-delete-indicator
+ - arguments:
+ - default: false
+ description: The name of the campaign group.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The earliest date on which the campaign was seen.
+ isArray: false
+ name: firstSeen
+ required: false
+ secret: false
+ - default: false
+ description: The owner of the new incident. The default is the "defaultOrg" parameter.
+ isArray: false
+ name: owner
+ required: false
+ secret: false
+ - default: false
+ description: The description of the campaign.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: The name of the tag to apply to the campaign.
+ isArray: false
+ name: tag
+ required: false
+ secret: false
+ - default: false
+ description: The security label of the campaign. For example, "TLP:Green".
+ isArray: false
+ name: securityLabel
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a group based on the "Campaign" type.
+ execution: false
+ name: tc-create-campaign
+ outputs:
+ - contextPath: TC.Campaign.Name
+ description: The name of the campaign.
+ type: string
+ - contextPath: TC.Campaign.Owner
+ description: The owner of the campaign.
+ type: string
+ - contextPath: TC.Campaign.FirstSeen
+ description: The earliest date on which the campaign was seen.
+ type: date
+ - contextPath: TC.Campaign.Tag
+ description: The tag of the campaign.
+ type: string
+ - contextPath: TC.Campaign.SecurityLevel
+ description: The security label of the campaign.
+ type: string
+ - contextPath: TC.Campaign.ID
+ description: The ID of the campaign.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the event group.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The date on which the event occurred. If the date is not specified, the current date is used.
+ isArray: false
+ name: eventDate
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The status of the event. Can be "Needs Review", "False Positive", "No Further Action", or "Escalated".
+ isArray: false
+ name: status
+ predefined:
+ - Needs Review
+ - False Positive
+ - No Further Action
+ - Escalated
+ required: false
+ secret: false
+ - default: false
+ description: The owner of the event.
+ isArray: false
+ name: owner
+ required: false
+ secret: false
+ - default: false
+ description: The description of the event.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: The tag of the event.
+ isArray: false
+ name: tag
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a group based on the "Event" type.
+ execution: false
+ name: tc-create-event
+ outputs:
+ - contextPath: TC.Event.Name
+ description: The name of the event.
+ type: string
+ - contextPath: TC.Event.Date
+ description: The date of the event.
+ type: date
+ - contextPath: TC.Event.Status
+ description: The status of the event.
+ type: string
+ - contextPath: TC.Event.Owner
+ description: The owner of the event.
+ type: string
+ - contextPath: TC.Event.Tag
+ description: The tag of the event.
+ type: string
+ - contextPath: TC.Event.ID
+ description: The ID of the event.
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the threat group.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: false
+ description: Creates a group based on the "Threats" type.
+ execution: false
+ name: tc-create-threat
+ outputs:
+ - contextPath: TC.Threat.Name
+ description: The name of the threat.
+ type: string
+ - contextPath: TC.Threat.ID
+ description: The ID of the threat.
+ type: string
+ - arguments:
+ - default: false
+ description: The ID of the group to delete.
+ isArray: false
+ name: groupID
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The type of the group to delete. Can be "Incidents", "Events", "Campaigns", or "Threats".
+ isArray: false
+ name: type
+ predefined:
+ - Incidents
+ - Events
+ - Campaigns
+ - Threats
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes a group.
+ execution: false
+ name: tc-delete-group
+ - arguments:
+ - default: false
+ description: The ID of the group to which to add attributes. To get the ID of the group, run the tc-get-groups command.
+ isArray: false
+ name: group_id
+ required: true
+ secret: false
+ - default: false
+ description: The type of attribute to add to the group. The type is located in the UI in a specific group or under Org Config.
+ isArray: false
+ name: attribute_type
+ required: true
+ secret: false
+ - default: false
+ description: The value of the attribute.
+ isArray: false
+ name: attribute_value
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The type of the group. Can be "adversaries", "campaigns", "documents", "emails", "events", "incidents", "intrusionSets", "reports", "signatures", or "threats".
+ isArray: false
+ name: group_type
+ predefined:
+ - adversaries
+ - campaigns
+ - documents
+ - emails
+ - events
+ - incidents
+ - intrusionSets
+ - reports
+ - signatures
+ - threats
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds an attribute to a specified group.
+ execution: false
+ name: tc-add-group-attribute
+ outputs:
+ - contextPath: TC.Group.DateAdded
+ description: The date on which the attribute was added.
+ type: Date
+ - contextPath: TC.Group.LastModified
+ description: The date on which the added attribute was last modified.
+ type: Date
+ - contextPath: TC.Group.Type
+ description: The type of the group to which the attribute was added.
+ type: String
+ - contextPath: TC.Group.Value
+ description: The value of the attribute added to the group.
+ type: String
+ - contextPath: TC.Group.ID
+ description: The group ID to which the attribute was added.
+ type: Number
+ - deprecated: false
+ description: Returns a list of events.
+ execution: false
+ name: tc-get-events
+ outputs:
+ - contextPath: TC.Event.DateAdded
+ description: The date on which the event was added.
+ type: Date
+ - contextPath: TC.Event.EventDate
+ description: The date on which the event occurred.
+ type: Date
+ - contextPath: TC.Event.ID
+ description: The ID of the event.
+ type: Number
+ - contextPath: TC.Event.OwnerName
+ description: The name of the owner of the event.
+ type: String
+ - contextPath: TC.Event.Status
+ description: The status of the event.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The type of the group. Can be "adversaries", "campaigns", "documents", "emails", "events", "incidents", "intrusionSets", "reports", "signatures", or "threats".
+ isArray: false
+ name: group_type
+ predefined:
+ - adversaries
+ - campaigns
+ - documents
+ - emails
+ - events
+ - incidents
+ - intrusionSets
+ - reports
+ - signatures
+ - threats
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns all groups, filtered by the group type.
+ execution: false
+ name: tc-get-groups
+ outputs:
+ - contextPath: TC.Group.DateAdded
+ description: The date on which the group was added.
+ type: Date
+ - contextPath: TC.Group.EventDate
+ description: The date on which the event occurred.
+ type: Date
+ - contextPath: TC.Group.Name
+ description: The name of the group.
+ type: String
+ - contextPath: TC.Group.OwnerName
+ description: The name of the owner of the group.
+ type: String
+ - contextPath: TC.Group.Status
+ description: The status of the group.
+ type: String
+ - contextPath: TC.Group.ID
+ description: The ID of the group.
+ type: Number
+ - arguments:
+ - default: false
+ description: The ID of the group to which to add the security label. To get the ID, run the tc-get-groups command.
+ isArray: false
+ name: group_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The type of the group to which to add the security label. Can be "adversaries", "campaigns", "documents", "emails", "events", "incidents", "intrusionSets", "reports", "signatures", or "threats".
+ isArray: false
+ name: group_type
+ predefined:
+ - adversaries
+ - campaigns
+ - documents
+ - emails
+ - events
+ - incidents
+ - intrusionSets
+ - reports
+ - signatures
+ - threats
+ required: true
+ secret: false
+ - default: false
+ description: The name of the security label to add to the group. For example, "TLP:GREEN".
+ isArray: false
+ name: security_label_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds a security label to a group.
+ execution: false
+ name: tc-add-group-security-label
+ - arguments:
+ - default: false
+ description: The ID of the group to which to add the tag. To get the ID, run the tc-get-groups command.
+ isArray: false
+ name: group_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The type of the group to which to add the tag. Can be "adversaries", "campaigns", "documents", "emails", "events", "incidents", "intrusionSets", "reports", "signatures", or "threats".
+ isArray: false
+ name: group_type
+ predefined:
+ - adversaries
+ - campaigns
+ - documents
+ - emails
+ - events
+ - incidents
+ - intrusionSets
+ - reports
+ - signatures
+ - threats
+ required: true
+ secret: false
+ - default: false
+ description: The name of the tag to add to the group.
+ isArray: false
+ name: tag_name
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds tags to a specified group.
+ execution: false
+ name: tc-add-group-tag
+ - deprecated: false
+ description: Returns all indicator types available.
+ execution: false
+ name: tc-get-indicator-types
+ outputs:
+ - contextPath: TC.IndicatorType.ApiBranch
+ description: The branch of the API.
+ type: String
+ - contextPath: TC.IndicatorType.ApiEntity
+ description: The entity of the API.
+ type: String
+ - contextPath: TC.IndicatorType.CasePreference
+ description: The case preference of the indicator. For example, "sensitive", "upper", or "lower".
+ type: String
+ - contextPath: TC.IndicatorType.Custom
+ description: Whether the indicator is a custom indicator.
+ type: Boolean
+ - contextPath: TC.IndicatorType.Parsable
+ description: Whether the indicator can be parsed.
+ type: Boolean
+ - contextPath: TC.IndicatorType.Value1Type
+ description: The name of the indicator.
+ type: String
+ - contextPath: TC.IndicatorType.Value1Label
+ description: The value label of the indicator.
+ type: String
+ - arguments:
+ - default: false
+ description: The type of the indicator. To get the available types, run the tc-get-indicator-types command. The indicator must be spelled as displayed in the ApiBranch column of the UI.
+ isArray: false
+ name: indicator_type
+ required: true
+ secret: false
+ - default: false
+ description: The name of the indicator. For example, "indicator_type=emailAddresses" where "indicator=a@a.co.il".
+ isArray: false
+ name: indicator
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The type of the group. Can be "adversaries", "campaigns", "documents", "emails", "events", "incidents", "intrusionSets", "reports", "signatures", or "threats".
+ isArray: false
+ name: group_type
+ predefined:
+ - adversaries
+ - campaigns
+ - documents
+ - emails
+ - events
+ - incidents
+ - intrusionSets
+ - reports
+ - signatures
+ - threats
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the group. To get the ID of the group, run the tc-get-groups command.
+ isArray: false
+ name: group_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Associates an indicator with a group.
+ execution: false
+ name: tc-group-associate-indicator
+ outputs:
+ - contextPath: TC.Group.GroupID
+ description: The ID of the group.
+ type: Number
+ - contextPath: TC.Group.GroupType
+ description: The type of the group.
+ type: String
+ - contextPath: TC.Group.Indicator
+ description: The name of the indicator.
+ type: String
+ - contextPath: TC.Group.IndicatorType
+ description: The type of the indicator.
+ type: String
+ - arguments:
+ - default: false
+ description: The name of the file to display in the UI.
+ isArray: false
+ name: file_name
+ required: true
+ secret: false
+ - default: false
+ description: The name of the file.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether the file is malware. If "true", ThreatConnect creates a password-protected ZIP file on your local machine that contains the sample and uploads the ZIP file.
+ isArray: false
+ name: malware
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: false
+ description: The password of the ZIP file.
+ isArray: false
+ name: password
+ required: false
+ secret: false
+ - default: false
+ description: The security label of the group.
+ isArray: false
+ name: security_label
+ required: false
+ secret: false
+ - default: false
+ description: A description of the group.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - default: false
+ description: The file of the ID of the entry, as displayed in the War Room.
+ isArray: false
+ name: entry_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Creates a document group.
+ execution: false
+ name: tc-create-document-group
+ outputs:
+ - contextPath: TC.Group.Name
+ description: The name of the group.
+ type: String
+ - contextPath: TC.Group.Owner
+ description: The owner of the group.
+ type: String
+ - contextPath: TC.Group.EventDate
+ description: The date on which the group was created.
+ type: Date
+ - contextPath: TC.Group.Description
+ description: The description of the group.
+ type: String
+ - contextPath: TC.Group.SecurityLabel
+ description: The security label of the group.
+ type: String
+ - contextPath: TC.Group.ID
+ description: The ID of the group to which the attribute was added.
+ type: Number
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The type of group for which to return the ID. Can be "adversaries", "campaigns", "documents", "emails", "events", "incidents", "intrusionSets", "reports", "signatures",
+ or "threats".
+ isArray: false
+ name: group_type
+ predefined:
+ - adversaries
+ - campaigns
+ - documents
+ - emails
+ - events
+ - incidents
+ - intrusionSets
+ - reports
+ - signatures
+ - threats
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the group to retrieve. To get the ID, run the tc-get-groups command.
+ isArray: false
+ name: group_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves a single group.
+ execution: false
+ name: tc-get-group
+ outputs:
+ - contextPath: TC.Group.DateAdded
+ description: The date on which the group was added.
+ type: Date
+ - contextPath: TC.Group.EventDate
+ description: The date on which the event occurred.
+ type: Date
+ - contextPath: TC.Group.Name
+ description: The name of the group.
+ type: String
+ - contextPath: TC.Group.Owner.ID
+ description: The ID of the group owner.
+ type: Number
+ - contextPath: TC.Group.Owner.Name
+ description: The name of the group owner.
+ type: String
+ - contextPath: TC.Group.Owner.Type
+ description: The type of the owner.
+ type: String
+ - contextPath: TC.Group.Status
+ description: The status of the group.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The type of group for which to return the attribute. Can be "adversaries", "campaigns", "documents", "emails", "events", "incidents", "intrusionSets", "reports", "signatures", or "threats".
+ isArray: false
+ name: group_type
+ predefined:
+ - adversaries
+ - campaigns
+ - documents
+ - emails
+ - events
+ - incidents
+ - intrusionSets
+ - reports
+ - signatures
+ - threats
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the group for which to return the attribute. To get the ID, run the tc-get-groups command.
+ isArray: false
+ name: group_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the attribute of a group.
+ execution: false
+ name: tc-get-group-attributes
+ outputs:
+ - contextPath: TC.Group.Attribute.DateAdded
+ description: The date on which the group was added.
+ type: Date
+ - contextPath: TC.Group.Attribute.Displayed
+ description: Whether the attribute is displayed on the UI.
+ type: Boolean
+ - contextPath: TC.Group.Attribute.AttributeID
+ description: The ID of the attribute.
+ type: Number
+ - contextPath: TC.Group.Attribute.LastModified
+ description: The date on which the attribute was last modified.
+ type: Date
+ - contextPath: TC.Group.Attribute.Type
+ description: The type of the attribute.
+ type: String
+ - contextPath: TC.Group.Attribute.Value
+ description: The value of the attribute.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The type of group for which to return the security labels. Can be "adversaries", "campaigns", "documents", "emails", "events", "incidents", "intrusionSets", "reports", "signatures", or "threats".
+ isArray: false
+ name: group_type
+ predefined:
+ - adversaries
+ - campaigns
+ - documents
+ - emails
+ - events
+ - incidents
+ - intrusionSets
+ - reports
+ - signatures
+ - threats
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the group for which to return the security labels. To get the ID, run the tc-get-groups command.
+ isArray: false
+ name: group_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the security labels of a group.
+ execution: false
+ name: tc-get-group-security-labels
+ outputs:
+ - contextPath: TC.Group.SecurityLabel.Name
+ description: The name of the security label.
+ type: String
+ - contextPath: TC.Group.SecurityLabel.Description
+ description: The description of the security label.
+ type: String
+ - contextPath: TC.Group.SecurityLabel.DateAdded
+ description: The date on which the security label was added.
+ type: Date
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The type of group for which to return the tags. Can be "adversaries", "campaigns", "documents", "emails", "events", "incidents", "intrusionSets", "reports", "signatures", or "threats".
+ isArray: false
+ name: group_type
+ predefined:
+ - adversaries
+ - campaigns
+ - documents
+ - emails
+ - events
+ - incidents
+ - intrusionSets
+ - reports
+ - signatures
+ - threats
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the group for which to return the tags. To get the ID, run the tc-get-groups command.
+ isArray: false
+ name: group_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the tags of a group.
+ execution: false
+ name: tc-get-group-tags
+ outputs:
+ - contextPath: TC.Group.Tag.Name
+ description: The name of the tag.
+ type: String
+ - arguments:
+ - default: false
+ description: The ID of the document.
+ isArray: false
+ name: document_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Downloads the contents of a document.
+ execution: false
+ name: tc-download-document
+ outputs:
+ - contextPath: File.Size
+ description: The size of the file.
+ type: Number
+ - contextPath: File.SHA1
+ description: The SHA1 hash of the file.
+ type: String
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: String
+ - contextPath: File.Name
+ description: The name of the file.
+ type: String
+ - contextPath: File.SSDeep
+ description: The ssdeep hash of the file (same as displayed in file entries).
+ type: String
+ - contextPath: File.EntryID
+ description: The entry ID of the file.
+ type: String
+ - contextPath: File.Info
+ description: The information of the file.
+ type: String
+ - contextPath: File.Type
+ description: The type of the file.
+ type: String
+ - contextPath: File.MD5
+ description: The MD5 hash of the file.
+ type: String
+ - contextPath: File.Extension
+ description: The extension of the file.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The type of the group for which to return the indicators. Can be "adversaries", "campaigns", "documents", "emails", "events", "incidents", "intrusionSets", "reports", "signatures", or "threats".
+ isArray: false
+ name: group_type
+ predefined:
+ - adversaries
+ - campaigns
+ - documents
+ - emails
+ - events
+ - incidents
+ - intrusionSets
+ - reports
+ - signatures
+ - threats
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the group for which to return the indicators. To get the ID, run the tc-get-groups command.
+ isArray: false
+ name: group_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns indicators associated with a group.
+ execution: false
+ name: tc-get-group-indicators
+ outputs:
+ - contextPath: TC.Group.Indicator.Summary
+ description: The summary of the indicator.
+ type: String
+ - contextPath: TC.Group.Indicator.ThreatAssessConfidence
+ description: The confidence rating of the indicator.
+ type: String
+ - contextPath: TC.Group.Indicator.IndicatorID
+ description: The ID of the indicator.
+ type: Number
+ - contextPath: TC.Group.Indicator.DateAdded
+ description: The date on which the indicator was added.
+ type: Date
+ - contextPath: TC.Group.Indicator.Type
+ description: The type of the indicator.
+ type: String
+ - contextPath: TC.Group.Indicator.Rating
+ description: The threat rating of the indicator.
+ type: Number
+ - contextPath: TC.Group.Indicator.ThreatAssertRating
+ description: The rating of the threat assert.
+ type: Number
+ - contextPath: TC.Group.Indicator.OwnerName
+ description: The name of the owner of the indicator.
+ type: String
+ - contextPath: TC.Group.Indicator.LastModified
+ description: The date that the indicator was last modified.
+ type: Date
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The type of group. Can be "adversaries", "campaigns", "documents", "emails", "events", "incidents", "intrusionSets", "reports", "signatures", or "threats".
+ isArray: false
+ name: group_type
+ predefined:
+ - adversaries
+ - campaigns
+ - documents
+ - emails
+ - events
+ - incidents
+ - intrusionSets
+ - reports
+ - signatures
+ - threats
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the group. To get the ID, run the tc-get-groups command.
+ isArray: false
+ name: group_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns indicators associated with a specified group.
+ execution: false
+ name: tc-get-associated-groups
+ outputs:
+ - contextPath: TC.Group.AssociatedGroup.DateAdded
+ description: The date on which group was added.
+ type: Date
+ - contextPath: TC.Group.AssociatedGroup.GroupID
+ description: The ID of the group.
+ type: Number
+ - contextPath: TC.Group.AssociatedGroup.Name
+ description: The name of the group.
+ type: String
+ - contextPath: TC.Group.AssociatedGroup.OwnerName
+ description: The name of the owner of the group.
+ type: String
+ - contextPath: TC.Group.AssociatedGroup.Type
+ description: The type of the group.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The type of the group. Can be "adversaries", "campaigns", "documents", "emails", "events", "incidents", "intrusionSets", "reports", "signatures", or "threats".
+ isArray: false
+ name: group_type
+ predefined:
+ - adversaries
+ - campaigns
+ - documents
+ - emails
+ - events
+ - incidents
+ - intrusionSets
+ - reports
+ - signatures
+ - threats
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the group. To get the ID of the group, run the tc-get-groups command.
+ isArray: false
+ name: group_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The type of group to associate. Can be "adversaries", "campaigns", "documents", "emails", "events", "incidents", "intrusionSets", "reports", "signatures", or "threats".
+ isArray: false
+ name: associated_group_type
+ predefined:
+ - adversaries
+ - campaigns
+ - documents
+ - emails
+ - events
+ - incidents
+ - intrusionSets
+ - reports
+ - signatures
+ - threats
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the group to associate.
+ isArray: false
+ name: associated_group_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Associates one group with another group.
+ execution: false
+ name: tc-associate-group-to-group
+ outputs:
+ - contextPath: TC.Group.AssociatedGroup.AssociatedGroupID
+ description: The ID of the associated group.
+ type: Number
+ - contextPath: TC.Group.AssociatedGroup.AssociatedGroupType
+ description: The type of the associated group.
+ type: String
+ - contextPath: TC.Group.AssociatedGroup.GroupID
+ description: The ID of the group to associate to.
+ type: Number
+ - contextPath: TC.Group.AssociatedGroup.GroupType
+ description: The type of the group to associate to.
+ type: String
+ dockerimage: demisto/threatconnect-sdk
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python2
+tests:
+- test-ThreatConnect
diff --git a/Integrations/ThreatConnect/ThreatConnect_description.md b/Integrations/ThreatConnect/ThreatConnect_description.md
new file mode 100644
index 000000000000..0519ecba6ea9
--- /dev/null
+++ b/Integrations/ThreatConnect/ThreatConnect_description.md
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/Integrations/ThreatConnect/ThreatConnect_image.png b/Integrations/ThreatConnect/ThreatConnect_image.png
new file mode 100644
index 000000000000..002918ed21c0
Binary files /dev/null and b/Integrations/ThreatConnect/ThreatConnect_image.png differ
diff --git a/Integrations/ThreatQ_v2/CHANGELOG.md b/Integrations/ThreatQ_v2/CHANGELOG.md
new file mode 100644
index 000000000000..3de599cd9e8a
--- /dev/null
+++ b/Integrations/ThreatQ_v2/CHANGELOG.md
@@ -0,0 +1,11 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+Fixed results numbering for the following commands.
+ - ***threatq-get-all-adversaries***
+ - ***threatq-get-all-indicators***
+ - ***threatq-get-all-events*** commands.
+
+## [19.9.1] - 2019-09-18
+ - Added ThreatQ v2 integration
diff --git a/Integrations/ThreatQ_v2/Pipfile b/Integrations/ThreatQ_v2/Pipfile
new file mode 100644
index 000000000000..e6770cd572fd
--- /dev/null
+++ b/Integrations/ThreatQ_v2/Pipfile
@@ -0,0 +1,17 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "==5.0.1"
+pytest-mock = "*"
+requests-mock = "*"
+pytest-asyncio = "*"
+
+[packages]
+unittest-mock = "*"
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/ThreatQ_v2/Pipfile.lock b/Integrations/ThreatQ_v2/Pipfile.lock
new file mode 100644
index 000000000000..72ea61abd4b5
--- /dev/null
+++ b/Integrations/ThreatQ_v2/Pipfile.lock
@@ -0,0 +1,244 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "af46d4f6dfef85012879217ba21602edd980f9e21ee096dd3e399d20c32ccc7d"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
+ "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
+ ],
+ "version": "==2019.6.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:9ff1b1c5a354142de080b8a4e9803e5d0d59283c93aed808617c787d16768375",
+ "sha256:b7143592e374e50584564794fcb8aaf00a23025f9db866627f89a21491847a8d"
+ ],
+ "version": "==0.20"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:02b260c8deb80db09325b99edf62ae344ce9bc64d68b7a634410b8e9a568edbf",
+ "sha256:18f9c401083a4ba6e162355873f906315332ea7035803d0fd8166051e3d402e3",
+ "sha256:1f2c6209a8917c525c1e2b55a716135ca4658a3042b5122d4e3413a4030c26ce",
+ "sha256:2f06d97f0ca0f414f6b707c974aaf8829c2292c1c497642f63824119d770226f",
+ "sha256:616c94f8176808f4018b39f9638080ed86f96b55370b5a9463b2ee5c926f6c5f",
+ "sha256:63b91e30ef47ef68a30f0c3c278fbfe9822319c15f34b7538a829515b84ca2a0",
+ "sha256:77b454f03860b844f758c5d5c6e5f18d27de899a3db367f4af06bec2e6013a8e",
+ "sha256:83fe27ba321e4cfac466178606147d3c0aa18e8087507caec78ed5a966a64905",
+ "sha256:84742532d39f72df959d237912344d8a1764c2d03fe58beba96a87bfa11a76d8",
+ "sha256:874ebf3caaf55a020aeb08acead813baf5a305927a71ce88c9377970fe7ad3c2",
+ "sha256:9f5caf2c7436d44f3cec97c2fa7791f8a675170badbfa86e1992ca1b84c37009",
+ "sha256:a0c8758d01fcdfe7ae8e4b4017b13552efa7f1197dd7358dc9da0576f9d0328a",
+ "sha256:a4def978d9d28cda2d960c279318d46b327632686d82b4917516c36d4c274512",
+ "sha256:ad4f4be843dace866af5fc142509e9b9817ca0c59342fdb176ab6ad552c927f5",
+ "sha256:ae33dd198f772f714420c5ab698ff05ff900150486c648d29951e9c70694338e",
+ "sha256:b4a2b782b8a8c5522ad35c93e04d60e2ba7f7dcb9271ec8e8c3e08239be6c7b4",
+ "sha256:c462eb33f6abca3b34cdedbe84d761f31a60b814e173b98ede3c81bb48967c4f",
+ "sha256:fd135b8d35dfdcdb984828c84d695937e58cc5f49e1c854eb311c4d6aa03f4f1"
+ ],
+ "version": "==1.4.2"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832",
+ "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"
+ ],
+ "version": "==7.2.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9",
+ "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe"
+ ],
+ "version": "==19.1"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80",
+ "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4"
+ ],
+ "version": "==2.4.2"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6ef6d06de77ce2961156013e9dff62f1b2688aa04d0dc244299fe7d67e09370d",
+ "sha256:a736fed91c12681a7b34617c8fcefe39ea04599ca72c608751c31d89579a3f77"
+ ],
+ "index": "pypi",
+ "version": "==5.0.1"
+ },
+ "pytest-asyncio": {
+ "hashes": [
+ "sha256:9fac5100fd716cbecf6ef89233e8590a4ad61d729d1732e0a96b84182df1daaf",
+ "sha256:d734718e25cfc32d2bf78d346e99d33724deeba774cc4afdf491530c6184b63b"
+ ],
+ "index": "pypi",
+ "version": "==0.10.0"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
+ "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ ],
+ "version": "==2.22.0"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:510df890afe08d36eca5bb16b4aa6308a6f85e3159ad3013bac8b9de7bd5a010",
+ "sha256:88d3402dd8b3c69a9e4f9d3a73ad11b15920c6efd36bc27bf1f701cf4a8e4646"
+ ],
+ "index": "pypi",
+ "version": "==1.7.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e",
+ "sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e",
+ "sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0",
+ "sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c",
+ "sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631",
+ "sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4",
+ "sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34",
+ "sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b",
+ "sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a",
+ "sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233",
+ "sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1",
+ "sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36",
+ "sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d",
+ "sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a",
+ "sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.4.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
+ "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
+ ],
+ "version": "==1.25.3"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e",
+ "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
+ ],
+ "version": "==0.6.0"
+ }
+ }
+}
\ No newline at end of file
diff --git a/Integrations/ThreatQ_v2/ThreatQ_v2.py b/Integrations/ThreatQ_v2/ThreatQ_v2.py
new file mode 100644
index 000000000000..1fb2075cceb7
--- /dev/null
+++ b/Integrations/ThreatQ_v2/ThreatQ_v2.py
@@ -0,0 +1,1247 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+import requests
+import json
+import shutil
+from typing import Dict, Any
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARIABLES '''
+
+SERVER_URL = demisto.params()['serverUrl'].rstrip('/')
+API_URL = SERVER_URL + '/api'
+CLIENT_ID = demisto.params()['client_id']
+EMAIL = demisto.getParam('credentials').get('identifier')
+PASSWORD = demisto.getParam('credentials').get('password')
+USE_SSL = not demisto.params().get('insecure', False)
+THRESHOLD = int(demisto.params().get('threshold', '0'))
+if THRESHOLD:
+ THRESHOLD = int(THRESHOLD)
+
+domain_regex = r'(?i)(?:(?:https?|ftp|hxxps?):\/\/|www\[?\.\]?|ftp\[?\.\]?)(?:[-A-Z0-9]+\[?\.\]?)+[-A-Z0-9]+' \
+ r'(?::[0-9]+)?(?:(?:\/|\?)[-A-Z0-9+&@#\/%=~_$?!:,.\(\);\*|]*[-A-Z0-9+&@#\/%=~_$\(\);\*|])?|' \
+ r'\b[-A-Za-z0-9._%+\*|]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}\b'
+
+REGEX_MAP = {
+ 'email': re.compile(emailRegex, regexFlags),
+ 'url': re.compile(urlRegex, regexFlags),
+ 'md5': re.compile(r'\b[0-9a-fA-F]{32}\b', regexFlags),
+ 'sha1': re.compile(r'\b[0-9a-fA-F]{40}\b', regexFlags),
+ 'sha256': re.compile(r'\b[0-9a-fA-F]{64}\b', regexFlags),
+ 'domain': re.compile(domain_regex, regexFlags)
+}
+
+TQ_TO_DEMISTO_INDICATOR_TYPES = {
+ 'IP Address': 'ip',
+ 'IPv6 Address': 'ip',
+ 'Email Address': 'email',
+ 'URL': 'url',
+ 'MD5': 'file',
+ 'SHA-1': 'file',
+ 'SHA-256': 'file',
+ 'FQDN': 'domain'
+}
+
+STATUS_ID_TO_STATUS = {
+ 1: 'Active',
+ 2: 'Expired',
+ 3: 'Indirect',
+ 4: 'Review',
+ 5: 'Whitelisted'
+}
+
+TYPE_ID_TO_INDICATOR_TYPE = {
+ 1: 'Binary String',
+ 2: 'CIDR Block',
+ 3: 'CVE',
+ 4: 'Email Address',
+ 5: 'Email Attachment',
+ 6: 'Email Subject',
+ 7: 'File Mapping',
+ 8: 'File Path',
+ 9: 'Filename',
+ 10: 'FQDN',
+ 11: 'Fuzzy Hash',
+ 12: 'GOST Hash',
+ 13: 'Hash ION',
+ 14: 'IP Address',
+ 15: 'IPv6 Address',
+ 16: 'MD5',
+ 17: 'Mutex',
+ 18: 'Password',
+ 19: 'Registry Key',
+ 20: 'Service Name',
+ 21: 'SHA-1',
+ 22: 'SHA-256',
+ 23: 'SHA-384',
+ 24: 'SHA-512',
+ 25: 'String',
+ 26: 'x509 Serial',
+ 27: 'x509 Subject',
+ 28: 'URL',
+ 29: 'URL Path',
+ 30: 'User-agent',
+ 31: 'Username',
+ 32: 'X-Mailer'
+}
+
+TYPE_ID_TO_EVENT_TYPE = {
+ 1: 'Spearphish',
+ 2: 'Watering Hole',
+ 3: 'SQL Injection Attack',
+ 4: 'DoS Attack',
+ 5: 'Malware',
+ 6: 'Watchlist',
+ 7: 'Command and Control',
+ 8: 'Anonymization',
+ 9: 'Exfiltration',
+ 10: 'Host Characteristics',
+ 11: 'Compromised PKI Certificate',
+ 12: 'Login Compromise',
+ 13: 'Incident'
+}
+
+TYPE_ID_TO_FILE_TYPE = {
+ 1: 'Cuckoo',
+ 2: 'CrowdStrike Intelligence',
+ 3: 'Early Warning and Indicator Notice (EWIN)',
+ 4: 'FireEye Analysis',
+ 5: 'FBI FLASH',
+ 6: 'Generic Text',
+ 7: 'Intelligence Whitepaper',
+ 8: 'iSight Report',
+ 9: 'iSight ThreatScape Intelligence Report',
+ 10: 'IB',
+ 11: 'AEC',
+ 12: 'Malware Analysis Report',
+ 13: 'Malware Initial Findings Report (MFIR)',
+ 14: 'Malware Sample',
+ 15: 'Packet Capture',
+ 16: 'Palo Alto Networks WildFire XML',
+ 17: 'PCAP',
+ 18: 'PDF',
+ 19: 'Private Industry Notification (PIN)',
+ 20: 'Spearphish Attachment',
+ 21: 'STIX',
+ 22: 'ThreatAnalyzer Analysis',
+ 23: 'ThreatQ CSV File',
+ 24: 'Whitepaper'
+}
+
+TABLE_HEADERS = {
+ 'indicator': ['ID', 'Type', 'Value', 'Description', 'Status',
+ 'TQScore', 'CreatedAt', 'UpdatedAt', 'URL'],
+ 'adversary': ['ID', 'Name', 'CreatedAt', 'UpdatedAt', 'URL'],
+ 'event': ['ID', 'Type', 'Title', 'Description', 'Occurred', 'CreatedAt', 'UpdatedAt', 'URL'],
+ 'attachment': ['ID', 'Name', 'Title', 'Type', 'Size', 'Description', 'MD5', 'CreatedAt', 'UpdatedAt',
+ 'MalwareLocked', 'ContentType', 'URL'],
+ 'attributes': ['ID', 'Name', 'Value'],
+ 'sources': ['ID', 'Name']
+}
+
+OBJ_DIRECTORY = {
+ 'indicator': 'indicators',
+ 'adversary': 'adversaries',
+ 'event': 'events',
+ 'attachment': 'attachments'
+}
+
+RELATED_KEY = {
+ 'indicator': 'RelatedIndicator',
+ 'adversary': 'RelatedAdversary',
+ 'event': 'RelatedEvent'
+}
+
+CONTEXT_PATH = {
+ 'indicator': 'ThreatQ.Indicator(val.ID === obj.ID)',
+ 'adversary': 'ThreatQ.Adversary(val.ID === obj.ID)',
+ 'event': 'ThreatQ.Event(val.ID === obj.ID)',
+ 'attachment': 'ThreatQ.File(val.ID === obj.ID)'
+}
+
+
+''' HELPER FUNCTIONS '''
+
+
+def get_errors_string_from_bad_request(bad_request_results, status_code):
+ if status_code == 404:
+ return 'Object does not exist.\n'
+
+ # Errors could be retrieved in two forms:
+ # 1. A dictionary of fields and errors list related to the fields, all under 'data' key in the response json object
+ # 2. A list, directly within the response object
+
+ errors_string = 'Errors from service:\n\n'
+
+ # First form
+ errors_dict = bad_request_results.json().get('data', {}).get('errors', {})
+ if errors_dict:
+ for error_num, (key, lst) in enumerate(errors_dict.items(), 1):
+ curr_error_string = '\n'.join(lst) + '\n\n'
+ errors_string += 'Error #{0}. In \'{1}\':\n{2}'.format(error_num, key, curr_error_string)
+ return errors_string
+
+ # Second form
+ errors_list = bad_request_results.json().get('errors', [])
+ if errors_list:
+ for error_num, error in enumerate(errors_list, 1):
+ if isinstance(error, str):
+ errors_string += 'Error #{0}: {1}\n'.format(error_num, error)
+ else: # error is a list
+ for i in range(len(error)):
+ errors_string += 'Error #{0}.{1}: {2}\n'.format(error_num, i, error[i])
+ return errors_string
+
+ return str() # Service did not provide any errors.
+
+
+def tq_request(method, url_suffix, params=None, files=None, retrieve_entire_response=False, allow_redirects=True):
+ api_call_headers = None
+ if url_suffix != '/token':
+ access_token = get_access_token()
+ api_call_headers = {'Authorization': 'Bearer ' + access_token}
+
+ if not files:
+ params = json.dumps(params)
+
+ response = requests.request(method, API_URL + url_suffix, data=params, headers=api_call_headers,
+ verify=USE_SSL, files=files, allow_redirects=allow_redirects)
+
+ if response.status_code >= 400:
+ errors_string = get_errors_string_from_bad_request(response, response.status_code)
+ error_message = 'Received an error - status code [{0}].\n{1}'.format(response.status_code, errors_string)
+ return_error(error_message)
+
+ if retrieve_entire_response:
+ return response
+ elif method != 'DELETE': # the DELETE request returns nothing in response
+ return response.json()
+ return None
+
+
+def request_new_access_token():
+ params = {'grant_type': 'password', 'email': EMAIL, 'password': PASSWORD, 'client_id': CLIENT_ID}
+ access_token_response = tq_request('POST', '/token', params, allow_redirects=False)
+
+ updated_integration_context = {
+ 'access_token': access_token_response['access_token'],
+ 'access_token_creation_time': int(time.time()) - 1, # decrementing one second to be on the safe side
+ 'access_token_expires_in': access_token_response['expires_in']
+ }
+ demisto.setIntegrationContext(updated_integration_context)
+ threatq_access_token = access_token_response['access_token']
+ return threatq_access_token
+
+
+def access_token_not_expired():
+ epoch_time_now = time.time()
+ epoch_time_when_token_granted = demisto.getIntegrationContext().get('access_token_creation_time')
+ token_time_until_expiration = demisto.getIntegrationContext().get('access_token_expires_in')
+ return int(epoch_time_now) - int(epoch_time_when_token_granted) < int(token_time_until_expiration)
+
+
+def get_access_token():
+ existing_access_token = demisto.getIntegrationContext().get('access_token')
+ if existing_access_token and access_token_not_expired():
+ return existing_access_token
+ else:
+ new_access_token = request_new_access_token()
+ return new_access_token
+
+
+def make_create_object_request(obj_type, params):
+ url_suffix = '/{0}'.format(OBJ_DIRECTORY[obj_type])
+ res = tq_request('POST', url_suffix, params)
+
+ # For some reason, only while creating an indicator, the response data is a list of dicts with size 1.
+ # Creating other objects simply returns one dict, as expected.
+ data = res['data'][0] if obj_type == 'indicator' else res['data']
+ data = data_to_demisto_format(data, obj_type)
+
+ entry_context = {CONTEXT_PATH[obj_type]: createContext(data, removeNull=True)}
+
+ readable_title = '{0} was successfully created.'.format(obj_type.title())
+ readable = build_readable(readable_title, obj_type, data)
+
+ return_outputs(readable, entry_context, res)
+
+
+def make_edit_request_for_an_object(obj_id, obj_type, params):
+ # Remove items with empty values.
+ params = {k: v for k, v in params.items() if v is not None}
+
+ url_suffix = '/{0}/{1}?with=attributes,sources'.format(OBJ_DIRECTORY[obj_type], obj_id)
+ if obj_type == 'indicator':
+ url_suffix += ',score'
+
+ res = tq_request('PUT', url_suffix, params)
+
+ data = data_to_demisto_format(res['data'], obj_type)
+ entry_context = {CONTEXT_PATH[obj_type]: createContext(data, removeNull=True)}
+
+ readable_title = 'Successfully edited {0} with ID {1}'.format(obj_type, obj_id)
+ readable = build_readable(readable_title, obj_type, data)
+
+ return_outputs(readable, entry_context, res)
+
+
+def make_indicator_reputation_request(indicator_type, value, generic_context):
+ # Search for the indicator ID by keyword:
+ url_suffix = '/search?query={0}&limit=1'.format(value)
+ res = tq_request('GET', url_suffix)
+
+ data = {} # type: Dict[str, Any]
+ for obj in res.get('data', []):
+ if obj.get('value') == value and obj.get('object') == 'indicator':
+ # Search for detailed information about the indicator
+ url_suffix = '/indicators/{0}?with=attributes,sources,score,type'.format(obj.get('id'))
+ res = tq_request('GET', url_suffix)
+ data = indicator_data_to_demisto_format(res['data'])
+ break
+
+ dbot_context = create_dbot_context(value, indicator_type, data.get('TQScore', -1))
+ entry_context = set_indicator_entry_context(indicator_type, data, dbot_context, generic_context)
+
+ readable_title = 'Search results for {0} {1}'.format(indicator_type, value)
+ readable = build_readable(readable_title, 'indicator', data)
+
+ return_outputs(readable, entry_context, res)
+
+
+def create_dbot_context(indicator, ind_type, ind_score):
+ """ This function converts a TQ scoring value of an indicator into a DBot score.
+ The default score mapping function is: -1 -> 0, [0,3] -> 1, [4,7] -> 2, [8,10] -> 3.
+
+ If threshold parameter is set manually, it overrides the default function definition for a
+ malicious indicator, such that TQ score >= threshold iff the DBot score == 3.
+
+ Args:
+ indicator (str): The indicator name
+ ind_type (str): The indicator type
+ ind_score (int): The indicator TQ score
+
+ Returns:
+ (dict). The indicator's DBotScore.
+
+ """
+ dbot_score_map = {
+ -1: 0,
+ 0: 1,
+ 1: 1,
+ 2: 1,
+ 3: 1,
+ 4: 2,
+ 5: 2,
+ 6: 2,
+ 7: 2,
+ 8: 2,
+ 9: 2,
+ 10: 2
+ }
+
+ ret = {
+ 'Vendor': 'ThreatQ v2',
+ 'Indicator': indicator,
+ 'Type': ind_type
+ }
+
+ if ind_score >= THRESHOLD:
+ ret['Score'] = 3
+ else:
+ ret['Score'] = dbot_score_map[ind_score]
+
+ return ret
+
+
+def get_tq_score_from_response(score_data):
+ if score_data is None:
+ return None
+ if isinstance(score_data, dict):
+ # score will be max(gen_score, manual_score)
+ gen_score = str(score_data.get('generated_score'))
+ manual_score = score_data.get('manual_score', 0.0)
+ if manual_score is None:
+ manual_score = -1
+ return max(float(gen_score), float(manual_score))
+ else:
+ # score is already defined as a number
+ return float(score_data)
+
+
+def clean_html_from_string(raw_html):
+ """ This function receives an HTML string of a text, and retrieves a clean string of its content.
+
+ Args:
+ raw_html: An HTML format text
+
+ Returns:
+ (string). A clean text string
+ """
+ if not raw_html:
+ return None
+ clean_r = re.compile('<.*?>')
+ clean_text = re.sub(clean_r, '', raw_html)
+ return clean_text
+
+
+def sources_to_request_format(sources):
+ if not sources:
+ return []
+ if isinstance(sources, str):
+ sources = sources.split(',')
+ return [{'name': source} for source in sources]
+
+
+def sources_to_demisto_format(lst):
+ if lst is None:
+ return None
+ return [{
+ 'Name': elem.get('name'),
+ 'ID': elem.get('pivot', {}).get('id')
+ } for elem in lst]
+
+
+def attributes_to_request_format(attributes_names, attributes_values):
+ if not attributes_names and not attributes_values:
+ return []
+ if isinstance(attributes_names, str):
+ attributes_names = attributes_names.split(',')
+ if isinstance(attributes_values, str):
+ attributes_values = attributes_values.split(',')
+ if not attributes_names or not attributes_values or len(attributes_names) != len(attributes_values):
+ return_error('Attributes_names and attributes_values arguments must have the same length.')
+
+ return [{'name': name, 'value': val} for name, val in zip(attributes_names, attributes_values)]
+
+
+def attributes_to_demisto_format(lst):
+ if lst is None:
+ return None
+ return [{
+ 'Name': elem.get('name'),
+ 'Value': elem.get('value'),
+ 'ID': elem.get('id')
+ } for elem in lst]
+
+
+def content_type_to_demisto_format(c_type_id):
+ # content_type is a file object property
+ return 'text/plain' if c_type_id == 1 else 'text/rtf'
+
+
+def malware_locked_to_request_format(state):
+ # malware_locked is a file object property
+ if not state:
+ return None
+ return 1 if state == 'on' else 0
+
+
+def malware_locked_to_demisto_format(state):
+ return 'on' if state == 1 else 'off'
+
+
+def parse_date(text):
+ valid_formats = ['%Y-%m-%d %H:%M:%S', '%Y-%m-%d']
+ for fmt in valid_formats:
+ try:
+ return str(datetime.strptime(text, fmt))
+ except ValueError:
+ pass
+ return_error('Time data \'{0}\' does not match any valid format.'.format(text))
+
+
+def data_to_demisto_format(data, obj_type):
+ if obj_type == 'indicator':
+ return indicator_data_to_demisto_format(data)
+ elif obj_type == 'event':
+ return event_data_to_demisto_format(data)
+ elif obj_type == 'adversary':
+ return adversary_data_to_demisto_format(data)
+ elif obj_type == 'attachment':
+ return file_data_to_demisto_format(data)
+
+
+def indicator_data_to_demisto_format(data):
+ ret = {
+ 'ID': data.get('id'),
+ 'UpdatedAt': data.get('updated_at'),
+ 'CreatedAt': data.get('created_at'),
+ 'Value': data.get('value'),
+ 'Status': STATUS_ID_TO_STATUS[data.get('status_id')],
+ 'Type': TYPE_ID_TO_INDICATOR_TYPE[data.get('type_id')],
+ 'URL': '{0}/indicators/{1}/details'.format(SERVER_URL, data.get('id')),
+ 'TQScore': get_tq_score_from_response(data.get('score')),
+ 'Description': clean_html_from_string(data.get('description')),
+ 'Source': sources_to_demisto_format(data.get('sources')),
+ 'Attribute': attributes_to_demisto_format(data.get('attributes'))
+ }
+ return ret
+
+
+def adversary_data_to_demisto_format(data):
+ ret = {
+ 'ID': data.get('id'),
+ 'UpdatedAt': data.get('updated_at'),
+ 'CreatedAt': data.get('created_at'),
+ 'Name': data.get('name'),
+ 'URL': '{0}/adversaries/{1}/details'.format(SERVER_URL, data.get('id')),
+ 'Source': sources_to_demisto_format(data.get('sources')),
+ 'Attribute': attributes_to_demisto_format(data.get('attributes'))
+ }
+ return ret
+
+
+def event_data_to_demisto_format(data):
+ ret = {
+ 'ID': data.get('id'),
+ 'UpdatedAt': data.get('updated_at'),
+ 'CreatedAt': data.get('created_at'),
+ 'Title': data.get('title'),
+ 'Occurred': data.get('happened_at'),
+ 'Type': TYPE_ID_TO_EVENT_TYPE[data.get('type_id')],
+ 'URL': '{0}/events/{1}/details'.format(SERVER_URL, data.get('id')),
+ 'Description': clean_html_from_string(data.get('description')),
+ 'Source': sources_to_demisto_format(data.get('sources')),
+ 'Attribute': attributes_to_demisto_format(data.get('attributes'))
+ }
+ return ret
+
+
+def file_data_to_demisto_format(data):
+ raw = {
+ 'ID': data.get('id'),
+ 'CreatedAt': data.get('created_at'),
+ 'UpdatedAt': data.get('updated_at'),
+ 'Size': data.get('file_size'),
+ 'MD5': data.get('hash'),
+ 'Type': TYPE_ID_TO_FILE_TYPE[data.get('type_id')],
+ 'URL': '{0}/files/{1}/details'.format(SERVER_URL, data.get('id')),
+ 'Name': data.get('name'),
+ 'Title': data.get('title'),
+ 'Description': data.get('description'),
+ 'ContentType': content_type_to_demisto_format(data.get('content_type_id')),
+ 'MalwareLocked': malware_locked_to_demisto_format(data.get('malware_locked')),
+ 'Source': sources_to_demisto_format(data.get('sources')),
+ 'Attribute': attributes_to_demisto_format(data.get('attributes'))
+ }
+
+ return raw
+
+
+def get_pivot_id(obj1_type, obj1_id, obj2_type, obj2_id):
+ # A pivot id represents a connection between two objects.
+
+ url_suffix = '/{0}/{1}/{2}'.format(OBJ_DIRECTORY[obj1_type], obj1_id, OBJ_DIRECTORY[obj2_type])
+ res = tq_request('GET', url_suffix)
+
+ for related_object in res['data']: # res['data'] contains all the related objects of obj_id1
+ if int(related_object.get('id')) == int(obj2_id):
+ return int(related_object['pivot']['id'])
+ else:
+ return_error('Command failed - objects are not related.')
+
+
+def add_malicious_data(generic_context, tq_score):
+ generic_context['Malicious'] = {
+ 'Vendor': 'ThreatQ v2',
+ 'Description': 'Score from ThreatQ is {0}'.format(tq_score)
+ }
+
+
+def set_indicator_entry_context(indicator_type, raw, dbot, generic):
+ if dbot.get('Score') == 3:
+ add_malicious_data(generic, raw.get('TQScore', -1))
+
+ ec = {
+ outputPaths[indicator_type]: generic,
+ 'DBotScore': dbot
+ }
+ if raw:
+ ec[CONTEXT_PATH['indicator']] = raw
+ return ec
+
+
+def build_readable_for_search_by_name(indicator_context, event_context, adversary_context, file_context):
+ if not (indicator_context or event_context or adversary_context or file_context):
+ return 'No results.'
+
+ human_readable = ''
+ if indicator_context:
+ human_readable += tableToMarkdown('Search Results - Indicators', indicator_context)
+ if event_context:
+ human_readable += tableToMarkdown('Search Results - Events', event_context)
+ if adversary_context:
+ human_readable += tableToMarkdown('Search Results - Adversaries', adversary_context)
+ if file_context:
+ human_readable += tableToMarkdown('Search Results - Files', file_context)
+
+ return human_readable
+
+
+def build_readable(readable_title, obj_type, data, metadata=None):
+ if isinstance(data, dict): # One object data
+ readable = tableToMarkdown(readable_title, data, headers=TABLE_HEADERS[obj_type],
+ headerTransform=pascalToSpace, removeNull=True, metadata=metadata)
+ if 'Attribute' in data:
+ readable += tableToMarkdown('Attributes', data['Attribute'], headers=TABLE_HEADERS['attributes'],
+ removeNull=True, headerTransform=pascalToSpace, metadata=metadata)
+ if 'Source' in data:
+ readable += tableToMarkdown('Sources', data['Source'], headers=TABLE_HEADERS['sources'],
+ removeNull=True, headerTransform=pascalToSpace, metadata=metadata)
+ if 'URL' in data:
+ url_in_markdown_format = '[{0}]({1})'.format(data['URL'], data['URL'])
+ readable = readable.replace(data['URL'], url_in_markdown_format)
+
+ else: # 'data' is a list of objects
+ readable = tableToMarkdown(readable_title, data, headers=TABLE_HEADERS[obj_type],
+ headerTransform=pascalToSpace, removeNull=True, metadata=metadata)
+ for elem in data:
+ url_in_markdown_format = '[{0}]({1})'.format(elem['URL'], elem['URL'])
+ readable = readable.replace(elem['URL'], url_in_markdown_format)
+
+ return readable
+
+
+''' COMMANDS '''
+
+
+def test_module():
+ token = request_new_access_token()
+ threshold = demisto.params().get('threshold')
+ threshold_is_integer = isinstance(threshold, int) or (isinstance(threshold, str) and threshold.isdigit())
+ if token and threshold_is_integer and 0 <= int(threshold) <= 10:
+ demisto.results('ok')
+
+
+def search_by_name_command():
+ args = demisto.args()
+ name = args.get('name')
+ limit = args.get('limit')
+
+ if limit and isinstance(limit, str) and not limit.isdigit():
+ return_error('limit argument must be an integer.')
+
+ url_suffix = '/search?query={0}&limit={1}'.format(name, limit)
+ res = tq_request('GET', url_suffix)
+
+ indicator_context = [{'ID': e['id'], 'Value': e['value']} for e in res['data'] if e['object'] == 'indicator']
+ event_context = [{'ID': e['id'], 'Title': e['value']} for e in res['data'] if e['object'] == 'event']
+ adversary_context = [{'ID': e['id'], 'Name': e['value']} for e in res['data'] if e['object'] == 'adversary']
+ file_context = [{'ID': e['id'], 'Name': e['value'].split()[1]} for e in res['data'] if e['object'] == 'attachment']
+ # file value in response is returned in the form ["title" name], thus we use the split method above
+
+ entry_context = {
+ CONTEXT_PATH['indicator']: indicator_context,
+ CONTEXT_PATH['event']: event_context,
+ CONTEXT_PATH['adversary']: adversary_context,
+ CONTEXT_PATH['attachment']: file_context
+ }
+
+ # Remove items with empty values:
+ entry_context = {k: v for k, v in entry_context.items() if v}
+
+ readable = build_readable_for_search_by_name(indicator_context, event_context, adversary_context, file_context)
+
+ return_outputs(readable, entry_context, res)
+
+
+def search_by_id_command():
+ args = demisto.args()
+ obj_type = args.get('obj_type')
+ obj_id = args.get('obj_id')
+
+ if isinstance(obj_id, str) and not obj_id.isdigit():
+ return_error('obj_id argument must be an integer.')
+
+ url_suffix = '/{0}/{1}?with=attributes,sources'.format(OBJ_DIRECTORY[obj_type], obj_id)
+ if obj_type == 'indicator':
+ url_suffix += ',score,type'
+
+ res = tq_request('GET', url_suffix)
+ data = data_to_demisto_format(res['data'], obj_type)
+
+ ec = {CONTEXT_PATH[obj_type]: createContext(data, removeNull=True)}
+
+ if obj_type == 'indicator':
+ indicator_type = TQ_TO_DEMISTO_INDICATOR_TYPES.get(data['Type'])
+ if indicator_type is not None:
+ ec['DBotScore'] = create_dbot_context(data['Value'], indicator_type, data['TQScore'])
+
+ readable_title = 'Search results for {0} with ID {1}'.format(obj_type, obj_id)
+ readable = build_readable(readable_title, obj_type, data)
+
+ return_outputs(readable, ec, res)
+
+
+def create_indicator_command():
+ args = demisto.args()
+ indicator_type = args.get('type')
+ status = args.get('status')
+ value = args.get('value')
+ sources = args.get('sources')
+ attributes_names = args.get('attributes_names')
+ attributes_values = args.get('attributes_values')
+
+ params = {
+ 'type': indicator_type,
+ 'status': status,
+ 'value': value,
+ 'sources': sources_to_request_format(sources),
+ 'attributes': attributes_to_request_format(attributes_names, attributes_values)
+ }
+
+ make_create_object_request('indicator', params)
+
+
+def create_adversary_command():
+ args = demisto.args()
+ name = args.get('name')
+ sources = args.get('sources')
+ attributes_names = args.get('attributes_names')
+ attributes_values = args.get('attributes_values')
+
+ params = {
+ 'name': name,
+ 'sources': sources_to_request_format(sources),
+ 'attributes': attributes_to_request_format(attributes_names, attributes_values)
+ }
+
+ make_create_object_request('adversary', params)
+
+
+def create_event_command():
+ args = demisto.args()
+ event_type = args.get('type')
+ title = args.get('title')
+ date = args.get('date')
+ sources = args.get('sources')
+ attributes_names = args.get('attributes_names')
+ attributes_values = args.get('attributes_values')
+
+ params = {
+ 'title': title,
+ 'type': event_type,
+ 'happened_at': parse_date(date),
+ 'sources': sources_to_request_format(sources),
+ 'attributes': attributes_to_request_format(attributes_names, attributes_values)
+ }
+
+ make_create_object_request('event', params)
+
+
+def edit_indicator_command():
+ args = demisto.args()
+ indicator_id = args.get('id')
+ value = args.get('value')
+ indicator_type = args.get('type')
+ description = args.get('description')
+
+ if isinstance(indicator_id, str) and not indicator_id.isdigit():
+ return_error('id argument must be an integer.')
+
+ params = {
+ 'value': value,
+ 'type': indicator_type,
+ 'description': description
+ }
+
+ make_edit_request_for_an_object(indicator_id, 'indicator', params)
+
+
+def edit_adversary_command():
+ args = demisto.args()
+ adversary_id = args.get('id')
+ name = args.get('name')
+
+ if isinstance(adversary_id, str) and not adversary_id.isdigit():
+ return_error('id argument must be an integer.')
+
+ params = {
+ 'name': name
+ }
+
+ make_edit_request_for_an_object(adversary_id, 'adversary', params)
+
+
+def edit_event_command():
+ args = demisto.args()
+ event_id = args.get('id')
+ event_type = args.get('type')
+ title = args.get('title')
+ date = args.get('date')
+ description = args.get('description')
+
+ if isinstance(event_id, str) and not event_id.isdigit():
+ return_error('id argument must be an integer.')
+
+ params = {
+ 'title': title,
+ 'happened_at': parse_date(date) if date else None,
+ 'type': event_type,
+ 'description': description
+ }
+
+ make_edit_request_for_an_object(event_id, 'event', params)
+
+
+def delete_object_command():
+ args = demisto.args()
+ obj_type = args.get('obj_type')
+ obj_id = args.get('obj_id')
+
+ if isinstance(obj_id, str) and not obj_id.isdigit():
+ return_error('obj_id argument must be an integer.')
+
+ url_suffix = '/{0}/{1}'.format(OBJ_DIRECTORY[obj_type], obj_id)
+ tq_request('DELETE', url_suffix)
+ demisto.results('Successfully deleted {0} with ID {1}.'.format(obj_type, obj_id))
+
+
+def get_related_objs_command(related_type):
+ args = demisto.args()
+ obj_type = args.get('obj_type')
+ obj_id = args.get('obj_id')
+
+ if isinstance(obj_id, str) and not obj_id.isdigit():
+ return_error('obj_id argument must be an integer.')
+
+ url_suffix = '/{0}/{1}/{2}?with=sources'.format(OBJ_DIRECTORY[obj_type], obj_id, OBJ_DIRECTORY[related_type])
+ if related_type == 'indicator':
+ url_suffix += ',score'
+ res = tq_request('GET', url_suffix)
+
+ info = [data_to_demisto_format(obj, related_type) for obj in res['data']]
+ info = createContext(info, removeNull=True)
+ data = {
+ RELATED_KEY[related_type]: createContext(info, removeNull=True),
+ 'ID': int(obj_id)
+ }
+ ec = {CONTEXT_PATH[obj_type]: data} if info else {}
+
+ readable_title = 'Related {0} type objects of {1} with ID {2}'.format(related_type, obj_type, obj_id)
+ readable = build_readable(readable_title, related_type, data[RELATED_KEY[related_type]])
+
+ return_outputs(readable, ec, res)
+
+
+def link_objects_command():
+ args = demisto.args()
+ obj1_type = args.get('obj1_type')
+ obj1_id = args.get('obj1_id')
+ obj2_type = args.get('obj2_type')
+ obj2_id = args.get('obj2_id')
+
+ if isinstance(obj1_id, str) and not obj1_id.isdigit() or isinstance(obj2_id, str) and not obj2_id.isdigit():
+ return_error('obj1_id, obj2_id arguments must be integers.')
+
+ if obj1_type == obj2_type and obj1_id == obj2_id:
+ return_error('Cannot link an object to itself.')
+
+ url_suffix = '/{0}/{1}/{2}'.format(OBJ_DIRECTORY[obj1_type], obj1_id, OBJ_DIRECTORY[obj2_type])
+ params = {
+ 'id': obj2_id
+ }
+ tq_request('POST', url_suffix, params)
+ demisto.results(
+ 'Successfully linked {0} with ID {1} and {2} with ID {3}.'.format(obj1_type, obj1_id, obj2_type, obj2_id))
+
+
+def unlink_objects_command():
+ args = demisto.args()
+ obj1_type = args.get('obj1_type')
+ obj1_id = args.get('obj1_id')
+ obj2_type = args.get('obj2_type')
+ obj2_id = args.get('obj2_id')
+
+ if isinstance(obj1_id, str) and not obj1_id.isdigit() or isinstance(obj2_id, str) and not obj2_id.isdigit():
+ return_error('obj1_id, obj2_id arguments must be integers.')
+
+ if obj1_type == obj2_type and obj1_id == obj2_id:
+ return_error('An object cannot be linked to itself.')
+
+ p_id = get_pivot_id(obj1_type, obj1_id, obj2_type, obj2_id)
+ url_suffix = '/{0}/{1}/{2}'.format(OBJ_DIRECTORY[obj1_type], obj1_id, OBJ_DIRECTORY[obj2_type])
+ tq_request('DELETE', url_suffix, params=[p_id])
+ demisto.results(
+ 'Successfully unlinked {0} with ID {1} and {2} with ID {3}.'.format(obj1_type, obj1_id, obj2_type, obj2_id))
+
+
+def update_score_command():
+ # Note: We can't update DBot Score because API doesn't retrieve the indicator value.
+ args = demisto.args()
+ indicator_id = args.get('id')
+ score = args.get('score')
+
+ if isinstance(indicator_id, str) and not indicator_id.isdigit():
+ return_error('id argument must be an integer.')
+
+ if isinstance(score, str) and not score.isdigit(): # User chose 'Generated Score' option
+ manual_score = None
+ else:
+ manual_score = int(score)
+
+ url_suffix = '/indicator/{0}/scores'.format(indicator_id)
+ params = {'manual_score': manual_score}
+
+ res = tq_request('PUT', url_suffix, params)
+
+ data = {
+ 'ID': int(indicator_id),
+ 'TQScore': get_tq_score_from_response(res['data'])
+ }
+
+ ec = {CONTEXT_PATH['indicator']: data}
+
+ readable = 'Successfully updated score of indicator with ID {0} to {1}. '\
+ 'Notice that final score is the maximum between ' \
+ 'manual and generated scores.'.format(indicator_id, int(data['TQScore']))
+
+ return_outputs(readable, ec, res)
+
+
+def add_source_command():
+ args = demisto.args()
+ source = args.get('source')
+ obj_id = args.get('obj_id')
+ obj_type = args.get('obj_type')
+
+ if isinstance(obj_id, str) and not obj_id.isdigit():
+ return_error('obj_id argument must be an integer.')
+
+ url_suffix = '/{0}/{1}/sources'.format(OBJ_DIRECTORY[obj_type], obj_id)
+ params = {
+ 'name': source
+ }
+
+ tq_request('POST', url_suffix, params)
+ demisto.results('Successfully added source {0} to {1} with ID {2}.'.format(source, obj_type, obj_id))
+
+
+def delete_source_command():
+ args = demisto.args()
+ source_id = args.get('source_id')
+ obj_id = args.get('obj_id')
+ obj_type = args.get('obj_type')
+
+ if isinstance(obj_id, str) and not obj_id.isdigit():
+ return_error('obj_id argument must be an integer.')
+ if isinstance(source_id, str) and not source_id.isdigit():
+ return_error('source_id argument must be an integer.')
+
+ url_suffix = '/{0}/{1}/sources/{2}'.format(OBJ_DIRECTORY[obj_type], obj_id, source_id)
+
+ tq_request('DELETE', url_suffix)
+ demisto.results('Successfully deleted source #{0} from {1} with ID {2}.'.format(source_id, obj_type, obj_id))
+
+
+def add_attribute_command():
+ args = demisto.args()
+ attribute_name = args.get('name')
+ attribute_value = args.get('value')
+ obj_type = args.get('obj_type')
+ obj_id = args.get('obj_id')
+
+ if isinstance(obj_id, str) and not obj_id.isdigit():
+ return_error('obj_id argument must be an integer.')
+
+ url_suffix = '/{0}/{1}/attributes'.format(OBJ_DIRECTORY[obj_type], obj_id)
+ params = {
+ 'name': attribute_name,
+ 'value': attribute_value
+ }
+
+ tq_request('POST', url_suffix, params)
+ demisto.results('Successfully added attribute to {0} with ID {1}.'.format(obj_type, obj_id))
+
+
+def modify_attribute_command():
+ args = demisto.args()
+ attribute_id = args.get('attribute_id')
+ attribute_value = args.get('attribute_value')
+ obj_type = args.get('obj_type')
+ obj_id = args.get('obj_id')
+
+ if isinstance(obj_id, str) and not obj_id.isdigit():
+ return_error('obj_id argument must be an integer.')
+ if isinstance(attribute_id, str) and not attribute_id.isdigit():
+ return_error('attribute_id argument must be an integer.')
+
+ url_suffix = '/{0}/{1}/attributes/{2}'.format(OBJ_DIRECTORY[obj_type], obj_id, attribute_id)
+ params = {'value': attribute_value}
+
+ tq_request('PUT', url_suffix, params)
+
+ demisto.results('Successfully modified attribute #{0} of {1} with ID {2}.'.format(attribute_id, obj_type, obj_id))
+
+
+def delete_attribute_command():
+ args = demisto.args()
+ attribute_id = args.get('attribute_id')
+ obj_type = args.get('obj_type')
+ obj_id = args.get('obj_id')
+
+ if isinstance(obj_id, str) and not obj_id.isdigit():
+ return_error('obj_id argument must be an integer.')
+ if isinstance(attribute_id, str) and not attribute_id.isdigit():
+ return_error('attribute_id argument must be an integer.')
+
+ url_suffix = '/{0}/{1}/attributes/{2}'.format(OBJ_DIRECTORY[obj_type], obj_id, attribute_id)
+
+ tq_request('DELETE', url_suffix)
+ demisto.results('Successfully deleted attribute #{0} from {1} with ID {2}.'.format(attribute_id, obj_type, obj_id))
+
+
+def update_status_command():
+ args = demisto.args()
+ indicator_id = args.get('id')
+ status = args.get('status')
+
+ if isinstance(indicator_id, str) and not indicator_id.isdigit():
+ return_error('id argument must be an integer.')
+
+ url_suffix = '/indicators/{0}'.format(indicator_id)
+ params = {'status': status}
+
+ res = tq_request('PUT', url_suffix, params)
+
+ data = {
+ 'ID': int(indicator_id),
+ 'Status': STATUS_ID_TO_STATUS[res['data'].get('status_id')],
+ }
+
+ ec = {CONTEXT_PATH['indicator']: data}
+
+ readable = 'Successfully updated status of indicator with ID {0} to {1}.'.format(indicator_id, status)
+
+ return_outputs(readable, ec, res)
+
+
+def upload_file_command():
+ args = demisto.args()
+ entry_id = args.get('entry_id')
+ title = args.get('title')
+ malware_safety_lock = args.get('malware_safety_lock')
+ file_category = args.get('file_category')
+
+ file_info = demisto.getFilePath(entry_id)
+
+ if not title:
+ title = file_info['name']
+
+ params = {
+ 'name': file_info['name'],
+ 'title': title,
+ 'type': file_category,
+ 'malware_locked': malware_locked_to_request_format(malware_safety_lock)
+ }
+
+ try:
+ shutil.copy(file_info['path'], file_info['name'])
+ except Exception as e:
+ return_error('Failed to prepare file for upload. Error message: {0}'.format(str(e)))
+
+ try:
+ with open(file_info['name'], 'rb') as f:
+ files = {'file': f}
+ url_suffix = '/attachments'
+ res = tq_request('POST', url_suffix, params, files=files)
+ finally:
+ shutil.rmtree(file_info['name'], ignore_errors=True)
+
+ data = file_data_to_demisto_format(res['data'])
+
+ ec = {CONTEXT_PATH['attachment']: data}
+
+ readable_title = 'Successfully uploaded file {0}.'.format(file_info['name'])
+ readable = build_readable(readable_title, 'attachment', data)
+
+ return_outputs(readable, ec, res)
+
+
+def download_file_command():
+ args = demisto.args()
+ file_id = args.get('id')
+
+ if isinstance(file_id, str) and not file_id.isdigit():
+ return_error('id argument must be an integer.')
+
+ url_suffix = '/attachments/{0}/download'.format(file_id)
+
+ res = tq_request('GET', url_suffix, retrieve_entire_response=True)
+
+ # 'Content-Disposition' value is of the form: attachment; filename="filename.txt"
+ # Since we don't have the file name anywhere else in the response object, we parse it from this entry.
+ filename = res.headers.get('Content-Disposition', str()).split('\"')[1]
+ content = res.content
+
+ demisto.results(fileResult(filename, content))
+
+
+def get_all_objs_command(obj_type):
+ args = demisto.args()
+ page = int(args.get('page'))
+ limit = int(args.get('limit'))
+ if limit > 200:
+ limit = 200
+
+ url_suffix = '/{0}?with=attributes,sources'.format(OBJ_DIRECTORY[obj_type])
+ if obj_type == 'indicator':
+ url_suffix += ',score'
+ res = tq_request('GET', url_suffix)
+
+ from_index = min(page, len(res['data']))
+ to_index = min(from_index + limit, len(res['data']))
+
+ data = [data_to_demisto_format(obj, obj_type) for obj in res['data'][from_index:to_index]]
+ ec = {CONTEXT_PATH[obj_type]: createContext(data, removeNull=True)} if data else {}
+
+ readable_title = 'List of all objects of type {0} - {1}-{2}'.format(obj_type, from_index, to_index - 1)
+ metadata = 'Total number of objects is {0}'.format(len(res['data']))
+ readable = build_readable(readable_title, obj_type, data, metadata=metadata)
+
+ return_outputs(readable, ec, res)
+
+
+def get_ip_reputation():
+ args = demisto.args()
+ ip = args.get('ip')
+
+ if not is_ip_valid(ip, accept_v6_ips=True):
+ return_error('{0} is not a valid IP address.'.format(ip))
+
+ generic_context = {'Address': ip}
+
+ make_indicator_reputation_request(indicator_type='ip', value=ip, generic_context=generic_context)
+
+
+def get_url_reputation():
+ args = demisto.args()
+ url = args.get('url')
+
+ if not REGEX_MAP['url'].match(url):
+ return_error('{0} is not a valid URL.'.format(url))
+
+ generic_context = {'Data': url}
+
+ make_indicator_reputation_request(indicator_type='url', value=url, generic_context=generic_context)
+
+
+def get_email_reputation():
+ args = demisto.args()
+ email = args.get('email')
+
+ if not REGEX_MAP['email'].match(email):
+ return_error('{0} is not a valid email address.'.format(email))
+
+ generic_context = {'Address': email}
+
+ make_indicator_reputation_request(indicator_type='email', value=email, generic_context=generic_context)
+
+
+def get_domain_reputation():
+ args = demisto.args()
+ domain = args.get('domain')
+
+ if not REGEX_MAP['domain'].match(domain):
+ return_error('{0} is not a valid domain.'.format(domain))
+
+ generic_context = {'Name': domain}
+
+ make_indicator_reputation_request(indicator_type='domain', value=domain, generic_context=generic_context)
+
+
+def get_file_reputation():
+ args = demisto.args()
+ file = args.get('file')
+
+ for fmt in ['md5', 'sha1', 'sha256']:
+ if REGEX_MAP[fmt].match(file):
+ break
+ else:
+ return_error('{0} is not a valid file format.'.format(file))
+
+ generic_context = createContext({
+ 'MD5': file if fmt == 'md5' else None,
+ 'SHA1': file if fmt == 'sha1' else None,
+ 'SHA256': file if fmt == 'sha256' else None
+ }, removeNull=True)
+
+ make_indicator_reputation_request(indicator_type='file', value=file, generic_context=generic_context)
+
+
+''' EXECUTION CODE '''
+command = demisto.command()
+LOG('command is {0}'.format(demisto.command()))
+try:
+ handle_proxy()
+ if command == 'test-module':
+ test_module()
+ elif command == 'threatq-search-by-name':
+ search_by_name_command()
+ elif command == 'threatq-search-by-id':
+ search_by_id_command()
+ elif command == 'threatq-create-indicator':
+ create_indicator_command()
+ elif command == 'threatq-create-event':
+ create_event_command()
+ elif command == 'threatq-create-adversary':
+ create_adversary_command()
+ elif command == 'threatq-edit-indicator':
+ edit_indicator_command()
+ elif command == 'threatq-edit-event':
+ edit_event_command()
+ elif command == 'threatq-edit-adversary':
+ edit_adversary_command()
+ elif command == 'threatq-delete-object':
+ delete_object_command()
+ elif command == 'threatq-get-related-indicators':
+ get_related_objs_command('indicator')
+ elif command == 'threatq-get-related-events':
+ get_related_objs_command('event')
+ elif command == 'threatq-get-related-adversaries':
+ get_related_objs_command('adversary')
+ elif command == 'threatq-link-objects':
+ link_objects_command()
+ elif command == 'threatq-unlink-objects':
+ unlink_objects_command()
+ elif command == 'threatq-update-score':
+ update_score_command()
+ elif command == 'threatq-add-source':
+ add_source_command()
+ elif command == 'threatq-delete-source':
+ delete_source_command()
+ elif command == 'threatq-add-attribute':
+ add_attribute_command()
+ elif command == 'threatq-modify-attribute':
+ modify_attribute_command()
+ elif command == 'threatq-delete-attribute':
+ delete_attribute_command()
+ elif command == 'threatq-update-status':
+ update_status_command()
+ elif command == 'threatq-upload-file':
+ upload_file_command()
+ elif command == 'threatq-download-file':
+ download_file_command()
+ elif command == 'threatq-get-all-indicators':
+ get_all_objs_command('indicator')
+ elif command == 'threatq-get-all-events':
+ get_all_objs_command('event')
+ elif command == 'threatq-get-all-adversaries':
+ get_all_objs_command('adversary')
+ elif command == 'ip':
+ get_ip_reputation()
+ elif command == 'domain':
+ get_domain_reputation()
+ elif command == 'email':
+ get_email_reputation()
+ elif command == 'url':
+ get_url_reputation()
+ elif command == 'file':
+ get_file_reputation()
+
+except Exception as ex:
+ return_error(str(ex))
diff --git a/Integrations/ThreatQ_v2/ThreatQ_v2.yml b/Integrations/ThreatQ_v2/ThreatQ_v2.yml
new file mode 100644
index 000000000000..e0519815276f
--- /dev/null
+++ b/Integrations/ThreatQ_v2/ThreatQ_v2.yml
@@ -0,0 +1,2190 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: ThreatQ v2
+ version: -1
+configuration:
+- display: ThreatQ server URL (e.g. https://192.168.1.136)
+ name: serverUrl
+ required: true
+ type: 0
+- display: ThreatQ client ID
+ name: client_id
+ required: true
+ type: 0
+- display: Email
+ name: credentials
+ required: true
+ type: 9
+- defaultvalue: '8'
+ display: Indicator threshold (minimum TQ score to consider the indicator malicious).
+ name: threshold
+ required: false
+ type: 0
+- defaultvalue: 'false'
+ display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: 'A threat intelligence platform that collects and interprets intelligence data
+ from open sources and manages indicator scoring, types, and attributes.'
+display: ThreatQ v2
+name: ThreatQ v2
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: Name of the object to search.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: The maximum number of records to retrieve.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Searches for objects by name in the ThreatQ repository.
+ execution: false
+ name: threatq-search-by-name
+ outputs:
+ - contextPath: ThreatQ.Indicator.ID
+ description: The ID of the Indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Value
+ description: The value of the Indicator.
+ type: String
+ - contextPath: ThreatQ.Event.ID
+ description: The ID of the Event.
+ type: Number
+ - contextPath: ThreatQ.Event.Title
+ description: The title of the Event.
+ type: String
+ - contextPath: ThreatQ.Adversary.ID
+ description: The ID of the Adversary.
+ type: Number
+ - contextPath: ThreatQ.Adversary.Name
+ description: The name of the Adversary.
+ type: String
+ - arguments:
+ - default: true
+ defaultValue: ''
+ description: The IP address to check.
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks the reputation of an IP address in ThreatQ.
+ execution: false
+ name: ip
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The value of the indicator.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The type of the indicator.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: The vendor of the indicator.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The DBot Score of the indicator.
+ type: Number
+ - contextPath: IP.Address
+ description: The IP Address.
+ type: String
+ - contextPath: IP.Malicious.Vendor
+ description: The IP address of the Vendor.
+ type: String
+ - contextPath: IP.Malicious.Description
+ description: The description of the Malicious IP address.
+ type: String
+ - contextPath: ThreatQ.Indicator.ID
+ description: The Id of the Indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Value
+ description: The value of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Source.ID
+ description: The source ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Source.Name
+ description: The source name of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Attribute.ID
+ description: The attribute ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Attribute.Value
+ description: The attribute value of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Attribute.Name
+ description: The attribute name of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.CreatedAt
+ description: The creation date of the indicator.
+ type: Date
+ - contextPath: ThreatQ.Indicator.UpdatedAt
+ description: The last update date of the indicator.
+ type: Date
+ - contextPath: ThreatQ.Indicator.Status
+ description: The status of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.TQScore
+ description: The ThreatQ score of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Description
+ description: The description of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Type
+ description: The type of the indicator.
+ type: String
+ - arguments:
+ - default: true
+ defaultValue: ''
+ description: URL to check
+ isArray: false
+ name: url
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks the reputation of a URL in ThreatQ.
+ execution: false
+ name: url
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The value of the indicator.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The type of the indicator.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: The vendor of the indicator.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The DBot Score of the indicator.
+ type: Number
+ - contextPath: URL.Data
+ description: The URL.
+ type: String
+ - contextPath: URL.Malicious.Vendor
+ description: The vendor of the malicious URL.
+ type: String
+ - contextPath: URL.Malicious.Description
+ description: The description of the malicious URL.
+ type: String
+ - contextPath: ThreatQ.Indicator.ID
+ description: The ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Value
+ description: The value of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Source.ID
+ description: The source of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Source.Name
+ description: The source of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Attribute.ID
+ description: The attribute ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Attribute.Value
+ description: The attribute value of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Attribute.Name
+ description: The attribute name of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.CreatedAt
+ description: The creation date of the indicator.
+ type: Date
+ - contextPath: ThreatQ.Indicator.UpdatedAt
+ description: The last update date of the indicator.
+ type: Date
+ - contextPath: ThreatQ.Indicator.Status
+ description: The status of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.TQScore
+ description: The ThreatQ score of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Description
+ description: The description of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Type
+ description: The type of the indicator.
+ type: String
+ - arguments:
+ - default: true
+ defaultValue: ''
+ description: File MD5, SHA-1 or SHA-256
+ isArray: false
+ name: file
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks the reputation of a file in ThreatQ.
+ execution: false
+ name: file
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The value of the indicator.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The type of the indicator.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: The vendor of the indicator.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The DBot Score of the indicator.
+ type: Number
+ - contextPath: File.Name
+ description: The name of the file.
+ type: String
+ - contextPath: File.MD5
+ description: The MD5 of the file.
+ type: String
+ - contextPath: File.SHA1
+ description: The SHA1 of the file.
+ type: String
+ - contextPath: File.SHA256
+ description: The SHA256 of the file.
+ type: String
+ - contextPath: File.SHA512
+ description: The SHA512 of the file.
+ type: String
+ - contextPath: File.Path
+ description: The path of the file.
+ type: String
+ - contextPath: File.Malicious.Vendor
+ description: The vendor of the malicious file.
+ type: String
+ - contextPath: File.Malicious.Description
+ description: The description of the malicious file.
+ type: String
+ - contextPath: ThreatQ.Indicator.ID
+ description: The ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Value
+ description: The value of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Source.ID
+ description: The source ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Source.Name
+ description: The source name of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Attribute.ID
+ description: The attribute ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Attribute.Value
+ description: The attribute value of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Attribute.Name
+ description: The attribute name of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.CreatedAt
+ description: The creation date of the indicator.
+ type: Date
+ - contextPath: ThreatQ.Indicator.UpdatedAt
+ description: The last update date of the indicator.
+ type: Date
+ - contextPath: ThreatQ.Indicator.Status
+ description: The status of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.TQScore
+ description: The ThreatQ score of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Description
+ description: The description of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Type
+ description: The type of the indicator.
+ type: String
+ - arguments:
+ - default: true
+ defaultValue: ''
+ description: The email address to check.
+ isArray: false
+ name: email
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks the reputation of an email in ThreatQ.
+ execution: false
+ name: email
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The value of the indicator.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The type of the indicator.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: The vendor of the indicator.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The DBot Score of the indicator.
+ type: Number
+ - contextPath: Account.Email.Address
+ description: The Email Address.
+ type: String
+ - contextPath: Account.Malicious.Vendor
+ description: The vendor of the malicious account.
+ type: String
+ - contextPath: Account.Malicious.Description
+ description: The description of the malicious account.
+ type: String
+ - contextPath: ThreatQ.Indicator.ID
+ description: The ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Value
+ description: The value of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Source.ID
+ description: The source ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Source.Name
+ description: The source name of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Attribute.ID
+ description: The attribute ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Attribute.Value
+ description: The attribute value of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Attribute.Name
+ description: The attribute name of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.CreatedAt
+ description: The creation date of the indicator.
+ type: Date
+ - contextPath: ThreatQ.Indicator.UpdatedAt
+ description: The last update date of the indicator.
+ type: Date
+ - contextPath: ThreatQ.Indicator.Status
+ description: The status of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.TQScore
+ description: The ThreatQ score of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Description
+ description: The description of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Type
+ description: The type of the indicator.
+ type: String
+ - arguments:
+ - default: true
+ description: Domain or FQDN
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: Checks the reputation of a domain in ThreatQ.
+ execution: false
+ name: domain
+ outputs:
+ - contextPath: DBotScore.Indicator
+ description: The value of the indicator.
+ type: String
+ - contextPath: DBotScore.Vendor
+ description: The vendor of the indicator.
+ type: String
+ - contextPath: DBotScore.Type
+ description: The type of the indicator.
+ type: String
+ - contextPath: DBotScore.Score
+ description: The DBot Score of the indicator.
+ type: Number
+ - contextPath: Domain.Name
+ description: The name of the domain.
+ type: String
+ - contextPath: Domain.Malicious.Vendor
+ description: The vendor of the malicious domain.
+ type: String
+ - contextPath: Domain.Malicious.Description
+ description: The description of the malicious domain.
+ type: String
+ - contextPath: ThreatQ.Indicator.ID
+ description: The ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Value
+ description: The value of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Source.ID
+ description: The source ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Source.Name
+ description: The source name of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Attribute.ID
+ description: The attribute ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Attribute.Value
+ description: The attribute value of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Attribute.Name
+ description: The attribute name of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.CreatedAt
+ description: The creation date of the indicator.
+ type: Date
+ - contextPath: ThreatQ.Indicator.UpdatedAt
+ description: The last update date of the indicator.
+ type: Date
+ - contextPath: ThreatQ.Indicator.Status
+ description: The status of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.TQScore
+ description: The ThreatQ score of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Description
+ description: The description of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Type
+ description: The type of the indicator.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: The type of indicator, such as email address, IP address, Registry key,
+ binary string, and so on.
+ isArray: false
+ name: type
+ predefined:
+ - Binary String
+ - CIDR Block
+ - CVE
+ - Email Address
+ - Email Attachment
+ - Email Subject
+ - File Mapping
+ - File Path
+ - Filename
+ - FQDN
+ - Fuzzy Hash
+ - GOST Hash
+ - Hash ION
+ - IP Address
+ - IPv6 Address
+ - MD5
+ - Mutex
+ - Password
+ - Registry Key
+ - Service Name
+ - SHA-1
+ - SHA-256
+ - SHA-384
+ - SHA-512
+ - String
+ - x509 Serial
+ - x509 Subject
+ - URL
+ - URL Path
+ - User-agent
+ - Username
+ - X-Mailer
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The status of the indicator. Can be: "Active", "Expired", "Indirect",
+ "Review", or "Whitelisted".'
+ isArray: false
+ name: status
+ predefined:
+ - Active
+ - Expired
+ - Indirect
+ - Review
+ - Whitelisted
+ required: true
+ secret: false
+ - default: false
+ description: The value of the indicator.
+ isArray: false
+ name: value
+ required: true
+ secret: false
+ - default: false
+ description: List of Sources names, separated by commas.
+ isArray: true
+ name: sources
+ required: false
+ secret: false
+ - default: false
+ description: Attributes names list, separated by commas. The i-th element in the attributes names list
+ corresponds to the i-th element in the attributes values list.
+ isArray: true
+ name: attributes_names
+ required: false
+ secret: false
+ - default: false
+ description: Attributes values list, separated by commas. The i-th element in the attributes values list
+ corresponds to the i-th element in the attributes names list.
+ isArray: true
+ name: attributes_values
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a new indicator in ThreatQ.
+ execution: false
+ name: threatq-create-indicator
+ outputs:
+ - contextPath: ThreatQ.Indicator.ID
+ description: The ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Value
+ description: The value of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Source.ID
+ description: The source ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Source.Name
+ description: The source name of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Attribute.ID
+ description: The attribute ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Attribute.Value
+ description: The attribute value of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Attribute.Name
+ description: The attribute name of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.CreatedAt
+ description: The creation date of the indicator.
+ type: Date
+ - contextPath: ThreatQ.Indicator.UpdatedAt
+ description: The last update date of the indicator.
+ type: Date
+ - contextPath: ThreatQ.Indicator.Status
+ description: The status of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.TQScore
+ description: The ThreatQ score of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Description
+ description: The description of the indicator.
+ type: String
+ - contextPath: ThreatQ.Indicator.Type
+ description: The type of the indicator.
+ type: String
+ - arguments:
+ - default: false
+ description: The name of the attribute to add.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The value of the attribute to add.
+ isArray: false
+ name: value
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the object to add. Can be: "indicator", "event", "adversary", or "attachment".'
+ isArray: false
+ name: obj_type
+ predefined:
+ - indicator
+ - event
+ - adversary
+ - attachment
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the Object.
+ isArray: false
+ name: obj_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds an attribute to an object in ThreatQ.
+ execution: false
+ name: threatq-add-attribute
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the object. Can be: "indicator", "event", "adversary", or "attachment".'
+ isArray: false
+ name: obj_type
+ predefined:
+ - indicator
+ - adversary
+ - event
+ - attachment
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the object.
+ isArray: false
+ name: obj_id
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the attribute to modify.
+ isArray: false
+ name: attribute_id
+ required: true
+ secret: false
+ - default: false
+ description: The new value of the attribute.
+ isArray: false
+ name: attribute_value
+ required: true
+ secret: false
+ deprecated: false
+ description: Modifies an attribute for an object in ThreatQ.
+ execution: false
+ name: threatq-modify-attribute
+ - arguments:
+ - default: false
+ description: The ID of the first object.
+ isArray: false
+ name: obj1_id
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the second object.
+ isArray: false
+ name: obj2_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the first object. Can be: "indicator", "adversary", or "event".'
+ isArray: false
+ name: obj1_type
+ predefined:
+ - indicator
+ - adversary
+ - event
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the second object. Can be: "indicator", "adversary", or "event".'
+ isArray: false
+ name: obj2_type
+ predefined:
+ - indicator
+ - adversary
+ - event
+ required: true
+ secret: false
+ deprecated: false
+ description: Links two objects together in ThreatQ.
+ execution: false
+ name: threatq-link-objects
+ - arguments:
+ - default: false
+ description: Name of the adversary to create.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: List of sources names, separated by commas.
+ isArray: true
+ name: sources
+ required: false
+ secret: false
+ - default: false
+ description: List of attributes names, separated by commas. The i-th element in the attributes names list
+ corresponds to the i-th element in the attributes values list.
+ isArray: true
+ name: attributes_names
+ required: false
+ secret: false
+ - default: false
+ description: List of attributes values, separated by commas. The i-th element in the attributes values list
+ corresponds to the i-th element in the attributes names list.
+ isArray: true
+ name: attributes_values
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a new adversary in ThreatQ.
+ execution: false
+ name: threatq-create-adversary
+ outputs:
+ - contextPath: ThreatQ.Adversary.Name
+ description: The name of the adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.ID
+ description: The ID of the adversary.
+ type: number
+ - contextPath: ThreatQ.Adversary.Source.ID
+ description: The source ID of the adversary.
+ type: number
+ - contextPath: ThreatQ.Adversary.Source.Name
+ description: The source name of the adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.Attribute.ID
+ description: The ID of the adversary's attribute.
+ type: number
+ - contextPath: ThreatQ.Adversary.Attribute.Name
+ description: The name of the adversary's attribute.
+ type: string
+ - contextPath: ThreatQ.Adversary.Attribute.Value
+ description: The value of the adversary's attribute.
+ type: string
+ - contextPath: ThreatQ.Adversary.UpdatedAt
+ description: The creation date of the adversary.
+ type: date
+ - contextPath: ThreatQ.Adversary.CreatedAt
+ description: The last update date of the adversary.
+ type: date
+ - arguments:
+ - default: false
+ description: Title of the event.
+ isArray: false
+ name: title
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The type of the event, such as malware, watchlist, command and control, and so on.
+ isArray: false
+ name: type
+ predefined:
+ - Spearphish
+ - Watering Hole
+ - SQL Injection Attack
+ - DoS Attack
+ - Malware
+ - Watchlist
+ - Command and Control
+ - Anonymization
+ - Exfiltration
+ - Host Characteristics
+ - Compromised PKI Certificate
+ - Login Compromise
+ - Incident
+ required: true
+ secret: false
+ - default: false
+ description: 'Date that event happened. Can be: YYYY-mm-dd HH:MM:SS, YYYY-mm-dd'
+ isArray: false
+ name: date
+ required: true
+ secret: false
+ - default: false
+ description: List of sources names, separated by commas.
+ isArray: true
+ name: sources
+ required: false
+ secret: false
+ - default: false
+ description: List of attributes names, separated by commas. The i-th element in the attributes names list
+ corresponds to the i-th element in the attributes values list.
+ isArray: true
+ name: attributes_names
+ required: false
+ secret: false
+ - default: false
+ description: List of attributes values, separated by commas. The i-th element in the attributes values list
+ corresponds to the i-th element in the attributes names list.
+ isArray: true
+ name: attributes_values
+ required: false
+ secret: false
+ deprecated: false
+ description: Creates a new event in ThreatQ.
+ execution: false
+ name: threatq-create-event
+ outputs:
+ - contextPath: ThreatQ.Event.ID
+ description: The ID of the event.
+ type: number
+ - contextPath: ThreatQ.Event.Source.ID
+ description: The source ID of the event.
+ type: number
+ - contextPath: ThreatQ.Event.Source.Name
+ description: The source name of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Attribute.ID
+ description: The ID of the event attribute.
+ type: number
+ - contextPath: ThreatQ.Event.Attribute.Name
+ description: The name of the event attribute.
+ type: string
+ - contextPath: ThreatQ.Event.Attribute.Value
+ description: The attribute value of the event.
+ type: string
+ - contextPath: ThreatQ.Event.UpdatedAt
+ description: The last update date of the event.
+ type: date
+ - contextPath: ThreatQ.Event.CreatedAt
+ description: The creation date of the event.
+ type: date
+ - contextPath: ThreatQ.Event.Type
+ description: The type of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Description
+ description: The description of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Title
+ description: The title of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Occurred
+ description: The date of the event that happened.
+ type: date
+ - arguments:
+ - default: false
+ description: The ID of the object.
+ isArray: false
+ name: obj_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the object. Can be: "indicator", "event", or "adversary".'
+ isArray: false
+ name: obj_type
+ predefined:
+ - indicator
+ - event
+ - adversary
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves related indicators for an object in ThreatQ.
+ execution: false
+ name: threatq-get-related-indicators
+ outputs:
+ - contextPath: ThreatQ.Indicator.RelatedIndicator.ID
+ description: The ID of the related indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.RelatedIndicator.Source.ID
+ description: The source ID of the related indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.RelatedIndicator.Source.Name
+ description: The source name of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.RelatedIndicator.Attribute.ID
+ description: The attribute ID of the related indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.RelatedIndicator.Attribute.Name
+ description: The attribute name of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.RelatedIndicator.Attribute.Value
+ description: The attribute value of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.RelatedIndicator.UpdatedAt
+ description: The last update date of the related indicator.
+ type: date
+ - contextPath: ThreatQ.Indicator.RelatedIndicator.CreatedAt
+ description: The creation date of the related indicator.
+ type: date
+ - contextPath: ThreatQ.Indicator.RelatedIndicator.Type
+ description: The type of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.RelatedIndicator.Description
+ description: The description of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.RelatedIndicator.Value
+ description: The value of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.RelatedIndicator.Status
+ description: The status of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.RelatedIndicator.TQScore
+ description: The ThreatQ score of the related indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.ID
+ description: The ID of the indicator.
+ type: number
+ - contextPath: ThreatQ.Event.RelatedIndicator.ID
+ description: The ID of the related indicator.
+ type: number
+ - contextPath: ThreatQ.Event.RelatedIndicator.Source.ID
+ description: The source ID of the related indicator.
+ type: number
+ - contextPath: ThreatQ.Event.RelatedIndicator.Source.Name
+ description: The source name of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Event.RelatedIndicator.Attribute.ID
+ description: The attribute ID of the related indicator.
+ type: number
+ - contextPath: ThreatQ.Event.RelatedIndicator.Attribute.Name
+ description: The attribute name of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Event.RelatedIndicator.Attribute.Value
+ description: The attribute value of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Event.RelatedIndicator.UpdatedAt
+ description: The last update date of the related indicator.
+ type: date
+ - contextPath: ThreatQ.Event.RelatedIndicator.CreatedAt
+ description: The creation date of the related indicator.
+ type: date
+ - contextPath: ThreatQ.Event.RelatedIndicator.Type
+ description: The type of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Event.RelatedIndicator.Description
+ description: The description of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Event.RelatedIndicator.Value
+ description: The value of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Event.RelatedIndicator.Status
+ description: The status of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Event.RelatedIndicator.TQScore
+ description: The ThreatQ score of the related indicator.
+ type: number
+ - contextPath: ThreatQ.Event.ID
+ description: ID of the Event.
+ type: number
+ - contextPath: ThreatQ.Adversary.RelatedIndicator.ID
+ description: ID of the related indicator.
+ type: number
+ - contextPath: ThreatQ.Adversary.RelatedIndicator.Source.ID
+ description: Source ID of the related indicator.
+ type: number
+ - contextPath: ThreatQ.Adversary.RelatedIndicator.Source.Name
+ description: Source name of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Adversary.RelatedIndicator.Attribute.ID
+ description: ID attribute of the related indicator.
+ type: number
+ - contextPath: ThreatQ.Adversary.RelatedIndicator.Attribute.Name
+ description: Attribute name of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Adversary.RelatedIndicator.Attribute.Value
+ description: Attribute value of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Adversary.RelatedIndicator.UpdatedAt
+ description: The last update date of the related indicator.
+ type: date
+ - contextPath: ThreatQ.Adversary.RelatedIndicator.CreatedAt
+ description: The creation date of the related indicator.
+ type: date
+ - contextPath: ThreatQ.Adversary.RelatedIndicator.Type
+ description: The type of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Adversary.RelatedIndicator.Description
+ description: Description of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Adversary.RelatedIndicator.Value
+ description: The value of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Adversary.RelatedIndicator.Status
+ description: The status of the related indicator.
+ type: string
+ - contextPath: ThreatQ.Adversary.RelatedIndicator.TQScore
+ description: The ThreatQ score of the related indicator.
+ type: number
+ - contextPath: ThreatQ.Adversary.ID
+ description: ID of the Adversary.
+ type: number
+ - arguments:
+ - default: false
+ description: The ID of the indicator.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The new status of the indicator. Can be: "Active", "Expired",
+ "Indirect", "Review", or "Whitelisted".'
+ isArray: false
+ name: status
+ predefined:
+ - Active
+ - Expired
+ - Indirect
+ - Review
+ - Whitelisted
+ required: true
+ secret: false
+ deprecated: false
+ description: Updates an indicator status in ThreatQ.
+ execution: false
+ name: threatq-update-status
+ outputs:
+ - contextPath: ThreatQ.Indicator.ID
+ description: ID of the indicator.
+ type: Number
+ - contextPath: ThreatQ.Indicator.Status
+ description: Status of the indicator.
+ type: String
+ - arguments:
+ - default: false
+ description: ID of the object.
+ isArray: false
+ name: obj_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the object. Can be: "indicator", "event", or "adversary".'
+ isArray: false
+ name: obj_type
+ predefined:
+ - indicator
+ - event
+ - adversary
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves related events of an object in ThreatQ.
+ execution: false
+ name: threatq-get-related-events
+ outputs:
+ - contextPath: ThreatQ.Indicator.RelatedEvent.ID
+ description: ID of the related event.
+ type: number
+ - contextPath: ThreatQ.Indicator.RelatedEvent.Source.ID
+ description: Source ID of the related event.
+ type: number
+ - contextPath: ThreatQ.Indicator.RelatedEvent.Source.Name
+ description: Source name of the related event.
+ type: string
+ - contextPath: ThreatQ.Indicator.RelatedEvent.Attribute.ID
+ description: The attribute ID of the related event.
+ type: number
+ - contextPath: ThreatQ.Indicator.RelatedEvent.Attribute.Name
+ description: The attribute name of the related event.
+ type: string
+ - contextPath: ThreatQ.Indicator.RelatedEvent.Attribute.Value
+ description: The attribute value of the related event.
+ type: string
+ - contextPath: ThreatQ.Indicator.RelatedEvent.UpdatedAt
+ description: The last update date of the related event.
+ type: date
+ - contextPath: ThreatQ.Indicator.RelatedEvent.CreatedAt
+ description: The creation date of the related event.
+ type: date
+ - contextPath: ThreatQ.Indicator.RelatedEvent.Description
+ description: Description of the related event.
+ type: string
+ - contextPath: ThreatQ.Indicator.RelatedEvent.Title
+ description: The title of the related event.
+ type: string
+ - contextPath: ThreatQ.Indicator.RelatedEvent.Occurred
+ description: The date of occurrence of the related event.
+ type: date
+ - contextPath: ThreatQ.Indicator.RelatedEvent.Type
+ description: The type of the related event.
+ type: string
+ - contextPath: ThreatQ.Indicator.ID
+ description: The ID of the Indicator.
+ type: number
+ - contextPath: ThreatQ.Event.RelatedEvent.ID
+ description: The ID of the related event.
+ type: number
+ - contextPath: ThreatQ.Event.RelatedEvent.Source.ID
+ description: The source ID of the related event.
+ type: number
+ - contextPath: ThreatQ.Event.RelatedEvent.Source.Name
+ description: The source name of the related event.
+ type: string
+ - contextPath: ThreatQ.Event.RelatedEvent.Attribute.ID
+ description: The attribute ID of the related event.
+ type: number
+ - contextPath: ThreatQ.Event.RelatedEvent.Attribute.Name
+ description: The attribute name of the related event.
+ type: string
+ - contextPath: ThreatQ.Event.RelatedEvent.Attribute.Value
+ description: The attribute value of the related event.
+ type: string
+ - contextPath: ThreatQ.Event.RelatedEvent.UpdatedAt
+ description: The last update date of the related event.
+ type: date
+ - contextPath: ThreatQ.Event.RelatedEvent.CreatedAt
+ description: The creation date of the related event.
+ type: date
+ - contextPath: ThreatQ.Event.RelatedEvent.Description
+ description: The description of the related event.
+ type: string
+ - contextPath: ThreatQ.Event.RelatedEvent.Title
+ description: The title of the related event.
+ type: string
+ - contextPath: ThreatQ.Event.RelatedEvent.Occurred
+ description: The date of occurrence of the related event.
+ type: date
+ - contextPath: ThreatQ.Event.RelatedEvent.Type
+ description: The type of the related event.
+ type: string
+ - contextPath: ThreatQ.Event.ID
+ description: The ID of the Event.
+ type: number
+ - contextPath: ThreatQ.Adversary.RelatedEvent.ID
+ description: The ID of the related event.
+ type: number
+ - contextPath: ThreatQ.Adversary.RelatedEvent.Source.ID
+ description: The source ID of the related event.
+ type: number
+ - contextPath: ThreatQ.Adversary.RelatedEvent.Source.Name
+ description: The source name of the related event.
+ type: string
+ - contextPath: ThreatQ.Adversary.RelatedEvent.Attribute.ID
+ description: The attribute ID of the of the related event.
+ type: number
+ - contextPath: ThreatQ.Adversary.RelatedEvent.Attribute.Name
+ description: The attribute name of the related event.
+ type: string
+ - contextPath: ThreatQ.Adversary.RelatedEvent.Attribute.Value
+ description: The attribute value of the related event.
+ type: string
+ - contextPath: ThreatQ.Adversary.RelatedEvent.UpdatedAt
+ description: The last update date of the related event.
+ type: date
+ - contextPath: ThreatQ.Adversary.RelatedEvent.CreatedAt
+ description: The creation date of the related event.
+ type: date
+ - contextPath: ThreatQ.Adversary.RelatedEvent.Description
+ description: The description of the related event.
+ type: string
+ - contextPath: ThreatQ.Adversary.RelatedEvent.Title
+ description: The title of the related event.
+ type: string
+ - contextPath: ThreatQ.Adversary.RelatedEvent.Occurred
+ description: The date of occurrence of the related event.
+ type: date
+ - contextPath: ThreatQ.Adversary.RelatedEvent.Type
+ description: The type of the related event.
+ type: string
+ - contextPath: ThreatQ.Adversary.ID
+ description: ID of the Adversary.
+ type: number
+ - arguments:
+ - default: false
+ description: ID of the object.
+ isArray: false
+ name: obj_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the object. Can be: "indicator", "event", or "adversary".'
+ isArray: false
+ name: obj_type
+ predefined:
+ - indicator
+ - event
+ - adversary
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieve related adversaries from an object in ThreatQ.
+ execution: false
+ name: threatq-get-related-adversaries
+ outputs:
+ - contextPath: ThreatQ.Indicator.RelatedAdversary.ID
+ description: ID of the related adversary.
+ type: number
+ - contextPath: ThreatQ.Indicator.RelatedAdversary.Source.ID
+ description: Source ID of the related adversary.
+ type: number
+ - contextPath: ThreatQ.Indicator.RelatedAdversary.Source.Name
+ description: The Source name of the related adversary.
+ type: string
+ - contextPath: ThreatQ.Indicator.RelatedAdversary.Attribute.ID
+ description: The attribute ID of the related adversary.
+ type: number
+ - contextPath: ThreatQ.Indicator.RelatedAdversary.Attribute.Name
+ description: The attribute name of the related adversary.
+ type: string
+ - contextPath: ThreatQ.Indicator.RelatedAdversary.Attribute.Value
+ description: The attribute value of the related adversary.
+ type: string
+ - contextPath: ThreatQ.Indicator.RelatedAdversary.UpdatedAt
+ description: The last update date of the related adversary.
+ type: date
+ - contextPath: ThreatQ.Indicator.RelatedAdversary.CreatedAt
+ description: The creation date of the related adversary.
+ type: date
+ - contextPath: ThreatQ.Indicator.RelatedAdversary.Name
+ description: The name of the related adversary.
+ type: string
+ - contextPath: ThreatQ.Indicator.ID
+ description: The ID of the Indicator.
+ type: number
+ - contextPath: ThreatQ.Event.RelatedAdversary.ID
+ description: The ID of the related adversary.
+ type: number
+ - contextPath: ThreatQ.Event.RelatedAdversary.Source.ID
+ description: The source ID of the related adversary.
+ type: number
+ - contextPath: ThreatQ.Event.RelatedAdversary.Source.Name
+ description: The source name of the related adversary.
+ type: string
+ - contextPath: ThreatQ.Event.RelatedAdversary.Attribute.ID
+ description: The attribute ID of the related adversary.
+ type: number
+ - contextPath: ThreatQ.Event.RelatedAdversary.Attribute.Name
+ description: The Attribute name of the related adversary.
+ type: string
+ - contextPath: ThreatQ.Event.RelatedAdversary.Attribute.Value
+ description: The attribute value of the related adversary.
+ type: string
+ - contextPath: ThreatQ.Event.RelatedAdversary.UpdatedAt
+ description: The last update date of the related adversary.
+ type: date
+ - contextPath: ThreatQ.Event.RelatedAdversary.CreatedAt
+ description: The creation date of the related adversary.
+ type: date
+ - contextPath: ThreatQ.Event.RelatedAdversary.Name
+ description: The name of the related adversary.
+ type: string
+ - contextPath: ThreatQ.Event.ID
+ description: The ID of the Event.
+ type: number
+ - contextPath: ThreatQ.Adversary.RelatedAdversary.ID
+ description: The ID of the Related adversary.
+ type: number
+ - contextPath: ThreatQ.Adversary.RelatedAdversary.Source.ID
+ description: The source ID of the related adversary.
+ type: number
+ - contextPath: ThreatQ.Adversary.RelatedAdversary.Source.Name
+ description: The source name of the related adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.RelatedAdversary.Attribute.ID
+ description: The attribute ID of the related adversary.
+ type: number
+ - contextPath: ThreatQ.Adversary.RelatedAdversary.Attribute.Name
+ description: The attribute name of the related adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.RelatedAdversary.Attribute.Value
+ description: The attribute value of the related adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.RelatedAdversary.UpdatedAt
+ description: The last update date of the related adversary.
+ type: date
+ - contextPath: ThreatQ.Adversary.RelatedAdversary.CreatedAt
+ description: The creation date of the related adversary.
+ type: date
+ - contextPath: ThreatQ.Adversary.RelatedAdversary.Name
+ description: The name of the related adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.ID
+ description: The ID of the Adversary.
+ type: number
+ - arguments:
+ - default: false
+ description: The file entry ID in Demisto.
+ isArray: false
+ name: entry_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Category of the file, such as CrowdStrike Intelligence, FireEye Analysis, PDF, and so on.
+ isArray: false
+ name: file_category
+ predefined:
+ - Cuckoo
+ - CrowdStrike Intelligence
+ - Early Warning and Indicator Notice (EWIN)
+ - FireEye Analysis
+ - FBI FLASH
+ - Generic Text
+ - Intelligence Whitepaper
+ - iSight Report
+ - iSight ThreatScape Intelligence Report
+ - JIB
+ - MAEC
+ - Malware Analysis Report
+ - Malware Initial Findings Report (MFIR)
+ - Malware Sample
+ - Packet Capture
+ - Palo Alto Networks WildFire XML
+ - PCAP
+ - PDF
+ - Private Industry Notification (PIN)
+ - Spearphish Attachment
+ - STIX
+ - ThreatAnalyzer Analysis
+ - ThreatQ CSV File
+ - Whitepaper
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 'off'
+ description: 'Zips malware files for safer downloading. Can be: "on", or "off". Default is off.'
+ isArray: false
+ name: malware_safety_lock
+ predefined:
+ - 'on'
+ - 'off'
+ required: false
+ secret: false
+ - default: false
+ description: Title of the File. Default is the file name.
+ isArray: false
+ name: title
+ required: false
+ secret: false
+ deprecated: false
+ description: Uploads a file in ThreatQ.
+ execution: false
+ name: threatq-upload-file
+ outputs:
+ - contextPath: ThreatQ.File.CreatedAt
+ description: Date of the file upload.
+ type: Date
+ - contextPath: ThreatQ.File.Size
+ description: Size (in bytes) of the file.
+ type: Number
+ - contextPath: ThreatQ.File.MD5
+ description: The MD5 of the file.
+ type: String
+ - contextPath: ThreatQ.File.ID
+ description: The File ID in ThreatQ.
+ type: Number
+ - contextPath: ThreatQ.File.Name
+ description: The name of the File.
+ type: String
+ - contextPath: ThreatQ.File.Title
+ description: The title of the file.
+ type: String
+ - contextPath: ThreatQ.File.UpdatedAt
+ description: The last update of the file.
+ type: Date
+ - contextPath: ThreatQ.File.MalwareLocked
+ description: Whether malware files are zipped.
+ type: Number
+ - contextPath: ThreatQ.File.ContentType
+ description: The content type of the file.
+ type: String
+ - contextPath: ThreatQ.File.Type
+ description: The type of the file.
+ type: String
+ - contextPath: ThreatQ.File.Source.ID
+ description: The source of the file.
+ type: Number
+ - contextPath: ThreatQ.File.Source.Name
+ description: The source name of the file.
+ type: String
+ - contextPath: ThreatQ.File.Attribute.ID
+ description: The attribute ID of the file.
+ type: Number
+ - contextPath: ThreatQ.File.Attribute.Name
+ description: The attribute name of the file.
+ type: String
+ - contextPath: ThreatQ.File.Attribute.Value
+ description: The attribute value of the file.
+ type: String
+ - arguments:
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the object. Can be: "indicator", "event", "attachment" or "adversary".'
+ isArray: false
+ name: obj_type
+ predefined:
+ - indicator
+ - adversary
+ - event
+ - attachment
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the Object.
+ isArray: false
+ name: obj_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Searches for an object by object type and ID. Generic
+ and DBotScore contexts also may be generated.
+ execution: false
+ name: threatq-search-by-id
+ outputs:
+ - contextPath: ThreatQ.Indicator.ID
+ description: ID of the indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.Source.ID
+ description: Source ID of the indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.Source.Name
+ description: Source name of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Attribute.ID
+ description: Attribute ID of the indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.Attribute.Name
+ description: Attribute name of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Attribute.Value
+ description: Attribute value of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.CreatedAt
+ description: Creation date of the indicator.
+ type: date
+ - contextPath: ThreatQ.Indicator.UpdatedAt
+ description: Last update date of the indicator.
+ type: date
+ - contextPath: ThreatQ.Indicator.Description
+ description: Description of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Value
+ description: The value of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Status
+ description: The status of indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Type
+ description: The type of the indicator. For example, IP Address.
+ type: string
+ - contextPath: ThreatQ.Indicator.TQScore
+ description: The ThreatQ Score of the indicator.
+ type: number
+ - contextPath: ThreatQ.Event.ID
+ description: The ID of the indicator.
+ type: number
+ - contextPath: ThreatQ.Event.Source.ID
+ description: The source ID of the indicator.
+ type: number
+ - contextPath: ThreatQ.Event.Source.Name
+ description: The source name of the indicator.
+ type: string
+ - contextPath: ThreatQ.Event.Attribute.ID
+ description: The attribute ID of the indicator.
+ type: number
+ - contextPath: ThreatQ.Event.Attribute.Name
+ description: The attribute name of the indicator.
+ type: string
+ - contextPath: ThreatQ.Event.Attribute.Value
+ description: The attribute value of the indicator.
+ type: string
+ - contextPath: ThreatQ.Event.UpdatedAt
+ description: The last update date of the event.
+ type: date
+ - contextPath: ThreatQ.Event.CreatedAt
+ description: The creation date of the event.
+ type: date
+ - contextPath: ThreatQ.Event.Type
+ description: The type of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Description
+ description: Description of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Title
+ description: The title of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Occurred
+ description: The date that the event happened.
+ type: date
+ - contextPath: ThreatQ.Adversary.Name
+ description: The name of the adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.ID
+ description: The ID of the adversary.
+ type: number
+ - contextPath: ThreatQ.Adversary.Source.ID
+ description: The source of the adversary.
+ type: number
+ - contextPath: ThreatQ.Adversary.Source.Name
+ description: The source name of the adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.Attribute.ID
+ description: The attribute ID of the adversary.
+ type: number
+ - contextPath: ThreatQ.Adversary.Attribute.Name
+ description: The attribute name of the adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.Attribute.Value
+ description: The attribute value of the adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.UpdatedAt
+ description: The creation date of the adversary.
+ type: date
+ - contextPath: ThreatQ.Adversary.CreatedAt
+ description: The last update date of the adversary.
+ type: date
+ - contextPath: ThreatQ.File.CreatedAt
+ description: The date that the file was uploaded.
+ type: Date
+ - contextPath: ThreatQ.File.Size
+ description: The size of the file (in bytes).
+ type: Number
+ - contextPath: ThreatQ.File.MD5
+ description: The MD5 hash of the file.
+ type: String
+ - contextPath: ThreatQ.File.ID
+ description: The file ID in ThreatQ.
+ type: Number
+ - contextPath: ThreatQ.File.Name
+ description: The name of the file.
+ type: String
+ - contextPath: ThreatQ.File.Title
+ description: The title of the file.
+ type: String
+ - contextPath: ThreatQ.File.UpdatedAt
+ description: The last update of the file.
+ type: Date
+ - contextPath: ThreatQ.File.MalwareLocked
+ description: Whether malware files are zipped.
+ type: Number
+ - contextPath: ThreatQ.File.ContentType
+ description: The content type of the file.
+ type: String
+ - contextPath: ThreatQ.File.Type
+ description: The file type.
+ type: String
+ - contextPath: ThreatQ.File.Source.ID
+ description: The source of the file.
+ type: Number
+ - contextPath: ThreatQ.File.Source.Name
+ description: The source name of the file.
+ type: String
+ - contextPath: ThreatQ.File.Attribute.ID
+ description: The attribute ID of the file.
+ type: Number
+ - contextPath: ThreatQ.File.Attribute.Name
+ description: The attribute name of the file.
+ type: String
+ - contextPath: ThreatQ.File.Attribute.Value
+ description: The attribute value of the file.
+ type: String
+ - arguments:
+ - default: false
+ description: The ID of the first object.
+ isArray: false
+ name: obj1_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the first object. Can be: "adversary", "indicator", or "event".'
+ isArray: false
+ name: obj1_type
+ predefined:
+ - adversary
+ - indicator
+ - event
+ required: true
+ secret: false
+ - default: false
+ description: The ID of the second object.
+ isArray: false
+ name: obj2_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the second object. Can be: "adversary", "indicator", or "event".'
+ isArray: false
+ name: obj2_type
+ predefined:
+ - adversary
+ - indicator
+ - event
+ required: true
+ secret: false
+ deprecated: false
+ description: Unlinks two objects in ThreatQ.
+ execution: false
+ name: threatq-unlink-objects
+ - arguments:
+ - default: false
+ description: ID of the Object.
+ isArray: false
+ name: obj_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the object. Can be: "indicator", "event", "adversary" or "attachment".'
+ isArray: false
+ name: obj_type
+ predefined:
+ - indicator
+ - event
+ - adversary
+ - attachment
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes an object in ThreatQ.
+ execution: false
+ name: threatq-delete-object
+ - arguments:
+ - default: false
+ description: ID of an Object.
+ isArray: false
+ name: obj_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the object. Can be: "indicator", "event", "adversary", or "attachment".'
+ isArray: false
+ name: obj_type
+ predefined:
+ - indicator
+ - adversary
+ - event
+ - attachment
+ required: true
+ secret: false
+ - default: false
+ description: The source name.
+ isArray: false
+ name: source
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds a source to an object in ThreatQ.
+ execution: false
+ name: threatq-add-source
+ - arguments:
+ - default: false
+ description: ID of the source.
+ isArray: false
+ name: source_id
+ required: true
+ secret: false
+ - default: false
+ description: ID of the object.
+ isArray: false
+ name: obj_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the object. Can be: "indicator", "event", "adversary", or "attachment".'
+ isArray: false
+ name: obj_type
+ predefined:
+ - indicator
+ - adversary
+ - event
+ - attachment
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes a source from an object in ThreatQ.
+ execution: false
+ name: threatq-delete-source
+ - arguments:
+ - default: false
+ description: ID of the attribute.
+ isArray: false
+ name: attribute_id
+ required: true
+ secret: false
+ - default: false
+ description: ID of the object.
+ isArray: false
+ name: obj_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The type of the object. Can be: "indicator", "event", "adversary", or "attachment".'
+ isArray: false
+ name: obj_type
+ predefined:
+ - indicator
+ - adversary
+ - event
+ - attachment
+ required: true
+ secret: false
+ deprecated: false
+ description: Deletes an attribute from an object in ThreatQ.
+ execution: false
+ name: threatq-delete-attribute
+ - arguments:
+ - default: false
+ description: ID of the Adversary to update.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: Name of the new adversary.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: false
+ description: Updates an adversary name in ThreatQ.
+ execution: false
+ name: threatq-edit-adversary
+ outputs:
+ - contextPath: ThreatQ.Adversary.Name
+ description: The name of the adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.ID
+ description: The ID of the adversary.
+ type: number
+ - contextPath: ThreatQ.Adversary.Source.ID
+ description: The source ID of the adversary.
+ type: number
+ - contextPath: ThreatQ.Adversary.Source.Name
+ description: The source name of the adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.Attribute.ID
+ description: The attribute ID of the adversary.
+ type: number
+ - contextPath: ThreatQ.Adversary.Attribute.Name
+ description: The attribute name of the adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.Attribute.Value
+ description: The value of the adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.UpdatedAt
+ description: The creation date of the adversary.
+ type: date
+ - contextPath: ThreatQ.Adversary.CreatedAt
+ description: The last update date of the adversary.
+ type: date
+ - arguments:
+ - default: false
+ description: The ID of the indicator.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: The value of the new indicator.
+ isArray: false
+ name: value
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The type of the new indicator, such as email address, Filename, Binary string and so on.
+ isArray: false
+ name: type
+ predefined:
+ - Binary String
+ - CIDR Block
+ - CVE
+ - Email Address
+ - Email Attachment
+ - Email Subject
+ - File Mapping
+ - File Path
+ - Filename
+ - FQDN
+ - Fuzzy Hash
+ - GOST Hash
+ - Hash ION
+ - IP Address
+ - IPv6 Address
+ - MD5
+ - Mutex
+ - Password
+ - Registry Key
+ - Service Name
+ - SHA-1
+ - SHA-256
+ - SHA-384
+ - SHA-512
+ - String
+ - x509 Serial
+ - x509 Subject
+ - URL
+ - URL Path
+ - User-agent
+ - Username
+ - X-Mailer
+ required: false
+ secret: false
+ - default: false
+ description: The description of the indicator.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ deprecated: false
+ description: Updates an indicator in ThreatQ.
+ execution: false
+ name: threatq-edit-indicator
+ outputs:
+ - contextPath: ThreatQ.Indicator.ID
+ description: The ID of the indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.Source.ID
+ description: The source ID of the indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.Source.Name
+ description: The source name of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Attribute.ID
+ description: The attribute ID of the indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.Attribute.Name
+ description: The attribute name of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Attribute.Value
+ description: The attribute value of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.CreatedAt
+ description: The creation date of the indicator.
+ type: date
+ - contextPath: ThreatQ.Indicator.UpdatedAt
+ description: The last update date of the indicator.
+ type: date
+ - contextPath: ThreatQ.Indicator.Description
+ description: The description of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Value
+ description: The value of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Status
+ description: The status of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Type
+ description: The type of the indicator. For example, IP Address.
+ type: string
+ - contextPath: ThreatQ.Indicator.TQScore
+ description: The ThreatQ Score of the indicator.
+ type: number
+ - arguments:
+ - default: false
+ description: The ID of the Event.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - default: false
+ description: The title of the new event.
+ isArray: false
+ name: title
+ required: false
+ secret: false
+ - default: false
+ description: 'Date that event happened. Can be: YYYY-mm-dd HH:MM:SS, YYYY-mm-dd'
+ isArray: false
+ name: date
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Type of the event, such as DoS Attack, Malware, Watchlist, and so on.
+ isArray: false
+ name: type
+ predefined:
+ - Spearphish
+ - Watering Hole
+ - SQL Injection Attack
+ - DoS Attack
+ - Malware
+ - Watchlist
+ - Command and Control
+ - Anonymization
+ - Exfiltration
+ - Host Characteristics
+ - Compromised PKI Certificate
+ - Login Compromise
+ - Incident
+ required: false
+ secret: false
+ - default: false
+ description: Description of the event.
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ deprecated: false
+ description: Updates an event in ThreatQ.
+ execution: false
+ name: threatq-edit-event
+ outputs:
+ - contextPath: ThreatQ.Event.ID
+ description: The ID of the event.
+ type: number
+ - contextPath: ThreatQ.Event.Source.ID
+ description: The source ID of the event.
+ type: number
+ - contextPath: ThreatQ.Event.Source.Name
+ description: The source name of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Attribute.ID
+ description: The attribute ID of the event.
+ type: number
+ - contextPath: ThreatQ.Event.Attribute.Name
+ description: The attribute name of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Attribute.Value
+ description: The attribute value of the event.
+ type: string
+ - contextPath: ThreatQ.Event.UpdatedAt
+ description: The last update date of the event.
+ type: date
+ - contextPath: ThreatQ.Event.CreatedAt
+ description: The creation date of the event.
+ type: date
+ - contextPath: ThreatQ.Event.Type
+ description: The type of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Description
+ description: The description of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Title
+ description: The title of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Occurred
+ description: The date that the event happened.
+ type: date
+ - arguments:
+ - default: false
+ description: The ID of the indicator.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: 'The manual indicator score. Can be: "Generated Score" or "1", "2", "3", "4", "5",
+ "6", "7", "8", "9" or "10".'
+ isArray: false
+ name: score
+ predefined:
+ - Generated Score
+ - '0'
+ - '1'
+ - '2'
+ - '3'
+ - '4'
+ - '5'
+ - '6'
+ - '7'
+ - '8'
+ - '9'
+ - '10'
+ required: true
+ secret: false
+ deprecated: false
+ description: Modifies an indicator's score in ThreatQ. The final indicator score is the highest of
+ the manual and generated scores.
+ execution: false
+ name: threatq-update-score
+ outputs:
+ - contextPath: ThreatQ.Indicator.ID
+ description: The ID of the indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.Source.ID
+ description: The source ID of the indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.Source.Name
+ description: The source name of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Attribute.ID
+ description: The attribute ID of the indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.Attribute.Name
+ description: The attribute name of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Attribute.Value
+ description: The attribute value of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.CreatedAt
+ description: The creation date of the indicator.
+ type: date
+ - contextPath: ThreatQ.Indicator.UpdatedAt
+ description: The last update date of the indicator.
+ type: date
+ - contextPath: ThreatQ.Indicator.Description
+ description: The description of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Value
+ description: The value of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Status
+ description: The status of the Indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Type
+ description: The type of the indicator. For example, IP Address.
+ type: string
+ - contextPath: ThreatQ.Indicator.TQScore
+ description: The ThreatQ Score of the indicator.
+ type: number
+ - arguments:
+ - default: false
+ description: The ID of the file.
+ isArray: false
+ name: id
+ required: true
+ secret: false
+ deprecated: false
+ description: Downloads a file from ThreatQ to Demisto.
+ execution: false
+ name: threatq-download-file
+ - arguments:
+ - default: false
+ defaultValue: '0'
+ description: The result page number to return. Default is 0.
+ isArray: false
+ name: page
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: The maximum number of indicators return. Default is 50.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves all indicators in ThreatQ.
+ execution: false
+ name: threatq-get-all-indicators
+ outputs:
+ - contextPath: ThreatQ.Indicator.ID
+ description: ID of the indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.Source.ID
+ description: Source ID of the indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.Source.Name
+ description: Source name of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Attribute.ID
+ description: Attribute ID of the of the indicator.
+ type: number
+ - contextPath: ThreatQ.Indicator.Attribute.Name
+ description: Attribute name of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Attribute.Value
+ description: Attribute value of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.CreatedAt
+ description: The creation date of the indicator.
+ type: date
+ - contextPath: ThreatQ.Indicator.UpdatedAt
+ description: The last update date of the indicator.
+ type: date
+ - contextPath: ThreatQ.Indicator.Description
+ description: The description of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Value
+ description: The value of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Status
+ description: The status of the indicator.
+ type: string
+ - contextPath: ThreatQ.Indicator.Type
+ description: The type of the indicator. For example, IP Address.
+ type: string
+ - contextPath: ThreatQ.Indicator.TQScore
+ description: The ThreatQ Score of the indicator.
+ type: number
+ - arguments:
+ - default: false
+ defaultValue: '0'
+ description: The result page number to return. Default is 0.
+ isArray: false
+ name: page
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: The maximum number of events to return. Default is 50.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves all events in ThreatQ.
+ execution: false
+ name: threatq-get-all-events
+ outputs:
+ - contextPath: ThreatQ.Event.ID
+ description: The ID of the event.
+ type: number
+ - contextPath: ThreatQ.Event.Source.ID
+ description: The source ID of the event.
+ type: number
+ - contextPath: ThreatQ.Event.Source.Name
+ description: The source name of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Attribute.ID
+ description: The attribute ID of the event.
+ type: number
+ - contextPath: ThreatQ.Event.Attribute.Name
+ description: The attribute name of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Attribute.Value
+ description: The attribute value of the event.
+ type: string
+ - contextPath: ThreatQ.Event.UpdatedAt
+ description: The last update date of the event.
+ type: date
+ - contextPath: ThreatQ.Event.CreatedAt
+ description: The creation date of the event.
+ type: date
+ - contextPath: ThreatQ.Event.Type
+ description: The type of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Description
+ description: The description of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Title
+ description: The title of the event.
+ type: string
+ - contextPath: ThreatQ.Event.Occurred
+ description: The date the event happened.
+ type: date
+ - arguments:
+ - default: false
+ defaultValue: '0'
+ description: The result page number to return. Default is 0.
+ isArray: false
+ name: page
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '50'
+ description: The maximum number of objects to return in one response (maximum is 200).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns all adversaries in ThreatQ.
+ execution: false
+ name: threatq-get-all-adversaries
+ outputs:
+ - contextPath: ThreatQ.Adversary.Name
+ description: The name of the adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.ID
+ description: The ID of the of the adversary.
+ type: number
+ - contextPath: ThreatQ.Adversary.Source.ID
+ description: The source ID of the adversary.
+ type: number
+ - contextPath: ThreatQ.Adversary.Source.Name
+ description: The source name of the adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.Attribute.ID
+ description: The attribute ID of the adversary.
+ type: number
+ - contextPath: ThreatQ.Adversary.Attribute.Name
+ description: The attribute name of the adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.Attribute.Value
+ description: The attribute value of the adversary.
+ type: string
+ - contextPath: ThreatQ.Adversary.UpdatedAt
+ description: The creation date of the adversary.
+ type: date
+ - contextPath: ThreatQ.Adversary.CreatedAt
+ description: The last update date of the adversary.
+ type: date
+ dockerimage: demisto/python3:3.7.4.977
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+ subtype: python3
diff --git a/Integrations/ThreatQ_v2/ThreatQ_v2_description.md b/Integrations/ThreatQ_v2/ThreatQ_v2_description.md
new file mode 100644
index 000000000000..be5c475d3172
--- /dev/null
+++ b/Integrations/ThreatQ_v2/ThreatQ_v2_description.md
@@ -0,0 +1,3 @@
+You must have a ThreatQ user account to retrieve an api token. The api token is required for all api requests.
+ThreatQ provides indicator scoring weighting for indicators and their contextual information, such as sources, attributes, and indicator types, as they are added to ThreatQ.
+For detailed information on ThreatQ scoring please refer to https://helpcenter.threatq.com/
\ No newline at end of file
diff --git a/Integrations/ThreatQ_v2/ThreatQ_v2_image.png b/Integrations/ThreatQ_v2/ThreatQ_v2_image.png
new file mode 100644
index 000000000000..31c75913ba61
Binary files /dev/null and b/Integrations/ThreatQ_v2/ThreatQ_v2_image.png differ
diff --git a/Integrations/ThreatQ_v2/ThreatQ_v2_test.py b/Integrations/ThreatQ_v2/ThreatQ_v2_test.py
new file mode 100644
index 000000000000..816b64b531d5
--- /dev/null
+++ b/Integrations/ThreatQ_v2/ThreatQ_v2_test.py
@@ -0,0 +1,305 @@
+from unittest.mock import Mock
+import demistomock as demisto
+
+MOCK_URL = "http://123-fake-api.com"
+MOCK_API_URL = MOCK_URL + "/api"
+
+MOCK_PARAMS = {
+ "credentials": {
+ "identifier": "mock_email",
+ "password": "mock_pass"
+ },
+ "insecure": True,
+ "proxy": False,
+ "serverUrl": MOCK_URL,
+ "client_id": "mock_cliend_id",
+ "threshold": 6
+}
+
+MOCK_ACCESS_TOKEN = {
+ 'expires_in': 3600,
+ 'access_token': '3220879210'
+}
+
+MOCK_GET_ALL_OBJS_ARGUMENTS = {'limit': '2', 'page': '0'}
+
+MOCK_EMAIL_REPUTATION_ARGUMENTS = {'email': 'foo@demisto.com'}
+
+MOCK_SEARCH_BY_ID_ARGUMENTS = {'obj_id': 2019, 'obj_type': 'event'}
+
+MOCK_RELATED_OBJS_ARGUMENTS = {'obj_type': 'adversary', 'obj_id': '1'}
+
+MOCK_SEARCH_BY_NAME_ARGUMENTS = {'name': 'foo@demisto', 'limit': '10'}
+
+MOCK_FILE_INFO = {'name': 'TestTitle', 'path': 'test_data/testfile.txt'}
+
+MOCK_EDIT_EVENT_ARGUMENTS = {'id': 2019, 'date': '2019-03-01', 'description': 'test ', 'type': 'Spearphish'}
+
+MOCK_UPLOAD_FILE_ARGUMENTS = {
+ 'entry_id': 'mock',
+ 'title': 'TestTitle',
+ 'malware_safety_lock': 'off',
+ 'file_category': 'Cuckoo'
+}
+
+MOCK_CREATE_INDICATOR_ARGUMENTS = {
+ 'type': 'Email Address',
+ 'status': 'Active',
+ 'value': 'foo@demisto.com',
+ 'sources': 'test_source1,test_source2',
+ 'attributes_names': 'test_attribute1,test_attribute2',
+ 'attributes_values': 'test_value1,test_value2'
+}
+
+MOCK_FILE_UPLOAD_RESPONSE = {
+ 'data': {
+ 'type_id': 1,
+ 'name': 'testfile.txt',
+ 'title': 'TestTitle',
+ 'malware_locked': 0,
+ 'content_type_id': 1
+ }
+}
+
+MOCK_INDICATOR_CREATION_RESPONSE = {
+ 'data': [{
+ 'id': 2019,
+ 'type_id': 4, # 'Email Address'
+ 'value': 'foo@demisto.com',
+ 'status_id': 1, # 'Active'
+ 'sources': [
+ {'name': 'test_source1', 'pivot': {'id': 2017}},
+ {'name': 'test_source2', 'pivot': {'id': 2018}}
+ ],
+ 'attributes': [
+ {'name': 'test_attribute1', 'value': 'test_value1', 'id': 2019},
+ {'name': 'test_attribute2', 'value': 'test_value2', 'id': 2020}
+ ],
+ 'score': 6
+ }]
+}
+
+MOCK_GET_INDICATOR_RESPONSE = {
+ 'data': {
+ 'id': 2019,
+ 'type_id': 4, # 'Email Address'
+ 'value': 'foo@demisto.com',
+ 'score': 6,
+ 'status_id': 1 # 'Active'
+ }
+}
+
+MOCK_SEARCH_BY_NAME_RESPONSE = {
+ 'data': [
+ {'id': 2017, 'value': 'foo@demisto.com', 'object': 'event'},
+ {'id': 2018, 'value': 'foo@demisto.com', 'object': 'adversary'},
+ {'id': 2019, 'value': 'foo@demisto.com', 'object': 'indicator'}
+ ]
+}
+
+MOCK_GET_EVENT_RESPONSE = {
+ 'data': {
+ 'id': 2019,
+ 'happened_at': '2019-03-01 00:00:00',
+ 'description': 'test',
+ 'type_id': 1
+ }
+}
+
+MOCK_INDICATOR_LIST_RESPONSE = {
+ 'data': [
+ {'id': 10, 'value': 'foo@demisto.com', 'type_id': 4, 'status_id': 2},
+ {'id': 11, 'value': '8.8.8.8', 'type_id': 14, 'status_id': 3}
+ ]
+}
+
+MOCK_ERROR_RESPONSES = [
+ {
+ "data": {
+ "errors": {
+ "name": ["The name has already been taken."],
+ "test": ["test_error1", "test_error2"]
+ }
+ }
+ },
+ {
+ "errors": [
+ 'First Error',
+ ['Second error - part 1', 'Second error - part 2']
+ ]
+ }
+]
+
+EXPECTED_ERROR_STRINGS = [
+ "Errors from service:\n\n"
+ "Error #1. In 'name':\nThe name has already been taken.\n\n"
+ "Error #2. In 'test':\ntest_error1\ntest_error2\n\n",
+
+ "Errors from service:\n\n"
+ "Error #1: First Error\n"
+ "Error #2.0: Second error - part 1\n"
+ "Error #2.1: Second error - part 2\n"
+]
+
+
+def mock_demisto(mocker, mock_args):
+ mocker.patch.object(demisto, 'params', return_value=MOCK_PARAMS)
+ mocker.patch.object(demisto, 'args', return_value=mock_args)
+ mocker.patch.object(demisto, 'results')
+
+
+def test_create_indicator_command(mocker, requests_mock):
+ mock_demisto(mocker, MOCK_CREATE_INDICATOR_ARGUMENTS)
+ requests_mock.post(MOCK_API_URL + '/indicators', json=MOCK_INDICATOR_CREATION_RESPONSE)
+ requests_mock.post(MOCK_API_URL + '/token', json=MOCK_ACCESS_TOKEN)
+
+ from ThreatQ_v2 import create_indicator_command
+ create_indicator_command()
+
+ results = demisto.results.call_args[0]
+ entry_context = results[0]['EntryContext']['ThreatQ.Indicator(val.ID === obj.ID)']
+
+ assert 'Indicator was successfully created.' in results[0]['HumanReadable']
+ assert entry_context['Value'] == 'foo@demisto.com'
+ assert entry_context['Type'] == 'Email Address'
+ assert entry_context['Status'] == 'Active'
+ assert entry_context['Source'][0]['ID'] == 2017
+ assert entry_context['Source'][1]['Name'] == 'test_source2'
+ assert entry_context['Attribute'][0]['Name'] == 'test_attribute1'
+ assert entry_context['Attribute'][1]['Value'] == 'test_value2'
+
+
+def test_edit_event_command(mocker, requests_mock):
+ mock_demisto(mocker, MOCK_EDIT_EVENT_ARGUMENTS)
+ requests_mock.put(MOCK_API_URL + '/events/2019', json=MOCK_GET_EVENT_RESPONSE)
+ requests_mock.post(MOCK_API_URL + '/token', json=MOCK_ACCESS_TOKEN)
+
+ from ThreatQ_v2 import edit_event_command
+ edit_event_command()
+
+ results = demisto.results.call_args[0]
+ entry_context = results[0]['EntryContext']['ThreatQ.Event(val.ID === obj.ID)']
+
+ assert 'Successfully edited event with ID 2019' in results[0]['HumanReadable']
+ assert entry_context['Occurred'] == '2019-03-01 00:00:00' # date format should be changed
+ assert entry_context['Description'] == 'test' # html markups should be cleaned
+
+
+def test_upload_file_command(mocker, requests_mock):
+ mock_demisto(mocker, MOCK_UPLOAD_FILE_ARGUMENTS)
+ mocker.patch.object(demisto, 'getFilePath', return_value=MOCK_FILE_INFO)
+ requests_mock.post(MOCK_API_URL + '/token', json=MOCK_ACCESS_TOKEN)
+ requests_mock.post(MOCK_API_URL + '/attachments', json=MOCK_FILE_UPLOAD_RESPONSE)
+
+ from ThreatQ_v2 import upload_file_command
+ upload_file_command()
+
+ results = demisto.results.call_args[0]
+ entry_context = results[0]['EntryContext']['ThreatQ.File(val.ID === obj.ID)']
+
+ assert 'Successfully uploaded file TestTitle.' in results[0]['HumanReadable']
+ assert entry_context['MalwareLocked'] == 'off'
+ assert entry_context['Type'] == 'Cuckoo'
+ assert entry_context['ContentType'] == 'text/plain'
+ assert entry_context['Title'] == 'TestTitle'
+ assert entry_context['Name'] == 'testfile.txt'
+
+
+def test_get_email_reputation(mocker, requests_mock):
+ mock_demisto(mocker, MOCK_EMAIL_REPUTATION_ARGUMENTS)
+ requests_mock.post(MOCK_API_URL + '/token', json=MOCK_ACCESS_TOKEN)
+ requests_mock.get(MOCK_API_URL + '/search?query=foo@demisto.com&limit=1',
+ json=MOCK_SEARCH_BY_NAME_RESPONSE)
+ requests_mock.get(MOCK_API_URL + '/indicators/2019?with=attributes,sources,score,type',
+ json=MOCK_GET_INDICATOR_RESPONSE)
+
+ from ThreatQ_v2 import get_email_reputation
+ get_email_reputation()
+
+ results = demisto.results.call_args[0]
+ entry_context = results[0]['EntryContext']['ThreatQ.Indicator(val.ID === obj.ID)']
+ generic_context = results[0]['EntryContext']['Account.Email(val.Address && val.Address == obj.Address)']
+
+ assert 'Search results for email foo@demisto.com' in results[0]['HumanReadable']
+ assert entry_context['Value'] == 'foo@demisto.com'
+ assert generic_context['Address'] == 'foo@demisto.com'
+ assert generic_context['Malicious']['Vendor'] == 'ThreatQ v2' # indicator should be marked a malicious
+ assert results[0]['EntryContext']['DBotScore']['Score'] == 3
+
+
+def test_get_related_objs_command(mocker, requests_mock):
+ mock_demisto(mocker, MOCK_RELATED_OBJS_ARGUMENTS)
+ requests_mock.post(MOCK_API_URL + '/token', json=MOCK_ACCESS_TOKEN)
+ requests_mock.get(MOCK_API_URL + '/adversaries/1/indicators?with=sources,score', json=MOCK_INDICATOR_LIST_RESPONSE)
+
+ from ThreatQ_v2 import get_related_objs_command
+ get_related_objs_command('indicator')
+
+ results = demisto.results.call_args[0]
+ entry_context = results[0]['EntryContext']['ThreatQ.Adversary(val.ID === obj.ID)']
+
+ assert 'Related indicator type objects of adversary with ID 1' in results[0]['HumanReadable']
+ assert len(entry_context['RelatedIndicator']) == 2
+ assert entry_context['RelatedIndicator'][0]['Type'] == 'Email Address'
+ assert entry_context['RelatedIndicator'][1]['Type'] == 'IP Address'
+
+
+def test_get_all_objs_command(mocker, requests_mock):
+ mock_demisto(mocker, MOCK_GET_ALL_OBJS_ARGUMENTS)
+ requests_mock.post(MOCK_API_URL + '/token', json=MOCK_ACCESS_TOKEN)
+ requests_mock.get(MOCK_API_URL + '/indicators?with=attributes,sources,score', json=MOCK_INDICATOR_LIST_RESPONSE)
+
+ from ThreatQ_v2 import get_all_objs_command
+ get_all_objs_command('indicator')
+
+ results = demisto.results.call_args[0]
+ entry_context = results[0]['EntryContext']['ThreatQ.Indicator(val.ID === obj.ID)']
+
+ assert 'List of all objects of type indicator - 0-1' in results[0]['HumanReadable']
+ assert len(entry_context) == 2
+ assert entry_context[0]['Type'] == 'Email Address'
+ assert entry_context[1]['Type'] == 'IP Address'
+
+
+def test_search_by_name_command(mocker, requests_mock):
+ mock_demisto(mocker, MOCK_SEARCH_BY_NAME_ARGUMENTS)
+ requests_mock.post(MOCK_API_URL + '/token', json=MOCK_ACCESS_TOKEN)
+ requests_mock.get(MOCK_API_URL + '/search?query=foo@demisto&limit=10', json=MOCK_SEARCH_BY_NAME_RESPONSE)
+
+ from ThreatQ_v2 import search_by_name_command
+ search_by_name_command()
+
+ results = demisto.results.call_args[0]
+
+ assert 'Search Results - Indicators' in results[0]['HumanReadable']
+ assert 'Search Results - Adversaries' in results[0]['HumanReadable']
+ assert 'Search Results - Events' in results[0]['HumanReadable']
+ assert 'Search Results - Files' not in results[0]['HumanReadable']
+ assert len(results[0]['EntryContext']) == 3
+
+
+def test_search_by_id_command(mocker, requests_mock):
+ mock_demisto(mocker, MOCK_SEARCH_BY_ID_ARGUMENTS)
+ requests_mock.post(MOCK_API_URL + '/token', json=MOCK_ACCESS_TOKEN)
+ requests_mock.get(MOCK_API_URL + '/events/2019?with=attributes,sources', json=MOCK_GET_EVENT_RESPONSE)
+
+ from ThreatQ_v2 import search_by_id_command
+ search_by_id_command()
+
+ results = demisto.results.call_args[0]
+ entry_context = results[0]['EntryContext']['ThreatQ.Event(val.ID === obj.ID)']
+
+ assert 'Search results for event with ID 2019' in results[0]['HumanReadable']
+ assert entry_context['Description'] == 'test'
+ assert entry_context['Occurred'] == '2019-03-01 00:00:00'
+
+
+def test_get_errors_string_from_bad_request():
+ from ThreatQ_v2 import get_errors_string_from_bad_request
+ from requests.models import Response
+ res = Mock(spec=Response)
+
+ for error_response, expected_result in zip(MOCK_ERROR_RESPONSES, EXPECTED_ERROR_STRINGS):
+ res.json.return_value = error_response
+ actual_result = get_errors_string_from_bad_request(res, 400)
+ assert expected_result in actual_result
diff --git a/Integrations/ThreatQ_v2/test_data/testfile.txt b/Integrations/ThreatQ_v2/test_data/testfile.txt
new file mode 100644
index 000000000000..30d74d258442
--- /dev/null
+++ b/Integrations/ThreatQ_v2/test_data/testfile.txt
@@ -0,0 +1 @@
+test
\ No newline at end of file
diff --git a/Integrations/ThreatX/ThreatX.py b/Integrations/ThreatX/ThreatX.py
new file mode 100644
index 000000000000..773a0c3ed3ec
--- /dev/null
+++ b/Integrations/ThreatX/ThreatX.py
@@ -0,0 +1,592 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+''' IMPORTS '''
+import socket
+import struct
+import time
+from operator import itemgetter
+import requests
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARS '''
+CUSTOMER_NAME = demisto.params().get('customer_name', None)
+API_KEY = demisto.params().get('api_key', None)
+URL = demisto.params().get('url', None)
+
+if URL[-1] != '/':
+ URL += '/'
+
+BASE_URL = URL + 'tx_api/v1'
+DBOT_THRESHOLD = int(demisto.params().get('dbot_threshold', 70))
+USE_SSL = not demisto.params().get('insecure')
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(url_suffix, commands=None):
+ state = demisto.getIntegrationContext()
+
+ session_token = state.get('session_token')
+
+ if url_suffix != '/login':
+ demisto.info('running request with url=%s with commands=%s' % (BASE_URL + url_suffix, commands))
+ data = {
+ 'token': session_token,
+ 'customer_name': CUSTOMER_NAME
+ }
+ else:
+ demisto.info('running request with url=%s' % (BASE_URL + url_suffix))
+ data = {}
+
+ if commands is not None:
+ data.update(commands)
+
+ res = requests.post(
+ BASE_URL + url_suffix,
+ verify=USE_SSL,
+ json=data
+ )
+
+ if res.status_code != requests.codes.ok:
+ if url_suffix == '/login':
+ demisto.setIntegrationContext({'session_token': None,
+ 'token_expires': None
+ })
+ demisto.info(str(res.status_code) + ' from server during login. Clearing session token cache.')
+
+ return_error('HTTP %d Error in API call to ThreatX service - %s' % (res.status_code, res.text))
+
+ if not res.text:
+ resp_json = {} # type:dict
+
+ try:
+ resp_json = res.json()
+ except ValueError:
+ return_error('Could not parse the response from ThreatX: %s' % (res.text))
+
+ if 'Ok' not in resp_json:
+ if url_suffix == '/login':
+ demisto.setIntegrationContext({'session_token': None,
+ 'token_expires': None
+ })
+ return_error('Login response error - %s.' % (res.text))
+
+ return_error(res.text)
+
+ if url_suffix == '/login':
+ if 'status' in resp_json['Ok']:
+ if resp_json['Ok']['status'] is not True:
+ demisto.setIntegrationContext({'session_token': None,
+ 'token_expires': None
+ })
+ return_error('Invalid credentials.')
+
+ return resp_json['Ok']
+
+
+@logger
+def initialize():
+ endpoint = '/login'
+ commands = {
+ 'command': 'login',
+ 'api_token': API_KEY
+ }
+
+ state = demisto.getIntegrationContext()
+
+ if not state.get('session_token'):
+ session_token = None
+ token_expires = None
+ else:
+ session_token = state.get('session_token')
+ token_expires = state.get('token_expires')
+
+ demisto.info('Initializing request...')
+
+ if session_token is None or (token_expires is not None and token_expires < int(time.time())):
+ if session_token is None:
+ demisto.info('Session token missing - getting new session token...')
+ elif token_expires is not None and token_expires < int(time.time()):
+ demisto.info('Session token expired - getting new session token...')
+
+ r = http_request(endpoint, commands)
+ demisto.setIntegrationContext({'session_token': r['token'],
+ 'token_expires': int(time.time() + (10 * 60))
+ })
+ return
+
+ demisto.info('Cached session token not expired.')
+ return
+
+
+def pretty_ip(decimal_ip):
+ """Convert decimal ip to dotted quad format"""
+ packed_ip = struct.pack("!I", decimal_ip)
+ return socket.inet_ntoa(packed_ip)
+
+
+def pretty_time(input_time):
+ """Convert unix epoch time to human readable format"""
+ return time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(input_time))
+
+
+def set_dbot_score(threatx_score):
+ """Set the DBot Score based on the ThreatX risk score"""
+ if threatx_score >= DBOT_THRESHOLD:
+ return 3
+ elif threatx_score > 10:
+ return 2
+ else:
+ return 0
+
+
+''' FUNCTIONS '''
+
+
+@logger
+def block_ip(ip):
+ commands = {
+ 'command': 'new_blocklist',
+ 'entry': {
+ 'ip': ip,
+ 'description': 'Added by ThreatX Demisto Integration',
+ 'created': int(time.time())
+ }
+ }
+
+ return http_request('/lists', commands)
+
+
+@logger
+def block_ip_command():
+ ip = demisto.args().get('ip', None)
+ results = block_ip(ip)
+
+ md = tableToMarkdown('Block IP',
+ results,
+ ['Result'],
+ removeNull=True)
+
+ ec = {
+ 'IP(val.Address === obj.Address)': {
+ 'Address': ip
+ }
+ }
+
+ return_outputs(md, ec, results)
+
+
+@logger
+def unblock_ip(ip):
+ commands = {
+ 'command': 'delete_blocklist',
+ 'ip': ip
+ }
+
+ return http_request('/lists', commands)
+
+
+@logger
+def unblock_ip_command():
+ ip = demisto.args().get('ip', None)
+ results = unblock_ip(ip)
+
+ md = tableToMarkdown('Unblock IP',
+ results,
+ ['Result'],
+ removeNull=True)
+
+ ec = {
+ 'IP(val.Address === obj.Address)': {
+ 'Address': ip
+ }
+ }
+
+ return_outputs(md, ec, results)
+
+
+@logger
+def blacklist_ip(ip):
+ commands = {
+ 'command': 'new_blacklist',
+ 'entry': {
+ 'ip': ip,
+ 'description': 'Added by ThreatX Demisto Integration',
+ 'created': int(time.time())
+ }
+ }
+
+ return http_request('/lists', commands)
+
+
+@logger
+def blacklist_ip_command():
+ ip = demisto.args().get('ip', None)
+ results = blacklist_ip(ip)
+
+ md = tableToMarkdown('Blacklist IP',
+ results,
+ ['Result'],
+ removeNull=True)
+
+ ec = {
+ 'IP(val.Address === obj.Address)': {
+ 'Address': ip
+ }
+ }
+
+ return_outputs(md, ec, results)
+
+
+@logger
+def unblacklist_ip(ip):
+ commands = {
+ 'command': 'delete_blacklist',
+ 'ip': ip
+ }
+
+ return http_request('/lists', commands)
+
+
+@logger
+def unblacklist_ip_command():
+ ip = demisto.args().get('ip', None)
+ results = unblacklist_ip(ip)
+
+ md = tableToMarkdown('Unblacklist IP',
+ results,
+ ['Result'],
+ removeNull=True)
+
+ ec = {
+ 'IP(val.Address === obj.Address)': {
+ 'Address': ip
+ }
+ }
+
+ return_outputs(md, ec, results)
+
+
+@logger
+def whitelist_ip(ip):
+ commands = {
+ 'command': 'new_whitelist',
+ 'entry': {
+ 'ip': ip,
+ 'description': 'Added by ThreatX Demisto Integration',
+ 'created': int(time.time())
+ }
+ }
+
+ return http_request('/lists', commands)
+
+
+@logger
+def whitelist_ip_command():
+ ip = demisto.args().get('ip', None)
+ results = whitelist_ip(ip)
+
+ md = tableToMarkdown('Whitelist IP',
+ results,
+ ['Result'],
+ removeNull=True)
+
+ ec = {
+ 'IP(val.Address === obj.Address)': {
+ 'Address': ip
+ }
+ }
+
+ return_outputs(md, ec, results)
+
+
+@logger
+def unwhitelist_ip(ip):
+ commands = {
+ 'command': 'delete_whitelist',
+ 'ip': ip
+ }
+
+ return http_request('/lists', commands)
+
+
+@logger
+def unwhitelist_ip_command():
+ ip = demisto.args().get('ip', None)
+ results = unwhitelist_ip(ip)
+
+ md = tableToMarkdown('Unwhitelist IP',
+ results,
+ ['Result'],
+ removeNull=True)
+
+ ec = {
+ 'IP(val.Address === obj.Address)': {
+ 'Address': ip
+ }
+ }
+
+ return_outputs(md, ec, results)
+
+
+@logger
+def get_entities(entity_name, entity_id, entity_ip, timeframe):
+ commands = {
+ 'command': 'list',
+ 'query': dict()
+ } # type: dict
+
+ if entity_name is not None:
+ entity_names = entity_name.split(',')
+ my_entity_name = {'codenames': entity_names}
+ commands['query'].update(my_entity_name)
+
+ if entity_id is not None:
+ entity_ids = entity_id.split(',')
+ my_entity_id = {'entity_ids': entity_ids}
+ commands['query'].update(my_entity_id)
+
+ if entity_ip is not None:
+ entity_ips = entity_ip.split(',')
+ my_entity_ip = {'ip_addresses': entity_ips}
+ commands['query'].update(my_entity_ip)
+
+ first_seen = None
+
+ if timeframe is not None:
+ if timeframe == '1-Hour':
+ first_seen = int(time.time() - (60 * 60))
+ elif timeframe == '1-Day':
+ first_seen = int(time.time() - (24 * 60 * 60))
+ elif timeframe == '1-Week':
+ first_seen = int(time.time() - (7 * 24 * 60 * 60))
+ elif timeframe == '1-Month':
+ first_seen = int(time.time() - (31 * 24 * 60 * 60))
+
+ if first_seen:
+ my_timeframe = {'first_seen': first_seen}
+ commands['query'].update(my_timeframe)
+
+ return http_request('/entities', commands)
+
+
+@logger
+def get_entity_risk(entity_id):
+ commands = {
+ 'command': 'risk_changes',
+ 'id': entity_id
+ }
+
+ return http_request('/entities', commands)
+
+
+@logger
+def get_entities_command():
+ entity_name = demisto.args().get('entity_name', None)
+ entity_id = demisto.args().get('entity_id', None)
+ entity_ip = demisto.args().get('entity_ip', None)
+ timeframe = demisto.args().get('timeframe', None)
+ results = get_entities(entity_name, entity_id, entity_ip, timeframe)
+ dbot_scores = []
+ ip_enrich = []
+ human_readable = []
+ entities_context = []
+ for entity in results:
+ risk_score = 0
+ e_risk = None
+ # Grab the entity risk so we can set the Dbot score for the Actor IPs
+ e_id = entity.get('id')
+
+ if e_id:
+ e_risk = get_entity_risk(e_id)
+
+ if isinstance(e_risk, list) and e_risk:
+ if isinstance(e_risk[-1], dict) and 'risk' in e_risk[-1]:
+ risk_score = e_risk[-1]['risk']
+
+ entity['risk'] = risk_score
+
+ iplist = []
+
+ for actor in entity.get('actors', []):
+ if 'ip_address' in actor:
+ ipdot = pretty_ip(actor['ip_address'])
+ iplist.append(ipdot)
+ actor['ip_address'] = ipdot
+
+ if 'interval_time_start' in actor:
+ actor['interval_time_start'] = pretty_time(actor['interval_time_start'])
+
+ if 'interval_time_stop' in actor:
+ actor['interval_time_stop'] = pretty_time(actor['interval_time_stop'])
+
+ if 'fingerprint' in actor and actor.get('fingerprint') is not None:
+ if 'last_seen' in actor.get('fingerprint', {}):
+ actor['fingerprint']['last_seen'] = pretty_time(actor['fingerprint']['last_seen'])
+
+ dbscore = set_dbot_score(risk_score)
+
+ dbot_scores.append({
+ 'Vendor': 'ThreatX',
+ 'Indicator': ipdot,
+ 'Type': 'ip',
+ 'Score': dbscore
+ })
+
+ if dbscore == 3:
+ ip_enrich.append({
+ 'Address': ipdot,
+ 'Malicious': {
+ 'Vendor': 'ThreatX',
+ 'Description': 'ThreatX risk score is ' + str(risk_score)
+ }
+ })
+ else:
+ ip_enrich.append({
+ 'Address': ipdot
+ })
+ entities_context.append({
+ 'ID': e_id,
+ 'Name': entity['codename'],
+ 'IP': iplist,
+ 'Risk': risk_score
+ })
+
+ human_readable.append({
+ 'Name': entity['codename'],
+ 'ID': e_id,
+ 'IP Addresses': ', '.join(iplist),
+ 'ThreatX Risk Score': risk_score
+ })
+
+ ec = {
+ 'Threatx.Entity(val.ID && val.ID === obj.ID)': entities_context,
+ 'DBotScore': dbot_scores,
+ 'IP(val.Address === obj.Address)': ip_enrich
+ }
+
+ return_outputs(tableToMarkdown('Entities', human_readable), ec, results)
+
+
+@logger
+def get_entity_notes(entity_id):
+ commands = {
+ 'command': 'notes',
+ 'id': entity_id
+ }
+
+ return http_request('/entities', commands)
+
+
+@logger
+def get_entity_notes_command():
+ entity_id = demisto.args().get('entity_id', None)
+ results = get_entity_notes(entity_id)
+
+ # Reverse sort the list by timestamp
+ sorted_results = sorted(results, key=itemgetter('timestamp'), reverse=True)
+
+ # Replace dates with pretty format
+ for note in sorted_results:
+ if 'timestamp' in note:
+ note['timestamp'] = pretty_time(note['timestamp'])
+
+ md = tableToMarkdown('Entity Notes',
+ sorted_results,
+ headerTransform=string_to_table_header)
+
+ ec = {
+ 'Threatx.Entity(val.ID && val.ID === obj.ID)': {
+ 'ID': entity_id,
+ 'Note': sorted_results
+ }
+ }
+
+ return_outputs(md, ec, sorted_results)
+
+
+@logger
+def add_entity_note(entity_id, message):
+ commands = {
+ 'command': 'new_note',
+ 'note': {
+ 'entity_id': entity_id,
+ 'content': message
+ }
+ }
+
+ return http_request('/entities', commands)
+
+
+@logger
+def add_entity_note_command():
+ entity_id = demisto.args().get('entity_id', None)
+ message = demisto.args().get('message', None)
+ results = add_entity_note(entity_id, message)
+
+ md = tableToMarkdown('New Entity Note',
+ results,
+ ['Result'],
+ removeNull=True)
+
+ return_outputs(md, None, results)
+
+
+@logger
+def test_module():
+ commands = {
+ 'command': 'list'
+ }
+
+ return http_request('/users', commands)
+
+
+@logger
+def test_module_command():
+ results = test_module()
+
+ if isinstance(results, list):
+ if results:
+ if 'username' in results[0]:
+ demisto.results('ok')
+ else:
+ return_error('Unexpected response from ThreatX.')
+ else:
+ return_error('Empty response from ThreatX.')
+ else:
+ return_error('Unrecognized response from ThreatX.')
+
+
+''' EXECUTION CODE '''
+
+
+demisto.info('command is %s' % (demisto.command(),))
+try:
+ handle_proxy()
+ initialize()
+ if demisto.command() == 'test-module':
+ test_module_command()
+ elif demisto.command() == 'threatx-block-ip':
+ block_ip_command()
+ elif demisto.command() == 'threatx-unblock-ip':
+ unblock_ip_command()
+ elif demisto.command() == 'threatx-blacklist-ip':
+ blacklist_ip_command()
+ elif demisto.command() == 'threatx-unblacklist-ip':
+ unblacklist_ip_command()
+ elif demisto.command() == 'threatx-whitelist-ip':
+ whitelist_ip_command()
+ elif demisto.command() == 'threatx-unwhitelist-ip':
+ unwhitelist_ip_command()
+ elif demisto.command() == 'threatx-get-entities':
+ get_entities_command()
+ elif demisto.command() == 'threatx-get-entity-notes':
+ get_entity_notes_command()
+ elif demisto.command() == 'threatx-add-entity-note':
+ add_entity_note_command()
+
+except Exception as e:
+ return_error(str(e))
diff --git a/Integrations/ThreatX/ThreatX.yml b/Integrations/ThreatX/ThreatX.yml
new file mode 100644
index 000000000000..3c9b57f8dd0c
--- /dev/null
+++ b/Integrations/ThreatX/ThreatX.yml
@@ -0,0 +1,234 @@
+category: Network Security
+commonfields:
+ id: ThreatX
+ version: -1
+configuration:
+- display: Customer Name
+ name: customer_name
+ required: true
+ type: 0
+- defaultvalue: https://provision.threatx.io
+ display: ThreatX Server URL (e.g., https://provision.threatx.io)
+ name: url
+ required: true
+ type: 0
+- display: API Key
+ name: api_key
+ required: true
+ type: 4
+- defaultvalue: '70'
+ display: IP Threshold. Minimum risk score from ThreatX to consider the IP malicious.
+ name: dbot_threshold
+ required: true
+ type: 0
+- defaultvalue: 'false'
+ display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+- defaultvalue: 'false'
+ display: Trust any certificate (not secure)
+ name: insecure
+ required: false
+ type: 8
+description: The ThreatX integration allows automated enforcement and intel gathering
+ actions.
+display: ThreatX
+name: ThreatX
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: 'IP address or CIDR, for example: "10.1.1.1" or "10.1.1.0/24".'
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: false
+ description: Temporarily blocks an IP address or CIDR. Default is 30 minutes.
+ execution: true
+ name: threatx-block-ip
+ outputs:
+ - contextPath: IP.Address
+ description: IP address or CIDR that was blocked.
+ type: string
+ - arguments:
+ - default: false
+ description: 'IP address or CIDR, for example: "10.1.1.1" or "10.1.1.0/24".'
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: false
+ description: Unblocks a blocked IP address or CIDR.
+ execution: true
+ name: threatx-unblock-ip
+ outputs:
+ - contextPath: IP.Address
+ description: IP address or CIDR that was unblocked.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: 'IP address or CIDR, for example: "10.1.1.1" or "10.1.1.0/24".'
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds an IP address or CIDR to the blacklist.
+ execution: true
+ name: threatx-blacklist-ip
+ outputs:
+ - contextPath: IP.Address
+ description: IP address or CIDR that was added to the blacklist.
+ type: string
+ - arguments:
+ - default: false
+ description: 'IP address or CIDR, for example: "10.1.1.1" or "10.1.1.0/24".'
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes an IP or CIDR from the blacklist.
+ execution: true
+ name: threatx-unblacklist-ip
+ outputs:
+ - contextPath: IP.Address
+ description: IP address or CIDR that was removed from the blacklist.
+ type: string
+ - arguments:
+ - default: false
+ description: 'IP address or CIDR, for example: "10.1.1.1" or "10.1.1.0/24".'
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: false
+ description: Adds an IP address or CIDR to the whitelist.
+ execution: true
+ name: threatx-whitelist-ip
+ outputs:
+ - contextPath: IP.Address
+ description: IP address or CIDR was added to the whitelist.
+ type: string
+ - arguments:
+ - default: false
+ description: 'IP address or CIDR, for example: "10.1.1.1" or "10.1.1.0/24".'
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ deprecated: false
+ description: Removes an IP address or CIDR from the whitelist.
+ execution: true
+ name: threatx-unwhitelist-ip
+ outputs:
+ - contextPath: IP.Address
+ description: IP address or CIDR that was removed from the whitelist.
+ type: string
+ - arguments:
+ - default: false
+ description: CSV list of Entity names.
+ isArray: false
+ name: entity_name
+ required: false
+ secret: false
+ - default: false
+ description: CSV list of Entity ID hashes.
+ isArray: false
+ name: entity_id
+ required: false
+ secret: false
+ - default: false
+ description: CSV list of Entity IP addresses.
+ isArray: false
+ name: entity_ip
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: 1-Hour
+ description: 'Look-back timeframe for the query. Options are 1-Hour, 1-Day,
+ 1-Week, 1-Month, or 3-Months. Note: long look-back timeframes for a large
+ number of Entities can timeout.'
+ isArray: false
+ name: timeframe
+ predefined:
+ - 1-Hour
+ - 1-Day
+ - 1-Week
+ - 1-Month
+ - 3-Months
+ required: true
+ secret: false
+ deprecated: false
+ description: 'Get high-level Entity information using the Entity ID, Entity Name,
+ or Entity IP. For example: !threatx-get-entities timeframe=1-Day entity_name=CynicalGraaf,MJ12Bot
+ entity_id= 566056709675514809 entity_ip=12.12.12.12,14.14.14.14. Note: long
+ look-back timeframes for a large number of Entities can timeout.'
+ execution: false
+ name: threatx-get-entities
+ outputs:
+ - contextPath: Threatx.Entity.ID
+ description: ID hash of the entity.
+ type: string
+ - contextPath: Threatx.Entity.Name
+ description: Name of the entity.
+ type: string
+ - contextPath: Threatx.Entity.IP
+ description: IP address of the entity.
+ type: string
+ - contextPath: Threatx.Entity.Risk
+ description: Risk score of the entity.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: 'ID hash of the Entity. Note: this value can be retrieved using
+ the "!threatx-get-entities" command.'
+ isArray: false
+ name: entity_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns the notes attached to an Entity, by Entity ID.
+ execution: false
+ name: threatx-get-entity-notes
+ outputs:
+ - contextPath: Threatx.Entity.Note
+ description: Notes attached to the entity.
+ type: number
+ - contextPath: Threatx.Entity.ID
+ description: ID hash of the entity.
+ type: Unknown
+ - contextPath: Threatx.Entity.Note.content
+ description: Content of the note.
+ type: Unknown
+ - contextPath: Threatx.Entity.Note.timestamp
+ description: Timestamp of the note.
+ type: Unknown
+ - contextPath: Threatx.Entity.Note.username
+ description: Author of the note.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: 'ID hash of the Entity. To retrieve this value, run the "!threatx-get-entities" command.'
+ isArray: false
+ name: entity_id
+ required: true
+ secret: false
+ - default: false
+ description: Contents of the note
+ isArray: false
+ name: message
+ required: true
+ secret: false
+ deprecated: false
+ description: 'Add a new note to the entity. For example: !threatx-add-entity-note
+ entity_id=566056709695514809 message="test note"'
+ execution: true
+ name: threatx-add-entity-note
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
diff --git a/Integrations/ThreatX/ThreatX_description.md b/Integrations/ThreatX/ThreatX_description.md
new file mode 100644
index 000000000000..35f70d42af69
--- /dev/null
+++ b/Integrations/ThreatX/ThreatX_description.md
@@ -0,0 +1,3 @@
+Enter your Customer Name and API Key provided by the ThreatX SOC. You can request your Customer Name and API Key by opening a support ticket via support@threatx.com.
+
+Set the DBot Score Threshold to a number between 1 and 100 (default is 70). Any IP addresses associated with an Entity queried with a ThreatX score greater than or equal to the DBot Score Threshold will be assigned a DBot Score of 3 (Malicious).
diff --git a/Integrations/ThreatX/ThreatX_image.png b/Integrations/ThreatX/ThreatX_image.png
new file mode 100644
index 000000000000..8c2b4e091fda
Binary files /dev/null and b/Integrations/ThreatX/ThreatX_image.png differ
diff --git a/Integrations/TruSTAR/CHANGELOG.md b/Integrations/TruSTAR/CHANGELOG.md
new file mode 100644
index 000000000000..cf548c18ce45
--- /dev/null
+++ b/Integrations/TruSTAR/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.10.1] - 2019-10-15
+Fixed an issue where the ***trustar-search-indicator*** command returned an incorrect context output.
diff --git a/Integrations/TruSTAR/TruSTAR.py b/Integrations/TruSTAR/TruSTAR.py
new file mode 100644
index 000000000000..10ece7060089
--- /dev/null
+++ b/Integrations/TruSTAR/TruSTAR.py
@@ -0,0 +1,745 @@
+import demistomock as demisto
+from CommonServerPython import *
+''' IMPORTS '''
+import requests
+import time
+import trustar
+import collections
+from trustar.models.indicator import Indicator
+from trustar.models.page import Page
+
+handle_proxy()
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBAL VARS '''
+SERVER = demisto.params()['server']
+API_KEY = str(demisto.params()['key'])
+API_SECRET = str(demisto.params()['secret'])
+BASE_URL = SERVER + '/api/1.3'
+INSECURE = demisto.params()['insecure']
+
+''' HELPER FUNCTIONS '''
+
+
+def translate_indicators(ts_indicators):
+ indicators = []
+ file_context = []
+ url_context = []
+ ip_context = []
+ email_context = []
+ key_context = []
+ cve_context = []
+ for indicator in ts_indicators:
+ current_indicator = indicator.to_dict(remove_nones=True)
+ indicator_type = current_indicator['indicatorType']
+ priority_level = current_indicator.get('priorityLevel')
+ value = current_indicator['value']
+ if indicator_type == 'SOFTWARE':
+ # Extracts the filename out of file path
+ if "\\" in r"%r" % value:
+ file_name = value.split('\\')[-1] # Handles file path with backslash
+ else:
+ file_name = value.split('/')[-1] # Handles file path with slash
+ current_indicator['value'] = file_name
+ context_dict = {'Name': file_name}
+ if priority_level:
+ context_dict.update({'priorityLevel': priority_level})
+ file_context.append(context_dict)
+ elif indicator_type in {'SHA256', 'SHA1', 'MD5'}:
+ context_dict = {indicator_type: value}
+ if priority_level:
+ context_dict.update({'priorityLevel': priority_level})
+ file_context.append(context_dict)
+ elif indicator_type == 'URL':
+ context_dict = {'Address': value}
+ if priority_level:
+ context_dict.update({'priorityLevel': priority_level})
+ url_context.append(context_dict)
+ elif indicator_type == 'IP':
+ context_dict = {'Address': value}
+ if priority_level:
+ context_dict.update({'priorityLevel': priority_level})
+ ip_context.append(context_dict)
+ elif indicator_type == 'EMAIL_ADDRESS':
+ context_dict = {'Address': value}
+ if priority_level:
+ context_dict.update({'priorityLevel': priority_level})
+ email_context.append(context_dict)
+ elif indicator_type == 'REGISTRY_KEY':
+ context_dict = {'Path': value}
+ if priority_level:
+ context_dict.update({'priorityLevel': priority_level})
+ key_context.append(context_dict)
+ elif indicator_type == 'CVE':
+ context_dict = {'ID': value}
+ if priority_level:
+ context_dict.update({'priorityLevel': priority_level})
+ cve_context.append(context_dict)
+ indicators.append(current_indicator)
+ # Build Entry Context
+ ec = {}
+ if file_context:
+ ec['File(val.Name && val.Name === obj.Name)'] = file_context
+ if url_context:
+ ec['URL(val.Address && val.Address === obj.Address)'] = url_context
+ if ip_context:
+ ec['IP(val.Address && val.Address === obj.Address)'] = ip_context
+ if email_context:
+ ec['Account.Email(val.Address && val.Address === obj.Address)'] = email_context
+ if key_context:
+ ec['RegistryKey(val.Path && val.Path === obj.Path)'] = key_context
+ if cve_context:
+ ec['CVE(val.ID && val.ID === obj.ID)'] = cve_context
+ return indicators, ec
+
+
+def translate_specific_indicators(ts_indicators, specific_types):
+ res = []
+ for indicator in ts_indicators:
+ current_indicator = indicator.to_dict(remove_nones=True)
+ indicator_type = current_indicator['indicatorType']
+ priority_level = current_indicator.get('priorityLevel')
+ value = current_indicator['value']
+ whitelisted = current_indicator.get('whitelisted')
+ if indicator_type in specific_types:
+ res.append({
+ 'value': value,
+ 'priorityLevel': priority_level,
+ 'whitelisted': whitelisted,
+ 'indicatorType': indicator_type
+ })
+ return res
+
+
+def priority_level_to_score(priority_level):
+ if priority_level == 'LOW':
+ return 1
+ elif priority_level == 'MEDIUM':
+ return 2
+ elif priority_level == 'HIGH':
+ return 3
+ return 0
+
+
+def normalize_time(timestamp):
+ '''
+ Converts unix epoch time to GMT
+ '''
+ if isinstance(timestamp, str):
+ return timestamp
+ return time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(timestamp / 1000.0))
+
+
+def date_to_unix(timestamp):
+ d = datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S")
+ return int(d.strftime("%s")) * 1000
+
+
+def create_file_ec(indicators, file, threshold):
+ if not indicators:
+ return {
+ 'DBotScore': {
+ 'Indicator': file,
+ 'Type': 'file',
+ 'Score': 0,
+ 'Vendor': 'TruSTAR',
+ }
+ }
+ trustar_ec = {}
+ file_ec = {}
+ dbot_ec = {}
+ for indicator in indicators:
+ file_ec.update({
+ indicator['indicatorType']: indicator['value'],
+ })
+ trustar_ec.update({
+ 'Value': indicator['value'],
+ 'Whitelisted': indicator['whitelisted'],
+ 'Priority': indicator['priorityLevel']
+ })
+ indicator_score = priority_level_to_score(indicator['priorityLevel'])
+ dbot_ec.update({
+ 'Indicator': file,
+ 'Type': 'file',
+ 'Vendor': 'TruSTAR',
+ 'Score': 0 if indicator_score == 0 else (2 if threshold > indicator_score else 3)
+ })
+ if threshold <= indicator_score:
+ file_ec.update({
+ 'Malicious': {
+ 'Vendor': 'TruSTAR',
+ 'Description': 'Priority level above {0}'.format(indicator['priorityLevel'])
+ }
+ })
+ return {
+ outputPaths['dbotscore']: dbot_ec,
+ outputPaths['file']: file_ec,
+ 'TruSTAR.File(val.Value === obj.Value)': trustar_ec
+ }
+
+
+def create_ip_ec(indicators, ip, threshold):
+ if not indicators:
+ return {
+ 'DBotScore': {
+ 'Indicator': ip,
+ 'Type': 'ip',
+ 'Score': 0,
+ 'Vendor': 'TruSTAR',
+ }
+ }
+ trustar_ec = {}
+ ip_ec = {}
+ dbot_ec = {}
+ for indicator in indicators:
+ ip_ec.update({
+ 'Address': indicator['value'],
+ })
+ trustar_ec.update({
+ 'Value': indicator['value'],
+ 'Whitelisted': indicator['whitelisted'],
+ 'Priority': indicator['priorityLevel']
+ })
+ indicator_score = priority_level_to_score(indicator['priorityLevel'])
+ dbot_ec.update({
+ 'Indicator': ip,
+ 'Type': 'ip',
+ 'Vendor': 'TruSTAR',
+ 'Score': 0 if indicator_score == 0 else (2 if threshold > indicator_score else 3)
+ })
+ if threshold <= indicator_score:
+ ip_ec.update({
+ 'Malicious': {
+ 'Vendor': 'TruSTAR',
+ 'Description': 'Priority level above {0}'.format(indicator['priorityLevel'])
+ }
+ })
+ return {
+ outputPaths['dbotscore']: dbot_ec,
+ outputPaths['ip']: ip_ec,
+ 'TruSTAR.IP(val.Value === obj.Value)': trustar_ec
+ }
+
+
+def create_url_ec(indicators, url, threshold):
+ if not indicators:
+ return {
+ 'DBotScore': {
+ 'Indicator': url,
+ 'Type': 'url',
+ 'Score': 0,
+ 'Vendor': 'TruSTAR',
+ }
+ }
+ trustar_ec = {}
+ url_ec = {}
+ dbot_ec = {}
+ for indicator in indicators:
+ url_ec.update({
+ 'Data': indicator['value'],
+ })
+ trustar_ec.update({
+ 'Value': indicator['value'],
+ 'Whitelisted': indicator['whitelisted'],
+ 'Priority': indicator['priorityLevel']
+ })
+ indicator_score = priority_level_to_score(indicator['priorityLevel'])
+ dbot_ec.update({
+ 'Indicator': url,
+ 'Type': 'url',
+ 'Vendor': 'TruSTAR',
+ 'Score': 0 if indicator_score == 0 else (2 if threshold > indicator_score else 3)
+ })
+ if threshold <= indicator_score:
+ url_ec.update({
+ 'Malicious': {
+ 'Vendor': 'TruSTAR',
+ 'Description': 'Priority level above {0}'.format(indicator['priorityLevel'])
+ }
+ })
+ return {
+ outputPaths['dbotscore']: dbot_ec,
+ outputPaths['url']: url_ec,
+ 'TruSTAR.URL(val.Value === obj.Value)': trustar_ec
+ }
+
+
+def create_domain_ec(indicators, url, threshold):
+ if not indicators:
+ return {
+ 'DBotScore': {
+ 'Indicator': url,
+ 'Type': 'domain',
+ 'Score': 0,
+ 'Vendor': 'TruSTAR',
+ }
+ }
+ trustar_ec = {}
+ domain_ec = {}
+ dbot_ec = {}
+ for indicator in indicators:
+ domain_ec.update({
+ 'Name': indicator['value'],
+ })
+ trustar_ec.update({
+ 'Value': indicator['value'],
+ 'Whitelisted': indicator['whitelisted'],
+ 'Priority': indicator['priorityLevel']
+ })
+ indicator_score = priority_level_to_score(indicator['priorityLevel'])
+ dbot_ec.update({
+ 'Indicator': url,
+ 'Type': 'domain',
+ 'Vendor': 'TruSTAR',
+ 'Score': 0 if indicator_score == 0 else (2 if threshold > indicator_score else 3)
+ })
+ if threshold <= indicator_score:
+ domain_ec.update({
+ 'Malicious': {
+ 'Vendor': 'TruSTAR',
+ 'Description': 'Priority level above {0}'.format(indicator['priorityLevel'])
+ }
+ })
+ return {
+ outputPaths['dbotscore']: dbot_ec,
+ outputPaths['domain']: domain_ec,
+ 'TruSTAR.Domain(val.Value === obj.Value)': trustar_ec
+ }
+
+
+''' FUNCTIONS '''
+
+
+def get_related_indicators(indicators, enclave_ids, page_size, page_number):
+ # To display priority score
+ items_list = []
+ indicators_json = dict()
+ related_indicator_response = ts.get_related_indicators_page(indicators, enclave_ids, page_size, page_number)
+ for related_indicator in related_indicator_response:
+ current_indicator = related_indicator.to_dict(remove_nones=True)
+ search_indicator_response = ts.search_indicators_page(current_indicator['value'], enclave_ids, page_size,
+ page_number)
+ for found_indicator in search_indicator_response:
+ current_found_indicator = found_indicator.to_dict(remove_nones=True)
+ if current_indicator['value'] == current_found_indicator['value']:
+ current_indicator['priorityLevel'] = current_found_indicator['priorityLevel']
+ break
+ if not current_indicator.get('priorityLevel'):
+ current_indicator['priorityLevel'] = "NOT_FOUND"
+ items_list.append(current_indicator)
+ indicators_json.update({'items': items_list})
+ response = Page.from_dict(indicators_json, content_type=Indicator)
+ related_indicators, ec = translate_indicators(response)
+ if related_indicators:
+ title = 'TruSTAR indicators related to ' + indicators
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': related_indicators,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, related_indicators),
+ 'EntryContext': ec
+ }
+ else:
+ entry = 'No indicators related to ' + indicators + ' were found.'
+ return entry
+
+
+def get_trending_indicators(indicator_type, days_back):
+ if indicator_type == 'other':
+ indicator_type = None
+ response = ts.get_community_trends(indicator_type, days_back)
+ trending_indicators, ec = translate_indicators(response)
+ if trending_indicators:
+ title = 'TruSTAR Community Trending Indicators'
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': trending_indicators,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, trending_indicators),
+ 'EntryContext': ec
+ }
+ return entry
+ return 'No trending indicators were found.'
+
+
+def search_indicators(search_term, enclave_ids, page_size, page_number):
+ response = ts.search_indicators_page(search_term, enclave_ids, page_size, page_number)
+ indicators, ec = translate_indicators(response)
+ if indicators:
+ title = 'TruSTAR indicators that contain the term ' + search_term
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': indicators,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, indicators),
+ 'EntryContext': ec
+ }
+ return entry
+ return 'No indicators were found.'
+
+
+def generic_search_indicator(search_term, threshold, search_type, ec_function):
+ if demisto.args().get('threshold'):
+ threshold = demisto.args().get('threshold')
+ response = ts.search_indicators_page(search_term=search_term)
+ indicators = translate_specific_indicators(response, search_type)
+ threshold = priority_level_to_score(threshold)
+ title = 'TruSTAR results for {0} indicator: {1}'.format(search_type[0], search_term)
+ ec = ec_function(indicators, search_term, threshold)
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': indicators,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, indicators),
+ 'EntryContext': ec
+ }
+ return entry
+
+
+def submit_report(title, report_body, enclave_ids, external_url, time_began, distribution_type):
+ if distribution_type == 'ENCLAVE' and enclave_ids is None:
+ return 'Distribution type is ENCLAVE, but no enclave ID was given.'
+ ts_report = trustar.models.Report(
+ title=title,
+ body=report_body,
+ enclave_ids=[enclave_ids] if enclave_ids else enclave_ids,
+ is_enclave=True if distribution_type == 'ENCLAVE' else False,
+ time_began=time_began,
+ external_url=external_url
+ )
+ response = ts.submit_report(ts_report)
+ deep_link = '{server_url}/constellation/reports/{report_id}'.format(server_url=SERVER, report_id=response.id)
+ report = collections.OrderedDict() # type: OrderedDict
+ report['id'] = response.id
+ report['reportTitle'] = title
+ report['reportDeepLink'] = '[{}]({})'.format(deep_link, deep_link)
+ report['reportBody'] = report_body
+ ec = {
+ 'TruSTAR.Report(val.id && val.id === obj.id)': report
+ }
+ title = 'TruSTAR report was successfully created'
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': report,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, report),
+ 'EntryContext': ec
+ }
+ return entry
+
+
+def update_report(report_id, title, report_body, enclave_ids, external_url, time_began, distribution_type):
+ ts_report = trustar.models.Report(
+ id=report_id,
+ title=title,
+ body=report_body,
+ enclave_ids=[enclave_ids] if enclave_ids else enclave_ids,
+ is_enclave=True if distribution_type == 'ENCLAVE' else False,
+ time_began=time_began,
+ external_url=external_url
+ )
+ ts.update_report(ts_report)
+ deep_link = '{server_url}/constellation/reports/{report_id}'.format(server_url=SERVER, report_id=report_id)
+ report = collections.OrderedDict() # type: OrderedDict
+ report['id'] = report_id
+ report['reportTitle'] = title
+ report['reportDeepLink'] = '[{}]({})'.format(deep_link, deep_link)
+ report['reportBody'] = report_body
+ ec = {
+ 'TruSTAR.Report(val.id && val.id === obj.id)': report
+ }
+ title = 'TruSTAR report was successfully updated'
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': report,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, report),
+ 'EntryContext': ec
+ }
+ return entry
+
+
+def get_report_details(report_id, id_type):
+ response = ts.get_report_details(report_id, id_type)
+ current_report_dict = response.to_dict(remove_nones=True)
+ report_details = collections.OrderedDict() # type: OrderedDict
+ report_details['id'] = current_report_dict['id']
+ report_details['title'] = current_report_dict['title']
+ deep_link = '{server_url}/constellation/reports/{report_id}'.format(server_url=SERVER,
+ report_id=current_report_dict['id'])
+ report_details['reportDeepLink'] = '[{}]({})'.format(deep_link, deep_link)
+ if current_report_dict['enclaveIds']:
+ report_details['enclaveIds'] = ', '.join(current_report_dict['enclaveIds']) # Prettify list of enclave IDs
+ report_details['updated'] = normalize_time(current_report_dict['updated'])
+ report_details['created'] = normalize_time(current_report_dict['created'])
+ report_details['timeBegan'] = normalize_time(current_report_dict['timeBegan'])
+ report_details['distributionType'] = current_report_dict['distributionType']
+ if current_report_dict.get('externalUrl'):
+ report_details['externalUrl'] = current_report_dict['externalUrl']
+ report_details['reportBody'] = current_report_dict['reportBody']
+ report_context = {
+ 'reportTitle': report_details['title'],
+ 'reportBody': report_details['reportBody'],
+ 'id': report_details['id']
+ }
+ ec = {
+ 'TruSTAR.Report(val.id && val.id === obj.id)': report_context
+ }
+ title = 'TruSTAR report ID ' + report_id + ' details'
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': report_details,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, report_details),
+ 'EntryContext': ec
+ }
+ return entry
+
+
+def delete_report(report_id, id_type):
+ ts.delete_report(report_id, id_type)
+ return 'Report ' + report_id + ' was successfully deleted'
+
+
+def get_reports(from_time, to_time, enclave_ids, distribution_type, tags, excluded_tags):
+ is_encalve = True if distribution_type == 'ENCLAVE' else False
+ from_time = date_to_unix(from_time) if from_time else from_time
+ to_time = date_to_unix(to_time) if to_time else to_time
+ response = ts.get_reports(is_encalve, enclave_ids, tags, excluded_tags, from_time, to_time)
+ reports = []
+ reports_context = []
+ for report in response:
+ current_report_dict = report.to_dict(remove_nones=True)
+ current_report = collections.OrderedDict() # type: OrderedDict
+ current_report['id'] = current_report_dict['id']
+ current_report['title'] = current_report_dict['title']
+ deep_link = '{server_url}/constellation/reports/{report_id}'.format(
+ server_url=SERVER, report_id=current_report_dict['id'])
+ current_report['reportDeepLink'] = '[{}]({})'.format(deep_link, deep_link)
+ if current_report_dict['enclaveIds']:
+ current_report['enclaveIds'] = ', '.join(current_report_dict['enclaveIds']) # Prettify list of enclave IDs
+ current_report['updated'] = normalize_time(current_report_dict['updated'])
+ current_report['created'] = normalize_time(current_report_dict['created'])
+ current_report['timeBegan'] = normalize_time(current_report_dict['timeBegan'])
+ current_report['distributionType'] = current_report_dict['distributionType']
+ if current_report_dict.get('externalUrl'):
+ current_report['externalUrl'] = current_report_dict['externalUrl']
+ current_report['reportBody'] = current_report_dict['reportBody']
+ reports.append(current_report)
+ reports_context.append({
+ 'reportTitle': current_report['title'],
+ 'reportBody': current_report['reportBody'],
+ 'id': current_report['id']
+ })
+ if reports:
+ ec = {
+ 'TruSTAR.Report(val.id && val.id === obj.id)': reports_context
+ }
+ title = 'TruSTAR reports'
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': reports,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, reports),
+ 'EntryContext': ec
+ }
+ return entry
+ return 'No reports were found.'
+
+
+def get_correlated_reports(indicators, enclave_ids, distribution_type, page_size, page_number):
+ response = ts.get_correlated_reports_page(indicators, enclave_ids, page_number, page_size)
+ correlated_reports = [] # type: List
+ for report in response:
+ current_report = report.to_dict(remove_nones=True)
+ current_report['updated'] = normalize_time(current_report['updated'])
+ current_report['created'] = normalize_time(current_report['created'])
+ current_report['timeBegan'] = normalize_time(current_report['timeBegan'])
+ return current_report
+ correlated_reports.append(current_report)
+ if correlated_reports:
+ title = 'TruSTAR correlated reports'
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': correlated_reports,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, correlated_reports)
+ }
+ return entry
+ return 'No reports were found.'
+
+
+def search_reports(search_term, enclave_ids):
+ response = ts.search_reports(search_term, enclave_ids)
+ reports = []
+ report_context = []
+ for i, report in enumerate(response):
+ current_report = report.to_dict(remove_nones=True)
+ current_report['updated'] = normalize_time(current_report['updated'])
+ current_report['created'] = normalize_time(current_report['created'])
+ current_report['timeBegan'] = normalize_time(current_report['timeBegan'])
+ reports.append(current_report)
+ report_context.append({
+ 'reportTitle': current_report['title'],
+ 'id': current_report['id']
+ })
+ if 'reportBody' in current_report:
+ report_context[i]['reportBody'] = current_report['reportBody']
+
+ ec = {
+ 'TruSTAR.Report(val.id && val.id === obj.id)': report_context
+ }
+
+ title = 'TruSTAR reports that contain the term ' + search_term
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': reports,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, reports),
+ 'EntryContext': ec
+ }
+ return entry
+
+
+def add_to_whitelist(indicators):
+ response = ts.add_terms_to_whitelist([indicators])
+ if response:
+ return 'Added to the whitelist successfully'
+ else:
+ return 'Indicator could not be added to the whitelist.'
+
+
+def remove_from_whitelist(indicator, indicator_type):
+ ts_indicator = trustar.models.Indicator(
+ value=indicator,
+ type=indicator_type
+ )
+ response = ts.delete_indicator_from_whitelist(ts_indicator)
+ if response:
+ return 'Removed from the whitelist successfully'
+ else:
+ return 'Indicator could not be removed from the whitelist.'
+
+
+def get_enclaves():
+ response = ts.get_user_enclaves()
+ enclave_ids = []
+ for enclave in response:
+ enclave_ids.append(enclave.to_dict(remove_nones=True))
+ title = 'TruSTAR Enclaves'
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': enclave_ids,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': tableToMarkdown(title, enclave_ids),
+ }
+ return entry
+
+
+''' EXECUTION CODE '''
+config = {
+ 'user_api_key': API_KEY,
+ 'user_api_secret': API_SECRET,
+ 'api_endpoint': BASE_URL,
+ 'verify': INSECURE
+}
+ts = trustar.TruStar(config=config)
+
+LOG('command is %s' % (demisto.command(), ))
+
+try:
+ if demisto.command() == 'test-module':
+ demisto.results('ok')
+
+ elif demisto.command() == 'trustar-related-indicators':
+ enclave_ids = demisto.args().get('enclave-ids', None)
+ demisto.results(get_related_indicators(demisto.args()['indicators'], enclave_ids, demisto.args()[
+ 'page-size'], demisto.args()['page-number']))
+
+ elif demisto.command() == 'trustar-trending-indicators':
+ demisto.results(get_trending_indicators(demisto.args()['type'], demisto.args()['days-back']))
+
+ elif demisto.command() == 'trustar-search-indicators':
+ enclave_ids = demisto.args().get('enclave-ids', None)
+ demisto.results(search_indicators(demisto.args()['search-term'], enclave_ids,
+ demisto.args()['page-size'], demisto.args()['page-number']))
+
+ elif demisto.command() == 'trustar-submit-report':
+ enclave_ids = demisto.args().get('enclave-ids', None)
+ external_url = demisto.args().get('external-url', None)
+ time_began = demisto.args().get('time-began', None)
+ demisto.results(submit_report(demisto.args()['title'], demisto.args()[
+ 'report-body'], enclave_ids, external_url, time_began, demisto.args()['distribution-type']))
+
+ elif demisto.command() == 'trustar-update-report':
+ enclave_ids = demisto.args().get('enclave-ids', None)
+ external_url = demisto.args().get('external-url', None)
+ time_began = demisto.args().get('time-began', None)
+ demisto.results(update_report(demisto.args()['report-id'], demisto.args()['title'],
+ demisto.args()['report-body'], enclave_ids, external_url, time_began,
+ demisto.args()['distribution-type']))
+
+ elif demisto.command() == 'trustar-report-details':
+ demisto.results(get_report_details(demisto.args()['report-id'], demisto.args()['id-type']))
+
+ elif demisto.command() == 'trustar-delete-report':
+ demisto.results(delete_report(demisto.args()['report-id'], demisto.args()['id-type']))
+
+ elif demisto.command() == 'trustar-get-reports':
+ from_time = demisto.args().get('from', None)
+ to_time = demisto.args().get('to', None)
+ enclave_ids = demisto.args().get('enclave-ids', None)
+ tags = demisto.args().get('tags', None)
+ excluded_tags = demisto.args().get('excluded-tags', None)
+ demisto.results(get_reports(from_time, to_time, enclave_ids,
+ demisto.args()['distribution-type'], tags, excluded_tags))
+
+ elif demisto.command() == 'trustar-correlated-reports':
+ enclave_ids = demisto.args().get('enclave-ids', None)
+ demisto.results(get_correlated_reports(demisto.args()['indicators'], enclave_ids, demisto.args()[
+ 'distribution-type'], demisto.args()['page-size'], demisto.args()['page-number']))
+
+ elif demisto.command() == 'trustar-search-reports':
+ enclave_ids = demisto.args().get('enclave-ids', None)
+ demisto.results(search_reports(demisto.args()['search-term'], enclave_ids))
+
+ elif demisto.command() == 'trustar-add-to-whitelist':
+ demisto.results(add_to_whitelist(demisto.args()['indicators']))
+
+ elif demisto.command() == 'trustar-remove-from-whitelist':
+ demisto.results(remove_from_whitelist(demisto.args()['indicator'], demisto.args()['indicator-type']))
+
+ elif demisto.command() == 'trustar-get-enclaves':
+ demisto.results(get_enclaves())
+
+ elif demisto.command() == 'file':
+ demisto.results(generic_search_indicator(demisto.args().get('file'), demisto.params().get(
+ 'file_threshold'), ('File', 'MD5', 'SHA1', 'SHA256'), create_file_ec))
+
+ elif demisto.command() == 'ip':
+ demisto.results(generic_search_indicator(demisto.args().get('ip'),
+ demisto.params().get('ip_threshold'), ('IP',), create_ip_ec))
+
+ elif demisto.command() == 'url':
+ demisto.results(generic_search_indicator(demisto.args().get('url'),
+ demisto.params().get('url_threshold'), ('URL',), create_url_ec))
+
+ elif demisto.command() == 'domain':
+ demisto.results(generic_search_indicator(demisto.args().get('domain'), demisto.params().get('domain_threshold'),
+ ('Domain', 'URL',), create_domain_ec))
+
+except Exception as e:
+ return_error(str(e))
diff --git a/Integrations/TruSTAR/TruSTAR.yml b/Integrations/TruSTAR/TruSTAR.yml
new file mode 100644
index 000000000000..d32088de1325
--- /dev/null
+++ b/Integrations/TruSTAR/TruSTAR.yml
@@ -0,0 +1,624 @@
+commonfields:
+ id: TruSTAR
+ version: -1
+name: TruSTAR
+display: TruSTAR
+category: Data Enrichment & Threat Intelligence
+description: TruSTAR's threat intelligence platform enriches every stage of the security
+ operations workflow from the trusted and relevant data sources.
+configuration:
+- display: Server URL (e.g. https://192.168.0.1)
+ name: server
+ defaultvalue: https://api.trustar.co
+ type: 0
+ required: true
+- display: TruSTAR API Key
+ name: key
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: TruSTAR API Secret
+ name: secret
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Trust any certificate (not secure)
+ name: insecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: "false"
+ type: 8
+ required: false
+- display: File Threshold (LOW, MEDIUM, HIGH). Minimum TruSTAR priority level to consider
+ the file malicious
+ name: file_threshold
+ defaultvalue: MEDIUM
+ type: 0
+ required: false
+- display: URL Threshold (LOW, MEDIUM, HIGH). Minimum TruSTAR priority level to consider
+ the URL malicious
+ name: url_threshold
+ defaultvalue: MEDIUM
+ type: 0
+ required: false
+- display: IP Threshold (LOW, MEDIUM, HIGH). Minimum TruSTAR priority level to consider
+ the IP malicious
+ name: ip_threshold
+ defaultvalue: MEDIUM
+ type: 0
+ required: false
+- display: Domain Threshold (LOW, MEDIUM, HIGH). Minimum TruSTAR priority level to
+ consider the domain malicious
+ name: domain_threshold
+ defaultvalue: MEDIUM
+ type: 0
+ required: false
+script:
+ script: ''
+ type: python
+ subtype: python2
+ commands:
+ - name: trustar-related-indicators
+ arguments:
+ - name: indicators
+ required: true
+ default: true
+ description: Indicator value of any type; i.e. an IP address, email address,
+ URL, MD5, SHA1, SHA256, Registry Key, Malware name, etc.
+ - name: enclave-ids
+ description: Comma-separated list of enclave ids; only indicators found in reports
+ from these enclaves will be returned (defaults to all of user’s enclaves).
+ Defaults is all enclaves the user has READ access to.
+ - name: page-number
+ description: Which page of the result set to get
+ defaultValue: "0"
+ - name: page-size
+ description: The number of results per page.
+ defaultValue: "25"
+ outputs:
+ - contextPath: File.Name
+ description: File name
+ type: string
+ - contextPath: File.MD5
+ description: File MD5
+ type: string
+ - contextPath: File.SHA1
+ description: File SHA1
+ type: string
+ - contextPath: File.SHA256
+ description: File SHA256
+ type: string
+ - contextPath: File.priorityLevel
+ description: File priority level
+ type: string
+ - contextPath: URL.Address
+ description: URL address
+ type: string
+ - contextPath: URL.priorityLevel
+ description: URL priority level
+ type: string
+ - contextPath: IP.Address
+ description: IP address
+ type: string
+ - contextPath: IP.priorityLevel
+ description: IP priority level
+ type: string
+ - contextPath: Account.Email.Address
+ description: Email address
+ type: string
+ - contextPath: Account.Email.priorityLevel
+ description: Email priority level
+ type: string
+ - contextPath: RegistryKey.Path
+ description: Registry key path
+ type: string
+ - contextPath: RegistryKey.priorityLevel
+ description: Registry key priority level
+ type: string
+ - contextPath: CVE.ID
+ description: CVE ID
+ type: string
+ - contextPath: CVE.priorityLevel
+ description: CVE priority level
+ type: string
+ description: Search all TruSTAR incident reports for provided indicators and return
+ all correlated indicators from search results. Two indicators are considered
+ “correlated†if they can be found in a common report.
+ - name: trustar-trending-indicators
+ arguments:
+ - name: type
+ auto: PREDEFINED
+ predefined:
+ - CVE
+ - MALWARE
+ - other
+ description: The types of indicators to be returned. If other, then all indicator
+ types except for CVE and MALWARE will be returned.
+ defaultValue: other
+ - name: days-back
+ description: The number of days back to count correlations for.
+ defaultValue: "3"
+ outputs:
+ - contextPath: File.Name
+ description: File name
+ type: string
+ - contextPath: File.MD5
+ description: File MD5
+ type: string
+ - contextPath: File.SHA1
+ description: File SHA1
+ type: string
+ - contextPath: File.SHA256
+ description: File SHA256
+ type: string
+ - contextPath: URL.Address
+ description: URL address
+ type: string
+ - contextPath: IP.Address
+ description: IP address
+ type: string
+ - contextPath: Account.Email.Address
+ description: Email address
+ type: string
+ - contextPath: RegistryKey.Path
+ description: Registry key path
+ type: string
+ - contextPath: CVE.ID
+ description: CVE ID
+ type: string
+ description: Returns the 10 indicators that have recently appeared in the most
+ community reports. This is analogous to the Community Trends section of the
+ dashboard on Station.
+ - name: trustar-search-indicators
+ arguments:
+ - name: search-term
+ required: true
+ default: true
+ description: The term to search for
+ - name: enclave-ids
+ description: Comma-separated list of enclave ids; only indicators found in reports
+ from these enclaves will be returned (defaults to all of user’s enclaves).
+ Defaults is all enclaves the user has READ access to.
+ - name: page-number
+ description: Which page of the result set to get
+ defaultValue: "0"
+ - name: page-size
+ description: The number of results per page.
+ defaultValue: "25"
+ outputs:
+ - contextPath: File.Name
+ description: File name
+ type: string
+ - contextPath: File.MD5
+ description: File MD5
+ type: string
+ - contextPath: File.SHA1
+ description: File SHA1
+ type: string
+ - contextPath: File.SHA256
+ description: File SHA256
+ type: string
+ - contextPath: URL.Address
+ description: URL address
+ type: string
+ - contextPath: IP.Address
+ description: IP address
+ type: string
+ - contextPath: Account.Email.Address
+ description: Email address
+ type: string
+ - contextPath: RegistryKey.Path
+ description: Registry key path
+ type: string
+ - contextPath: CVE.ID
+ description: CVE ID
+ type: string
+ description: Searches for all indicators that contain the given search term.
+ - name: trustar-submit-report
+ arguments:
+ - name: title
+ required: true
+ description: Title of the report
+ - name: report-body
+ required: true
+ description: Text content of report
+ - name: enclave-ids
+ description: CSV of TruSTAR-generated enclave ids. Use the enclave ID, NOT the
+ enclave name. Mandatory if the distribution type is ENCLAVE.
+ - name: distribution-type
+ auto: PREDEFINED
+ predefined:
+ - COMMUNITY
+ - ENCLAVE
+ description: Distribution type of the report
+ defaultValue: ENCLAVE
+ - name: external-url
+ description: URL for the external report that this originated from, if one exists.
+ Limit 500 alphanumeric characters. Must be unique across all reports for a
+ given company.
+ - name: time-began
+ description: ISO-8601 formatted incident time with timezone, e.g. 2016-09-22T11:38:35+00:00.
+ Default is current time.
+ outputs:
+ - contextPath: TruSTAR.Report.reportTitle
+ description: Title of the report
+ type: string
+ - contextPath: TruSTAR.Report.reportBody
+ description: Body of the report
+ type: string
+ - contextPath: TruSTAR.Report.id
+ description: ID of the report
+ type: string
+ description: Submit a new incident report, and receive the ID it has been assigned
+ in TruSTAR’s system.
+ - name: trustar-update-report
+ arguments:
+ - name: report-id
+ required: true
+ description: TruSTAR report id or external tracking id.
+ - name: title
+ required: true
+ description: Title of the report
+ - name: report-body
+ required: true
+ description: Text content of report
+ - name: enclave-ids
+ description: CSV of TruSTAR-generated enclave ids. Use the enclave ID, NOT the
+ enclave name. Mandatory if the distribution type is ENCLAVE.
+ - name: external-url
+ description: URL for the external report that this originated from, if one exists.
+ Limit 500 alphanumeric characters. Must be unique across all reports for a
+ given company.
+ - name: distribution-type
+ auto: PREDEFINED
+ predefined:
+ - COMMUNITY
+ - ENCLAVE
+ description: Distribution type of the report
+ defaultValue: ENCLAVE
+ - name: time-began
+ description: ISO-8601 formatted incident time with timezone, e.g. 2016-09-22T11:38:35+00:00.
+ Default is current time.
+ outputs:
+ - contextPath: TruSTAR.Report.reportTitle
+ description: Title of the report
+ type: string
+ - contextPath: TruSTAR.Report.reportBody
+ description: Body of the report
+ type: string
+ - contextPath: TruSTAR.Report.id
+ description: ID of the report
+ type: string
+ description: Update the report with the specified ID. Either the internal TruSTAR
+ report ID or an external tracking ID can be used. Only the fields passed will
+ be updated. All others will be left unchanged.
+ - name: trustar-report-details
+ arguments:
+ - name: report-id
+ required: true
+ description: Finds a report by its internal or external id.
+ - name: id-type
+ auto: PREDEFINED
+ predefined:
+ - internal
+ - external
+ description: Type of report ID
+ defaultValue: internal
+ outputs:
+ - contextPath: TruSTAR.Report.reportTitle
+ description: Title of the report
+ type: string
+ - contextPath: TruSTAR.Report.reportBody
+ description: Body of the report
+ type: string
+ - contextPath: TruSTAR.Report.id
+ description: ID of the report
+ type: string
+ description: Finds a report by its internal or external id.
+ - name: trustar-delete-report
+ arguments:
+ - name: report-id
+ required: true
+ description: Finds a report by its internal or external id.
+ - name: id-type
+ auto: PREDEFINED
+ predefined:
+ - internal
+ - external
+ description: Type of report ID
+ defaultValue: internal
+ description: Deletes a report as specified by given id (id can be TruSTAR report
+ id or external id).
+ - name: trustar-get-reports
+ arguments:
+ - name: from
+ description: Start of time window (format is YY-MM-DD HH:MM:SS, i.e. 2018-01-01
+ 10:30:00). Based on updated time, and not created time. Default is 1 day ago.
+ - name: to
+ description: End of time window (format is YY-MM-DD HH:MM:SS, i.e. 2018-01-01
+ 10:30:00). Based on updated time, and not created time. Default is current
+ time.
+ - name: distribution-type
+ auto: PREDEFINED
+ predefined:
+ - ENCLAVE
+ - COMMUNITY
+ description: Whether to search for reports in the community, or only in enclaves
+ defaultValue: ENCLAVE
+ - name: enclave-ids
+ description: Comma separated list of enclave ids to search for reports in. Even
+ if distributionType is COMMUNITY, these enclaves will still be searched as
+ well. Default is All enclaves the user has READ access to.
+ - name: tags
+ description: a list of names of tags to filter by; only reports containing ALL
+ of these tags will be returned
+ - name: excluded-tags
+ description: reports containing ANY of these tags will be excluded from the
+ results.
+ outputs:
+ - contextPath: TruSTAR.Report.reportTitle
+ description: Title of the report
+ - contextPath: TruSTAR.Report.reportBody
+ description: Body of the report
+ - contextPath: TruSTAR.Report.id
+ description: ID of the report
+ description: 'Returns incident reports matching the specified filters. All parameters
+ are optional: if nothing is specified, the latest 25 reports accessible by the
+ user will be returned (matching the view the user would have by logging into
+ Station).'
+ - name: trustar-correlated-reports
+ arguments:
+ - name: indicators
+ required: true
+ description: Indicator value of any type; i.e. an IP address, email address,
+ URL, MD5, SHA1, SHA256, Registry Key, Malware name, etc.
+ - name: enclave-ids
+ description: Comma-separated list of enclave ids; only indicators found in reports
+ from these enclaves will be returned (defaults to all of user’s enclaves).
+ Defaults is all enclaves the user has READ access to.
+ - name: page-number
+ description: Which page of the result set to get
+ defaultValue: "0"
+ - name: page-size
+ description: The number of results per page.
+ defaultValue: "25"
+ - name: distribution-type
+ auto: PREDEFINED
+ predefined:
+ - COMMUNITY
+ - ENCLAVE
+ description: Distribution type of the report
+ defaultValue: ENCLAVE
+ description: Returns a paginated list of all reports that contain any of the provided
+ indicator values.
+ - name: trustar-search-reports
+ arguments:
+ - name: search-term
+ required: true
+ default: true
+ description: The term to search for
+ - name: enclave-ids
+ description: Comma-separated list of enclave ids; only indicators found in reports
+ from these enclaves will be returned (defaults to all of user’s enclaves)
+ description: Searches for all reports that contain the given search term.
+ - name: trustar-add-to-whitelist
+ arguments:
+ - name: indicators
+ required: true
+ default: true
+ description: CSV of indicators to whitelist, i.e. evil.com,101.43.52.224
+ description: Whitelist a list of indicator values for the user’s company.
+ - name: trustar-remove-from-whitelist
+ arguments:
+ - name: indicator
+ required: true
+ description: The value of the indicator to delete.
+ - name: indicator-type
+ required: true
+ auto: PREDEFINED
+ predefined:
+ - URL
+ - IP
+ - SHA256
+ - SHA1
+ - MD5
+ - SOFTWARE
+ - EMAIL_ADDRESS
+ - BITCOIN_ADDRESS
+ - CIDR_BLOCK
+ - CVE
+ - REGISTRY_KEY
+ description: The type of the indicator to delete.
+ description: Delete an indicator from the user’s company whitelist.
+ - name: trustar-get-enclaves
+ arguments: []
+ description: Returns the list of all enclaves that the user has access to, as
+ well as whether they can read, create, and update reports in that enclave.
+ - name: file
+ arguments:
+ - name: file
+ required: true
+ description: File hash - MD5, SHA1 or SHA256
+ - name: threshold
+ auto: PREDEFINED
+ predefined:
+ - LOW
+ - MEDIUM
+ - HIGH
+ description: If ThreatScore is greater or equal than the threshold, then ip
+ will be considered malicious
+ outputs:
+ - contextPath: File.MD5
+ description: File MD5
+ type: string
+ - contextPath: File.SHA1
+ description: File SHA1
+ type: string
+ - contextPath: File.SHA256
+ description: File SHA256
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator we tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type of the indicator
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: TruSTAR.File.Value
+ description: Indicator value
+ type: string
+ - contextPath: TruSTAR.File.Whitelisted
+ description: Is the indicator whitelisted
+ type: boolean
+ - contextPath: TruSTAR.File.Priority
+ description: "Indicator's priority level by TruSTAR - LOW, MEDIUM or HIGH"
+ type: string
+ - contextPath: File.Malicious.Description
+ description: For malicious files, the reason for the vendor to make the decision
+ description: Check file reputation on TruSTAR
+ - name: ip
+ arguments:
+ - name: ip
+ required: true
+ description: IP address (e.g. 8.8.8.8) or a CIDR (e.g. 1.1.1.0/18)
+ - name: threshold
+ auto: PREDEFINED
+ predefined:
+ - LOW
+ - MEDIUM
+ - HIGH
+ description: If ThreatScore is greater or equal than the threshold, then ip
+ will be considered malicious
+ outputs:
+ - contextPath: IP.Address
+ description: IP Address
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IPs, the vendor that made the decision
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IPs, the reason for the vendor to make the decision
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator we tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type of the indicator
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: string
+ - contextPath: TruSTAR.IP.Value
+ description: Indicator value
+ type: string
+ - contextPath: TruSTAR.IP.Whitelisted
+ description: Is the indicator whitelisted
+ type: boolean
+ - contextPath: TruSTAR.IP.Priority
+ description: "Indicator's priority level by TruSTAR - LOW, MEDIUM or HIGH"
+ description: Check IP Reputation on TruSTAR
+ - name: url
+ arguments:
+ - name: url
+ required: true
+ description: Enter a URL to search
+ - name: threshold
+ auto: PREDEFINED
+ predefined:
+ - LOW
+ - MEDIUM
+ - HIGH
+ description: If ThreatScore is greater or equal than the threshold, then ip
+ will be considered malicious
+ outputs:
+ - contextPath: URL.Data
+ description: URL data
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the reason for the vendor to make the decision
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator we tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type of the indicator
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: string
+ - contextPath: TruSTAR.URL.Value
+ description: Indicator value
+ type: string
+ - contextPath: TruSTAR.URL.Whitelisted
+ description: Is the indicator whitelisted
+ type: boolean
+ - contextPath: TruSTAR.URL.Priority
+ description: "Indicator's priority level by TruSTAR - LOW, MEDIUM or HIGH"
+ type: string
+ description: Check URL reputation on TruSTAR
+ - name: domain
+ arguments:
+ - name: domain
+ required: true
+ description: Enter domain name to search
+ - name: threshold
+ auto: PREDEFINED
+ predefined:
+ - LOW
+ - MEDIUM
+ - HIGH
+ description: If ThreatScore is greater or equal than the threshold, then ip
+ will be considered malicious
+ outputs:
+ - contextPath: Domain.Name
+ description: Domain Name
+ type: string
+ - contextPath: Domain.Malicious.Vendor
+ description: For malicious domains, the vendor that made the decision
+ type: string
+ - contextPath: Domain.Malicious.Description
+ description: For malicious domains, the reason for the vendor to make the decision
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator we tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type of the indicator
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: string
+ - contextPath: TruSTAR.Domain.Value
+ description: Indicator value
+ type: string
+ - contextPath: TruSTAR.Domain.Whitelisted
+ description: Is the indicator whitelisted
+ type: boolean
+ - contextPath: TruSTAR.Domain.Priority
+ description: "Indicator's priority level by TruSTAR - LOW, MEDIUM or HIGH"
+ type: string
+ description: Check Domain reputation on TruStar
+ dockerimage: demisto/trustar
diff --git a/Integrations/TruSTAR/TruSTAR_image.png b/Integrations/TruSTAR/TruSTAR_image.png
new file mode 100644
index 000000000000..f1d9e65b0e5e
Binary files /dev/null and b/Integrations/TruSTAR/TruSTAR_image.png differ
diff --git a/Integrations/URLHaus/URLHaus.py b/Integrations/URLHaus/URLHaus.py
new file mode 100644
index 000000000000..05e65de6b4ea
--- /dev/null
+++ b/Integrations/URLHaus/URLHaus.py
@@ -0,0 +1,511 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+import os
+import traceback
+import requests
+import zipfile
+import io
+from datetime import datetime as dt
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+# Remove trailing slash to prevent wrong URL path to service
+API_URL = demisto.params()['url'].rstrip('/')
+
+# Should we use SSL
+USE_SSL = not demisto.params().get('insecure', False)
+
+# Remove proxy if not set to true in params
+if not demisto.params().get('proxy'):
+ os.environ.pop('HTTP_PROXY', None)
+ os.environ.pop('HTTPS_PROXY', None)
+ os.environ.pop('http_proxy', None)
+ os.environ.pop('https_proxy', None)
+
+THRESHOLD = int(demisto.params().get('threshold', 1))
+
+# disable-secrets-detection-start
+# Whether compromised websites are considered malicious or not. See the blacklists output in
+# https://urlhaus-api.abuse.ch/
+# disable-secrets-detection-end
+COMPROMISED_IS_MALICIOUS = demisto.params().get('compromised_is_malicious', False)
+
+# Headers to be sent in requests
+HEADERS = {
+ 'Content-Type': 'application/x-www-form-urlencoded',
+ 'Accept': 'application/json'
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def http_request(method, command, data=None):
+ url = f'{API_URL}/{command}/'
+ res = requests.request(method,
+ url,
+ verify=USE_SSL,
+ data=data,
+ headers=HEADERS)
+
+ if res.status_code != 200:
+ raise Exception(f'Error in API call {url} [{res.status_code}] - {res.reason}')
+
+ return res
+
+
+def reformat_date(date):
+ try:
+ return dt.strptime(date.rstrip(' UTC'), '%Y-%m-%d %H:%M:%S').strftime('%Y-%m-%dT%H:%M:%S')
+ except Exception:
+ return 'Unknown'
+
+
+def extract_zipped_buffer(buffer):
+ with io.BytesIO() as bio:
+ bio.write(buffer)
+ with zipfile.ZipFile(bio) as z:
+ return z.read(z.namelist()[0])
+
+
+def query_url_information(url):
+ return http_request('POST',
+ 'url',
+ f'url={url}')
+
+
+def query_host_information(host):
+ return http_request('POST',
+ 'host',
+ f'host={host}')
+
+
+def query_payload_information(hash_type, hash):
+ return http_request('POST',
+ 'payload',
+ f'{hash_type}_hash={hash}')
+
+
+def download_malware_sample(sha256):
+ return http_request('GET',
+ f'download/{sha256}')
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """
+ Performs basic get request to get item samples
+ """
+ http_request('POST', 'url')
+
+
+def calculate_dbot_score(blacklists, threshold, compromised_is_malicious):
+ dbot_score = 0
+ description = 'Not listed in any blacklist'
+ blacklist_appearances = []
+ for blacklist, status in blacklists.items():
+ if blacklist == 'spamhaus_dbl':
+ if status.endswith('domain') or (status.startswith('abused') and compromised_is_malicious):
+ blacklist_appearances.append((blacklist, status))
+ elif status == 'listed':
+ blacklist_appearances.append((blacklist, None))
+
+ if len(blacklist_appearances) >= threshold:
+ description = ''
+ for appearance in blacklist_appearances:
+ if appearance[1] is not None:
+ description += f'Listed in {appearance[0]}. '
+ else:
+ description += f'Listed as {appearance[1]} in {appearance[0]}. '
+ dbot_score = 3
+ elif len(blacklist_appearances) > 0:
+ dbot_score = 2
+ else:
+ dbot_score = 1
+
+ return dbot_score, description
+
+
+def url_command():
+ url = demisto.args().get('url')
+ try:
+ url_information = query_url_information(url).json()
+
+ ec = {
+ 'URL': {
+ 'Data': url
+ },
+ 'DBotScore': {
+ 'Type': 'url',
+ 'Vendor': 'URLhaus',
+ 'Indicator': url
+ }
+ }
+
+ if url_information['query_status'] == 'ok':
+ # URLhaus output
+ blacklist_information = []
+ blacklists = url_information.get('blacklists', {})
+ for bl_name, bl_status in blacklists.items():
+ blacklist_information.append({'Name': bl_name,
+ 'Status': bl_status})
+
+ date_added = reformat_date(url_information.get('date_added'))
+ urlhaus_data = {
+ 'ID': url_information.get('id', ''),
+ 'Status': url_information.get('url_status', ''),
+ 'Host': url_information.get('host', ''),
+ 'DateAdded': date_added,
+ 'Threat': url_information.get('threat', ''),
+ 'Blacklist': blacklist_information,
+ 'Tags': url_information.get('tags', [])
+ }
+
+ payloads = []
+ for payload in url_information.get('payloads', []):
+ vt_data = payload.get('virustotal', None)
+ vt_information = None
+ if vt_data:
+ vt_information = {
+ 'Result': float(vt_data.get('percent', 0)),
+ 'Link': vt_data.get('link', '')
+ }
+ payloads.append({
+ 'Name': payload.get('filename', 'unknown'),
+ 'Type': payload.get('file_type', ''),
+ 'MD5': payload.get('response_md5', ''),
+ 'VT': vt_information
+ })
+
+ urlhaus_data['Payload'] = payloads
+
+ # DBot score calculation
+ dbot_score, description = calculate_dbot_score(url_information.get('blacklists', {}), THRESHOLD,
+ COMPROMISED_IS_MALICIOUS)
+
+ ec['DBotScore']['Score'] = dbot_score
+ if dbot_score == 3:
+ ec['URL']['Malicious'] = {
+ 'Vendor': 'URLhaus',
+ 'Description': description
+ }
+
+ ec['URLhaus.URL(val.ID && val.ID === obj.ID)'] = urlhaus_data
+
+ human_readable = tableToMarkdown(f'URLhaus reputation for {url}',
+ {
+ 'URLhaus link': url_information.get("urlhaus_reference", "None"),
+ 'Description': description,
+ 'URLhaus ID': urlhaus_data['ID'],
+ 'Status': urlhaus_data['Status'],
+ 'Threat': url_information.get("threat", ""),
+ 'Date added': date_added
+ })
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': url_information,
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+ elif url_information['query_status'] == 'no_results':
+ ec['DBotScore']['Score'] = 0
+
+ human_readable = f'## URLhaus reputation for {url}\n' \
+ f'No results!'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': url_information,
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+ elif url_information['query_status'] == 'invalid_url':
+ human_readable = f'## URLhaus reputation for {url}\n' \
+ f'Invalid URL!'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': url_information,
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results({
+ 'Type': entryTypes['error'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': f'Query results = {url_information["query_status"]}'
+ })
+
+ except Exception:
+ demisto.debug(traceback.format_exc())
+ return_error('Failed getting url data, please verify the arguments and parameters')
+
+
+def domain_command():
+ domain = demisto.args()['domain']
+
+ try:
+ domain_information = query_host_information(domain).json()
+
+ ec = {
+ 'Domain': {
+ 'Name': domain
+ },
+ 'DBotScore': {
+ 'Type': 'domain',
+ 'Vendor': 'URLhaus',
+ 'Indicator': domain
+ }
+ }
+
+ if domain_information['query_status'] == 'ok':
+ # URLHaus output
+ blacklist_information = []
+ blacklists = domain_information.get('blacklists', {})
+ for bl_name, bl_status in blacklists.items():
+ blacklist_information.append({'Name': bl_name,
+ 'Status': bl_status})
+
+ first_seen = reformat_date(domain_information.get('firstseen'))
+
+ urlhaus_data = {
+ 'FirstSeen': first_seen,
+ 'Blacklist': blacklists,
+ 'URL': domain_information.get('urls', [])
+ }
+
+ # DBot score calculation
+ dbot_score, description = calculate_dbot_score(domain_information.get('blacklists', {}), THRESHOLD,
+ COMPROMISED_IS_MALICIOUS)
+
+ ec['DBotScore']['Score'] = dbot_score
+ if dbot_score == 3:
+ ec['domain']['Malicious'] = {
+ 'Vendor': 'URLhaus',
+ 'Description': description
+ }
+
+ ec['URLhaus.Domain(val.Name && val.Name === obj.Name)'] = urlhaus_data
+
+ human_readable = tableToMarkdown(f'URLhaus reputation for {domain}',
+ {
+ 'URLhaus link': domain_information.get('urlhaus_reference', 'None'),
+ 'Description': description,
+ 'First seen': first_seen,
+ })
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': domain_information,
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+ elif domain_information['query_status'] == 'no_results':
+ ec['DBotScore']['Score'] = 0
+
+ human_readable = f'## URLhaus reputation for {domain}\n' \
+ f'No results!'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': domain_information,
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+ elif domain_information['query_status'] == 'invalid_host':
+ human_readable = f'## URLhaus reputation for {domain}\n' \
+ f'Invalid domain!'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': domain_information,
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+ else:
+ demisto.results({
+ 'Type': entryTypes['error'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': f'Query results = {domain_information["query_status"]}'
+ })
+
+ except Exception:
+ demisto.debug(traceback.format_exc())
+ return_error('Failed getting domain data, please verify the arguments and parameters')
+
+
+def file_command():
+ hash = demisto.args()['file']
+ if len(hash) == 32:
+ hash_type = 'md5'
+ elif len(hash) == 64:
+ hash_type = 'sha256'
+ else:
+ return_error('Only accepting MD5 (32 bytes) or SHA256 (64 bytes) hash types')
+
+ try:
+ file_information = query_payload_information(hash_type, hash).json()
+
+ if file_information['query_status'] == 'ok' and file_information['md5_hash']:
+ # URLhaus output
+ first_seen = reformat_date(file_information.get('firstseen'))
+ last_seen = reformat_date(file_information.get('lastseen'))
+
+ urlhaus_data = {
+ 'MD5': file_information.get('md5_hash', ''),
+ 'SHA256': file_information.get('sha256_hash', ''),
+ 'Type': file_information.get('file_type', ''),
+ 'Size': int(file_information.get('file_size', '')),
+ 'Signature': file_information.get('signature', ''),
+ 'FirstSeen': first_seen,
+ 'LastSeen': last_seen,
+ 'DownloadLink': file_information.get('urlhaus_download', ''),
+ 'URL': file_information.get('urls', [])
+ }
+
+ virus_total_data = file_information.get('virustotal')
+ if virus_total_data:
+ urlhaus_data['VirusTotal'] = {
+ 'Percent': float(file_information.get('virustotal', {'percent': 0})['percent']),
+ 'Link': file_information.get('virustotal', {'link': ''})['link']
+ }
+
+ ec = {
+ 'File': {
+ 'Size': urlhaus_data.get('Size', 0),
+ 'MD5': urlhaus_data.get('MD5', ''),
+ 'SHA256': urlhaus_data.get('SHA256')
+ },
+ 'URLhaus.File(val.MD5 && val.MD5 === obj.MD5)': urlhaus_data
+ }
+
+ human_readable = tableToMarkdown(f'URLhaus reputation for {hash_type.upper()} : {hash}',
+ {
+ 'URLhaus link': urlhaus_data.get('DownloadLink', ''),
+ 'Signature': urlhaus_data.get('Signature', ''),
+ 'MD5': urlhaus_data.get('MD5', ''),
+ 'SHA256': urlhaus_data.get('SHA256', ''),
+ 'First seen': first_seen,
+ 'Last seen': last_seen
+ })
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': file_information,
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+ elif (file_information['query_status'] == 'ok' and not file_information['md5_hash']) or \
+ file_information['query_status'] == 'no_results':
+ human_readable = f'## URLhaus reputation for {hash_type.upper()} : {hash}\n' \
+ f'No results!'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': file_information,
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ })
+ elif file_information['query_status'] in ['invalid_md5', 'invalid_sha256']:
+ human_readable = f'## URLhaus reputation for {hash_type.upper()} : {hash}\n' \
+ f'Invalid {file_information["query_status"].lstrip("invalid_").upper()}!'
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': file_information,
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ })
+ else:
+ demisto.results({
+ 'Type': entryTypes['error'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': f'Query results = {file_information["query_status"]}'
+ })
+
+ except Exception:
+ print(traceback.format_exc())
+ demisto.debug(traceback.format_exc())
+ return_error('Failed getting file data, please verify the arguments and parameters')
+
+
+def urlhaus_download_sample_command():
+ """
+ The response can be either the zipped sample (content-type = application/zip), or JSON (content-type = text/html)
+ containing the query status.
+ """
+ file_sha256 = demisto.args()['file']
+ res = download_malware_sample(file_sha256)
+
+ try:
+ if len(res.content) == 0:
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'HumanReadable': f'No results for SHA256: {file_sha256}',
+ 'HumanReadableFormat': formats['markdown']
+ })
+ elif res.headers['content-type'] == 'text/html' and res.json()['query_status'] == 'not_found':
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': res.json(),
+ 'HumanReadable': f'No results for SHA256: {file_sha256}',
+ 'HumanReadableFormat': formats['markdown']
+ })
+ elif res.headers['content-type'] == 'application/zip':
+ demisto.results(fileResult(file_sha256, extract_zipped_buffer(res.content)))
+ else:
+ raise Exception
+ # Handle like an exception
+ except Exception:
+ demisto.results({
+ 'Type': entryTypes['error'],
+ 'ContentsFormat': formats['text'],
+ 'Contents': res.content
+ })
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('Command being called is %s' % (demisto.command()))
+
+try:
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module()
+ demisto.results('ok')
+ elif demisto.command() == 'url':
+ url_command()
+ elif demisto.command() == 'domain':
+ domain_command()
+ elif demisto.command() == 'file':
+ file_command()
+ elif demisto.command() == 'urlhaus-download-sample':
+ urlhaus_download_sample_command()
+
+# Log exceptions
+except Exception as e:
+ LOG(str(e))
+ LOG.print_log()
+ raise
diff --git a/Integrations/URLHaus/URLHaus.yml b/Integrations/URLHaus/URLHaus.yml
new file mode 100644
index 000000000000..d5dfa8495761
--- /dev/null
+++ b/Integrations/URLHaus/URLHaus.yml
@@ -0,0 +1,251 @@
+category: Data Enrichment & Threat Intelligence
+commonfields:
+ id: URLhaus
+ version: -1
+configuration:
+- defaultvalue: https://urlhaus-api.abuse.ch/v1/
+ display: Server URL (e.g. https://192.168.0.1)
+ name: url
+ required: false
+ type: 0
+- display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+- defaultvalue: '1'
+ display: Blacklists appearances threshold
+ name: threshold
+ required: false
+ type: 0
+- display: Compromised (is malicious)
+ name: compromised_is_malicious
+ required: false
+ type: 8
+description: URLhaus has the goal of sharing malicious URLs that are being used for
+ malware distribution.
+display: URLhaus
+name: URLhaus
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: URL to query.
+ isArray: false
+ name: url
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves URL information from URLhaus.
+ execution: false
+ name: url
+ outputs:
+ - contextPath: URL.Data
+ description: The URL.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: Vendor that reported the URL as malicious.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: Description of the malicious URL.
+ type: string
+ - contextPath: URLhaus.URL.ID
+ description: Unique identifier of the URLhaus database entry.
+ type: string
+ - contextPath: URLhaus.URL.Status
+ description: The current status of the URL.
+ type: string
+ - contextPath: URLhaus.URL.Host
+ description: The extracted host of the malware URL (IP address or domain name/FQDN).
+ type: string
+ - contextPath: URLhaus.URL.DateAdded
+ description: Date the URL was added to URLhaus.
+ type: date
+ - contextPath: URLhaus.URL.Threat
+ description: The threat corresponding to this malware URL.
+ type: string
+ - contextPath: URLhaus.URL.Blacklist.Name
+ description: Name of the blacklist.
+ type: String
+ - contextPath: URLhaus.URL.Tags
+ description: A list of tags associated with the queried malware URL.
+ type: string
+ - contextPath: URLhaus.URL.Payload.Name
+ description: Payload file name.
+ type: String
+ - contextPath: URLhaus.URL.Payload.Type
+ description: Payload file type.
+ type: String
+ - contextPath: URLhaus.URL.Payload.MD5
+ description: MD5 hash of the HTTP response body (payload).
+ type: String
+ - contextPath: URLhaus.URL.Payload.VT.Result
+ description: VirusTotal results for the payload.
+ type: Number
+ - contextPath: DBotScore.Type
+ description: Indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: URLhaus.URL.Blacklist.Status
+ description: Status of the URL in the blacklist.
+ type: String
+ - contextPath: URLhaus.URL.Payload.VT.Link
+ description: Link to the VirusTotal report.
+ type: String
+ - arguments:
+ - default: false
+ description: Domain to query.
+ isArray: false
+ name: domain
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves domain information from URLhaus.
+ execution: false
+ name: domain
+ outputs:
+ - contextPath: Domain.Name
+ description: The domain name, for example, google.com.
+ type: String
+ - contextPath: DBotScore.Type
+ description: Indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score.
+ type: number
+ - contextPath: DBotScore.Indicator
+ description: The indicator that was tested.
+ type: string
+ - contextPath: URLhaus.Domain.FirstSeen
+ description: Date that the IP was seen for the first time (UTC).
+ type: Date
+ - contextPath: URLhaus.Domain.Blacklist.Name
+ description: The status of the domain in different blacklists.
+ type: String
+ - contextPath: URLhaus.Domain.URL
+ description: URLs observed on this domain.
+ type: String
+ - contextPath: Domain.Malicious.Vendor
+ description: Vendor that reported the domain as malicious.
+ type: String
+ - contextPath: Domain.Malicious.Description
+ description: Description of the malicious domain.
+ type: String
+ - contextPath: URLhaus.Domain.Blacklist.Status
+ description: Status of the URL in the blacklist.
+ type: String
+ - arguments:
+ - default: false
+ description: MD5 hash or SHA256 hash of the file to query.
+ isArray: false
+ name: file
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves file information from URLhaus.
+ execution: false
+ name: file
+ outputs:
+ - contextPath: File.Size
+ description: File size (in bytes).
+ type: Number
+ - contextPath: File.MD5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file.
+ type: String
+ - contextPath: URLhaus.File.MD5
+ description: MD5 hash of the file.
+ type: String
+ - contextPath: URLhaus.File.SHA256
+ description: SHA256 hash of the file.
+ type: String
+ - contextPath: URLhaus.File.Type
+ description: 'File type guessed by URLhaus, for example: .exe, .doc.'
+ type: String
+ - contextPath: URLhaus.File.Size
+ description: File size (in bytes).
+ type: Number
+ - contextPath: URLhaus.File.Signature
+ description: Malware family.
+ type: String
+ - contextPath: URLhaus.File.FirstSeen
+ description: Date and time (UTC) that URLhaus first saw this file (payload).
+ type: Date
+ - contextPath: URLhaus.File.LastSeen
+ description: Date and time (UTC) that URLhaus last saw this file (payload).
+ type: Date
+ - contextPath: URLhaus.File.DownloadLink
+ description: Location (URL) where you can download a copy of this file.
+ type: String
+ - contextPath: URLhaus.File.VirusTotal.Percent
+ description: 'AV detection (percentage), for example: 24.14.'
+ type: Number
+ - contextPath: URLhaus.File.VirusTotal.Link
+ description: Link to the VirusTotal report.
+ type: String
+ - contextPath: URLhaus.File.URL
+ description: A list of malware URLs associated with this payload (max. 100).
+ type: Unknown
+ - arguments:
+ - default: false
+ description: SHA256 hash of the file to download.
+ isArray: false
+ name: file
+ required: true
+ secret: false
+ deprecated: false
+ description: Downloads a malware sample from URLhaus.
+ execution: true
+ name: urlhaus-download-sample
+ outputs:
+ - contextPath: File.Size
+ description: File size.
+ type: number
+ - contextPath: File.SHA1
+ description: SHA1 hash of the file.
+ type: string
+ - contextPath: File.SHA256
+ description: SHA256 hash of the file.
+ type: string
+ - contextPath: File.Name
+ description: File name.
+ type: string
+ - contextPath: File.SSDeep
+ description: SSDeep hash of the file.
+ type: string
+ - contextPath: File.EntryID
+ description: File entry ID.
+ type: string
+ - contextPath: File.Info
+ description: File information.
+ type: string
+ - contextPath: File.Type
+ description: File type.
+ type: string
+ - contextPath: File.MD5
+ description: MD5 hash of the file.
+ type: string
+ - contextPath: File.Extension
+ description: File extension.
+ type: string
+ dockerimage: demisto/python3:3.7.3.286
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
diff --git a/Integrations/URLHaus/URLHaus_description.md b/Integrations/URLHaus/URLHaus_description.md
new file mode 100644
index 000000000000..7245e3236d87
--- /dev/null
+++ b/Integrations/URLHaus/URLHaus_description.md
@@ -0,0 +1,23 @@
+## How DBot Score is Calculated
+
+A URL or domain can have one of the following statuses in a blacklist.
+- Malicious: the site is a known malware site.
+- Compromised: the site is legitimate but has been compromised.
+- Not listed
+
+If the `compromised_is_malicious` parameter is set to True, then compromised URLs or domains are treated as malicious.
+
+If the `compromised_is_malicious` parameter is set to False, then compromised URLs or domains are treated as legitimate.
+
+### DBot Score: Bad
+URLs and domains receive a DBot score of Bad if their total number of appearances in blacklists exceeds the `threshold` parameter.
+
+### DBot Score: Suspicious
+URLs and domains receive a DBot score of Suspicious if they appear on at least on blacklist, but their total number of appearances in blacklists does not exceed the `threshold` parameter.
+If the URL or domain appeared in at least one blacklist, but not enough blacklists to exceed the threshold, it is considered suspicious.
+
+### DBot Score: Good
+URLs and domains receive a DBot score of Good if they do not appear on any blacklists.their total number of appearances in blacklists exceeds the `threshold` parameter.
+
+### DBot Score: Empty
+If there is no information for the URLs and domains, they will not receive a DBot score.
diff --git a/Integrations/URLHaus/URLhaus_image.png b/Integrations/URLHaus/URLhaus_image.png
new file mode 100644
index 000000000000..9e11fd3d2375
Binary files /dev/null and b/Integrations/URLHaus/URLhaus_image.png differ
diff --git a/Integrations/Uptycs/CHANGELOG.md b/Integrations/Uptycs/CHANGELOG.md
new file mode 100644
index 000000000000..636ff563890b
--- /dev/null
+++ b/Integrations/Uptycs/CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.8.0] - 2019-08-06
+#### New Integration
+Use the Uptycs integration to fetch data from the Uptycs database.
\ No newline at end of file
diff --git a/Integrations/Uptycs/Pipfile b/Integrations/Uptycs/Pipfile
new file mode 100644
index 000000000000..c97c4be46d5d
--- /dev/null
+++ b/Integrations/Uptycs/Pipfile
@@ -0,0 +1,24 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+
+[packages]
+certifi = "==2018.11.29"
+chardet = "==3.0.4"
+idna = "==2.8"
+pyparsing = "==2.3.1"
+requests = "==2.21.0"
+urllib3 = "==1.24.1"
+virtualenv = "==16.2.0"
+DateRangeParser = "==1.3.1"
+Pygments = "==2.3.1"
+PyJWT = "==1.7.1"
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/Uptycs/Pipfile.lock b/Integrations/Uptycs/Pipfile.lock
new file mode 100644
index 000000000000..0b4a5cec9843
--- /dev/null
+++ b/Integrations/Uptycs/Pipfile.lock
@@ -0,0 +1,274 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "55ab19b70b5ee74e1e92dc9e3d5b689d1e4e8115cd39f4bd3356537ebc4fd0a5"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "certifi": {
+ "hashes": [
+ "sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7",
+ "sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033"
+ ],
+ "index": "pypi",
+ "version": "==2018.11.29"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "daterangeparser": {
+ "hashes": [
+ "sha256:fa23138f7d629dc78ad355366984a30996592ef443eadd4f4338b850b936205a"
+ ],
+ "index": "pypi",
+ "version": "==1.3.1"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "index": "pypi",
+ "version": "==2.8"
+ },
+ "pygments": {
+ "hashes": [
+ "sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a",
+ "sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pyjwt": {
+ "hashes": [
+ "sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e",
+ "sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96"
+ ],
+ "index": "pypi",
+ "version": "==1.7.1"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:66c9268862641abcac4a96ba74506e594c884e3f57690a696d21ad8210ed667a",
+ "sha256:f6c5ef0d7480ad048c054c37632c67fca55299990fff127850181659eea33fc3"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
+ "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
+ ],
+ "index": "pypi",
+ "version": "==2.21.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",
+ "sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
+ ],
+ "index": "pypi",
+ "version": "==1.24.1"
+ },
+ "virtualenv": {
+ "hashes": [
+ "sha256:34b9ae3742abed2f95d3970acf4d80533261d6061b51160b197f84e5b4c98b4c",
+ "sha256:fa736831a7b18bd2bfeef746beb622a92509e9733d645952da136b0639cd40cd"
+ ],
+ "index": "pypi",
+ "version": "==16.2.0"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8",
+ "sha256:80d2de76188eabfbfcf27e6a37342c2827801e59c4cc14b0371c56fed43820e3"
+ ],
+ "version": "==0.19"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832",
+ "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4"
+ ],
+ "version": "==7.2.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:a7ac867b97fdc07ee80a8058fe4435ccd274ecc3b0ed61d852d7d53055528cf9",
+ "sha256:c491ca87294da7cc01902edbe30a5bc6c4c28172b5138ab4e4aa1b9d7bfaeafe"
+ ],
+ "version": "==19.1"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:66c9268862641abcac4a96ba74506e594c884e3f57690a696d21ad8210ed667a",
+ "sha256:f6c5ef0d7480ad048c054c37632c67fca55299990fff127850181659eea33fc3"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6ef6d06de77ce2961156013e9dff62f1b2688aa04d0dc244299fe7d67e09370d",
+ "sha256:a736fed91c12681a7b34617c8fcefe39ea04599ca72c608751c31d89579a3f77"
+ ],
+ "index": "pypi",
+ "version": "==5.0.1"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e",
+ "sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e",
+ "sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0",
+ "sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c",
+ "sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631",
+ "sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4",
+ "sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34",
+ "sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b",
+ "sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a",
+ "sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233",
+ "sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1",
+ "sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36",
+ "sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d",
+ "sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a",
+ "sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.4.0"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/Uptycs/Uptycs.py b/Integrations/Uptycs/Uptycs.py
new file mode 100644
index 000000000000..ed50d9ba2979
--- /dev/null
+++ b/Integrations/Uptycs/Uptycs.py
@@ -0,0 +1,2371 @@
+import demistomock as demisto
+from CommonServerPython import *
+
+###############################################################################
+# import required libraries package
+###############################################################################
+
+import os
+import ast
+import json
+import jwt
+from datetime import datetime, timedelta
+import requests
+from typing import List
+from signal import signal, SIGPIPE, SIG_DFL
+signal(SIGPIPE, SIG_DFL)
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+###############################################################################
+# packages to handle IOerror
+###############################################################################
+
+if not demisto.params().get('proxy', False) \
+ or demisto.params()['proxy'] == 'false':
+ del os.environ['HTTP_PROXY']
+ del os.environ['HTTPS_PROXY']
+ del os.environ['http_proxy']
+ del os.environ['https_proxy']
+
+
+"""GLOBAL VARS"""
+
+VERIFY_CERT = True if not demisto.params().get('insecure') else False
+KEY = demisto.params().get('key')
+SECRET = demisto.params().get('secret')
+DOMAIN = demisto.params().get('domain')
+CUSTOMER_ID = demisto.params().get('customer_id')
+FETCH_TIME = demisto.params().get('fetch_time')
+
+"""HELPER FUNCTIONS"""
+
+
+def generate_headers(key, secret):
+ header = {}
+ utcnow = datetime.utcnow()
+ date = utcnow.strftime("%a, %d %b %Y %H:%M:%S GMT")
+ auth_var = jwt.encode({'iss': key}, secret, algorithm='HS256').\
+ decode('utf-8')
+ authorization = "Bearer %s" % auth_var
+ header['date'] = date
+ header['Authorization'] = authorization
+ return header
+
+
+def restcall(method, api, **kwargs):
+
+ header = generate_headers(KEY, SECRET)
+
+ url = ("https://%s.uptycs.io/public/api/customers/%s%s" %
+ (DOMAIN, CUSTOMER_ID, api))
+
+ try:
+ request_func = getattr(requests, method)
+ except AttributeError:
+ return_error("Invalid method: {0}".format(method))
+
+ try:
+ response = request_func(
+ url,
+ headers=header,
+ verify=VERIFY_CERT,
+ **kwargs)
+ except Exception as e:
+ return_error("Error Connecting to server. Details: {0}".format(str(e)))
+
+ return response.json()
+
+
+def severity_to_int(level_string):
+ level_int = 0
+ if level_string == 'low':
+ level_int = 1
+
+ if level_string == 'medium':
+ level_int = 2
+
+ if level_string == 'high':
+ level_int = 3
+
+ return level_int
+
+
+def remove_context_entries(context, context_entries_to_keep):
+ for index in range(len(context)):
+ for key in list(context[index]):
+ if key not in context_entries_to_keep:
+ context[index].pop(key, None)
+
+ return context
+
+
+def apply_os_cut(query, os):
+ if "WHERE" not in query:
+ query = ("%s WHERE" % query)
+ else:
+ query = ("%s AND" % query)
+
+ op_systems = os.split("/")
+ for index in range(len(op_systems)):
+ query = ("%s os LIKE '%%%s%%'" % (query, op_systems[index]))
+ if index < len(op_systems) - 1:
+ query = ("%s OR" % query)
+
+ return query
+
+
+def apply_equals_cuts(query, cuts):
+ if all(value is None for value in cuts.values()):
+ return query
+ else:
+ if "WHERE" not in query:
+ query = ("%s WHERE" % query)
+ else:
+ query = ("%s AND" % query)
+
+ use_and = False
+ for key in cuts:
+ if cuts.get(key) is not None:
+ if use_and:
+ query = ("%s AND" % query)
+ if "time" in key:
+ query = ("%s %s=CAST('%s' AS TIMESTAMP)" % (query, key,
+ cuts.get(key)))
+ use_and = True
+ else:
+ if type(cuts.get(key)) == str:
+ query = ("%s %s='%s'" % (query, key, cuts.get(key)))
+ if type(cuts.get(key)) == int:
+ query = ("%s %s=%s" % (query, key, cuts.get(key)))
+ use_and = True
+
+ return query
+
+
+def apply_like_cuts(query, cuts):
+ if all(value is None for value in cuts.values()):
+ return query
+ else:
+ if "WHERE" not in query:
+ query = ("%s WHERE" % query)
+ else:
+ query = ("%s AND" % query)
+ i = 0
+ for key in cuts:
+ i = i + 1
+ if cuts.get(key) is not None:
+ query = ("%s %s LIKE '%%%s%%'" % (query, key, cuts.get(key)))
+ if i < len(cuts):
+ query = ("%s AND" % query)
+
+ return query
+
+
+def apply_datetime_cuts(query, name, start, finish):
+ if start is None and finish is None:
+ return query
+
+ if "WHERE" not in query:
+ query = ("%s WHERE" % query)
+ else:
+ query = ("%s AND" % query)
+
+ if finish is None:
+ query = ("%s %s AFTER CAST('%s' AS TIMESTAMP)" % (query, name,
+ start))
+ if start is None:
+ query = ("%s %s BEFORE CAST('%s' AS TIMESTAMP)" % (query, name,
+ finish))
+ if start is not None and finish is not None:
+ query = ("%s %s BETWEEN CAST('%s' AS TIMESTAMP) AND \
+CAST('%s' AS TIMESTAMP)"
+ % (query, name, start, finish))
+
+ return query
+
+
+def uptycs_parse_date_range(timeago, start_time, end_time):
+
+ if timeago is None:
+ timeago = "1 day"
+
+ if end_time is not None and start_time is None:
+ number = timeago.split(" ")[0]
+ unit = timeago.split(" ")[1]
+ if unit == 'minutes' or unit == 'minute':
+ temp_time_ago = datetime.strftime(
+ datetime.strptime(end_time, "%Y-%m-%d %H:%M:%S.000")
+ - timedelta(minutes=number), "%Y-%m-%d %H:%M:%S.000")
+ if unit == 'hours' or unit == 'hour':
+ temp_time_ago = datetime.strftime(
+ datetime.strptime(end_time, "%Y-%m-%d %H:%M:%S.000") -
+ - timedelta(hours=number), "%Y-%m-%d %H:%M:%S.000")
+ if unit == 'days' or unit == 'day':
+ temp_time_ago = datetime.strftime(
+ datetime.strptime(end_time, "%Y-%m-%d %H:%M:%S.000") -
+ - timedelta(days=number), "%Y-%m-%d %H:%M:%S.000")
+ if unit == 'months' or unit == 'month':
+ temp_time_ago = datetime.strftime(
+ datetime.strptime(end_time, "%Y-%m-%d %H:%M:%S.000") -
+ - timedelta(days=number * 30), "%Y-%m-%d %H:%M:%S.000")
+ if unit == 'years' or unit == 'year':
+ temp_time_ago = datetime.strftime(
+ datetime.strptime(end_time, "%Y-%m-%d %H:%M:%S.000") -
+ - timedelta(days=number * 365), "%Y-%m-%d %H:%M:%S.000")
+ else:
+ temp_time_ago, now = parse_date_range(timeago,
+ date_format="%Y-%m-%d \
+%H:%M:%S.000")
+
+ end = (end_time if end_time is not None else now)
+ begin = (start_time if start_time is not None else temp_time_ago)
+
+ return begin, end
+
+
+"""COMMAND FUNCTIONS"""
+
+
+def uptycs_run_query():
+ """
+ return results of posted query
+ """
+ http_method = 'post'
+ query = demisto.args().get('query')
+ if demisto.args().get('query_type') == 'global':
+ api_call = '/query'
+ post_data = {
+ 'query': query
+ }
+ else:
+ api_call = '/assets/query'
+ if demisto.args().get('asset_id') is not None:
+ _id = {
+ "_id": {
+ "equals": demisto.args().get('asset_id')
+ }
+ }
+ elif demisto.args().get('host_name_is') is not None:
+ _id = {
+ "host_name": {
+ "equals": demisto.args().get(
+ 'host_name_is')
+ }
+ }
+ elif demisto.args().get('host_name_like') is not None:
+ _id = {
+ "host_name": {
+ "like": "%{0}%".format(demisto.args().get(
+ 'host_name_like'))
+ }
+ }
+ else:
+ _id = {
+ "host_name": {
+ "like": '%%'
+ }
+ }
+
+ post_data = {
+ "query": query,
+ "type": "realtime",
+ "filtering": {
+ "filters": _id
+ }
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_run_query_command():
+ query_results = uptycs_run_query()
+ human_readable = tableToMarkdown('Uptycs Query Result',
+ query_results.get('items'))
+ context = query_results.get('items')
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.QueryResults': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_assets():
+ """
+ return list of assets enrolled in Uptycs
+ """
+ http_method = 'post'
+ api_call = "/query"
+ query = 'SELECT * FROM upt_assets'
+ limit = demisto.args().get('limit')
+
+ equal_cuts = {
+ "id": demisto.args().get('asset_id'),
+ "host_name": demisto.args().get('host_name_is'),
+ "object_group_id": demisto.args().get('object_group_id')
+ }
+ query = apply_equals_cuts(query, equal_cuts)
+ like_cuts = {
+ "host_name": demisto.args().get('host_name_like')
+ }
+ query = apply_like_cuts(query, like_cuts)
+
+ os = demisto.args().get('os')
+ if os:
+ query = apply_os_cut(query, os)
+
+ query = ("%s ORDER BY last_activity_at DESC" % query)
+
+ if limit != -1 and limit is not None:
+ query = ("%s LIMIT %s" % (query, limit))
+
+ query_type = 'global'
+ post_data = {
+ "query": query,
+ "queryType": query_type
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_get_assets_command():
+ query_results = uptycs_get_assets()
+ human_readable = tableToMarkdown('Uptycs Assets',
+ query_results.get('items'),
+ ['id', 'host_name', 'os', 'os_version',
+ 'osquery_version', 'last_activity_at'])
+ context = query_results.get('items')
+ context_entries_to_keep = ['id', 'location', 'latitude', 'longitude',
+ 'os_flavor', 'os', 'last_enrolled_at',
+ 'status', 'host_name', 'os_version',
+ 'osquery_version', 'last_activity_at',
+ 'upt_asset_id', 'created_at']
+ if context is not None:
+ remove_context_entries(context, context_entries_to_keep)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results.get('items'),
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.Assets(val.id == obj.id)': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_alerts():
+ """
+ return list of alerts
+ """
+ http_method = 'post'
+ api_call = "/query"
+ query = 'SELECT a.*, u.host_name FROM upt_alerts a JOIN upt_assets u ON \
+a.upt_asset_id=u.id'
+ limit = demisto.args().get('limit')
+
+ alert_id = demisto.args().get('alert_id')
+ if alert_id is not None:
+ equal_cuts = {
+ "a.id": alert_id
+ }
+
+ query = apply_equals_cuts(query, equal_cuts)
+ else:
+ equal_cuts = {
+ "upt_asset_id": demisto.args().get('asset_id'),
+ "code": demisto.args().get('code'),
+ "host_name": demisto.args().get('host_name_is'),
+ "value": demisto.args().get('value'),
+ "key": demisto.args().get('key')
+ }
+
+ query = apply_equals_cuts(query, equal_cuts)
+ like_cuts = {
+ "host_name": demisto.args().get('host_name_like')
+ }
+ query = apply_like_cuts(query, like_cuts)
+
+ time_ago = demisto.args().get('time_ago')
+ start_window = demisto.args().get('start_window')
+ end_window = demisto.args().get('end_window')
+
+ if time_ago is not None or (start_window is not None
+ or end_window is not None):
+ begin, end = uptycs_parse_date_range(time_ago,
+ start_window, end_window)
+ query = apply_datetime_cuts(query, "alert_time", begin, end)
+
+ query = ("%s ORDER BY a.alert_time DESC" % query)
+
+ if limit != -1 and limit is not None:
+ query = ("%s LIMIT %s" % (query, limit))
+
+ query_type = 'global'
+ post_data = {
+ "query": query,
+ "queryType": query_type
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_get_alerts_command():
+ query_results = uptycs_get_alerts()
+ context = query_results.get('items')
+ context_entries_to_keep = ['id', 'host_name', 'grouping', 'code',
+ 'assigned_to', 'alert_time', 'updated_at',
+ 'metadata', 'asset', 'status', 'upt_asset_id',
+ 'created_at', 'description', 'severity',
+ 'value', 'key']
+
+ if context is not None:
+ remove_context_entries(context, context_entries_to_keep)
+
+ if context is not None:
+ for index in range(len(context)):
+ if bool(json.loads(context[index].get('metadata')).get('pid')):
+ context[index]['pid'] = json.loads(
+ context[index].get('metadata')).get('pid')
+ else:
+ context[index]['pid'] = 'Not applicable or unknown'
+ if bool(json.loads(
+ context[index].get('metadata')).get('indicatorId')):
+ context[index]['threat_indicator_id'] =\
+ json.loads(
+ context[index].get('metadata')).get('indicatorId')
+ context[index]['threat_source_name'] =\
+ json.loads(
+ context[index].get('metadata')).get(
+ 'indicatorSummary').get('threatSourceName')
+ else:
+ context[index]['threat_indicator_id'] = 'No threat indicator \
+for this alert'
+ context[index]['threat_source_name'] = 'No threat source for \
+this alert'
+
+ human_readable = tableToMarkdown('Uptycs Alerts: ',
+ context,
+ ['upt_asset_id', 'host_name', 'grouping',
+ 'alert_time', 'description', 'value',
+ 'severity', 'threat_indicator_id',
+ 'threat_source_name'])
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.Alerts(val.id == obj.id)': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_events():
+ """
+ return list of events
+ """
+ http_method = 'post'
+ api_call = "/query"
+ query = 'SELECT a.*, u.host_name FROM upt_events a JOIN upt_assets u ON \
+a.upt_asset_id=u.id'
+ limit = demisto.args().get('limit')
+
+ equal_cuts = {
+ "upt_asset_id": demisto.args().get('asset_id'),
+ "code": demisto.args().get('code'),
+ "host_name": demisto.args().get('host_name_is'),
+ "key": demisto.args().get('key'),
+ "value": demisto.args().get('value')
+ }
+ query = apply_equals_cuts(query, equal_cuts)
+ like_cuts = {
+ "host_name": demisto.args().get('host_name_like')
+ }
+ query = apply_like_cuts(query, like_cuts)
+
+ time_ago = demisto.args().get('time_ago')
+ start_window = demisto.args().get('start_window')
+ end_window = demisto.args().get('end_window')
+
+ if time_ago is not None or (start_window is not None
+ or end_window is not None):
+ begin, end = uptycs_parse_date_range(time_ago,
+ start_window, end_window)
+ query = apply_datetime_cuts(query, "event_time", begin, end)
+
+ query = ("%s ORDER BY a.event_time DESC" % query)
+
+ if limit != -1 and limit is not None:
+ query = ("%s LIMIT %s" % (query, limit))
+
+ query_type = 'global'
+ post_data = {
+ "query": query,
+ "queryType": query_type
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_get_events_command():
+ query_results = uptycs_get_events()
+ context = query_results.get('items')
+ context_entries_to_keep = ['upt_asset_id', 'host_name', 'grouping',
+ 'code', 'assigned_to', 'event_time',
+ 'updated_at', 'metadata', 'asset', 'status',
+ 'id', 'created_at', 'description', 'severity',
+ 'value', 'key']
+
+ if context is not None:
+ remove_context_entries(context, context_entries_to_keep)
+
+ human_readable = tableToMarkdown('Uptycs Events',
+ query_results.get('items'),
+ ['host_name', 'grouping', 'event_time',
+ 'description', 'value', 'severity'])
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.Events(val.id == obj.id)': query_results.get('items')
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_alert_rules():
+ """
+ return list of alert rules
+ """
+ http_method = 'get'
+ api_call = "/alertRules"
+ limit = demisto.args().get('limit')
+
+ if limit != -1 and limit is not None:
+ api_call = ("%s?limit=%s" % (api_call, limit))
+
+ return restcall(http_method, api_call)
+
+
+def uptycs_get_alert_rules_command():
+ query_results = uptycs_get_alert_rules()
+ human_readable = tableToMarkdown('Uptycs Alert Rules',
+ query_results.get('items'),
+ ['name', 'description', 'grouping',
+ 'enabled', 'updatedAt', 'code'])
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results.get('items'),
+ 'HumanReadable': human_readable,
+ }
+
+ return entry
+
+
+def uptycs_get_event_rules():
+ """
+ return list of event rules
+ """
+ http_method = 'get'
+ api_call = "/eventRules"
+ limit = demisto.args().get('limit')
+
+ if limit != -1 and limit is not None:
+ api_call = ("%s?limit=%s" % (api_call, limit))
+
+ return restcall(http_method, api_call)
+
+
+def uptycs_get_event_rules_command():
+ query_results = uptycs_get_event_rules()
+ human_readable = tableToMarkdown('Uptycs Event Rules',
+ query_results.get('items'),
+ ['name', 'description', 'grouping',
+ 'enabled', 'updatedAt', 'code'])
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results.get('items'),
+ 'HumanReadable': human_readable,
+ }
+
+ return entry
+
+
+def uptycs_get_process_open_files():
+ """
+ return information for processes which opened a file
+ """
+ http_method = 'post'
+ api_call = '/query'
+ query = "select * from process_open_files"
+ limit = demisto.args().get('limit')
+
+ time = demisto.args().get('time')
+ if time is not None:
+ day = time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+
+ else:
+ temp_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
+ day = temp_time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+
+ query = "%s WHERE upt_day = %s" % (query, uptday)
+
+ equal_cuts = {
+ "upt_asset_id": demisto.args().get('asset_id'),
+ "upt_hostname": demisto.args().get('host_name_is'),
+ "upt_time": time
+ }
+ query = apply_equals_cuts(query, equal_cuts)
+
+ like_cuts = {
+ "upt_hostname": demisto.args().get('host_name_like')
+ }
+ query = apply_like_cuts(query, like_cuts)
+
+ time_ago = demisto.args().get('time_ago')
+ start_window = demisto.args().get('start_window')
+ end_window = demisto.args().get('end_window')
+
+ if time is None and (time_ago is not None or (start_window is not None
+ or end_window is not None)):
+ begin, end = uptycs_parse_date_range(time_ago,
+ start_window, end_window)
+ query = apply_datetime_cuts(query, "upt_time", begin, end)
+
+ query = ("%s ORDER BY upt_time DESC" % query)
+
+ if limit != -1 and limit is not None:
+ query = ("%s LIMIT %s" % (query, limit))
+
+ query_type = 'global'
+ post_data = {
+ 'query': query,
+ 'queryType': query_type
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_get_process_open_files_command():
+ query_results = uptycs_get_process_open_files()
+ human_readable = tableToMarkdown('Process which has opened a file',
+ query_results.get('items'),
+ ['upt_hostname', 'pid', 'path', 'fd',
+ 'upt_time'])
+ context = query_results.get('items')
+ context_entries_to_keep = ['upt_hostname', 'upt_asset_id', 'pid',
+ 'path', 'fd', 'upt_time']
+ if context is not None:
+ remove_context_entries(context, context_entries_to_keep)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.Files': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_process_open_sockets():
+ """
+ return information for processes which opened a socket
+ """
+ http_method = 'post'
+ api_call = '/query'
+ time = demisto.args().get('time')
+ query = "select * from process_open_sockets"
+ limit = demisto.args().get('limit')
+
+ if time is not None:
+ day = time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+ else:
+ temp_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
+ day = temp_time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+
+ query = "%s WHERE upt_day = %s" % (query, uptday)
+
+ equal_cuts = {
+ "remote_address": demisto.args().get('ip'),
+ "upt_asset_id": demisto.args().get('asset_id'),
+ "upt_hostname": demisto.args().get('host_name_is'),
+ "upt_time": time
+ }
+ query = apply_equals_cuts(query, equal_cuts)
+
+ like_cuts = {
+ "upt_hostname": demisto.args().get('host_name_like')
+ }
+ query = apply_like_cuts(query, like_cuts)
+
+ time_ago = demisto.args().get('time_ago')
+ start_window = demisto.args().get('start_window')
+ end_window = demisto.args().get('end_window')
+
+ if time is None and (time_ago is not None or (start_window is not None
+ or end_window is not None)):
+ begin, end = uptycs_parse_date_range(time_ago,
+ start_window, end_window)
+ query = apply_datetime_cuts(query, "upt_time", begin, end)
+
+ query = ("%s ORDER BY upt_time DESC" % query)
+
+ if limit != -1 and limit is not None:
+ query = ("%s LIMIT %s" % (query, limit))
+
+ query_type = 'global'
+ post_data = {
+ 'query': query,
+ 'queryType': query_type
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_get_process_open_sockets_command():
+ query_results = uptycs_get_process_open_sockets()
+ human_readable = tableToMarkdown('process_open_sockets',
+ query_results.get('items'),
+ ['upt_hostname', 'pid', 'local_address',
+ 'remote_address', 'upt_time',
+ 'local_port', 'remote_port', 'socket'])
+ context = query_results.get('items')
+ context_entries_to_keep = ['upt_hostname', 'upt_asset_id', 'pid',
+ 'local_address', 'remote_address', 'upt_time',
+ 'local_port', 'remote_port', 'socket', 'family',
+ 'path', 'state', 'protocol']
+
+ if context is not None:
+ remove_context_entries(context, context_entries_to_keep)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.Sockets': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_socket_events():
+ """
+ return information for processes which opened a socket
+ """
+ http_method = 'post'
+ api_call = '/query'
+ time = demisto.args().get('time')
+ query = "select * from socket_events"
+ limit = demisto.args().get('limit')
+
+ if time is not None:
+ day = time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+ else:
+ temp_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
+ day = temp_time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+
+ query = "%s WHERE upt_day = %s" % (query, uptday)
+
+ equal_cuts = {
+ "remote_address": demisto.args().get('ip'),
+ "upt_asset_id": demisto.args().get('asset_id'),
+ "upt_hostname": demisto.args().get('host_name_is'),
+ "upt_time": time
+ }
+ query = apply_equals_cuts(query, equal_cuts)
+
+ like_cuts = {
+ "upt_hostname": demisto.args().get('host_name_like')
+ }
+ query = apply_like_cuts(query, like_cuts)
+
+ time_ago = demisto.args().get('time_ago')
+ start_window = demisto.args().get('start_window')
+ end_window = demisto.args().get('end_window')
+
+ if time is None and (time_ago is not None or (start_window is not None
+ or end_window is not None)):
+ begin, end = uptycs_parse_date_range(time_ago,
+ start_window, end_window)
+ query = apply_datetime_cuts(query, "upt_time", begin, end)
+
+ query = ("%s ORDER BY upt_time DESC" % query)
+
+ if limit != -1 and limit is not None:
+ query = ("%s LIMIT %s" % (query, limit))
+
+ query_type = 'global'
+ post_data = {
+ 'query': query,
+ 'queryType': query_type
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_get_socket_events_command():
+ query_results = uptycs_get_socket_events()
+ human_readable = tableToMarkdown('Socket events',
+ query_results.get('items'),
+ ['upt_hostname', 'pid', 'local_address',
+ 'remote_address', 'upt_time',
+ 'local_port', 'action'])
+ context = query_results.get('items')
+
+ context_entries_to_keep = ['upt_hostname', 'upt_asset_id', 'pid',
+ 'local_address', 'remote_address', 'upt_time',
+ 'local_port', 'remote_port', 'socket',
+ 'family', 'path', 'action', 'protocol']
+
+ if context is not None:
+ remove_context_entries(context, context_entries_to_keep)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.SocketEvents': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_socket_event_information():
+ """
+ return process event information
+ """
+ http_method = 'post'
+ api_call = '/query'
+ time = demisto.args().get('time')
+
+ if time is not None:
+ day = time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+ else:
+ temp_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
+ day = temp_time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+
+ query = ("SELECT * FROM socket_events WHERE upt_day = %s AND \
+upt_time <= CAST('%s' AS TIMESTAMP) AND remote_address='%s' \
+ORDER BY upt_time DESC LIMIT 1" %
+ (uptday, time, demisto.args().get('ip')))
+
+ equal_cuts = {
+ "upt_asset_id": demisto.args().get('asset_id'),
+ "upt_hostname": demisto.args().get('host_name_is')
+ }
+
+ query = apply_equals_cuts(query, equal_cuts)
+
+ query_type = 'global'
+ post_data = {
+ 'query': query,
+ 'queryType': query_type
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_get_socket_event_information_command():
+ query_results = uptycs_get_socket_event_information()
+ human_readable = tableToMarkdown('Socket event information',
+ query_results.get('items'),
+ ['upt_hostname', 'pid', 'local_address',
+ 'remote_address', 'upt_time',
+ 'local_port', 'action'])
+ context = query_results.get('items')
+
+ context_entries_to_keep = ['upt_hostname', 'upt_asset_id', 'pid',
+ 'local_address', 'remote_address', 'upt_time',
+ 'local_port', 'remote_port', 'socket',
+ 'family', 'path', 'action', 'protocol']
+
+ if context is not None:
+ remove_context_entries(context, context_entries_to_keep)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.SocketEvent': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_processes():
+ """
+ return process which are running or have run on a registered Uptycs asset
+ """
+ http_method = 'post'
+ api_call = '/query'
+ time = demisto.args().get('time')
+ query = "select * from processes"
+ limit = demisto.args().get('limit')
+
+ if time is not None:
+ day = time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+ else:
+ temp_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
+ day = temp_time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+
+ query = "%s WHERE upt_day = %s" % (query, uptday)
+
+ equal_cuts = {
+ "upt_asset_id": demisto.args().get('asset_id'),
+ "upt_hostname": demisto.args().get('host_name_is'),
+ "upt_time": time
+ }
+ query = apply_equals_cuts(query, equal_cuts)
+
+ like_cuts = {
+ "upt_hostname": demisto.args().get('host_name_like')
+ }
+ query = apply_like_cuts(query, like_cuts)
+
+ time_ago = demisto.args().get('time_ago')
+ start_window = demisto.args().get('start_window')
+ end_window = demisto.args().get('end_window')
+
+ if time is None and (time_ago is not None or (start_window is not None
+ or end_window is not None)):
+ begin, end = uptycs_parse_date_range(time_ago,
+ start_window, end_window)
+ query = apply_datetime_cuts(query, "upt_time", begin, end)
+
+ query = ("%s ORDER BY upt_time DESC" % query)
+
+ if limit != -1 and limit is not None:
+ query = ("%s LIMIT %s" % (query, limit))
+
+ query_type = 'global'
+ post_data = {
+ 'query': query,
+ 'queryType': query_type
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_get_processes_command():
+ query_results = uptycs_get_processes()
+ human_readable = tableToMarkdown('Processes',
+ query_results.get('items'),
+ ['upt_hostname', 'pid', 'name', 'path',
+ 'upt_time', 'parent', 'cmdline'])
+ context = query_results.get('items')
+
+ context_entries_to_keep = ['upt_hostname', 'upt_asset_id', 'pid', 'name',
+ 'path', 'upt_time', 'parent', 'cmdline',
+ 'pgroup', 'cwd']
+
+ if context is not None:
+ remove_context_entries(context, context_entries_to_keep)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.Process': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_process_events():
+ """return process events which have executed on a \
+ registered Uptycs asset"""
+ http_method = 'post'
+ api_call = '/query'
+ time = demisto.args().get('time')
+ query = "select * from process_events"
+ limit = demisto.args().get('limit')
+
+ if time is not None:
+ day = time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+ else:
+ temp_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
+ day = temp_time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+
+ query = "%s WHERE upt_day = %s" % (query, uptday)
+
+ equal_cuts = {
+ "upt_asset_id": demisto.args().get('asset_id'),
+ "upt_hostname": demisto.args().get('host_name_is'),
+ "upt_time": time
+ }
+ query = apply_equals_cuts(query, equal_cuts)
+
+ like_cuts = {
+ "upt_hostname": demisto.args().get('host_name_like')
+ }
+ query = apply_like_cuts(query, like_cuts)
+
+ time_ago = demisto.args().get('time_ago')
+ start_window = demisto.args().get('start_window')
+ end_window = demisto.args().get('end_window')
+
+ if time is None and (time_ago is not None or (start_window is not None
+ or end_window is not None)):
+ begin, end = uptycs_parse_date_range(time_ago,
+ start_window, end_window)
+ query = apply_datetime_cuts(query, "upt_time", begin, end)
+
+ query = ("%s ORDER BY upt_time DESC" % query)
+
+ if limit != -1 and limit is not None:
+ query = ("%s LIMIT %s" % (query, limit))
+
+ query_type = 'global'
+ post_data = {
+ 'query': query,
+ 'queryType': query_type
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_get_process_events_command():
+ query_results = uptycs_get_process_events()
+ human_readable = tableToMarkdown('Process events',
+ query_results.get('items'),
+ ['upt_hostname', 'pid', 'path',
+ 'upt_time', 'parent', 'cmdline'])
+ context = query_results.get('items')
+
+ context_entries_to_keep = ['upt_hostname', 'upt_asset_id', 'pid', 'path',
+ 'upt_time', 'parent', 'cmdline', 'cwd']
+
+ if context is not None:
+ remove_context_entries(context, context_entries_to_keep)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.ProcessEvents': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_process_information():
+ """return process information"""
+ http_method = 'post'
+ api_call = '/query'
+ time = demisto.args().get('time')
+ if time is not None:
+ day = time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+ else:
+ temp_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
+ day = temp_time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+
+ query = ("WITH add_times AS (SELECT * FROM processes WHERE upt_added=True), \
+remove_times AS (SELECT upt_time, upt_hash FROM processes WHERE \
+upt_added=False), temp_proc AS (SELECT aa.upt_asset_id, aa.pid, \
+aa.name, aa.path, aa.cmdline, aa.cwd, aa.parent, aa.pgroup, \
+aa.upt_hostname, aa.upt_day, aa.upt_time as upt_add_time, \
+rr.upt_time as temp_remove_time FROM add_times aa LEFT JOIN \
+remove_times rr ON aa.upt_hash=rr.upt_hash), new_proc AS \
+(SELECT upt_asset_id, pid, name, path, cmdline, cwd, parent, \
+pgroup, upt_hostname, upt_day, upt_add_time, \
+coalesce(temp_remove_time, current_timestamp) AS upt_remove_time \
+FROM temp_proc) SELECT * FROM new_proc WHERE pid=%s AND \
+CAST('%s' AS TIMESTAMP) BETWEEN upt_add_time AND upt_remove_time"
+ % (demisto.args().get('pid'), time))
+
+ equal_cuts = {
+ "upt_day": uptday,
+ "upt_asset_id": demisto.args().get('asset_id'),
+ "upt_hostname": demisto.args().get('host_name_is')
+ }
+
+ query = apply_equals_cuts(query, equal_cuts)
+
+ query = ("%s ORDER BY upt_add_time DESC LIMIT 1" % query)
+
+ query_type = 'global'
+ post_data = {
+ 'query': query,
+ 'queryType': query_type
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_get_process_information_command():
+ query_results = uptycs_get_process_information()
+ human_readable = tableToMarkdown('Process information',
+ query_results.get('items'),
+ ['upt_hostname', 'parent', 'pid',
+ 'name', 'path', 'cmdline'])
+ context = query_results.get('items')
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.Proc': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_process_event_information():
+ """return process event information"""
+ http_method = 'post'
+ api_call = '/query'
+ time = demisto.args().get('time')
+
+ if time is not None:
+ day = time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+ else:
+ temp_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
+ day = temp_time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+
+ query = ("SELECT * FROM process_events WHERE upt_day = %s AND pid=%s AND \
+upt_time<=CAST('%s' AS TIMESTAMP)" %
+ (uptday, demisto.args().get('pid'), time))
+
+ equal_cuts = {
+ "upt_asset_id": demisto.args().get('asset_id'),
+ "upt_hostname": demisto.args().get('host_name_is')
+ }
+
+ query = apply_equals_cuts(query, equal_cuts)
+
+ query = ("%s ORDER BY upt_time DESC LIMIT 1" % query)
+
+ query_type = 'global'
+ post_data = {
+ 'query': query,
+ 'queryType': query_type
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_get_process_event_information_command():
+ query_results = uptycs_get_process_event_information()
+ human_readable = tableToMarkdown('Process event information',
+ query_results.get('items'),
+ ['upt_hostname', 'parent', 'pid',
+ 'path', 'cmdline'])
+ context = query_results.get('items')
+
+ context_entries_to_keep = ['upt_hostname', 'upt_asset_id', 'pid', 'path',
+ 'upt_time', 'parent', 'cmdline', 'cwd']
+
+ if context is not None:
+ remove_context_entries(context, context_entries_to_keep)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.ProcEvent': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_parent_information():
+ """return parent process information"""
+ http_method = 'post'
+ api_call = '/query'
+ child_add_time = demisto.args().get('child_add_time')
+ if child_add_time is not None:
+ day = child_add_time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+ else:
+ temp_child_add_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
+ day = temp_child_add_time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+
+ query = ("WITH add_times AS (SELECT * FROM processes WHERE upt_added=True), \
+remove_times AS (SELECT upt_time, upt_hash FROM processes WHERE \
+upt_added=False), temp_proc AS (SELECT aa.upt_asset_id, aa.pid, \
+aa.name, aa.path, aa.cmdline, aa.cwd, aa.parent, aa.pgroup, \
+aa.upt_hostname, aa.upt_day, aa.upt_time as upt_add_time, \
+rr.upt_time as temp_remove_time FROM add_times aa LEFT JOIN \
+remove_times rr ON aa.upt_hash=rr.upt_hash), new_proc AS \
+(SELECT upt_asset_id, pid, name, path, cmdline, cwd, parent, \
+pgroup, upt_hostname, upt_day, upt_add_time, \
+coalesce(temp_remove_time, current_timestamp) AS upt_remove_time \
+FROM temp_proc) SELECT * FROM new_proc WHERE pid=%s AND \
+CAST('%s' AS TIMESTAMP) BETWEEN upt_add_time AND upt_remove_time AND \
+upt_day <= %s"
+ % (demisto.args().get('parent'), child_add_time, uptday))
+
+ equal_cuts = {
+ "upt_asset_id": demisto.args().get('asset_id'),
+ "upt_hostname": demisto.args().get('host_name_is')
+ }
+
+ query = apply_equals_cuts(query, equal_cuts)
+
+ query = ("%s ORDER BY upt_add_time DESC LIMIT 1" % query)
+
+ query_type = 'global'
+ post_data = {
+ 'query': query,
+ 'queryType': query_type
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_get_parent_information_command():
+ query_results = uptycs_get_parent_information()
+ human_readable = tableToMarkdown('Parent process information',
+ query_results.get('items'),
+ ['upt_hostname', 'parent', 'pid',
+ 'name', 'path', 'cmdline'])
+ context = query_results.get('items')
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.Parent': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_parent_event_information():
+ """return process event information"""
+ http_method = 'post'
+ api_call = '/query'
+ child_add_time = demisto.args().get('child_add_time')
+
+ if child_add_time is not None:
+ day = child_add_time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+ else:
+ temp_child_add_time = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
+ day = temp_child_add_time.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+
+ query = ("SELECT * FROM process_events WHERE upt_day = %s AND pid=%s AND \
+upt_time<=CAST('%s' AS TIMESTAMP)" %
+ (uptday, demisto.args().get('parent'), child_add_time))
+
+ equal_cuts = {
+ "upt_asset_id": demisto.args().get('asset_id'),
+ "upt_hostname": demisto.args().get('host_name_is')
+ }
+
+ query = apply_equals_cuts(query, equal_cuts)
+
+ query = ("%s ORDER BY upt_time DESC LIMIT 1" % query)
+
+ query_type = 'global'
+ post_data = {
+ 'query': query,
+ 'queryType': query_type
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_get_parent_event_information_command():
+ query_results = uptycs_get_parent_event_information()
+ human_readable = tableToMarkdown('Parent process event information',
+ query_results.get('items'),
+ ['upt_hostname', 'parent', 'pid',
+ 'path', 'cmdline'])
+ context = query_results.get('items')
+
+ context_entries_to_keep = ['upt_hostname', 'upt_asset_id', 'pid', 'path',
+ 'upt_time', 'parent', 'cmdline', 'cwd']
+
+ if context is not None:
+ remove_context_entries(context, context_entries_to_keep)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.ParentEvent': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_process_child_processes():
+ """return child processes for a given parent process"""
+ http_method = 'post'
+ api_call = '/query'
+ parent = demisto.args().get('parent')
+ limit = demisto.args().get('limit')
+ asset_id = demisto.args().get('asset_id')
+ parent_start = demisto.args().get('parent_start_time')
+ parent_end = demisto.args().get('parent_end_time')
+ if parent_start is not None:
+ day = parent_start.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+ else:
+ temp_parent_start = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
+ day = temp_parent_start.replace(" ", "-")
+ day_list = day.split("-")
+ uptday = int("%s%s%s" %
+ (str(day_list[0]), str(day_list[1]), str(day_list[2])))
+
+ if parent_end is None:
+ query = ("SELECT upt_time FROM process_events WHERE pid = %s AND \
+upt_asset_id = '%s' AND upt_time > CAST('%s' AS TIMESTAMP) \
+ORDER BY upt_time ASC limit 1" %
+ (parent, asset_id, parent_start))
+ query_type = 'global'
+
+ post_data = {
+ 'query': query,
+ 'queryType': query_type
+ }
+ temp_results = restcall(http_method, api_call, json=post_data)
+ if len(temp_results.get('items')) > 0:
+ parent_end = temp_results.get('items')[0].get('upt_time')
+ else:
+ parent_end = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
+
+ query = ("WITH add_times AS (SELECT * FROM processes WHERE upt_added=True), \
+remove_times AS (SELECT upt_time, upt_hash FROM processes WHERE \
+upt_added=False), temp_proc AS (SELECT aa.upt_asset_id, aa.pid, \
+aa.name, aa.path, aa.cmdline, aa.cwd, aa.parent, aa.pgroup, \
+aa.upt_hostname, aa.upt_day, aa.upt_time as upt_add_time, \
+rr.upt_time as temp_remove_time FROM add_times aa LEFT JOIN \
+remove_times rr on aa.upt_hash=rr.upt_hash), new_proc AS \
+(SELECT upt_asset_id, pid, name, path, cmdline, cwd, parent, \
+pgroup, upt_hostname, upt_day, upt_add_time, \
+coalesce(temp_remove_time, current_timestamp) AS upt_remove_time \
+FROM temp_proc) SELECT * FROM new_proc WHERE upt_day>=%s AND \
+parent = %s AND upt_asset_id = '%s' AND upt_add_time BETWEEN \
+CAST('%s' AS TIMESTAMP) AND CAST('%s' AS TIMESTAMP) ORDER BY \
+upt_add_time DESC"
+ % (uptday, parent, asset_id, parent_start, parent_end))
+
+ if limit != -1 and limit is not None:
+ query = ("%s LIMIT %s" % (query, limit))
+
+ query_type = 'global'
+ post_data = {
+ 'query': query,
+ 'queryType': query_type
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_get_process_child_processes_command():
+ query_results = uptycs_get_process_child_processes()
+ human_readable = tableToMarkdown('Child processes of a specified pid',
+ query_results.get('items'),
+ ['upt_hostname', 'pid', 'name',
+ 'path', 'cmdline', 'upt_add_time'])
+ context = query_results.get('items')
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.Children': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_set_alert_status():
+ """set the status of an alert"""
+ http_method = 'put'
+ api_call = ('/alerts/%s' % demisto.args().get('alert_id'))
+
+ post_data = {
+ 'status': demisto.args().get('status')
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_set_alert_status_command():
+ query_results = uptycs_set_alert_status()
+ human_readable = tableToMarkdown('Uptycs Alert Status',
+ query_results, ['id', 'code', 'status',
+ 'createdAt', 'updatedAt'])
+ context = query_results
+ context['updatedBy'] = context.get('updatedByUser').get('name')
+ context['updatedByAdmin'] = context.get('updatedByUser').get('admin')
+ context['updatedByEmail'] = context.get('updatedByUser').get('email')
+ context_entries_to_keep = ['id', 'code', 'status', 'createdAt',
+ 'updatedAt', 'updatedBy', 'updatedByAdmin',
+ 'updatedByEmail']
+ if context is not None:
+ for key in list(context):
+ if key not in context_entries_to_keep:
+ context.pop(key, None)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.AlertStatus': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_asset_tags():
+ """set a tag on an asset"""
+ http_method = 'get'
+ api_call = ('/assets/%s' % demisto.args().get('asset_id'))
+ return restcall(http_method, api_call).get('tags')
+
+
+def uptycs_get_asset_tags_command():
+ query_results = uptycs_get_asset_tags()
+ human_readable = tableToMarkdown('Uptycs Asset Tags for asset id: %s' %
+ demisto.args().get('asset_id'),
+ query_results, 'Tags')
+ context = query_results
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.AssetTags': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_set_asset_tag():
+ """set a tag on an asset"""
+ http_method = 'get'
+ api_call = ('/assets/%s' % demisto.args().get('asset_id'))
+ tags = restcall(http_method, api_call).get('tags')
+ tags.append(demisto.args().get('tag_key') + '=' + demisto.args().get(
+ 'tag_value'))
+
+ http_method = 'put'
+ post_data = {
+ 'tags': tags
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_set_asset_tag_command():
+ query_results = uptycs_set_asset_tag()
+ human_readable = tableToMarkdown('Uptycs Asset Tag',
+ query_results, ['hostName', 'tags'])
+ context = query_results
+ context_entries_to_keep = ['hostName', 'tags']
+
+ if context is not None:
+ for key in list(context):
+ if key not in context_entries_to_keep:
+ context.pop(key, None)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.AssetTags': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_users():
+ """return a list of uptycs users"""
+ http_method = 'get'
+ api_call = '/users'
+ limit = demisto.args().get('limit')
+
+ if limit != -1 and limit is not None:
+ api_call = ("%s?limit=%s" % (api_call, limit))
+
+ return restcall(http_method, api_call)
+
+
+def uptycs_get_users_command():
+ query_results = uptycs_get_users()
+ human_readable = tableToMarkdown('Uptycs Users',
+ query_results.get(
+ 'items'), ['name', 'email', 'id',
+ 'admin', 'active',
+ 'createdAt', 'updatedAt'])
+ context = query_results.get('items')
+ context_entries_to_keep = ['name', 'email', 'id', 'admin', 'active',
+ 'createdAt', 'updatedAt']
+
+ if context is not None:
+ remove_context_entries(context, context_entries_to_keep)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.Users': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_user_information():
+ """return information about a specfic Uptycs user"""
+ http_method = 'get'
+ api_call = '/users/%s' % demisto.args().get('user_id')
+
+ return restcall(http_method, api_call)
+
+
+def uptycs_get_user_information_command():
+ query_results = uptycs_get_user_information()
+ human_readable = tableToMarkdown('Uptycs User Information',
+ query_results, ['name', 'email', 'id'])
+ context = query_results
+ context['userRoles'] = {
+ context.get('userRoles')[0].get('role').get('name'):
+ context.get('userRoles')[0].get('role')
+ }
+
+ context_entries_to_keep = ['name', 'email', 'id', 'userRoles',
+ 'userObjectGroups']
+
+ if context is not None:
+ for key in list(context):
+ if key not in context_entries_to_keep:
+ context.pop(key, None)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.UserInfo': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_user_asset_groups():
+ """return a list of users in a particular asset group"""
+ http_method = 'get'
+ api_call = '/users'
+
+ users = restcall(http_method, api_call).get('items')
+ user_ids = []
+ for index in range(len(users)):
+ user_ids.append(users[index].get('id'))
+
+ asset_group_id = demisto.args().get('asset_group_id')
+ users_in_group = {}
+ for user_id in user_ids:
+ http_method = 'get'
+ api_call = '/users/%s' % user_id
+ user_info = restcall(http_method, api_call)
+ obj_groups = user_info.get('userObjectGroups')
+ for obj_group in obj_groups:
+ if obj_group.get('objectGroupId') == asset_group_id:
+ users_in_group[user_info.get('name')] = {
+ 'email': user_info.get('email'),
+ 'id': user_info.get('id')
+ }
+
+ return users_in_group
+
+
+def uptycs_get_user_asset_groups_command():
+ query_results = uptycs_get_user_asset_groups()
+ human_readable = tableToMarkdown('Uptycs User Asset Groups',
+ query_results)
+ context = query_results
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.UserGroups': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_asset_groups():
+ """return a list of asset groups"""
+ http_method = 'get'
+ api_call = '/objectGroups'
+ limit = demisto.args().get('limit')
+
+ if limit != -1 and limit is not None:
+ api_call = ("%s?limit=%s" % (api_call, limit))
+
+ return restcall(http_method, api_call)
+
+
+def uptycs_get_asset_groups_command():
+ query_results = uptycs_get_asset_groups()
+ human_readable = tableToMarkdown('Uptycs Users',
+ query_results.get('items'),
+ ['id', 'name', 'description',
+ 'objectType', 'custom', 'createdAt',
+ 'updatedAt'])
+ context = query_results.get('items')
+ context_entries_to_keep = ['id', 'name', 'description', 'objectType',
+ 'custom', 'createdAt', 'updatedAt']
+
+ if context is not None:
+ remove_context_entries(context, context_entries_to_keep)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.AssetGroups': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_threat_indicators():
+ """return a list of threat indcicators"""
+ http_method = 'get'
+ api_call = '/threatIndicators'
+ limit = demisto.args().get('limit')
+
+ if limit != -1 and limit is not None:
+ api_call = ("%s?limit=%s" % (api_call, limit))
+
+ indicator = demisto.args().get('indicator')
+ if indicator is not None:
+ api_call = '%s?filters={"indicator":{"like":"%s"}}' %\
+ (api_call, indicator)
+
+ return restcall(http_method, api_call)
+
+
+def uptycs_get_threat_indicators_command():
+ query_results = uptycs_get_threat_indicators()
+ human_readable = tableToMarkdown('Uptycs Threat Indicators',
+ query_results.get('items'),
+ ['id', 'indicator', 'description',
+ 'indicatorType', 'createdAt',
+ 'isActive', 'threatId'])
+ context = query_results.get('items')
+ context_entries_to_keep = ['id', 'indicator', 'description',
+ 'indicatorType', 'createdAt', 'isActive',
+ 'threatId']
+
+ if context is not None:
+ remove_context_entries(context, context_entries_to_keep)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.ThreatIndicators': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_threat_indicator():
+ """return information about a particular threat indicator"""
+ http_method = 'get'
+ api_call = '/threatIndicators/%s' % demisto.args().get('indicator_id')
+
+ return restcall(http_method, api_call)
+
+
+def uptycs_get_threat_indicator_command():
+ query_results = uptycs_get_threat_indicator()
+ human_readable = tableToMarkdown('Uptycs Threat Indicator',
+ query_results, ['id', 'indicator',
+ 'description',
+ 'indicatorType',
+ 'createdAt', 'isActive',
+ 'threatId'])
+ context = query_results
+ context['threat_source_id'] = context.get('threat').get('threatSourceId')
+ context['threat_vendor_id'] = context.get('threat').get('threatSource').\
+ get('threatVendorId')
+ context['threat_source_name'] = context.get('threat').get('threatSource').\
+ get('name')
+
+ context_entries_to_keep = ['id', 'indicator', 'description',
+ 'indicatorType', 'createdAt', 'updatedAt',
+ 'isActive', 'threatId', 'threat_source_id',
+ 'threat_vendor_id', 'threat_source_name']
+
+ if context is not None:
+ for key in list(context):
+ if key not in context_entries_to_keep:
+ context.pop(key, None)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.ThreatIndicator': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_threat_sources():
+ """return a list of threat sources"""
+ http_method = 'get'
+ api_call = '/threatSources'
+ limit = demisto.args().get('limit')
+
+ if limit != -1 and limit is not None:
+ api_call = ("%s?limit=%s" % (api_call, limit))
+
+ return restcall(http_method, api_call)
+
+
+def uptycs_get_threat_sources_command():
+ query_results = uptycs_get_threat_sources()
+ human_readable = tableToMarkdown('Uptycs Threat Sources',
+ query_results.get('items'),
+ ['name', 'description', 'url', 'enabled',
+ 'custom', 'createdAt', 'lastDownload'])
+ context = query_results.get('items')
+ context_entries_to_keep = ['name', 'description', 'url', 'enabled',
+ 'custom', 'createdAt', 'lastDownload']
+
+ if context is not None:
+ remove_context_entries(context, context_entries_to_keep)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.ThreatSources': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_threat_source():
+ """return information about a particular threat source"""
+ http_method = 'get'
+ api_call = '/threatSources'
+
+ threat_source_id = demisto.args().get('threat_source_id')
+ if threat_source_id is not None:
+ api_call = '%s/%s' % (api_call, threat_source_id)
+
+ return restcall(http_method, api_call)
+
+
+def uptycs_get_threat_source_command():
+ query_results = uptycs_get_threat_source()
+ human_readable = tableToMarkdown('Uptycs Threat Sources',
+ query_results,
+ ['name', 'description', 'url', 'enabled',
+ 'custom', 'createdAt', 'lastDownload'])
+ context = query_results
+ context_entries_to_keep = ['name', 'description', 'url', 'enabled',
+ 'custom', 'createdAt', 'lastDownload']
+
+ if context is not None:
+ for key in list(context):
+ if key not in context_entries_to_keep:
+ context.pop(key, None)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.ThreatSources': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_get_threat_vendors():
+ """return a list of threat vendors"""
+ http_method = 'get'
+ api_call = '/threatVendors'
+ limit = demisto.args().get('limit')
+
+ if limit != -1 and limit is not None:
+ api_call = ("%s?limit=%s" % (api_call, limit))
+
+ return restcall(http_method, api_call)
+
+
+def uptycs_get_threat_vendors_command():
+ query_results = uptycs_get_threat_vendors()
+ context = query_results.get('items')
+
+ if context is not None:
+ for index in range(len(context)):
+ context[index].pop('links', None)
+
+ human_readable = tableToMarkdown('Uptycs Threat Vendors',
+ context)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': context,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.ThreatVendors': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_post_threat_source():
+ """post a new threat source"""
+
+ url = ("https://%s.uptycs.io/public/api/customers/%s/threatSources" %
+ (DOMAIN, CUSTOMER_ID))
+ header = generate_headers(KEY, SECRET)
+
+ filepath = demisto.getFilePath(demisto.args().get('entry_id'))
+ post_data = {
+ "name": demisto.args().get('name'),
+ "filename": filepath.get('name'),
+ "description": demisto.args().get('description')
+ }
+
+ files = {'file': open(filepath.get('path'), 'rb')}
+
+ response = requests.post(url, headers=header, data=post_data,
+ files=files, verify=VERIFY_CERT)
+
+ return response
+
+
+def uptycs_post_threat_source_command():
+ response = uptycs_post_threat_source()
+ human_readable = 'Uptycs Posted Threat Source'
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': response.json(),
+ 'HumanReadable': human_readable,
+ }
+
+ return entry
+
+
+def uptycs_get_saved_queries():
+ """return a list of threat vendors"""
+ http_method = 'get'
+ api_call = '/queries'
+
+ query_id = demisto.args().get('query_id')
+ if query_id is not None:
+ api_call = '%s/%s' % (api_call, query_id)
+
+ name = demisto.args().get('name')
+ if name is not None:
+ api_call = '%s?name=%s' % (api_call, name)
+
+ return restcall(http_method, api_call)
+
+
+def uptycs_get_saved_queries_command():
+ query_results = uptycs_get_saved_queries()
+ context = query_results.get('items')
+
+ if context is not None:
+ for index in range(len(context)):
+ context[index].pop('links', None)
+
+ human_readable = tableToMarkdown('Uptycs Saved Queries',
+ context,
+ ['name', 'description', 'query',
+ 'executionType', 'grouping', 'id'])
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': context,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.SavedQueries': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_run_saved_query():
+ """return a list of threat vendors"""
+ http_method = 'get'
+ api_call = '/queries'
+
+ query_id = demisto.args().get('query_id')
+ if query_id is not None:
+ api_call = '%s/%s' % (api_call, query_id)
+
+ name = demisto.args().get('name')
+ if name is not None:
+ api_call = '%s?name=%s' % (api_call, name)
+
+ query_results = restcall(http_method, api_call).get('items')[0]
+ query = query_results.get('query')
+ var_args = demisto.args().get('variable_arguments')
+
+ if var_args is not None:
+ while type(var_args) is not dict:
+ var_args = ast.literal_eval(var_args)
+ for key, value in var_args.items():
+ query = query.replace(key, value)
+
+ http_method = 'post'
+
+ if query_results.get('executionType') == 'realtime':
+ api_call = '/assets/query'
+ if demisto.args().get('asset_id') is not None:
+ _id = {
+ "id": {
+ "equals": demisto.args().get('asset_id')
+ }
+ }
+ elif demisto.args().get('host_name_is') is not None:
+ _id = {
+ "host_name": {
+ "equals": demisto.args().get('host_name_is')
+ }
+ }
+ elif demisto.args().get('host_name_like') is not None:
+ _id = {
+ "host_name": {
+ "like": '%' + demisto.args().get('host_name_like') + '%'
+ }
+ }
+ else:
+ _id = {
+ "host_name": {
+ "like": '%%'
+ }
+ }
+
+ post_data = {
+ "type": "realtime",
+ "query": query,
+ "filtering": {
+ "filters": _id
+ }
+ }
+ else:
+ post_data = {"query": query}
+ api_call = '/query'
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_run_saved_query_command():
+ query_results = uptycs_run_saved_query()
+ context = query_results.get('items')
+
+ if context is not None:
+ for index in range(len(context)):
+ context[index].pop('links', None)
+
+ human_readable = tableToMarkdown('Uptycs Query Results', context)
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': context,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.RunQuery': context
+ }
+ }
+
+ return entry
+
+
+def uptycs_post_saved_query():
+ """return a list of threat vendors"""
+ http_method = 'post'
+ api_call = '/queries'
+
+ post_data = {
+ "name": demisto.args().get('name'),
+ "type": demisto.args().get('type'),
+ "description": demisto.args().get('description'),
+ "query": demisto.args().get('query'),
+ "executionType": demisto.args().get('execution_type'),
+ "grouping": demisto.args().get('grouping'),
+ "custom": True
+ }
+
+ return restcall(http_method, api_call, json=post_data)
+
+
+def uptycs_post_saved_query_command():
+ query_results = uptycs_post_saved_query()
+ if query_results.get("status") == 500:
+ return_error("Internal Server Error, check whether a query with this \
+ name has already been saved")
+
+ human_readable = tableToMarkdown('Uptycs Posted Query',
+ query_results,
+ ['name', 'type', 'description', 'query',
+ 'executionType', 'grouping', 'custom'])
+
+ entry = {
+ 'ContentsFormat': formats['json'],
+ 'Type': entryTypes['note'],
+ 'Contents': query_results,
+ 'HumanReadable': human_readable,
+ 'EntryContext': {
+ 'Uptycs.PostedQuery': query_results
+ }
+ }
+
+ return entry
+
+
+def uptycs_test_module():
+ """check whether Uptycs API responds correctly"""
+ http_method = 'get'
+ api_call = '/assets?limit=1'
+
+ query_results = restcall(http_method, api_call)
+
+ if query_results == 0:
+ return False
+ else:
+ return True
+
+
+def uptycs_fetch_incidents():
+ """fetch alerts from Uptycs"""
+ this_run = datetime.utcnow().strftime("%m/%d/%y %H:%M:%S")
+ if bool(demisto.getLastRun()) is False:
+ last_run, _ = parse_date_range(FETCH_TIME)
+ else:
+ last_run = demisto.getLastRun()['time']
+
+ http_method = 'get'
+ api_call = ('/alerts?filters={"alertTime":{"between":["%s","%s"]}}'
+ % (last_run, this_run))
+
+ query_results = restcall(http_method, api_call)
+
+ incidents = [] # type: List[dict]
+ if len(query_results.get('items')) == 0:
+ return incidents
+ if query_results.get('items') is not None:
+ for index in range(len(query_results.get('items'))):
+ context = query_results.get('items')[index]
+ context['alertId'] = context.get('id')
+ context['hostName'] = context.get('asset').get('hostName')
+ if bool(context.get('metadata').get('indicatorId')):
+ context['indicatorId'] = context.get('metadata').\
+ get('indicatorId')
+ context['threatId'] = context.get('metadata').\
+ get('indicatorSummary').get('threatId')
+ context['threatSourceName'] = context.get('metadata').\
+ get('indicatorSummary').get('threatSourceName')
+ context['indicatorType'] = context.get('metadata').\
+ get('indicatorSummary').get('indicatorType')
+
+ context_entries_to_keep = ['id', 'hostName', 'grouping',
+ 'assignedTo', 'alertTime', 'alertId',
+ 'updatedAt', 'status', 'assetId',
+ 'createdAt', 'description', 'severity',
+ 'value', 'threatId',
+ 'threatSourceName', 'indicatorType',
+ 'indicatorId']
+
+ for key in list(context):
+ if key not in context_entries_to_keep:
+ context.pop(key, None)
+
+ alert_time = context.get('alertTime')
+
+ incident = {
+ "Name": "Uptycs Alert: %s for asset: %s" %
+ (context.get('description'), context.get('hostName')),
+ "Occurred": alert_time,
+ "Severity": severity_to_int(context.get('severity')),
+ "Details": context.get('id'),
+ "rawJSON": json.dumps(context)
+ }
+ incidents.insert(0, incident)
+
+ demisto.setLastRun({'time': this_run})
+ return incidents
+
+
+def main():
+ ###########################################################################
+ # main function
+ ###########################################################################
+
+ try:
+ if demisto.command() == 'uptycs-run-query':
+ demisto.results(uptycs_run_query_command())
+
+ if demisto.command() == 'uptycs-get-assets':
+ demisto.results(uptycs_get_assets_command())
+
+ if demisto.command() == 'uptycs-get-alerts':
+ demisto.results(uptycs_get_alerts_command())
+
+ if demisto.command() == 'uptycs-get-events':
+ demisto.results(uptycs_get_events_command())
+
+ if demisto.command() == 'uptycs-get-alert-rules':
+ demisto.results(uptycs_get_alert_rules_command())
+
+ if demisto.command() == 'uptycs-get-event-rules':
+ demisto.results(uptycs_get_event_rules_command())
+
+ if demisto.command() == 'uptycs-get-process-open-files':
+ demisto.results(uptycs_get_process_open_files_command())
+
+ if demisto.command() == 'uptycs-get-socket-events':
+ demisto.results(uptycs_get_socket_events_command())
+
+ if demisto.command() == 'uptycs-get-socket-event-information':
+ demisto.results(uptycs_get_socket_event_information_command())
+
+ if demisto.command() == 'uptycs-get-process-open-sockets':
+ demisto.results(uptycs_get_process_open_sockets_command())
+
+ if demisto.command() == 'uptycs-get-processes':
+ demisto.results(uptycs_get_processes_command())
+
+ if demisto.command() == 'uptycs-get-process-information':
+ demisto.results(uptycs_get_process_information_command())
+
+ if demisto.command() == 'uptycs-get-parent-information':
+ demisto.results(uptycs_get_parent_information_command())
+
+ if demisto.command() == 'uptycs-get-process-child-processes':
+ demisto.results(uptycs_get_process_child_processes_command())
+
+ if demisto.command() == 'uptycs-get-process-events':
+ demisto.results(uptycs_get_process_events_command())
+
+ if demisto.command() == 'uptycs-get-process-event-information':
+ demisto.results(uptycs_get_process_event_information_command())
+
+ if demisto.command() == 'uptycs-get-parent-event-information':
+ demisto.results(uptycs_get_parent_event_information_command())
+
+ if demisto.command() == 'uptycs-set-alert-status':
+ demisto.results(uptycs_set_alert_status_command())
+
+ if demisto.command() == 'uptycs-get-asset-tags':
+ demisto.results(uptycs_get_asset_tags_command())
+
+ if demisto.command() == 'uptycs-set-asset-tag':
+ demisto.results(uptycs_set_asset_tag_command())
+
+ if demisto.command() == 'uptycs-get-users':
+ demisto.results(uptycs_get_users_command())
+
+ if demisto.command() == 'uptycs-get-user-information':
+ demisto.results(uptycs_get_user_information_command())
+
+ if demisto.command() == 'uptycs-get-user-asset-groups':
+ demisto.results(uptycs_get_user_asset_groups_command())
+
+ if demisto.command() == 'uptycs-get-asset-groups':
+ demisto.results(uptycs_get_asset_groups_command())
+
+ if demisto.command() == 'uptycs-get-threat-indicators':
+ demisto.results(uptycs_get_threat_indicators_command())
+
+ if demisto.command() == 'uptycs-get-threat-indicator':
+ demisto.results(uptycs_get_threat_indicator_command())
+
+ if demisto.command() == 'uptycs-get-threat-sources':
+ demisto.results(uptycs_get_threat_sources_command())
+
+ if demisto.command() == 'uptycs-get-threat-source':
+ demisto.results(uptycs_get_threat_source_command())
+
+ if demisto.command() == 'uptycs-get-threat-vendors':
+ demisto.results(uptycs_get_threat_vendors_command())
+
+ if demisto.command() == 'uptycs-get-saved-queries':
+ demisto.results(uptycs_get_saved_queries_command())
+
+ if demisto.command() == 'uptycs-run-saved-query':
+ demisto.results(uptycs_run_saved_query_command())
+
+ if demisto.command() == 'uptycs-post-saved-query':
+ demisto.results(uptycs_post_saved_query_command())
+
+ if demisto.command() == 'uptycs-post-threat-source':
+ demisto.results(uptycs_post_threat_source_command())
+
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ if uptycs_test_module():
+ demisto.results('ok')
+ else:
+ demisto.results('test failed')
+
+ if demisto.command() == 'fetch-incidents':
+ demisto.incidents(uptycs_fetch_incidents())
+
+ except Exception as ex:
+ if demisto.command() == 'fetch-incidents':
+ raise
+
+ return_error(str(ex))
+
+
+if __name__ in ['__main__', '__builtin__', 'builtins']:
+ main()
diff --git a/Integrations/Uptycs/Uptycs.yml b/Integrations/Uptycs/Uptycs.yml
new file mode 100644
index 000000000000..73bc018ee558
--- /dev/null
+++ b/Integrations/Uptycs/Uptycs.yml
@@ -0,0 +1,1781 @@
+category: Analytics & SIEM
+commonfields:
+ id: Uptycs
+ version: -1
+configuration:
+- display: API key
+ name: key
+ required: true
+ type: 4
+- display: API secret
+ name: secret
+ required: true
+ type: 4
+- display: API domain
+ name: domain
+ required: true
+ type: 0
+- display: API customer_id
+ name: customer_id
+ required: true
+ type: 4
+- display: Fetch incidents
+ name: isFetch
+ required: false
+ type: 8
+- display: Incident type
+ name: incidentType
+ required: false
+ type: 13
+- defaultvalue: 'true'
+ display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+- defaultvalue: 'true'
+ display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+- defaultvalue: 1 day
+ display: First fetch since ( , e.g., 12 hours, 7 days)
+ name: fetch_time
+ required: false
+ type: 0
+description: Fetches data from the Uptycs database.
+display: Uptycs
+name: Uptycs
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Returns assets that are a member of this asset group.
+ isArray: false
+ name: asset_group_id
+ required: false
+ secret: false
+ - default: false
+ description: Returns assets with this hostname. Do not use the "host_name_is" argument
+ and "host_name_like" in the same command.
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ description: Returns assets with this string in the hostname. Use this argument
+ to find a selection of assets with similar hostnames. Do not use the "host_name_is" argument
+ and "host_name_like" in the same command.
+ isArray: false
+ name: host_name_like
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: The maximum number of entries to return. Use -1 to return all entries, which might cause performance issues or a time out).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Returns assets with this type of operating system. Can be "Ubuntu/Debian", "CentOS/RedHat/Fedora/Amazon Linux", "Mac OS X/Apple OS X/macOS", or "Windows".
+ isArray: false
+ name: os
+ predefined:
+ - Ubuntu/Debian
+ - CentOS/RedHat/Fedora/Amazon Linux
+ - CoreOS Container Linux
+ - Mac OS X/Apple OS X/macOS
+ - Windows
+ required: false
+ secret: false
+ - default: false
+ description: Returns the asset with this unique asset ID.
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns assets enrolled with Uptycs. Do not use the "host_name_is" argument
+ and "host_name_like" in the same command.
+ execution: false
+ name: uptycs-get-assets
+ outputs:
+ - contextPath: Uptycs.Assets.id
+ description: 'Uptycs asset ID. '
+ type: string
+ - contextPath: Uptycs.Assets.created_at
+ description: Date/time that the asset was enrolled with Uptycs.
+ type: date
+ - contextPath: Uptycs.Assets.host_name
+ description: Hostname in the Uptycs database.
+ type: string
+ - contextPath: Uptycs.Assets.os
+ description: OS installed on the asset (Windows, Linux, Mac OS X).
+ type: string
+ - contextPath: Uptycs.Assets.os_version
+ description: OS version
+ type: string
+ - contextPath: Uptycs.Assets.last_activity_at
+ description: Date/time of the last activity on the asset.
+ type: date
+ - contextPath: Uptycs.Assets.deleted_at
+ description: Date/time that the asset was unenrolled from Uptycs.
+ type: date
+ - contextPath: Uptycs.Assets.osquery_version
+ description: Current version of osquery installed on the asset.
+ type: string
+ - arguments:
+ - default: false
+ description: 'The SQL query against your Uptycs database. Queries should be written
+ for a SQLite database. For example, "SELECT * FROM processes" returns the
+ entire table named "processes". '
+ isArray: false
+ name: query
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The type of query to run. Can be "global" or "realtime". "Global" returns results for entire history
+ stored in the Uptycs database. "Realtime" returns results for queries run on endpoints at the time that the query is executed.
+ isArray: false
+ name: query_type
+ predefined:
+ - global
+ - realtime
+ required: true
+ secret: false
+ - default: false
+ description: ' Use this argument to run a realtime query on a particular asset. This argument will not work for global queries.'
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ - default: false
+ description: ' Use this argument to run a realtime query and return assets with this hostname. This argument will not work for global queries.'
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ description: ' Use this argument to run a realtime query and return assets with this string in the hostname. This argument will not work for global queries.'
+ isArray: false
+ name: host_name_like
+ required: false
+ secret: false
+ deprecated: false
+ description: Runs the specified SQL query against your Uptycs database. A list of
+ tables can be found at osquery.io/schema, or by using the query "select * from
+ information_schema.tables".
+ execution: false
+ name: uptycs-run-query
+ outputs:
+ - contextPath: Uptycs.QueryResults
+ description: Results of the executed query.
+ type: unknown
+ - arguments:
+ - default: false
+ description: Unique Uptycs alert ID, which will retrieve a specific alert. Do not pass other arguments with this argument.
+ isArray: false
+ name: alert_id
+ required: false
+ secret: false
+ - default: false
+ description: Return assets with this asset ID. Do not use the "asset_id", "host_name_is" argument
+ and "host_name_like" argument in the same command.
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Return alerts by alert code.
+ isArray: false
+ name: code
+ predefined:
+ - BAD_DOMAIN
+ - CRITICAL_FILE
+ - INBOUND_NETWORK_CONNECTIONS
+ - KERNULE_MODULE_MOD
+ - MANUAL_PACKAGE_INSTALL
+ - OPEN_SOCKET_BAD_IP
+ - OSX_ATTACK
+ - OSX_CRASHES
+ - OUTBOUND_CONNECTION_TO_THREAT_IOC
+ - PRIVILEGED_ACCOUNT_ACTIVITY
+ - PRIVILEGED_REMOTE_LOGIN
+ - PRIVILEGE_ESCALATION
+ - PROCESSES_WITHOUT_ONDISK_FILE
+ - PROCESSES_STARTED_TMP
+ - REMOTE_LOGIN
+ - SERVICE_RUNNING_AS_ROOT
+ - SERVICE_RUNNING_A_SHELL
+ - SOFTWARE_CHANGE
+ - SUSPICIOUS_FILE_PERMISSIONS_CHANGE
+ - SUSPICIOUS_FILE_TRANSFERS
+ - SUSPICIOUS_TOOLS_IN_USE
+ - SYSTEM_FILE_CHANGES
+ - USER_ADDED_TO_ASSET
+ required: false
+ secret: false
+ - default: false
+ description: Return assets with this hostname. Do not use the "asset_id", "host_name_is" argument
+ or "host_name_like" argument in the same command.
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ description: Return assets with this string in the hostname. Use this
+ to find a selection of assets with similar hostnames. Do not use the "asset_id" argument, "host_name_is" argument,
+ or "host_name_like" argument in the same command.
+ isArray: false
+ name: host_name_like
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Maximum number of entries to return. Use -1 to return all entries, which might cause performance issues or a time out).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: The date/time from which to start the search for open connections. Format is
+ "YYYY-MM-DD HH:MM:SS.000", for example, March 15, 2019 at 1:52:36 am would
+ be written as "2019-03-15 01:52:36.000".
+ isArray: false
+ name: start_window
+ required: false
+ secret: false
+ - default: false
+ description: The date/time from which to end the search for open connections. Format is "YYYY-MM-DD
+ HH:MM:SS.000", for example, March 15, 2019 at 1:52:36 am would be written
+ as "2019-03-15 01:52:36.000".
+ isArray: false
+ name: end_window
+ required: false
+ secret: false
+ - default: false
+ defaultValue: 10 days
+ description: 'The date/time range of how far back to search, for example: 2 hours,
+ 4 minutes, 6 month, 1 day, and so on. Default is 10 days.'
+ isArray: false
+ name: time_ago
+ required: false
+ secret: false
+ - default: false
+ description: The value for the alert type. Values vary according to the alert type. For example, the value for a Bad IP alert would
+ have the IP address as the value. A program crash alert would have the name
+ be the program that crashed.
+ isArray: false
+ name: value
+ required: false
+ secret: false
+ - default: false
+ description: The key for the alert type. Values vary according to the alert type. This tells you what type of alert
+ identifier is stored in the 'value' column.
+ isArray: false
+ name: key
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns alerts from the Uptycs database. Do not use the "asset_id", "host_name_is" argument
+ and "host_name_like" argument in the same command.
+ execution: false
+ name: uptycs-get-alerts
+ outputs:
+ - contextPath: Uptycs.Alerts.description
+ description: Description of the alert.
+ type: string
+ - contextPath: Uptycs.Alerts.upt_asset_id
+ description: Uptycs asset ID.
+ type: string
+ - contextPath: Uptycs.Alerts.code
+ description: Alert code in the Uptycs database.
+ type: string
+ - contextPath: Uptycs.Alerts.severity
+ description: The severity of the alert.
+ type: string
+ - contextPath: Uptycs.Alerts.alert_time
+ description: Time that the alert was created.
+ type: date
+ - contextPath: Uptycs.Alerts.value
+ description: Specific problem which caused an alert, for example an IP address,
+ a program that crashed, a file with a know malware file hash, and so on.
+ type: string
+ - contextPath: Uptycs.Alerts.host_name
+ description: Hostname for the asset tha fired the alert.
+ type: string
+ - contextPath: Uptycs.Alerts.id
+ description: Unique Uptycs ID for an alert.
+ type: string
+ - contextPath: Uptycs.Alerts.threat_indicator_id
+ description: Unique Uptycs ID that identifies the threat indicator that triggered this alert.
+ type: string
+ - contextPath: Uptycs.Alerts.threat_source_name
+ description: Name of the source of the threat indicator that triggered this alert.
+ type: string
+ - contextPath: Uptycs.Alerts.pid
+ description: PID of the process that was responsible for firing the alert.
+ type: number
+ - contextPath: Uptycs.Alerts.key
+ description: Type of indicator that is stored in the 'value' column.
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: '10'
+ description: Maximum number of entries to return. Default is 10. Use -1 to return all entries, which might cause performance issues or a time out).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of alert rules.
+ execution: false
+ name: uptycs-get-alert-rules
+ - arguments:
+ - default: false
+ defaultValue: '10'
+ description: Maximum number of entries to return. Default is 10. Use -1 to return all entries, which might cause performance issues or a time out).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of event rules.
+ execution: false
+ name: uptycs-get-event-rules
+ - arguments:
+ - default: false
+ description: Return assets with this asset ID. Do not use the "asset_id" argument, "host_name_is" argument,
+ or "host_name_like" argument in the same command.
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Event code to specify which types of events to return.
+ isArray: false
+ name: code
+ predefined:
+ - BAD_DOMAIN
+ - CRITICAL_FILE
+ - EXCESSIVE_LOGINS
+ - INBOUND_NETWORK_CONNECTIONS
+ - KERNULE_MODULE_MOD
+ - LOCATION
+ - MANUAL_FILE_EDITS
+ - MANUAL_PACKAGE_INSTALL
+ - OPEN_SOCKET_BAD_IP
+ - OSX_ATTACK
+ - OSX_CRASHES
+ - OUTBOUND_NETWORK_CONNECTIONS
+ - PRIVILEGED_ACCOUNT_ACTIVITY
+ - PRIVILEGED_REMOTE_LOGIN
+ - PRIVILEGE_ESCALATION
+ - PROCESSES_WITHOUT_ONDISK_FILE
+ - PROCESSES_STARTED_TMP
+ - REMOTE_LOGIN
+ - SERVICE_RUNNING_AS_ROOT
+ - SERVICE_RUNNING_A_SHELL
+ - SOFTWARE_CHANGE
+ - SUSPICIOUS_FILE_PERMISSIONS_CHANGE
+ - SUSPICIOUS_FILE_TRANSFERS
+ - SUSPICIOUS_TOOLS_IN_USE
+ - SYSTEM_FILE_CHANGES
+ - USER_ADDED_TO_ASSET
+ required: false
+ secret: false
+ - default: false
+ description: Return assets with this hostname. Do not use the "asset_id" argument, "host_name_is" argument,
+ or "host_name_like" argument in the same command.
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ description: Return assets with this string in the hostname. Use this
+ to find a selection of assets with similar hostnames. Do not use the "asset_id" argument, "host_name_is" argument,
+ or "host_name_like" argument in the same command.
+ isArray: false
+ name: host_name_like
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Maximum number of entries to return. Default is 10. Use -1 to return all entries, which might cause performance issues or a time out.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: The date/time from which to start the search for open connections. Format is
+ "YYYY-MM-DD HH:MM:SS.000", for example, March 15, 2019 at 1:52:36 am would
+ be written as "2019-03-15 01:52:36.000".
+ isArray: false
+ name: start_window
+ required: false
+ secret: false
+ - default: false
+ description: The date/time from which to end the search for open connections. Format is
+ "YYYY-MM-DD HH:MM:SS.000", for example, March 15, 2019 at 1:52:36 am would
+ be written as "2019-03-15 01:52:36.000".
+ isArray: false
+ name: end_window
+ required: false
+ secret: false
+ - default: false
+ defaultValue: 1 days
+ description: 'Specifies how far back you want to look. Format examples: 2 hours,
+ 4 minutes, 6 month, 1 day, etc.'
+ isArray: false
+ name: time_ago
+ required: false
+ secret: false
+ - default: false
+ description: The type of alert indicator that is stored in the 'value' column. Varies according to event type.
+ isArray: false
+ name: key
+ required: false
+ secret: false
+ - default: false
+ description: The value for the alert type. Values vary according to the alert type. For example, the value for a Bad IP alert would
+ have the IP address as the value. A program crash alert would have the name be the program that crashed.Varies for different events.
+ isArray: false
+ name: value
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns events from the Uptycs database.
+ execution: false
+ name: uptycs-get-events
+ outputs:
+ - contextPath: Uptycs.Events.description
+ description: Description of the event.
+ type: string
+ - contextPath: Uptycs.Events.upt_asset_id
+ description: Uptycs asset ID.
+ type: string
+ - contextPath: Uptycs.Events.code
+ description: Event code in the Uptycs database.
+ type: string
+ - contextPath: Uptycs.Events.created_at
+ description: Date/time that the event was created.
+ type: date
+ - contextPath: Uptycs.Events.id
+ description: Uptycs event ID for the event.
+ type: string
+ - contextPath: Uptycs.Events.host_name
+ description: Hostname for the assets on which this event occurred.
+ type: string
+ - contextPath: Uptycs.Events.grouping
+ description: Group to which this event belongs.
+ type: string
+ - contextPath: Uptycs.Events.value
+ description: The value will be different for different types of events. It
+ is that which triggered the event. For example, a Bad IP connection will
+ have the IP address here, and a program crash will have the name of the program
+ that crashed here.
+ type: string
+ - contextPath: Uptycs.Events.severity
+ description: The severity of the event.
+ type: string
+ - contextPath: Uptycs.Events.key
+ description: Type of indicator that is stored in the 'value' column.
+ type: string
+ - arguments:
+ - default: false
+ description: Return assets with this asset ID. Do not use the "asset_id" argument, "host_name_is" argument,
+ or "host_name_like" argument in the same command.
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ - default: false
+ description: Return assets with this hostname. Do not use the "asset_id" argument, "host_name_is" argument,
+ or "host_name_like" argument in the same command.
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ description: Return assets with this string in the hostname. Use this
+ to find a selection of assets with similar hostnames. Do not use the "asset_id" argument, "host_name_is" argument,
+ or "host_name_like" argument in the same command.
+ isArray: false
+ name: host_name_like
+ required: false
+ secret: false
+ - default: false
+ description: IP address to which the process opened a socket.
+ isArray: false
+ name: ip
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Maximum number of entries to return. Default is 10. Use -1 to return all entries, which might cause performance issues or a time out.
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: The date/time from which to start the search for open connections. Format is
+ "YYYY-MM-DD HH:MM:SS.000", for example, March 15, 2019 at 1:52:36 am would
+ be written as "2019-03-15 01:52:36.000".
+ isArray: false
+ name: start_window
+ required: false
+ secret: false
+ - default: false
+ description: The date/time from which to end the search for open connections. Format is
+ "YYYY-MM-DD HH:MM:SS.000", for example, March 15, 2019 at 1:52:36 am would
+ be written as "2019-03-15 01:52:36.000".
+ isArray: false
+ name: end_window
+ required: false
+ secret: false
+ - default: false
+ description: Exact time at which the socket was opened.
+ isArray: false
+ name: time
+ required: false
+ secret: false
+ - default: false
+ description: 'Specifies how far back to search. Format examples: 2 hours,
+ 4 minutes, 6 month, 1 day, etc.'
+ isArray: false
+ name: time_ago
+ required: false
+ secret: false
+ deprecated: false
+ description: Finds processes that opened a socket.
+ execution: false
+ name: uptycs-get-process-open-sockets
+ outputs:
+ - contextPath: Uptycs.Sockets.pid
+ description: PID of the process that opened a connection to a specified IP address.
+ type: number
+ - contextPath: Uptycs.Sockets.upt_hostname
+ description: Hostname of the asset that ran the specified process.
+ type: string
+ - contextPath: Uptycs.Sockets.upt_time
+ description: Date/time when the connection was opened.
+ type: date
+ - contextPath: Uptycs.Sockets.path
+ description: File path to the process being run.
+ type: string
+ - contextPath: Uptycs.Sockets.local_address
+ description: Local IP address for the specified connection.
+ type: string
+ - contextPath: Uptycs.Sockets.remote_address
+ description: Remote IP address for the specified connection.
+ type: string
+ - contextPath: Uptycs.Sockets.local_port
+ description: Local port for the specified connection.
+ type: number
+ - contextPath: Uptycs.Sockets.remote_port
+ description: Remote port for the specified connection.
+ type: number
+ - contextPath: Uptycs.Sockets.upt_asset_id
+ description: Asset ID for the asset that ran the specified process.
+ type: string
+ - contextPath: Uptycs.Sockets.socket
+ description: Socket used to open the connection.
+ type: number
+ - contextPath: Uptycs.Sockets.family
+ description: Network protocol.
+ type: number
+ - contextPath: Uptycs.Sockets.state
+ description: State of the connection.
+ type: string
+ - contextPath: Uptycs.Sockets.protocol
+ description: Transport protocol.
+ type: number
+ - arguments:
+ - default: false
+ description: Only return assets with this asset id. Do not use arguments "asset_id"
+ and "host_name_is" at the same time.
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ - default: false
+ description: Hostname for asset which spawned the specified process.
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ description: pid for the process.
+ isArray: false
+ name: pid
+ required: true
+ secret: false
+ - default: false
+ description: Time that the specified process was spawned.
+ isArray: false
+ name: time
+ required: true
+ secret: false
+ deprecated: false
+ description: get information for a particular process
+ execution: false
+ name: uptycs-get-process-information
+ outputs:
+ - contextPath: Uptycs.Proc.pid
+ description: pid for the process
+ type: number
+ - contextPath: Uptycs.Proc.upt_hostname
+ description: hostname for asset which spawned the specified process
+ type: string
+ - contextPath: Uptycs.Proc.upt_asset_id
+ description: asset id for asset which spawned the specified process
+ type: string
+ - contextPath: Uptycs.Proc.parent
+ description: pid for the parent process
+ type: number
+ - contextPath: Uptycs.Proc.upt_add_time
+ description: time that the process was spawned
+ type: date
+ - contextPath: Uptycs.Proc.upt_remove_time
+ description: time that the process was removed
+ type: date
+ - contextPath: Uptycs.Proc.path
+ description: path to the process binary
+ type: string
+ - contextPath: Uptycs.Proc.name
+ description: name of the process
+ type: string
+ - contextPath: Uptycs.Proc.cmdline
+ description: complete argv of the process
+ type: string
+ - contextPath: Uptycs.Proc.pgroup
+ description: process group
+ type: number
+ - contextPath: Uptycs.Proc.cwd
+ description: process current working directory
+ type: string
+ - arguments:
+ - default: false
+ description: Only return assets with this asset_id. Do not use arguments "asset_id"
+ and "host_name_is" at the same time.
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ - default: false
+ description: hostname for the asset which executed these processes.
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Limit the number of entries returned. Use -1 to return all entries
+ (may run slow or cause a time out).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: The pid for which all child processes will be found
+ isArray: false
+ name: parent
+ required: true
+ secret: false
+ - default: false
+ description: time at which the parent process was spawned
+ isArray: false
+ name: parent_start_time
+ required: true
+ secret: false
+ - default: false
+ description: time at which the parent process was killed, if it exists.
+ isArray: false
+ name: parent_end_time
+ required: false
+ secret: false
+ deprecated: false
+ description: get all the child processes for a given parent process
+ execution: false
+ name: uptycs-get-process-child-processes
+ outputs:
+ - contextPath: Uptycs.Children.pid
+ description: pid of a child process
+ type: number
+ - contextPath: Uptycs.Children.upt_asset_id
+ description: asset id for asset which this process was run on
+ type: string
+ - contextPath: Uptycs.Children.upt_hostname
+ description: hostname for asset which spawned the specified process
+ type: string
+ - contextPath: Uptycs.Children.upt_add_time
+ description: time that the process was spawned
+ type: date
+ - contextPath: Uptycs.Children.upt_remove_time
+ description: time that the process was removed
+ type: date
+ - contextPath: Uptycs.Children.path
+ description: path to the process binary
+ type: string
+ - contextPath: Uptycs.Children.parent
+ description: parent pid
+ type: number
+ - contextPath: Uptycs.Children.name
+ description: name of the process
+ type: string
+ - contextPath: Uptycs.Children.cmdline
+ description: complete argv for the process
+ type: string
+ - contextPath: Uptycs.Children.pgroup
+ description: process group
+ type: number
+ - contextPath: Uptycs.Children.cwd
+ description: process current working directory
+ type: string
+ - arguments:
+ - default: false
+ description: Only return assets with this asset id. Do not use arguments "asset_id",
+ "host_name_is" or "host_name_like" at the same time.
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ - default: false
+ description: Only return assets with this hostname. Do not use arguments "host_name_is"
+ and "host_name_like" at the same time.
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ description: Only return assets with this string in the hostname. Use this
+ to find a selection of assets with similar hostnames. Do not use arguments
+ "host_name_is" and "host_name_like" at the same time.
+ isArray: false
+ name: host_name_like
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Limit the number of entries returned. Use -1 to return all entries
+ (may run slow or cause a time out).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: Beginning of window to search for open connections. Format is
+ "YYYY-MM-DD HH:MM:SS.000", for example, March 15, 2019 at 1:52:36 am would
+ be written as "2019-03-15 01:52:36.000".
+ isArray: false
+ name: start_window
+ required: false
+ secret: false
+ - default: false
+ description: End of window to search for open connections. Format is "YYYY-MM-DD
+ HH:MM:SS.000", for example, March 15, 2019 at 1:52:36 am would be written
+ as "2019-03-15 01:52:36.000".
+ isArray: false
+ name: end_window
+ required: false
+ secret: false
+ - default: false
+ description: Exact time at which the process was spawned.
+ isArray: false
+ name: time
+ required: false
+ secret: false
+ - default: false
+ description: 'Specifies how far back you want to look. Format examples: 2 hours,
+ 4 minutes, 6 month, 1 day, etc.'
+ isArray: false
+ name: time_ago
+ required: false
+ secret: false
+ deprecated: false
+ description: find processes which are running or have run on a registered Uptycs
+ asset
+ execution: false
+ name: uptycs-get-processes
+ outputs:
+ - contextPath: Uptycs.Process.pid
+ description: pid for a particular process
+ type: number
+ - contextPath: Uptycs.Process.parent
+ description: pid for the parent of a particular process
+ type: number
+ - contextPath: Uptycs.Process.upt_asset_id
+ description: uptycs asset id for the asset which is running (or ran) the process
+ type: string
+ - contextPath: Uptycs.Process.upt_hostname
+ description: host name for the asset which is running (or ran) the process
+ type: string
+ - contextPath: Uptycs.Process.upt_time
+ description: time at which the process was spawned
+ type: date
+ - contextPath: Uptycs.Process.name
+ description: name of the process
+ type: string
+ - contextPath: Uptycs.Process.path
+ description: path to the process binary
+ type: string
+ - contextPath: Uptycs.Process.cmdline
+ description: comeplete argv for the process
+ type: string
+ - contextPath: Uptycs.Process.pgroup
+ description: process group
+ type: number
+ - contextPath: Uptycs.Process.cwd
+ description: process current working directory
+ type: string
+ - arguments:
+ - default: false
+ description: Only return assets with this asset id. Do not use arguments "asset_id",
+ "host_name_is" or "host_name_like" at the same time.
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ - default: false
+ description: Only return assets with this hostname. Do not use arguments "host_name_is"
+ and "host_name_like" at the same time.
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ description: Only return assets with this string in the hostname. Use this
+ to find a selection of assets with similar hostnames. Do not use arguments
+ "host_name_is" and "host_name_like" at the same time.
+ isArray: false
+ name: host_name_like
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Limit the number of entries returned. Use -1 to return all entries
+ (may run slow or cause a time out).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: Beginning of window to search for open connections. Format is
+ "YYYY-MM-DD HH:MM:SS.000", for example, March 15, 2019 at 1:52:36 am would
+ be written as "2019-03-15 01:52:36.000".
+ isArray: false
+ name: start_window
+ required: false
+ secret: false
+ - default: false
+ description: End of window to search for open connections. Format is "YYYY-MM-DD
+ HH:MM:SS.000", for example, March 15, 2019 at 1:52:36 am would be written
+ as "2019-03-15 01:52:36.000".
+ isArray: false
+ name: end_window
+ required: false
+ secret: false
+ - default: false
+ description: Exact time at which the process was spawned.
+ isArray: false
+ name: time
+ required: false
+ secret: false
+ - default: false
+ description: 'Specifies how far back you want to look. Format examples: 2 hours,
+ 4 minutes, 6 month, 1 day, etc.'
+ isArray: false
+ name: time_ago
+ required: false
+ secret: false
+ deprecated: false
+ description: find processes which have opened files
+ execution: false
+ name: uptycs-get-process-open-files
+ outputs:
+ - contextPath: Uptycs.Files.pid
+ description: pid for the process which opened a file
+ type: number
+ - contextPath: Uptycs.Files.fd
+ description: process specific file descriptor number
+ type: number
+ - contextPath: Uptycs.Files.upt_asset_id
+ description: Uptycs asset id for the the asset on which the file was opened
+ type: string
+ - contextPath: Uptycs.Files.upt_hostname
+ description: Host name for the asset on which the file was opened
+ type: string
+ - contextPath: Uptycs.Files.upt_time
+ description: time at which the file was opened
+ type: date
+ - contextPath: Uptycs.Files.path
+ description: filesystem path of the file descriptor
+ type: string
+ - arguments:
+ - default: false
+ description: Uptycs alert id used to identify a particular alert
+ isArray: false
+ name: alert_id
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Status of the alert can be new, assigned, resolved, or closed
+ isArray: false
+ name: status
+ predefined:
+ - open
+ - assigned
+ - closed
+ - resolved
+ required: true
+ secret: false
+ deprecated: false
+ description: Set the status of an alert to new, assigned, resolved, or closed
+ execution: false
+ name: uptycs-set-alert-status
+ - arguments:
+ - default: false
+ description: Uptycs asset id for the asset that the tag should be set on
+ isArray: false
+ name: asset_id
+ required: true
+ secret: false
+ - default: false
+ description: Tag key that will be set on the asset
+ isArray: false
+ name: tag_key
+ required: true
+ secret: false
+ - default: false
+ defaultValue: ''''''
+ description: Tag value that will be set on the asset
+ isArray: false
+ name: tag_value
+ required: true
+ secret: false
+ deprecated: false
+ description: Sets a tag on a particular asset
+ execution: false
+ name: uptycs-set-asset-tag
+ - arguments:
+ - default: false
+ description: Unique Uptycs id for the user
+ isArray: false
+ name: user_id
+ required: true
+ secret: false
+ deprecated: false
+ description: get info for an Uptycs user
+ execution: false
+ name: uptycs-get-user-information
+ outputs:
+ - contextPath: Uptycs.UserInfo.id
+ description: unique Uptycs id for the user
+ type: string
+ - contextPath: Uptycs.UserInfo.name
+ description: Uptycs user's name
+ type: string
+ - contextPath: Uptycs.UserInfo.email
+ description: Uptycs user's email address
+ type: string
+ - arguments:
+ - default: false
+ description: the specific indicator you wish to search for. This can be an
+ IP address, a Bad Domain, etc. as well ass any indicators you have added.
+ isArray: false
+ name: indicator
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Limit the number of entries returned. Use -1 to return all entries
+ (may run slow or cause a time out).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: get Uptycs threat indicators
+ execution: false
+ name: uptycs-get-threat-indicators
+ - arguments:
+ - default: false
+ defaultValue: '10'
+ description: Limit the number of entries returned. Use -1 to return all entries
+ (may run slow or cause a time out).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: get Uptycs threat sources
+ execution: false
+ name: uptycs-get-threat-sources
+ - arguments:
+ - default: false
+ defaultValue: '10'
+ description: Limit the number of entries returned. Use -1 to return all entries
+ (may run slow or cause a time out).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: get Uptycs threat vendors
+ execution: false
+ name: uptycs-get-threat-vendors
+ - arguments:
+ - default: false
+ description: Only return assets with this asset id. Do not use arguments "asset_id"
+ and "host_name_is" at the same time.
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ - default: false
+ description: Time that the specified process was spawned.
+ isArray: false
+ name: child_add_time
+ required: true
+ secret: false
+ - default: false
+ description: Hostname for asset which spawned the specified process.
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ description: pid for the parent process.
+ isArray: false
+ name: parent
+ required: true
+ secret: false
+ deprecated: false
+ description: get the parent process information for a particular child process
+ execution: false
+ name: uptycs-get-parent-information
+ outputs:
+ - contextPath: Uptycs.Parent.pid
+ description: pid of the process (this is the same number as the input argument
+ 'parent')
+ type: number
+ - contextPath: Uptycs.Parent.upt_hostname
+ description: hostname for asset which spawned the specified process
+ type: string
+ - contextPath: Uptycs.Parent.upt_asset_id
+ description: asset id for asset which spawned the specified process
+ type: string
+ - contextPath: Uptycs.Parent.parent
+ description: pid for the parent process (this is the parent of the input argument
+ 'parent')
+ type: number
+ - contextPath: Uptycs.Parent.upt_add_time
+ description: time that the process was spawned
+ type: date
+ - contextPath: Uptycs.Parent.upt_remove_time
+ description: time that the process was removed
+ type: date
+ - contextPath: Uptycs.Parent.name
+ description: name of the process
+ type: string
+ - contextPath: Uptycs.Parent.path
+ description: path to the process binary
+ type: string
+ - contextPath: Uptycs.Parent.cmdline
+ description: complete argv for the process
+ type: string
+ - contextPath: Uptycs.Parent.pgroup
+ description: process group
+ type: number
+ - contextPath: Uptycs.Parent.cwd
+ description: process current working directory
+ type: string
+ - arguments:
+ - default: false
+ defaultValue: Custom threat source pushed from Demisto
+ description: A short description for the threat source
+ isArray: false
+ name: description
+ required: true
+ secret: false
+ - default: false
+ description: entry_id for the file with threat information. This file should
+ be uploaded to demisto in the Playground War Room using the paperclip icon
+ next to the CLI.
+ isArray: false
+ name: entry_id
+ required: true
+ secret: false
+ - default: false
+ description: The name of the file being uploaded
+ isArray: false
+ name: filename
+ required: true
+ secret: false
+ - default: false
+ description: The name for the threat source
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ deprecated: false
+ description: post a new threat source to your threat sources in Uptycs
+ execution: false
+ name: uptycs-post-threat-source
+ - arguments:
+ - default: false
+ defaultValue: '10'
+ description: Limit the number of entries returned. Use -1 to return all entries
+ (may run slow or cause a time out).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: get a list of Uptycs users
+ execution: false
+ name: uptycs-get-users
+ outputs:
+ - contextPath: Uptycs.Users.id
+ description: unique Uptycs id for the user
+ type: string
+ - contextPath: Uptycs.Users.name
+ description: Uptycs user's name
+ type: string
+ - contextPath: Uptycs.Users.email
+ description: Uptycs user's email address
+ type: string
+ - contextPath: Uptycs.Users.createdAt
+ description: datetime this user was added
+ type: date
+ - contextPath: Uptycs.Users.updatedAt
+ description: last time this user was updated
+ type: date
+ - contextPath: Uptycs.Users.admin
+ description: true if this user has admin privileges, false otherwise
+ type: boolean
+ - contextPath: Uptycs.Users.active
+ description: true if this user is currently active, false otherwise
+ type: boolean
+ - arguments:
+ - default: false
+ defaultValue: '10'
+ description: Limit the number of entries returned. Use -1 to return all entries
+ (may run slow or cause a time out).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: get Uptycs asset groups
+ execution: false
+ name: uptycs-get-asset-groups
+ outputs:
+ - contextPath: Uptycs.AssetGroups.id
+ description: unique Uptycs id for a particular object group
+ type: string
+ - contextPath: Uptycs.AssetGroups.custom
+ description: true if this is a custom asset group, false otherwise
+ type: boolean
+ - contextPath: Uptycs.AssetGroups.createdAt
+ description: datetime the group was created
+ type: date
+ - contextPath: Uptycs.AssetGroups.updatedAt
+ description: datetime the group was last updated
+ type: date
+ - arguments:
+ - default: false
+ description: return a list of users with access to this asset group
+ isArray: false
+ name: asset_group_id
+ required: true
+ secret: false
+ deprecated: false
+ description: get a list of users in a particular asset group
+ execution: false
+ name: uptycs-get-user-asset-groups
+ - arguments:
+ - default: false
+ description: unique Uptycs id which identifies a specific threat indicator
+ isArray: false
+ name: indicator_id
+ required: true
+ secret: false
+ deprecated: false
+ description: retrieve information about a specific threat indicator using a unique
+ threat indicator id
+ execution: false
+ name: uptycs-get-threat-indicator
+ outputs:
+ - contextPath: Uptycs.ThreatIndicator.threat_source_id
+ description: unique Uptycs id which identifies the source of this specific threat
+ indicator
+ type: string
+ - contextPath: Uptycs.ThreatIndicator.threat_vendor_id
+ description: unique Uptycs id which identifies the vendor of this specific threat
+ source
+ type: string
+ - contextPath: Uptycs.ThreatIndicator.indicatorType
+ description: type of threat indicator (IPv4, domain,...)
+ type: string
+ - contextPath: Uptycs.ThreatIndicator.indicator
+ description: threat indicator
+ type: string
+ - contextPath: Uptycs.ThreatIndicator.createdAt
+ description: datetime the threat indicator was created
+ type: date
+ - contextPath: Uptycs.ThreatIndicator.threadId
+ description: unique id for the group of threat indicators this thread indicator
+ belongs to
+ type: string
+ - contextPath: Uptycs.ThreatIndicator.id
+ description: unique id for this particular threat indicator
+ type: string
+ - arguments:
+ - default: false
+ description: unique Uptycs id for the threat source you wish to retrive
+ isArray: false
+ name: threat_source_id
+ required: true
+ secret: false
+ deprecated: false
+ description: retrieve information about a specific threat source
+ execution: false
+ name: uptycs-get-threat-source
+ - arguments:
+ - default: false
+ description: Only return assets with this asset id. Do not use arguments "asset_id",
+ "host_name_is" or "host_name_like" at the same time.
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ - default: false
+ description: Only return assets with this hostname. Do not use arguments "host_name_is"
+ and "host_name_like" at the same time.
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ description: Only return assets with this string in the hostname. Use this
+ to find a selection of assets with similar hostnames. Do not use arguments
+ "host_name_is" and "host_name_like" at the same time.
+ isArray: false
+ name: host_name_like
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Limit the number of entries returned. Use -1 to return all entries
+ (may run slow or cause a time out).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: Beginning of window to search for open connections. Format is
+ "YYYY-MM-DD HH:MM:SS.000", for example, March 15, 2019 at 1:52:36 am would
+ be written as "2019-03-15 01:52:36.000".
+ isArray: false
+ name: start_window
+ required: false
+ secret: false
+ - default: false
+ description: End of window to search for open connections. Format is "YYYY-MM-DD
+ HH:MM:SS.000", for example, March 15, 2019 at 1:52:36 am would be written
+ as "2019-03-15 01:52:36.000".
+ isArray: false
+ name: end_window
+ required: false
+ secret: false
+ - default: false
+ description: Exact time at which the process was spawned.
+ isArray: false
+ name: time
+ required: false
+ secret: false
+ - default: false
+ description: 'Specifies how far back you want to look. Format examples: 2 hours,
+ 4 minutes, 6 month, 1 day, etc.'
+ isArray: false
+ name: time_ago
+ required: false
+ secret: false
+ deprecated: false
+ description: find process events which are running or have run on a registered
+ Uptycs asset
+ execution: false
+ name: uptycs-get-process-events
+ outputs:
+ - contextPath: Uptycs.ProcessEvents.pid
+ description: pid for a particular process
+ type: number
+ - contextPath: Uptycs.ProcessEvents.parent
+ description: pid for the parent of a particular process
+ type: number
+ - contextPath: Uptycs.ProcessEvents.upt_asset_id
+ description: uptycs asset id for the asset which is running (or ran) the process
+ type: string
+ - contextPath: Uptycs.ProcessEvents.upt_hostname
+ description: host name for the asset which is running (or ran) the process
+ type: string
+ - contextPath: Uptycs.ProcessEvents.upt_time
+ description: time at which the process was spawned
+ type: date
+ - contextPath: Uptycs.ProcessEvents.path
+ description: path to the process binary
+ type: string
+ - contextPath: Uptycs.ProcessEvents.cmdline
+ description: comeplete argv for the process
+ type: string
+ - contextPath: Uptycs.ProcessEvents.cwd
+ description: process current working directory
+ type: string
+ - arguments:
+ - default: false
+ description: Only return assets with this asset id. Do not use arguments "asset_id"
+ and "host_name_is" at the same time.
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ - default: false
+ description: Hostname for asset which spawned the specified process.
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ description: pid for the process.
+ isArray: false
+ name: pid
+ required: true
+ secret: false
+ - default: false
+ description: Time that the specified process was spawned.
+ isArray: false
+ name: time
+ required: true
+ secret: false
+ deprecated: false
+ description: get information for a particular process event
+ execution: false
+ name: uptycs-get-process-event-information
+ outputs:
+ - contextPath: Uptycs.ProcEvent.pid
+ description: pid for the process
+ type: number
+ - contextPath: Uptycs.ProcEvent.upt_hostname
+ description: hostname for asset which spawned the specified process
+ type: string
+ - contextPath: Uptycs.ProcEvent.upt_asset_id
+ description: asset id for asset which spawned the specified process
+ type: string
+ - contextPath: Uptycs.ProcEvent.parent
+ description: pid for the parent process
+ type: number
+ - contextPath: Uptycs.ProcEvent.upt_time
+ description: time that the process was spawned
+ type: date
+ - contextPath: Uptycs.ProcEvent.path
+ description: path to the process binary
+ type: string
+ - contextPath: Uptycs.ProcEvent.cmdline
+ description: comeplete argv for the process
+ type: string
+ - contextPath: Uptycs.ProcEvent.cwd
+ description: process current working directory
+ type: string
+ - arguments:
+ - default: false
+ description: Only return assets with this asset id. Do not use arguments "asset_id",
+ "host_name_is" or "host_name_like" at the same time.
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ - default: false
+ description: Only return assets with this hostname. Do not use arguments "host_name_is"
+ and "host_name_like" at the same time.
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ description: Only return assets with this string in the hostname. Use this
+ to find a selection of assets with similar hostnames. Do not use arguments
+ "host_name_is" and "host_name_like" at the same time.
+ isArray: false
+ name: host_name_like
+ required: false
+ secret: false
+ - default: false
+ description: IP address which process opened a socket to.
+ isArray: false
+ name: ip
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '10'
+ description: Limit the number of entries returned. Use -1 to return all entries
+ (may run slow or cause a time out).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ - default: false
+ description: Beginning of window to search for open sockets. Format is "YYYY-MM-DD
+ HH:MM:SS.000", for example, March 15, 2019 at 1:52:36 am would be written
+ as "2019-03-15 01:52:36.000".
+ isArray: false
+ name: start_window
+ required: false
+ secret: false
+ - default: false
+ description: ' End of window to search for open sockets. Format is "YYYY-MM-DD
+ HH:MM:SS.000", for example, March 15, 2019 at 1:52:36 am would be written
+ as "2019-03-15 01:52:36.000".'
+ isArray: false
+ name: end_window
+ required: false
+ secret: false
+ - default: false
+ description: Exact time at which the socket was opened.
+ isArray: false
+ name: time
+ required: false
+ secret: false
+ - default: false
+ description: 'Specifies how far back you want to look. Format examples: 2 hours,
+ 4 minutes, 6 month, 1 day, etc.'
+ isArray: false
+ name: time_ago
+ required: false
+ secret: false
+ deprecated: false
+ description: find processes which opened a socket
+ execution: false
+ name: uptycs-get-socket-events
+ outputs:
+ - contextPath: Uptycs.SocketEvents.pid
+ description: pid of process which opened a connection to a specified IP
+ type: number
+ - contextPath: Uptycs.SocketEvents.upt_hostname
+ description: hostname of the asset which ran the specified process
+ type: string
+ - contextPath: Uptycs.SocketEvents.upt_time
+ description: time at which the connection was opened
+ type: date
+ - contextPath: Uptycs.SocketEvents.path
+ description: file path to the process being run
+ type: string
+ - contextPath: Uptycs.SocketEvents.local_address
+ description: local IP for specified connection
+ type: string
+ - contextPath: Uptycs.SocketEvents.remote_address
+ description: remote IP for specified connection
+ type: string
+ - contextPath: Uptycs.SocketEvents.local_port
+ description: local port for specified connection
+ type: number
+ - contextPath: Uptycs.SocketEvents.remote_port
+ description: remote port for specified connection
+ type: number
+ - contextPath: Uptycs.SocketEvents.upt_asset_id
+ description: asset id for asset which ran the specified process
+ type: string
+ - contextPath: Uptycs.SocketEvents.socket
+ description: socket used to open the connection
+ type: number
+ - contextPath: Uptycs.SocketEvents.family
+ description: network protocol
+ type: number
+ - contextPath: Uptycs.SocketEvents.action
+ description: type of socket event (accept, connect, or bind)
+ type: string
+ - contextPath: Uptycs.SocketEvents.protocol
+ description: transfer protocol
+ type: number
+ - arguments:
+ - default: false
+ description: Only return assets with this asset id. Do not use arguments "asset_id"
+ and "host_name_is" at the same time.
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ - default: false
+ description: Time that the specified process was spawned.
+ isArray: false
+ name: child_add_time
+ required: true
+ secret: false
+ - default: false
+ description: Hostname for asset which spawned the specified process.
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ description: pid for the parent process.
+ isArray: false
+ name: parent
+ required: true
+ secret: false
+ deprecated: false
+ description: find information for parent process events which are running or have
+ run on a registered Uptycs assert
+ execution: false
+ name: uptycs-get-parent-event-information
+ outputs:
+ - contextPath: Uptycs.ParentEvent.pid
+ description: pid of the process (this is the same number as the input argument
+ 'parent')
+ type: number
+ - contextPath: Uptycs.ParentEvent.upt_hostname
+ description: hostname for asset which spawned the specified process
+ type: string
+ - contextPath: Uptycs.ParentEvent.upt_asset_id
+ description: asset id for asset which spawned the specified process
+ type: string
+ - contextPath: Uptycs.ParentEvent.parent
+ description: pid for the parent process (this is the parent of the input argument
+ 'parent')
+ type: number
+ - contextPath: Uptycs.ParentEvent.upt_time
+ description: time that the process was spawned
+ type: date
+ - contextPath: Uptycs.ParentEvent.path
+ description: path to the parent process binary
+ type: string
+ - contextPath: Uptycs.ParentEvent.cmdline
+ description: complete argv for the parent process
+ type: string
+ - contextPath: Uptycs.ParentEvent.cwd
+ description: parent process current working cirectory
+ type: string
+ - arguments:
+ - default: false
+ description: Only return assets with this asset id. Do not use arguments "asset_id"
+ and "host_name_is" at the same time.
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ - default: false
+ description: Hostname for asset which spawned the specified process.
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ description: IP address which process opened a socket to.
+ isArray: false
+ name: ip
+ required: true
+ secret: false
+ - default: false
+ description: Time that the specified connection was opened.
+ isArray: false
+ name: time
+ required: true
+ secret: false
+ deprecated: false
+ description: get information for a particular socket event
+ execution: false
+ name: uptycs-get-socket-event-information
+ outputs:
+ - contextPath: Uptycs.SocketEvents.pid
+ description: pid of process which opened a connection to a specified IP
+ type: number
+ - contextPath: Uptycs.SocketEvents.upt_hostname
+ description: hostname of the asset which ran the specified process
+ type: string
+ - contextPath: Uptycs.SocketEvents.upt_time
+ description: time at which the connection was opened
+ type: date
+ - contextPath: Uptycs.SocketEvents.path
+ description: file path to the process being run
+ type: string
+ - contextPath: Uptycs.SocketEvents.local_address
+ description: local IP for specified connection
+ type: string
+ - contextPath: Uptycs.SocketEvents.remote_address
+ description: remote IP for specified connection
+ type: string
+ - contextPath: Uptycs.SocketEvents.local_port
+ description: local port for specified connection
+ type: number
+ - contextPath: Uptycs.SocketEvents.remote_port
+ description: remote port for specified connection
+ type: number
+ - contextPath: Uptycs.SocketEvents.upt_asset_id
+ description: asset id for asset which ran the specified process
+ type: string
+ - contextPath: Uptycs.SocketEvents.action
+ description: type of socket event (accept, connect, or bind)
+ type: string
+ - contextPath: Uptycs.SocketEvents.family
+ description: network protocol
+ type: number
+ - contextPath: Uptycs.SocketEvents.socket
+ description: socket used to open the connection
+ type: number
+ - contextPath: Uptycs.SocketEvents.protocol
+ description: transfer protocol
+ type: number
+ - arguments:
+ - default: false
+ description: Uptycs asset id for the asset you are looking for.
+ isArray: false
+ name: asset_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieve a list of tags for a particular asset
+ execution: false
+ name: uptycs-get-asset-tags
+ - arguments:
+ - default: false
+ description: Only return the query with this unique id
+ isArray: false
+ name: query_id
+ required: false
+ secret: false
+ - default: false
+ description: Only return the query with this name
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieve a saved query or list of all saved queries
+ execution: false
+ name: uptycs-get-saved-queries
+ outputs:
+ - contextPath: Uptycs.SavedQueries.id
+ description: unique uptycs query id
+ type: string
+ - contextPath: Uptycs.SavedQueries.name
+ description: name of query
+ type: string
+ - arguments:
+ - default: false
+ description: The name of the query you want to run
+ isArray: false
+ name: name
+ required: false
+ secret: false
+ - default: false
+ description: The unique id for the query you want to run
+ isArray: false
+ name: query_id
+ required: false
+ secret: false
+ - default: false
+ description: '*realtime queries only* This argument should be used when one
+ wants to run a realtime query on a particular asset.'
+ isArray: false
+ name: asset_id
+ required: false
+ secret: false
+ - default: false
+ description: '*realtime queries only* Only return assets with this hostname'
+ isArray: false
+ name: host_name_is
+ required: false
+ secret: false
+ - default: false
+ description: '*realtime queries only* . Only return assets with this string
+ in the hostname.'
+ isArray: false
+ name: host_name_like
+ required: false
+ secret: false
+ - default: false
+ description: If your saved query has variable arguments, write them here in
+ a json format where the key is the name of the variable argument and value
+ is the value you want to use for this particular query.
+ isArray: false
+ name: variable_arguments
+ required: false
+ secret: false
+ deprecated: false
+ description: Run a saved query
+ execution: false
+ name: uptycs-run-saved-query
+ outputs:
+ - contextPath: Uptycs.RunQuery
+ description: Results of executed query
+ type: Unknown
+ - arguments:
+ - default: false
+ defaultValue: '""'
+ description: A short description for the query
+ isArray: false
+ name: description
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: The type of query (global or realtime).
+ isArray: false
+ name: execution_type
+ predefined:
+ - global
+ - realtime
+ required: true
+ secret: false
+ - default: false
+ description: The name for the query. This should be unique to this query.
+ isArray: false
+ name: name
+ required: true
+ secret: false
+ - default: false
+ description: The query which will be saved
+ isArray: false
+ name: query
+ required: true
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ defaultValue: default
+ description: Type of issue the query addresses.
+ isArray: false
+ name: type
+ predefined:
+ - default
+ - compliance
+ - hardware
+ - incident
+ - system
+ - vulnerability
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '""'
+ description: Add the query to a group of queries.
+ isArray: false
+ name: grouping
+ required: false
+ secret: false
+ deprecated: false
+ description: Save a query to the Uptycs DB
+ execution: false
+ name: uptycs-post-saved-query
+ outputs:
+ - contextPath: Uptycs.PostedQuery.name
+ description: name of query
+ type: string
+ dockerimage: demisto/uptycs:1.0.0.1126
+ isfetch: true
+ runonce: true
+ script: '-'
+ type: python
diff --git a/Integrations/Uptycs/Uptycs_description.md b/Integrations/Uptycs/Uptycs_description.md
new file mode 100644
index 000000000000..979e08dc2469
--- /dev/null
+++ b/Integrations/Uptycs/Uptycs_description.md
@@ -0,0 +1,7 @@
+## How to get an API Key and API Secret
+In order to create an instance of the integration, you need to download a user API key and secret from your Uptycs account.
+
+1. Go to your Uptycs environment.
+2. Navigate to **Configuration > Users**.
+3. In the **User API key** section, click download.
+ The downloaded file will have all the information necessary to create the instance.
diff --git a/Integrations/Uptycs/Uptycs_image.png b/Integrations/Uptycs/Uptycs_image.png
new file mode 100644
index 000000000000..2cc3e36412a6
Binary files /dev/null and b/Integrations/Uptycs/Uptycs_image.png differ
diff --git a/Integrations/UrlScan/URLScan.py b/Integrations/UrlScan/URLScan.py
new file mode 100644
index 000000000000..38c5da8d4d16
--- /dev/null
+++ b/Integrations/UrlScan/URLScan.py
@@ -0,0 +1,567 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+'''IMPORTS'''
+import requests
+import collections
+from urlparse import urlparse
+from requests.utils import quote # type: ignore
+import time
+
+""" POLLING FUNCTIONS"""
+try:
+ from Queue import Queue
+except ImportError:
+ from queue import Queue # type: ignore
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+
+'''GLOBAL VARS'''
+BASE_URL = 'https://urlscan.io/api/v1/'
+APIKEY = demisto.params().get('apikey')
+THRESHOLD = int(demisto.params().get('url_threshold', '1'))
+USE_SSL = not demisto.params().get('insecure', False)
+
+
+'''HELPER FUNCTIONS'''
+
+
+def http_request(method, url_suffix, json=None, wait=0, retries=0):
+ if method == 'GET':
+ headers = {} # type: Dict[str, str]
+ elif method == 'POST':
+ headers = {
+ 'API-Key': APIKEY,
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json'
+ }
+ r = requests.request(
+ method,
+ BASE_URL + url_suffix,
+ data=json,
+ headers=headers,
+ verify=USE_SSL
+ )
+ if r.status_code != 200:
+ if r.status_code == 429:
+ if retries <= 0:
+ # Error in API call to URLScan.io [429] - Too Many Requests
+ return_error('API rate limit reached. Use the retries and wait arguments whe submitting multile URls')
+ else:
+ time.sleep(wait)
+ return http_request(method, url_suffix, json, wait, retries - 1)
+ return_error('Error in API call to URLScan.io [%d] - %s' % (r.status_code, r.reason))
+
+ return r.json()
+
+
+# Allows nested keys to be accesible
+def makehash():
+ return collections.defaultdict(makehash)
+
+
+def is_valid_ip(s):
+ a = s.split('.')
+ if len(a) != 4:
+ return False
+ for x in a:
+ if not x.isdigit():
+ return False
+ i = int(x)
+ if i < 0 or i > 255:
+ return False
+ return True
+
+
+def get_result_page():
+ uuid = demisto.args().get('uuid')
+ uri = BASE_URL + 'result/{}'.format(uuid)
+ return uri
+
+
+def polling(uuid):
+ if demisto.args().get('timeout') is None:
+ TIMEOUT = 60
+ else:
+ TIMEOUT = demisto.args().get('timeout')
+
+ uri = BASE_URL + 'result/{}'.format(uuid)
+
+ ready = poll(
+ lambda: requests.get(uri, verify=USE_SSL).status_code == 200,
+ step=5,
+ ignore_exceptions=(requests.exceptions.ConnectionError),
+ timeout=int(TIMEOUT)
+ )
+ return ready
+
+
+def poll_uri():
+ uri = demisto.args().get('uri')
+ demisto.results(requests.get(uri, verify=USE_SSL).status_code)
+
+
+def step_constant(step):
+ return step
+
+
+def is_truthy(val):
+ return bool(val)
+
+
+def poll(target, step, args=(), kwargs=None, timeout=None, max_tries=None, check_success=is_truthy,
+ step_function=step_constant, ignore_exceptions=(), poll_forever=False, collect_values=None, *a, **k):
+
+ kwargs = kwargs or dict()
+ values = collect_values or Queue()
+
+ max_time = time.time() + timeout if timeout else None
+ tries = 0
+
+ last_item = None
+ while True:
+
+ try:
+ val = target(*args, **kwargs)
+ last_item = val
+ except ignore_exceptions as e:
+ last_item = e
+ else:
+ if check_success(val):
+ return val
+
+ values.put(last_item)
+ tries += 1
+ if max_time is not None and time.time() >= max_time:
+ demisto.results('The operation timed out. Please try again with a longer timeout period.')
+ time.sleep(step)
+ step = step_function(step)
+
+
+'''MAIN FUNCTIONS'''
+
+
+def urlscan_submit_url():
+ submission_dict = {}
+ if demisto.args().get('public'):
+ if demisto.args().get('public') == 'public':
+ submission_dict['public'] = 'on'
+ else:
+ if demisto.params().get('is_public') is True:
+ submission_dict['public'] = 'on'
+
+ submission_dict['url'] = demisto.args().get('url')
+ sub_json = json.dumps(submission_dict)
+ wait = int(demisto.args().get('wait', 5))
+ retries = int(demisto.args().get('retries', 0))
+ r = http_request('POST', 'scan/', sub_json, wait, retries)
+ uuid = r['uuid']
+ return uuid
+
+
+def format_results(uuid):
+ # Scan Lists sometimes returns empty
+ scan_lists = None
+ while scan_lists is None:
+ try:
+ response = urlscan_submit_request(uuid)
+ scan_data = response['data']
+ scan_lists = response['lists']
+ scan_tasks = response['task']
+ scan_page = response['page']
+ scan_stats = response['stats']
+ scan_meta = response['meta']
+ url_query = scan_tasks['url']
+ scan_verdicts = response.get('verdicts')
+ except Exception:
+ pass
+
+ ec = makehash()
+ dbot_score = makehash()
+ human_readable = makehash()
+ cont = makehash()
+ file_context = makehash()
+ url_cont = makehash()
+
+ LIMIT = int(demisto.args().get('limit'))
+ if 'certificates' in scan_lists:
+ cert_md = []
+ cert_ec = []
+ certs = scan_lists['certificates']
+ for x in certs[:LIMIT]:
+ info, ec_info = cert_format(x)
+ cert_md.append(info)
+ cert_ec.append(ec_info)
+ CERT_HEADERS = ['Subject Name', 'Issuer', 'Validity']
+ cont['Certificates'] = cert_ec
+ url_cont['Data'] = url_query
+ if 'urls' in scan_lists:
+ url_cont['Data'] = demisto.args().get('url')
+ cont['URL'] = demisto.args().get('url')
+ # effective url of the submitted url
+ human_readable['Effective URL'] = response['page']['url']
+ cont['EffectiveURL'] = response['page']['url']
+ if 'uuid' in scan_tasks:
+ ec['URLScan']['UUID'] = scan_tasks['uuid']
+ if 'ips' in scan_lists:
+ ip_asn_MD = []
+ ip_ec_info = makehash()
+ ip_list = scan_lists['ips']
+ asn_list = scan_lists['asns']
+
+ ip_asn_dict = dict(zip(ip_list, asn_list))
+ i = 1
+ for k in ip_asn_dict:
+ if i - 1 == LIMIT:
+ break
+ v = ip_asn_dict[k]
+ ip_info = {
+ 'Count': i,
+ 'IP': k,
+ 'ASN': v
+ }
+ ip_ec_info[i]['IP'] = k
+ ip_ec_info[i]['ASN'] = v
+ ip_asn_MD.append(ip_info)
+ i = i + 1
+ cont['RelatedIPs'] = ip_ec_info
+ IP_HEADERS = ['Count', 'IP', 'ASN']
+ # add redirected URLs
+ if 'requests' in scan_data:
+ redirected_urls = []
+ for o in scan_data['requests']:
+ if 'redirectResponse' in o['request']:
+ if 'url' in o['request']['redirectResponse']:
+ url = o['request']['redirectResponse']['url']
+ redirected_urls.append(url)
+ cont['RedirectedURLs'] = redirected_urls
+ if 'countries' in scan_lists:
+ countries = scan_lists['countries']
+ human_readable['Associated Countries'] = countries
+ cont['Country'] = countries
+ if None not in scan_lists['hashes']:
+ hashes = scan_lists['hashes']
+ cont['RelatedHash'] = hashes
+ human_readable['Related Hashes'] = hashes
+ if 'domains' in scan_lists:
+ subdomains = scan_lists['domains']
+ cont['Subdomains'] = subdomains
+ human_readable['Subdomains'] = subdomains
+ if 'asn' in scan_page:
+ cont['ASN'] = scan_page['asn']
+ if 'overall' in scan_verdicts:
+ human_readable['Malicious URLs Found'] = scan_stats['malicious']
+ if scan_verdicts['overall'].get('malicious'):
+ human_readable['Malicious'] = 'Malicious'
+ url_cont['Data'] = demisto.args().get('url')
+ cont['Data'] = demisto.args().get('url')
+ dbot_score['Indicator'] = demisto.args().get('url')
+ url_cont['Malicious']['Vendor'] = 'urlscan.io'
+ cont['Malicious']['Vendor'] = 'urlscan.io'
+ dbot_score['Vendor'] = 'urlscan.io'
+ url_cont['Malicious']['Description'] = 'Match found in Urlscan.io database'
+ cont['Malicious']['Description'] = 'Match found in Urlscan.io database'
+ dbot_score['Score'] = 3
+ dbot_score['Type'] = 'url'
+ else:
+ dbot_score['Vendor'] = 'urlscan.io'
+ dbot_score['Indicator'] = demisto.args().get('url')
+ dbot_score['Score'] = 0
+ dbot_score['Type'] = 'url'
+ human_readable['Malicious'] = 'Benign'
+ if 'url' in scan_meta['processors']['gsb']['data'] is None:
+ mal_url_list = []
+ matches = scan_meta['processors']['gsb']['data']['matches']
+ for match in matches:
+ mal_url = match['threat']['url']
+ mal_url_list.append(mal_url)
+ human_readable['Related Malicious URLs'] = mal_url_list
+ if len(scan_meta['processors']['download']['data']) > 0:
+ meta_data = scan_meta['processors']['download']['data'][0]
+ sha256 = meta_data['sha256']
+ filename = meta_data['filename']
+ filesize = meta_data['filesize']
+ filetype = meta_data['mimeType']
+ human_readable['File']['Hash'] = sha256
+ cont['File']['Hash'] = sha256
+ file_context['SHA256'] = sha256
+ human_readable['File']['Name'] = filename
+ cont['File']['FileName'] = filename
+ file_context['Name'] = filename
+ human_readable['File']['Size'] = filesize
+ cont['File']['FileSize'] = filesize
+ file_context['Size'] = filesize
+ human_readable['File']['Type'] = filetype
+ cont['File']['FileType'] = filetype
+ file_context['Type'] = filetype
+ file_context['Hostname'] = demisto.args().get('url')
+
+ ec = {
+ 'URLScan(val.URL && val.URL == obj.URL)': cont,
+ 'DBotScore': dbot_score,
+ 'URL': url_cont,
+ outputPaths['file']: file_context
+ }
+
+ if 'screenshotURL' in scan_tasks:
+ human_readable['Screenshot'] = scan_tasks['screenshotURL']
+ screen_path = scan_tasks['screenshotURL']
+ response_img = requests.request("GET", screen_path, verify=USE_SSL)
+ stored_img = fileResult('screenshot.png', response_img.content)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': response,
+ 'HumanReadable': tableToMarkdown('{} - Scan Results'.format(url_query), human_readable),
+ 'EntryContext': ec
+ })
+
+ if len(cert_md) > 0:
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': tableToMarkdown('Certificates', cert_md, CERT_HEADERS),
+ 'HumanReadable': tableToMarkdown('Certificates', cert_md, CERT_HEADERS)
+ })
+ if 'ips' in scan_lists:
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': tableToMarkdown('Related IPs and ASNs', ip_asn_MD, IP_HEADERS),
+ 'HumanReadable': tableToMarkdown('Related IPs and ASNs', ip_asn_MD, IP_HEADERS)
+ })
+
+ if 'screenshotURL' in scan_tasks:
+ demisto.results({
+ 'Type': entryTypes['image'],
+ 'ContentsFormat': formats['text'],
+ 'File': stored_img['File'],
+ 'FileID': stored_img['FileID'],
+ 'Contents': ''
+ })
+
+
+def urlscan_submit_request(uuid):
+ response = http_request('GET', 'result/{}'.format(uuid))
+ return response
+
+
+def get_urlscan_submit_results_polling(uuid):
+ ready = polling(uuid)
+ if ready is True:
+ format_results(uuid)
+
+
+def urlscan_submit_command():
+ get_urlscan_submit_results_polling(urlscan_submit_url())
+
+
+def urlscan_search(search_type, query):
+ r = http_request('GET', 'search/?q=' + search_type + ':"' + query + '"')
+ return r
+
+
+def cert_format(x):
+ valid_to = datetime.fromtimestamp(x['validTo']).strftime('%Y-%m-%d %H:%M:%S')
+ valid_from = datetime.fromtimestamp(x['validFrom']).strftime('%Y-%m-%d %H:%M:%S')
+ info = {
+ 'Subject Name': x['subjectName'],
+ 'Issuer': x['issuer'],
+ 'Validity': "{} - {}".format(valid_to, valid_from)
+ }
+ ec_info = {
+ 'SubjectName': x['subjectName'],
+ 'Issuer': x['issuer'],
+ 'ValidFrom': valid_from,
+ 'ValidTo': valid_to
+ }
+ return info, ec_info
+
+
+def urlscan_search_command():
+ LIMIT = int(demisto.args().get('limit'))
+ HUMAN_READBALE_HEADERS = ['URL', 'Domain', 'IP', 'ASN', 'Scan ID', 'Scan Date']
+ raw_query = demisto.args().get('searchParameter', '')
+ if is_valid_ip(raw_query):
+ search_type = 'ip'
+
+ # Parsing query to see if it's a url
+ parsed = urlparse(raw_query)
+ # Checks to see if Netloc is present. If it's not a url, Netloc will not exist
+ if parsed[1] == '' and len(raw_query) == 64:
+ search_type = 'hash'
+ else:
+ search_type = 'page.url'
+
+ # Making the query string safe for Elastic Search
+ query = quote(raw_query, safe='')
+
+ r = urlscan_search(search_type, query)
+
+ if r['total'] == 0:
+ demisto.results('No results found for {}'.format(raw_query))
+ return
+ if r['total'] > 0:
+ demisto.results('{} results found for {}'.format(r['total'], raw_query))
+
+ # Opening empty string for url comparison
+ last_url = ''
+ hr_md = []
+ cont_array = []
+ ip_array = []
+ dom_array = []
+ url_array = []
+
+ for res in r['results'][:LIMIT]:
+ ec = makehash()
+ cont = makehash()
+ url_cont = makehash()
+ ip_cont = makehash()
+ dom_cont = makehash()
+ file_context = makehash()
+ res_dict = res
+ res_tasks = res_dict['task']
+ res_page = res_dict['page']
+
+ if last_url == res_tasks['url']:
+ continue
+
+ human_readable = makehash()
+
+ if 'url' in res_tasks:
+ url = res_tasks['url']
+ human_readable['URL'] = url
+ cont['URL'] = url
+ url_cont['Data'] = url
+ if 'domain' in res_page:
+ domain = res_page['domain']
+ human_readable['Domain'] = domain
+ cont['Domain'] = domain
+ dom_cont['Name'] = domain
+ if 'asn' in res_page:
+ asn = res_page['asn']
+ cont['ASN'] = asn
+ ip_cont['ASN'] = asn
+ human_readable['ASN'] = asn
+ if 'ip' in res_page:
+ ip = res_page['ip']
+ cont['IP'] = ip
+ ip_cont['Address'] = ip
+ human_readable['IP'] = ip
+ if '_id' in res_dict:
+ scanID = res_dict['_id']
+ cont['ScanID'] = scanID
+ human_readable['Scan ID'] = scanID
+ if 'time' in res_tasks:
+ scanDate = res_tasks['time']
+ cont['ScanDate'] = scanDate
+ human_readable['Scan Date'] = scanDate
+ if 'files' in res_dict:
+ HUMAN_READBALE_HEADERS = ['URL', 'Domain', 'IP', 'ASN', 'Scan ID', 'Scan Date', 'File']
+ files = res_dict['files'][0]
+ sha256 = files['sha256']
+ filename = files['filename']
+ filesize = files['filesize']
+ filetype = files['mimeType']
+ url = res_tasks['url']
+ human_readable['File']['Hash'] = sha256
+ cont['Hash'] = sha256
+ file_context['SHA256'] = sha256
+ human_readable['File']['Name'] = filename
+ cont['FileName'] = filename
+ file_context['File']['Name'] = filename
+ human_readable['File']['Size'] = filesize
+ cont['FileSize'] = filesize
+ file_context['Size'] = filesize
+ human_readable['File']['Type'] = filetype
+ cont['FileType'] = filetype
+ file_context['File']['Type'] = filetype
+ file_context['File']['Hostname'] = url
+
+ ec[outputPaths['file']] = file_context
+ hr_md.append(human_readable)
+ cont_array.append(cont)
+ ip_array.append(ip_cont)
+ url_array.append(url_cont)
+ dom_array.append(dom_cont)
+
+ # Storing last url in memory for comparison on next loop
+ last_url = url
+
+ ec = ({
+ 'URLScan(val.URL && val.URL == obj.URL)': cont_array,
+ 'URL': url_array,
+ 'IP': ip_array,
+ 'Domain': dom_array
+ })
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': r,
+ 'HumanReadable': tableToMarkdown('URLScan.io query results for {}'.format(raw_query), hr_md,
+ HUMAN_READBALE_HEADERS, removeNull=True),
+ 'EntryContext': ec
+ })
+
+
+def format_http_transaction_list():
+ url = demisto.args().get('url')
+ uuid = demisto.args().get('uuid')
+
+ # Scan Lists sometimes returns empty
+ scan_lists = {} # type: dict
+ while not scan_lists:
+ response = urlscan_submit_request(uuid)
+ scan_lists = response.get('lists', {})
+
+ limit = int(demisto.args().get('limit'))
+ metadata = None
+ if limit > 100:
+ limit = 100
+ metadata = "Limited the data to the first 100 http transactions"
+
+ url_list = scan_lists.get('urls', [])[:limit]
+
+ context = {
+ 'URL': url,
+ 'httpTransaction': url_list
+ }
+
+ ec = {
+ 'URLScan(val.URL && val.URL == obj.URL)': context,
+ }
+
+ human_readable = tableToMarkdown('{} - http transaction list'.format(url), url_list, ['URLs'], metadata=metadata)
+ return_outputs(human_readable, ec, response)
+
+
+"""COMMAND FUNCTIONS"""
+try:
+ handle_proxy()
+ if demisto.command() == 'test-module':
+ search_type = 'ip'
+ query = '8.8.8.8'
+ urlscan_search(search_type, query)
+ demisto.results('ok')
+ if demisto.command() in {'urlscan-submit', 'url'}:
+ urlscan_submit_command()
+ if demisto.command() == 'urlscan-search':
+ urlscan_search_command()
+ if demisto.command() == 'urlscan-submit-url-command':
+ demisto.results(urlscan_submit_url())
+ if demisto.command() == 'urlscan-get-http-transaction-list':
+ format_http_transaction_list()
+ if demisto.command() == 'urlscan-get-result-page':
+ demisto.results(get_result_page())
+ if demisto.command() == 'urlscan-poll-uri':
+ poll_uri()
+
+
+except Exception as e:
+ LOG(e)
+ LOG.print_log(False)
+ return_error(e.message)
diff --git a/Integrations/UrlScan/URLScan.yml b/Integrations/UrlScan/URLScan.yml
new file mode 100644
index 000000000000..688f720c771d
--- /dev/null
+++ b/Integrations/UrlScan/URLScan.yml
@@ -0,0 +1,314 @@
+commonfields:
+ id: urlscan.io
+ version: -1
+name: urlscan.io
+display: urlscan.io
+category: Data Enrichment & Threat Intelligence
+description: Urlscan.io reputation
+defaultEnabled: false
+configuration:
+- display: API Key (only required for scanning URLs)
+ name: apikey
+ defaultvalue: ""
+ type: 4
+ required: false
+- display: Trust any certificate (not secure)
+ name: insecure
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: URL Threshold. Minimum number of positive results from urlscan.io to consider
+ the URL malicious.
+ name: url_threshold
+ defaultvalue: "1"
+ type: 0
+ required: false
+- display: Enable public submissions by default.
+ name: is_public
+ defaultvalue: true
+ type: 8
+ required: false
+script:
+ script: ''
+ type: python
+ subtype: python2
+ commands:
+ - name: urlscan-search
+ arguments:
+ - name: searchParameter
+ required: true
+ default: true
+ description: A parameter for which to search (as a string), for example an IP address, file name, SHA256 hash,
+ URL, domain, and so on.
+ - name: limit
+ description: The maximum number of results to return. Default is 20.
+ defaultValue: "20"
+ outputs:
+ - contextPath: URLScan.URL
+ description: The URL.
+ type: string
+ - contextPath: URLScan.Domain
+ description: The domain of the scanned URL.
+ type: string
+ - contextPath: URLScan.ASN
+ description: The ASN of the scanned URL.
+ type: string
+ - contextPath: URLScan.IP
+ description: The IP address of the scanned URL.
+ type: string
+ - contextPath: URLScan.ScanID
+ description: The scan ID of the scanned URL.
+ type: string
+ - contextPath: URLScan.ScanDate
+ description: The date that the URL was last scanned.
+ type: string
+ - contextPath: URLScan.Hash
+ description: The SHA256 hash of the scanned file.
+ type: string
+ - contextPath: URLScan.FileName
+ description: The file name of the scanned file.
+ type: string
+ - contextPath: URLScan.FileSize
+ description: The size of the scanned file.
+ type: number
+ - contextPath: URLScan.FileType
+ description: File type of the file scanned
+ type: string
+ description: Search for an indicator that is related to former urlscan.io scans.
+ - name: urlscan-submit
+ deprecated: true
+ arguments:
+ - name: url
+ required: true
+ default: true
+ description: The URL to scan.
+ - name: timeout
+ description: The amount of time (in seconds) to wait for the scan ID result before timeout. Default is 60.
+ defaultValue: "60"
+ - name: public
+ description: The submission type. Can be "public" or "private".
+ - name: limit
+ description: The maximum number of Limits the returned list of Certificates, IP's and ASN's
+ defaultValue: "20"
+ outputs:
+ - contextPath: URL.Data
+ description: The URL submitted for scanning.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the reason that the vendor made the decision.
+ type: string
+ - contextPath: URLScan.RelatedIPs
+ description: IP addresses related to the the scanned URL.
+ type: string
+ - contextPath: URLScan.RelatedASNs
+ description: ASNs related to the scanned URL.
+ type: string
+ - contextPath: URLScan.Countries
+ description: Countries associated with the scanned URL.
+ type: string
+ - contextPath: URLScan.RelatedHash
+ description: File hashes related to the scanned URL.
+ type: string
+ - contextPath: URLScan.Subdomains
+ description: Subdomains related to the scanned URL.
+ type: string
+ - contextPath: URLScan.ASN
+ description: ASN of the scanned URL.
+ type: string
+ - contextPath: URLScan.Data
+ description: URL of the file.
+ type: string
+ - contextPath: URLScan.Malicious.Vendor
+ description: The vendor reporting the malicious indicator for the file
+ type: string
+ - contextPath: URLScan.Malicious.Description
+ description: A description of the malicious indicator.
+ type: string
+ - contextPath: URLScan.File.Hash
+ description: SHA256 of file found
+ type: string
+ - contextPath: URLScan.File.FileName
+ description: File name of file found
+ type: string
+ - contextPath: URLScan.File.FileType
+ description: File type of the file found
+ type: string
+ - contextPath: URLScan.File.Hostname
+ description: URL where the file was found
+ type: string
+ - contextPath: URLScan.Certificates
+ description: Certificates found for the URL scanned
+ type: string
+ - contextPath: DBotScore.Score
+ description: Score retrieved for Dbot
+ type: number
+ - contextPath: DBotScore.Type
+ description: Type of indicator tested for
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor who provided DBot Score
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: Indicator URLScan tested for
+ type: string
+ - contextPath: URLScan.RedirectedURLs
+ description: Redirected URLs from the URL scanned
+ type: string
+ - contextPath: URLScan.EffectiveURL
+ description: Effective URL of the original URL
+ type: string
+ description: 'Deprecated. Use the url command instead.'
+ - name: url
+ arguments:
+ - name: url
+ required: true
+ description: Url to scan
+ - name: timeout
+ description: The amount of time (in seconds) to wait for the scan ID result before timeout. Default is 60.
+ defaultValue: "60"
+ - name: public
+ description: The submission type. Can be "public" or "private".
+ - name: limit
+ description: The maximum number of results to return.
+ defaultValue: "20"
+ - name: wait
+ description: The amount of time (in seconds) to wait between tries if the API rate limit is exceeded.
+ defaultValue: "5"
+ - name: retries
+ description: Number of retries for the API rate limit. Default is 0.
+ defaultValue: "0"
+ outputs:
+ - contextPath: URL.Data
+ description: The URL submitted for scanning.
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision.
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the reason that the vendor made the decision.
+ type: string
+ - contextPath: URLScan.RelatedIPs
+ description: The IP addresses related to the scanned URL.
+ type: string
+ - contextPath: URLScan.RelatedASNs
+ description: The ASNs related to the scanned URL.
+ type: string
+ - contextPath: URLScan.Countries
+ description: The countries associated with the scanned URL.
+ type: string
+ - contextPath: URLScan.RelatedHash
+ description: File hashes related to the scanned URL.
+ type: string
+ - contextPath: URLScan.Subdomains
+ description: Subdomains associated with the scanned URL.
+ type: string
+ - contextPath: URLScan.ASN
+ description: The ASN of the scanned URL.
+ type: string
+ - contextPath: URLScan.Data
+ description: The URL of the file.
+ type: string
+ - contextPath: URLScan.Malicious.Vendor
+ description: The vendor that reported the malicious indicator for the file.
+ type: string
+ - contextPath: URLScan.Malicious.Description
+ description: A description of the malicious indicator.
+ type: string
+ - contextPath: URLScan.File.Hash
+ description: The SHA256 hash of file.
+ type: string
+ - contextPath: URLScan.File.FileName
+ description: The name of the file.
+ type: string
+ - contextPath: URLScan.File.FileType
+ description: The file type.
+ type: string
+ - contextPath: URLScan.File.Hostname
+ description: The URL of the file.
+ type: string
+ - contextPath: File.SHA256
+ description: The SHA256 hash of the file.
+ type: string
+ - contextPath: File.Name
+ description: The name of the file.
+ type: string
+ - contextPath: File.Type
+ description: The file type.
+ type: string
+ - contextPath: File.Hostname
+ description: The URL of the file.
+ type: string
+ - contextPath: URLScan.Certificates
+ description: The certificates found for the scanned URL.
+ type: string
+ - contextPath: DBotScore.Score
+ description: The DBot score.
+ type: string
+ - contextPath: DBotScore.Type
+ description: The indicator type.
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: The vendor that provided the DBot Score.
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator for which urlscan tested.
+ type: string
+ - contextPath: URLScan.RedirectedURLs
+ description: Redirected URLs from the scanned URL.
+ type: string
+ - contextPath: URLScan.EffectiveURL
+ description: Effective URL of the original URL.
+ type: string
+ description: Submits a URL to scan.
+ - name: urlscan-get-http-transaction-list
+ deprecated: true
+ arguments:
+ - name: uuid
+ required: true
+ description: The UUID of the URL for which to search the transaction list.
+ - name: limit
+ description: The maximum number of results to return to the War Room. Maximum is 100. Default is 20.
+ defaultValue: "20"
+ - name: url
+ required: true
+ description: The URL for which to search the transaction list.
+ outputs:
+ - contextPath: URLScan.URL
+ description: The URL address that was scanned.
+ type: string
+ - contextPath: URLScan.httpTransaction
+ description: A link to the HTTP transaction made during the search for the specified
+ URL.
+ type: string
+ description: Returns the HTTP transaction list for the specified URL. Do not use this command in conjunction with the
+ urlscan-get-http-transactions script.
+ - name: urlscan-submit-url-command
+ deprecated: true
+ arguments:
+ - name: url
+ required: true
+ description: The URL sought after.
+ description: Submits a URL to retrieve its UUID.
+ - name: urlscan-poll-uri
+ deprecated: true
+ arguments:
+ - name: uri
+ required: true
+ description: The URI for which to get the results.
+ description: Polls the urlscan service regarding the results of the specified URI.
+ - name: urlscan-get-result-page
+ deprecated: true
+ arguments:
+ - name: uuid
+ required: true
+ description: The UUID of the URL for which to search.
+ description: Returns the results page for the specified UUID.
+ runonce: false
diff --git a/Integrations/UrlScan/URLScan_CHANGELOG.md b/Integrations/UrlScan/URLScan_CHANGELOG.md
new file mode 100644
index 000000000000..7c00ede37cfa
--- /dev/null
+++ b/Integrations/UrlScan/URLScan_CHANGELOG.md
@@ -0,0 +1,6 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+ - Added support for the _Verdict_ result from the urlscan.io API.
+ - Default privacy setting is now customizable, which enables submissions to be public or private (globally).
diff --git a/Integrations/UrlScan/URLScan_Image.png b/Integrations/UrlScan/URLScan_Image.png
new file mode 100644
index 000000000000..6db2a7ee4f69
Binary files /dev/null and b/Integrations/UrlScan/URLScan_Image.png differ
diff --git a/Integrations/UrlScan/URLScan_description.md b/Integrations/UrlScan/URLScan_description.md
new file mode 100644
index 000000000000..b3d399d9c8ce
--- /dev/null
+++ b/Integrations/UrlScan/URLScan_description.md
@@ -0,0 +1,2 @@
+This integration checks domain information from the urlscan.io Database.
+This is a free service with an API key required. contact urlscan.io to obtain one.
\ No newline at end of file
diff --git a/Integrations/VMRay/Pipfile b/Integrations/VMRay/Pipfile
new file mode 100644
index 000000000000..41c7519a7a9f
--- /dev/null
+++ b/Integrations/VMRay/Pipfile
@@ -0,0 +1,14 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+
+[packages]
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/VMRay/Pipfile.lock b/Integrations/VMRay/Pipfile.lock
new file mode 100644
index 000000000000..643d94a1edbe
--- /dev/null
+++ b/Integrations/VMRay/Pipfile.lock
@@ -0,0 +1,174 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "030517bfcc68d7e2f82fb5831e88abe2f6540ec99eefed71048ae95c58697218"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:18c796c2cd35eb1a1d3f012a214a542790a1aed95e29768bdcb9f2197eccbd0b",
+ "sha256:96151fca2c6e736503981896495d344781b60d18bfda78dc11b290c6125ebdb6"
+ ],
+ "version": "==4.3.15"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33",
+ "sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39",
+ "sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019",
+ "sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088",
+ "sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b",
+ "sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e",
+ "sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6",
+ "sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b",
+ "sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5",
+ "sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff",
+ "sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd",
+ "sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7",
+ "sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff",
+ "sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d",
+ "sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2",
+ "sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35",
+ "sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4",
+ "sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514",
+ "sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252",
+ "sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109",
+ "sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f",
+ "sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c",
+ "sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92",
+ "sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577",
+ "sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d",
+ "sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d",
+ "sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f",
+ "sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a",
+ "sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b"
+ ],
+ "version": "==1.3.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:0125e8f60e9e031347105eb1682cef932f5e97d7b9a1a28d9bf00c22a5daef40",
+ "sha256:590044e3942351a1bdb1de960b739ff4ce277960f2425ad4509446dbace8d9d1"
+ ],
+ "markers": "python_version > '2.7'",
+ "version": "==6.0.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f",
+ "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746"
+ ],
+ "version": "==0.9.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:592eaa2c33fae68c7d75aacf042efc9f77b27c08a6224a4f59beab8d9a420523",
+ "sha256:ad3ad5c450284819ecde191a654c09b0ec72257a2c711b9633d677c71c9850c4"
+ ],
+ "index": "pypi",
+ "version": "==4.3.1"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:4d0d06d173eecf172703219a71dbd4ade0e13904e6bbce1ce660e2e0dc78b5c4",
+ "sha256:bfdf02789e3d197bd682a758cae0a4a18706566395fbe2803badcd1335e0173e"
+ ],
+ "index": "pypi",
+ "version": "==1.10.1"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:035a54ede6ce1380599b2ce57844c6554666522e376bd111eb940fbc7c3dad23",
+ "sha256:037c35f2741ce3a9ac0d55abfcd119133cbd821fffa4461397718287092d9d15",
+ "sha256:049feae7e9f180b64efacbdc36b3af64a00393a47be22fa9cb6794e68d4e73d3",
+ "sha256:19228f7940beafc1ba21a6e8e070e0b0bfd1457902a3a81709762b8b9039b88d",
+ "sha256:2ea681e91e3550a30c2265d2916f40a5f5d89b59469a20f3bad7d07adee0f7a6",
+ "sha256:3a6b0a78af298d82323660df5497bcea0f0a4a25a0b003afd0ce5af049bd1f60",
+ "sha256:5385da8f3b801014504df0852bf83524599df890387a3c2b17b7caa3d78b1773",
+ "sha256:606d8afa07eef77280c2bf84335e24390055b478392e1975f96286d99d0cb424",
+ "sha256:69245b5b23bbf7fb242c9f8f08493e9ecd7711f063259aefffaeb90595d62287",
+ "sha256:6f6d839ab09830d59b7fa8fb6917023d8cb5498ee1f1dbd82d37db78eb76bc99",
+ "sha256:730888475f5ac0e37c1de4bd05eeb799fdb742697867f524dc8a4cd74bcecc23",
+ "sha256:9819b5162ffc121b9e334923c685b0d0826154e41dfe70b2ede2ce29034c71d8",
+ "sha256:9e60ef9426efab601dd9aa120e4ff560f4461cf8442e9c0a2b92548d52800699",
+ "sha256:af5fbdde0690c7da68e841d7fc2632345d570768ea7406a9434446d7b33b0ee1",
+ "sha256:b64efdbdf3bbb1377562c179f167f3bf301251411eb5ac77dec6b7d32bcda463",
+ "sha256:bac5f444c118aeb456fac1b0b5d14c6a71ea2a42069b09c176f75e9bd4c186f6",
+ "sha256:bda9068aafb73859491e13b99b682bd299c1b5fd50644d697533775828a28ee0",
+ "sha256:d659517ca116e6750101a1326107d3479028c5191f0ecee3c7203c50f5b915b0",
+ "sha256:eddd3fb1f3e0f82e5915a899285a39ee34ce18fd25d89582bc89fc9fb16cd2c6"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.3.1"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"
+ ],
+ "version": "==1.11.1"
+ }
+ }
+}
diff --git a/Integrations/VMRay/VMRay.py b/Integrations/VMRay/VMRay.py
new file mode 100644
index 000000000000..86e1d472e63f
--- /dev/null
+++ b/Integrations/VMRay/VMRay.py
@@ -0,0 +1,912 @@
+import requests
+from CommonServerPython import *
+
+''' GLOBAL PARAMS '''
+API_KEY = demisto.params()['api_key']
+SERVER = (
+ demisto.params()['server'][:-1]
+ if (demisto.params()['server'] and demisto.params()['server'].endswith('/'))
+ else demisto.params()['server']
+)
+SERVER += '/rest/'
+USE_SSL = not demisto.params().get('insecure', False)
+HEADERS = {'Authorization': 'api_key ' + API_KEY}
+ERROR_FORMAT = 'Error in API call to VMRay [{}] - {}'
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+# Remove proxy
+PROXIES = handle_proxy()
+
+''' HELPER DICTS '''
+SEVERITY_DICT = {
+ 'malicious': 'Malicious',
+ 'suspicious': 'Suspicious',
+ 'not_suspicious': 'Good',
+ 'blacklisted': 'Blacklisted',
+ 'whitelisted': 'Whitelisted',
+ 'unknown': 'Unknown',
+ None: 'Unknown',
+}
+
+DBOTSCORE = {
+ 'Malicious': 3,
+ 'Suspicious': 2,
+ 'Good': 1,
+ 'Blacklisted': 3,
+ 'Whitelisted': 1,
+ 'Unknown': 0,
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def is_json(response):
+ """Checks if response is jsonable
+
+ Args:
+ response (requests.Response):
+
+ Returns:
+ bool: true if object is jsonable
+ """
+ try:
+ response.json()
+ except ValueError:
+ return False
+ return True
+
+
+def check_id(id_to_check):
+ """Checks if parameter id_to_check is a number
+
+ Args:
+ id_to_check (int or str or unicode):
+
+ Returns:
+ bool: True if is a number, else returns error
+ """
+ if isinstance(id_to_check, int) or isinstance(id_to_check, (str, unicode)) and id_to_check.isdigit():
+ return True
+ return_error(ERROR_FORMAT.format(404, 'No such element'))
+
+
+def build_errors_string(errors):
+ """
+
+ Args:
+ errors (list or dict):
+
+ Returns:
+ str: error message
+ """
+ if isinstance(errors, list):
+ err_str = str()
+ for error in errors:
+ err_str += error.get('error_msg') + '.\n'
+ else:
+ err_str = errors.get('error_msg')
+ return err_str
+
+
+def http_request(method, url_suffix, params=None, files=None, ignore_errors=False):
+ """ General HTTP request.
+ Args:
+ ignore_errors (bool):
+ method: (str) 'GET', 'POST', 'DELETE' 'PUT'
+ url_suffix: (str)
+ params: (dict)
+ files: (tuple, dict)
+
+ Returns:
+ dict: response json
+ """
+
+ def find_error(may_be_error_inside):
+ """Function will search for dict with 'errors' or 'error_msg' key
+
+ Args:
+ may_be_error_inside: object, any object
+
+ Returns:
+ None if no error presents
+ Errors list/string if errors inside.
+ """
+ if isinstance(may_be_error_inside, list):
+ for obj in may_be_error_inside:
+ ans = find_error(obj)
+ if ans:
+ return ans
+ return None
+ if isinstance(may_be_error_inside, dict):
+ if 'error_msg' in may_be_error_inside:
+ return may_be_error_inside['error_msg']
+ if 'errors' in may_be_error_inside and may_be_error_inside.get('errors'):
+ return may_be_error_inside['errors']
+ for value in may_be_error_inside.values():
+ err_r = find_error(value)
+ if err_r:
+ return err_r
+ return None
+
+ url = SERVER + url_suffix
+ r = requests.request(
+ method, url, params=params, headers=HEADERS, files=files, verify=USE_SSL, proxies=PROXIES
+ )
+ # Handle errors
+ try:
+ if r.status_code in {405, 401}:
+ return_error(ERROR_FORMAT.format(r.status_code, 'Token may be invalid'))
+ elif not is_json(r):
+ raise ValueError
+ response = r.json()
+ if r.status_code not in {200, 201, 202, 204} and not ignore_errors:
+ err = find_error(response)
+ if not err:
+ err = r.text
+ return_error(ERROR_FORMAT.format(r.status_code, err))
+
+ err = find_error(response)
+ if err:
+ return_error(ERROR_FORMAT.format(r.status_code, err))
+ return response
+ except ValueError:
+ # If no JSON is present, must be an error that can't be ignored
+ return_error(ERROR_FORMAT.format(r.status_code, r.text))
+
+
+def dbot_score_by_hash(analysis):
+ """Gets a dict containing MD5/SHA1/SHA256/SSDeep and return dbotscore
+
+ Args:
+ analysis: (dict)
+
+ Returns:
+ dict: dbot score
+ """
+ hashes = ['MD5', 'SHA256', 'SHA1', 'SSDeep']
+ scores = list()
+ for hash_type in hashes:
+ if hash_type in analysis:
+ scores.append(
+ {
+ 'Indicator': analysis.get(hash_type),
+ 'Type': 'hash',
+ 'Vendor': 'VMRay',
+ 'Score': DBOTSCORE.get(analysis.get('Severity', 0)),
+ }
+ )
+ return scores
+
+
+def build_job_data(data):
+ """
+
+ Args:
+ data: any kind of object.
+
+ Returns:
+ list: list of jobs
+ """
+
+ def build_entry(entry_data):
+ entry = dict()
+ entry['JobID'] = entry_data.get('job_id')
+ entry['SampleID'] = entry_data.get('job_sample_id')
+ entry['SubmissionID'] = entry_data.get('job_submission_id')
+ entry['MD5'] = entry_data.get('job_sample_md5')
+ entry['SHA1'] = entry_data.get('job_sample_sha1')
+ entry['SHA256'] = entry_data.get('job_sample_sha256')
+ entry['SSDeep'] = entry_data.get('job_sample_ssdeep')
+ entry['VMName'] = entry_data.get('job_vm_name')
+ entry['VMID'] = entry_data.get('job_vm_id')
+ entry['Status'] = entry_data.get('job_status')
+ return entry
+
+ jobs_list = list()
+ if isinstance(data, list):
+ for item in data:
+ jobs_list.append(build_entry(item))
+ elif isinstance(data, dict):
+ jobs_list = build_entry(data)
+ return jobs_list
+
+
+def build_finished_job(job_id, sample_id):
+ entry = dict()
+ entry['JobID'] = job_id
+ entry['SampleID'] = sample_id
+ entry['Status'] = 'Finished/NotExists'
+ return entry
+
+
+def build_analysis_data(analyses):
+ """
+
+ Args:
+ analyses: (dict) of analysis
+
+ Returns:
+ dict: formatted entry context
+ """
+ entry_context = dict()
+ entry_context['VMRay.Analysis(val.AnalysisID === obj.AnalysisID)'] = [
+ {
+ 'AnalysisID': analysis.get('analysis_id'),
+ 'SampleID': analysis.get('analysis_sample_id'),
+ 'Severity': SEVERITY_DICT.get(analysis.get('analysis_severity')),
+ 'JobCreated': analysis.get('analysis_job_started'),
+ 'SHA1': analysis.get('analysis_sample_sha1'),
+ 'MD5': analysis.get('analysis_sample_md5'),
+ 'SHA256': analysis.get('analysis_sample_sha256'),
+ }
+ for analysis in analyses
+ ]
+
+ scores = list() # type: list
+ for analysis in entry_context:
+ scores.extend(dbot_score_by_hash(analysis))
+ entry_context[outputPaths['dbotscore']] = scores
+
+ return entry_context
+
+
+def build_upload_params():
+ """Builds params for upload_file
+
+ Returns:
+ dict: params
+ """
+ # additional params
+ doc_pass = demisto.args().get('document_password')
+ arch_pass = demisto.args().get('archive_password')
+ sample_type = demisto.args().get('sample_type')
+ shareable = demisto.args().get('shareable')
+ reanalyze = demisto.args().get('reanalyze')
+ max_jobs = demisto.args().get('max_jobs')
+ tags = demisto.args().get('tags')
+
+ params = dict()
+ if doc_pass:
+ params['document_password'] = doc_pass
+ if arch_pass:
+ params['archive_password'] = arch_pass
+ if sample_type:
+ params['sample_type'] = sample_type
+
+ params['shareable'] = shareable == 'true'
+ params['reanalyze'] = reanalyze == 'true'
+
+ if max_jobs:
+ if isinstance(max_jobs, (str, unicode)) and max_jobs.isdigit() or isinstance(max_jobs, int):
+ params['max_jobs'] = int(max_jobs)
+ else:
+ return_error('max_jobs arguments isn\'t a number')
+ if tags:
+ params['tags'] = tags
+ return params
+
+
+def test_module():
+ """Simple get request to see if connected
+ """
+ response = http_request('GET', 'analysis?_limit=1')
+ demisto.results('ok') if response.get('result') == 'ok' else return_error(
+ 'Can\'t authenticate: {}'.format(response)
+ )
+
+
+def upload_sample(file_id, params):
+ """Uploading sample to VMRay
+
+ Args:
+ file_id (str): entry_id
+ params (dict): dict of params
+
+ Returns:
+ dict: response
+ """
+ suffix = 'sample/submit'
+ file_obj = demisto.getFilePath(file_id)
+ # Ignoring non ASCII
+ file_name = file_obj['name'].encode('ascii', 'ignore')
+ file_path = file_obj['path']
+ with open(file_path, 'rb') as f:
+ files = {'sample_file': (file_name, f)}
+ results = http_request('POST', url_suffix=suffix, params=params, files=files)
+ return results
+
+
+def upload_sample_command():
+ """Uploads a file to vmray
+ """
+ # Preserve BC
+ file_id = (
+ demisto.args().get('entry_id')
+ if demisto.args().get('entry_id')
+ else demisto.args().get('file_id')
+ )
+ params = build_upload_params()
+
+ # Request call
+ raw_response = upload_sample(file_id, params=params)
+ data = raw_response.get('data')
+ jobs_list = list()
+ jobs = data.get('jobs')
+ if jobs:
+ for job in jobs:
+ if isinstance(job, dict):
+ job_entry = dict()
+ job_entry['JobID'] = job.get('job_id')
+ job_entry['Created'] = job.get('job_created')
+ job_entry['SampleID'] = job.get('job_sample_id')
+ job_entry['VMName'] = job.get('job_vm_name')
+ job_entry['VMID'] = job.get('job_vm_id')
+ job_entry['JobRuleSampleType'] = job.get('job_jobrule_sampletype')
+ jobs_list.append(job_entry)
+
+ samples_list = list()
+ samples = data.get('samples')
+ if samples:
+ for sample in samples:
+ if isinstance(sample, dict):
+ sample_entry = dict()
+ sample_entry['SampleID'] = sample.get('sample_id')
+ sample_entry['Created'] = sample.get('sample_created')
+ sample_entry['FileName'] = sample.get('submission_filename')
+ sample_entry['FileSize'] = sample.get('sample_filesize')
+ sample_entry['SSDeep'] = sample.get('sample_ssdeephash')
+ sample_entry['SHA1'] = sample.get('sample_sha1hash')
+ samples_list.append(sample_entry)
+
+ submissions_list = list()
+ submissions = data.get('submissions')
+ if submissions:
+ for submission in submissions:
+ if isinstance(submission, dict):
+ submission_entry = dict()
+ submission_entry['SubmissionID'] = submission.get('submission_id')
+ submission_entry['SampleID'] = submission.get('submission_sample_id')
+ submissions_list.append(submission_entry)
+
+ entry_context = dict()
+ entry_context['VMRay.Job(val.JobID === obj.JobID)'] = jobs_list
+ entry_context['VMRay.Sample(val.SampleID === obj.SampleID)'] = samples_list
+ entry_context[
+ 'VMRay.Submission(val.SubmissionID === obj.SubmissionID)'
+ ] = submissions_list
+
+ table = {
+ 'Jobs ID': [job.get('JobID') for job in jobs_list],
+ 'Samples ID': [sample.get('SampleID') for sample in samples_list],
+ 'Submissions ID': [
+ submission.get('SubmissionID') for submission in submissions_list
+ ],
+ }
+ human_readable = tableToMarkdown(
+ 'File submitted to VMRay',
+ t=table,
+ headers=['Jobs ID', 'Samples ID', 'Submissions ID'],
+ )
+
+ return_outputs(
+ readable_output=human_readable, outputs=entry_context, raw_response=raw_response
+ )
+
+
+def get_analysis_command():
+ sample_id = demisto.args().get('sample_id')
+ check_id(sample_id)
+ limit = demisto.args().get('limit')
+ params = {'_limit': limit}
+ raw_response = get_analysis(sample_id, params)
+ data = raw_response.get('data')
+ if data:
+ entry_context = build_analysis_data(data)
+ human_readable = tableToMarkdown(
+ 'Analysis results from VMRay for ID {}:'.format(sample_id),
+ entry_context.get('VMRay.Analysis(val.AnalysisID === obj.AnalysisID)'),
+ headers=['AnalysisID', 'SampleID', 'Severity']
+ )
+ return_outputs(human_readable, entry_context, raw_response=raw_response)
+ else:
+ return_outputs('#### No analysis found for sample id {}'.format(sample_id), None)
+
+
+def get_analysis(sample, params=None):
+ """Uploading sample to vmray
+
+ Args:
+ sample (str): sample id
+ params (dict): dict of params
+
+ Returns:
+ dict: response
+ """
+ suffix = 'analysis/sample/{}'.format(sample)
+ response = http_request('GET', suffix, params=params)
+ return response
+
+
+def get_submission_command():
+ submission_id = demisto.args().get('submission_id')
+ check_id(submission_id)
+ raw_response = get_submission(submission_id)
+ data = raw_response.get('data')
+ if data:
+ # Build entry
+ entry = dict()
+ entry['IsFinished'] = data.get('submission_finished')
+ entry['HasErrors'] = data.get('submission_has_errors')
+ entry['SubmissionID'] = data.get('submission_id')
+ entry['MD5'] = data.get('submission_sample_md5')
+ entry['SHA1'] = data.get('submission_sample_sha1')
+ entry['SHA256'] = data.get('submission_sample_sha256')
+ entry['SSDeep'] = data.get('submission_sample_ssdeep')
+ entry['Severity'] = SEVERITY_DICT.get(data.get('submission_severity'))
+ entry['SampleID'] = data.get('submission_sample_id')
+ scores = dbot_score_by_hash(entry)
+
+ entry_context = {
+ 'VMRay.Submission(val.SubmissionID === obj.SubmissionID)': entry,
+ outputPaths.get('dbotscore'): scores,
+ }
+
+ human_readable = tableToMarkdown(
+ 'Submission results from VMRay for ID {} with severity of {}'.format(
+ submission_id, entry.get('Severity', 'Unknown')
+ ),
+ entry,
+ headers=[
+ 'IsFinished',
+ 'Severity',
+ 'HasErrors',
+ 'MD5',
+ 'SHA1',
+ 'SHA256',
+ 'SSDeep',
+ ],
+ )
+
+ return_outputs(human_readable, entry_context, raw_response=raw_response)
+ else:
+ return_outputs(
+ 'No submission found in VMRay for submission id: {}'.format(submission_id),
+ {},
+ )
+
+
+def get_submission(submission_id):
+ """
+
+ Args:
+ submission_id (str): if of submission
+
+ Returns:
+ dict: response
+ """
+ suffix = 'submission/{}'.format(submission_id)
+ response = http_request('GET', url_suffix=suffix)
+ return response
+
+
+def get_sample_command():
+ sample_id = demisto.args().get('sample_id')
+ check_id(sample_id)
+ raw_response = get_sample(sample_id)
+ data = raw_response.get('data')
+
+ entry = dict()
+ entry['SampleID'] = data.get('sample_id')
+ entry['FileName'] = data.get('sample_filename')
+ entry['MD5'] = data.get('sample_md5hash')
+ entry['SHA1'] = data.get('sample_sha1hash')
+ entry['SHA256'] = data.get('sample_sha256hash')
+ entry['SSDeep'] = data.get('sample_ssdeephash')
+ entry['Severity'] = SEVERITY_DICT.get(data.get('sample_severity'))
+ entry['Type'] = data.get('sample_type')
+ entry['Created'] = data.get('sample_created')
+ entry['Classification'] = data.get('sample_classifications')
+ scores = dbot_score_by_hash(entry)
+
+ entry_context = {
+ 'VMRay.Sample(var.SampleID === obj.SampleID)': entry,
+ outputPaths.get('dbotscore'): scores,
+ }
+
+ human_readable = tableToMarkdown(
+ 'Results for sample id: {} with severity {}'.format(
+ entry.get('SampleID'), entry.get('Severity')
+ ),
+ entry,
+ headers=['Type', 'MD5', 'SHA1', 'SHA256', 'SSDeep'],
+ )
+ return_outputs(human_readable, entry_context, raw_response=raw_response)
+
+
+def get_sample(sample_id):
+ """building http request for get_sample_command
+
+ Args:
+ sample_id (str, int):
+
+ Returns:
+ dict: data from response
+ """
+ suffix = 'sample/{}'.format(sample_id)
+ response = http_request('GET', suffix)
+ return response
+
+
+def get_job(job_id, sample_id):
+ """
+ Args:
+ sample_id (str):
+ job_id (str):
+ Returns:
+ dict of response, if not exists returns:
+ {
+ 'error_msg': 'No such element'
+ 'result': 'error'
+ }
+ """
+ suffix = (
+ 'job/{}'.format(job_id)
+ if job_id
+ else 'job/sample/{}'.format(sample_id)
+ )
+ response = http_request('GET', suffix, ignore_errors=True)
+ return response
+
+
+def get_job_command():
+ job_id = demisto.args().get('job_id')
+ sample_id = demisto.args().get('sample_id')
+ if sample_id:
+ check_id(sample_id)
+ else:
+ check_id(job_id)
+
+ vmray_id = job_id if job_id else sample_id
+ title = 'job' if job_id else 'sample'
+
+ raw_response = get_job(job_id=job_id, sample_id=sample_id)
+ data = raw_response.get('data')
+ if raw_response.get('result') == 'error' or not data:
+ entry = build_finished_job(job_id=job_id, sample_id=sample_id)
+ human_readable = '#### Couldn\'t find a job for the {}: {}. Either the job completed, or does not exist.' \
+ .format(title, vmray_id)
+ else:
+ entry = build_job_data(data)
+ sample = entry[0] if isinstance(entry, list) else entry
+ human_readable = tableToMarkdown(
+ 'Job results for {} id: {}'.format(title, vmray_id),
+ sample,
+ headers=['JobID', 'SampleID', 'VMName', 'VMID'],
+ )
+
+ entry_context = {
+ 'VMRay.Job(val.JobID === obj.JobID && val.SampleID === obj.SampleID)': entry
+ }
+ return_outputs(human_readable, entry_context, raw_response=raw_response)
+
+
+def get_threat_indicators(sample_id):
+ """
+
+ Args:
+ sample_id (str):
+
+ Returns:
+ dict: response
+ """
+ suffix = 'sample/{}/threat_indicators'.format(sample_id)
+ response = http_request('GET', suffix).get('data')
+ return response
+
+
+def get_threat_indicators_command():
+ sample_id = demisto.args().get('sample_id')
+ check_id(sample_id)
+ raw_response = get_threat_indicators(sample_id)
+ data = raw_response.get('threat_indicators')
+
+ # Build Entry Context
+ if data and isinstance(data, list):
+ entry_context_list = list()
+ for indicator in data:
+ entry = dict()
+ entry['AnalysisID'] = indicator.get('analysis_ids')
+ entry['Category'] = indicator.get('category')
+ entry['Classification'] = indicator.get('classifications')
+ entry['ID'] = indicator.get('id')
+ entry['Operation'] = indicator.get('operation')
+ entry_context_list.append(entry)
+
+ human_readable = tableToMarkdown(
+ 'Threat indicators for sample ID: {}. Showing first indicator:'.format(
+ sample_id
+ ),
+ entry_context_list[0],
+ headers=['AnalysisID', 'Category', 'Classification', 'Operation'],
+ )
+
+ entry_context = {'VMRay.ThreatIndicator(obj.ID === val.ID)': entry_context_list}
+ return_outputs(
+ human_readable, entry_context, raw_response={'threat_indicators': data}
+ )
+ else:
+ return_outputs(
+ 'No threat indicators for sample ID: {}'.format(sample_id),
+ {},
+ raw_response=raw_response,
+ )
+
+
+def post_tags_to_analysis(analysis_id, tag):
+ """
+
+ Args:
+ analysis_id (str):
+ tag (str):
+
+ Returns:
+ dict:
+ """
+ suffix = 'analysis/{}/tag/{}'.format(analysis_id, tag)
+ response = http_request('POST', suffix)
+ return response
+
+
+def post_tags_to_submission(submission_id, tag):
+ """
+
+ Args:
+ submission_id (str):
+ tag (str):
+
+ Returns:
+ dict:
+
+ """
+ suffix = 'submission/{}/tag/{}'.format(submission_id, tag)
+ response = http_request('POST', suffix)
+ return response
+
+
+def post_tags():
+ analysis_id = demisto.args().get('analysis_id')
+ submission_id = demisto.args().get('submission_id')
+ tag = demisto.args().get('tag')
+ if not submission_id and not analysis_id:
+ return_error('No submission ID or analysis ID has been provided')
+ if analysis_id:
+ analysis_status = post_tags_to_analysis(analysis_id, tag)
+ if analysis_status.get('result') == 'ok':
+ return_outputs(
+ 'Tags: {} has been added to analysis: {}'.format(tag, analysis_id),
+ {},
+ raw_response=analysis_status,
+ )
+ if submission_id:
+ submission_status = post_tags_to_submission(submission_id, tag)
+ if submission_status.get('result') == 'ok':
+ return_outputs(
+ 'Tags: {} has been added to submission: {}'.format(tag, submission_id),
+ {},
+ raw_response=submission_status,
+ )
+
+
+def delete_tags_from_analysis(analysis_id, tag):
+ suffix = 'analysis/{}/tag/{}'.format(analysis_id, tag)
+ response = http_request('DELETE', suffix)
+ return response
+
+
+def delete_tags_from_submission(submission_id, tag):
+ suffix = 'submission/{}/tag/{}'.format(submission_id, tag)
+ response = http_request('DELETE', suffix)
+ return response
+
+
+def delete_tags():
+ analysis_id = demisto.args().get('analysis_id')
+ submission_id = demisto.args().get('submission_id')
+ tag = demisto.args().get('tag')
+ if not submission_id and not analysis_id:
+ return_error('No submission ID or analysis ID has been provided')
+ if submission_id:
+ submission_status = delete_tags_from_submission(submission_id, tag)
+ if submission_status.get('result') == 'ok':
+ return_outputs(
+ 'Tags: {} has been added to submission: {}'.format(tag, submission_id),
+ {},
+ raw_response=submission_status,
+ )
+ if analysis_id:
+ analysis_status = delete_tags_from_analysis(analysis_id, tag)
+ if analysis_status.get('result') == 'ok':
+ return_outputs(
+ 'Tags: {} has been added to analysis: {}'.format(tag, analysis_id),
+ {},
+ raw_response=analysis_status,
+ )
+
+
+def get_iocs(sample_id):
+ """
+
+ Args:
+ sample_id (str):
+
+ Returns:
+ dict: response
+ """
+ suffix = 'sample/{}/iocs'.format(sample_id)
+ response = http_request('GET', suffix)
+ return response
+
+
+def get_iocs_command():
+ def get_hashed(lst):
+ """
+
+ Args:
+ lst (List[dict]): list of hashes attributes
+
+ Returns:
+ List[dict]:list of hashes attributes in demisto's favor
+ """
+ hashes_dict = {
+ 'MD5': 'md5_hash',
+ 'SHA1': 'sha1_hash',
+ 'SHA256': 'sha256_hash',
+ 'SSDeep': 'ssdeep_hash'
+ }
+ return [
+ {k: hashes.get(v) for k, v in hashes_dict.items()}
+ for hashes in lst
+ ]
+
+ sample_id = demisto.args().get('sample_id')
+ check_id(sample_id)
+ raw_response = get_iocs(sample_id)
+ data = raw_response.get('data', {}).get('iocs', {})
+
+ # Initialize counters
+ iocs_size = 0
+ iocs_size_table = dict()
+ iocs = dict()
+
+ domains = data.get('domains')
+ if domains:
+ size = len(domains)
+ iocs_size_table['Domain'] = size
+ iocs_size += size
+ iocs['Domain'] = [
+ {
+ 'AnalysisID': domain.get('analysis_ids'),
+ 'Domain': domain.get('domain'),
+ 'ID': domain.get('id'),
+ 'Type': domain.get('type'),
+ } for domain in domains
+ ]
+
+ ips = data.get('ips')
+ if ips:
+ size = len(ips)
+ iocs_size_table['IP'] = size
+ iocs_size += size
+ iocs['IP'] = [
+ {
+ 'AnalysisID': ip.get('analysis_ids'),
+ 'IP': ip.get('ip_address'),
+ 'ID': ip.get('id'),
+ 'Type': ip.get('type')
+ } for ip in ips
+ ]
+
+ mutexes = data.get('mutexes')
+ if mutexes:
+ size = len(mutexes)
+ iocs_size_table['Mutex'] = size
+ iocs_size += size
+ iocs['Mutex'] = [{
+ 'AnalysisID': mutex.get('analysis_ids'),
+ 'Name': mutex.get('mutex_name'),
+ 'Operation': mutex.get('operations'),
+ 'ID': mutex.get('id'),
+ 'Type': mutex.get('type')
+ } for mutex in mutexes
+ ]
+
+ registry = data.get('registry')
+ if registry:
+ size = len(registry)
+ iocs_size_table['Registry'] = size
+ iocs_size += size
+ iocs['Registry'] = [
+ {
+ 'AnalysisID': reg.get('analysis_ids'),
+ 'Name': reg.get('reg_key_name'),
+ 'Operation': reg.get('operations'),
+ 'ID': reg.get('id'),
+ 'Type': reg.get('type'),
+ } for reg in registry
+ ]
+
+ urls = data.get('urls')
+ if urls:
+ size = len(urls)
+ iocs_size_table['URL'] = size
+ iocs_size += size
+ iocs['URL'] = [
+ {
+ 'AnalysisID': url.get('analysis_ids'),
+ 'URL': url.get('url'),
+ 'Operation': url.get('operations'),
+ 'ID': url.get('id'),
+ 'Type': url.get('type'),
+ } for url in urls
+ ]
+
+ files = data.get('files')
+ if files:
+ size = len(files)
+ iocs_size_table['File'] = size
+ iocs_size += size
+ iocs['File'] = [
+ {
+ 'AnalysisID': file_entry.get('analysis_ids'),
+ 'Filename': file_entry.get('filename'),
+ 'Operation': file_entry.get('operations'),
+ 'ID': file_entry.get('id'),
+ 'Type': file_entry.get('type'),
+ 'Hashes': get_hashed(file_entry.get('hashes'))
+ } for file_entry in files
+ ]
+
+ entry_context = {'VMRay.Sample(val.SampleID === {}).IOC'.format(sample_id): iocs}
+ if iocs_size:
+ human_readable = tableToMarkdown(
+ 'Total of {} IOCs found in VMRay by sample {}'.format(iocs_size, sample_id),
+ iocs_size_table,
+ headers=['URLs', 'IPs', 'Domains', 'Mutexes', 'Registry', 'File'],
+ removeNull=True
+ )
+ else:
+ human_readable = '### No IOCs found in sample {}'.format(sample_id)
+ return_outputs(human_readable, entry_context, raw_response=raw_response)
+
+
+try:
+ COMMAND = demisto.command()
+ if COMMAND == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module()
+ elif COMMAND in ('upload_sample', 'vmray-upload-sample', 'file'):
+ upload_sample_command()
+ elif COMMAND == 'vmray-get-submission':
+ get_submission_command()
+ elif COMMAND in ('get_results', 'vmray-get-analysis-by-sample'):
+ get_analysis_command()
+ elif COMMAND == 'vmray-get-sample':
+ get_sample_command()
+ elif COMMAND in (
+ 'vmray-get-job-by-sample',
+ 'get_job_sample',
+ 'vmray-get-job-by-id',
+ ):
+ get_job_command()
+ elif COMMAND == 'vmray-get-threat-indicators':
+ get_threat_indicators_command()
+ elif COMMAND == 'vmray-add-tag':
+ post_tags()
+ elif COMMAND == 'vmray-delete-tag':
+ delete_tags()
+ elif COMMAND == 'vmray-get-iocs':
+ get_iocs_command()
+except Exception as exc:
+ return_error(str(exc))
diff --git a/Integrations/VMRay/VMRay.yml b/Integrations/VMRay/VMRay.yml
new file mode 100644
index 000000000000..222b2ff79c55
--- /dev/null
+++ b/Integrations/VMRay/VMRay.yml
@@ -0,0 +1,527 @@
+category: Forensics & Malware Analysis
+commonfields:
+ id: vmray
+ version: -1
+configuration:
+- defaultvalue: https://cloud.vmray.com
+ display: Server URL (e.g., https://cloud.vmray.com)
+ name: server
+ required: true
+ type: 0
+- display: API Key
+ name: api_key
+ required: true
+ type: 4
+- display: Use system proxy
+ name: proxy
+ required: false
+ type: 8
+- display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+description: Ransomware analysis sandboxing.
+display: VMRay
+name: vmray
+script:
+ commands:
+ - arguments:
+ - default: false
+ description: Entry ID of the file to submit.
+ isArray: false
+ name: entry_id
+ required: true
+ secret: false
+ - default: false
+ description: Password of the document.
+ isArray: false
+ name: document_password
+ required: false
+ secret: false
+ - default: false
+ description: Password of an archive.
+ isArray: false
+ name: archive_password
+ required: false
+ secret: false
+ - default: false
+ description: Force type of the file.
+ isArray: false
+ name: sample_type
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Whether the file is shareable.
+ isArray: false
+ name: shareable
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - auto: PREDEFINED
+ default: false
+ description: Analyze even if analyses already exist.
+ isArray: false
+ name: reanalyze
+ predefined:
+ - 'true'
+ - 'false'
+ required: false
+ secret: false
+ - default: true
+ defaultValue: '1'
+ description: Maximum number of jobs to create (number).
+ isArray: false
+ name: max_jobs
+ required: false
+ secret: false
+ - default: false
+ description: A CSV list of tags to add to the sample.
+ isArray: false
+ name: tags
+ required: false
+ secret: false
+ deprecated: false
+ description: Submits a sample to VMRay for analysis.
+ execution: false
+ name: vmray-upload-sample
+ outputs:
+ - contextPath: VMRay.Job.JobID
+ description: ID of a new job
+ type: Number
+ - contextPath: VMRay.Job.Created
+ description: Timestamp of job creation.
+ type: Date
+ - contextPath: VMRay.Job.SampleID
+ description: ID of the sample.
+ type: Number
+ - contextPath: VMRay.Job.VMName
+ description: Name of the virtual machine.
+ type: String
+ - contextPath: VMRay.Job.VMID
+ description: ID of the virtual machine.
+ type: Number
+ - contextPath: VMRay.Sample.SampleID
+ description: ID of the sample.
+ type: Number
+ - contextPath: VMRay.Sample.Created
+ description: Timestamp of sample creation.
+ type: Date
+ - contextPath: VMRay.Submission.SubmissionID
+ description: Submission ID.
+ type: Number
+ - arguments:
+ - default: false
+ description: Analysis sample ID.
+ isArray: false
+ name: sample_id
+ required: true
+ secret: false
+ - default: false
+ description: Maximum number of results to return (number).
+ isArray: false
+ name: limit
+ required: false
+ secret: false
+ deprecated: false
+ description: Retrieves all analysis details for a specified sample.
+ execution: false
+ name: vmray-get-analysis-by-sample
+ outputs:
+ - contextPath: VMRay.Analysis.AnalysisID
+ description: Analysis ID.
+ type: Number
+ - contextPath: VMRay.Analysis.SampleID
+ description: Sample ID in the analysis.
+ type: Number
+ - contextPath: VMRay.Analysis.Severity
+ description: Severity of the sample (Malicious, Suspicious, Good, Blacklisted,
+ Whitelisted, Unknown).
+ type: String
+ - contextPath: VMRay.Analysis.JobCreated
+ description: Date when the analysis job started.
+ type: Date
+ - contextPath: VMRay.Analysis.MD5
+ description: MD5 hash of the sample.
+ type: String
+ - contextPath: VMRay.Analysis.SHA1
+ description: SHA1 hash of the sample.
+ type: String
+ - contextPath: VMRay.Analysis.SHA256
+ description: SHA256 hash of the sample.
+ type: String
+ - contextPath: VMRay.Analysis.SSDeep
+ description: ssdeep hash of the sample.
+ type: String
+ - arguments:
+ - default: false
+ description: Job sample ID.
+ isArray: false
+ name: sample_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves details for all jobs for a specified sample.
+ execution: false
+ name: vmray-get-job-by-sample
+ outputs:
+ - contextPath: VMRay.Job.JobID
+ description: ID of the job.
+ type: Number
+ - contextPath: VMRay.Job.SampleID
+ description: Sample ID of the job.
+ type: Number
+ - contextPath: VMRay.Job.SubmissionID
+ description: ID of the submission.
+ type: Number
+ - contextPath: VMRay.Job.MD5
+ description: MD5 hash of the sample in the job.
+ type: String
+ - contextPath: VMRay.Job.SHA1
+ description: SHA1 hash of the sample in the job.
+ type: String
+ - contextPath: VMRay.Job.SHA256
+ description: SHA256 hash of the sample in the job.
+ type: String
+ - contextPath: VMRay.Job.SSDeep
+ description: ssdeep hash of the sample in the job.
+ type: String
+ - contextPath: VMRay.Job.VMName
+ description: Name of the virtual machine.
+ type: String
+ - contextPath: VMRay.Job.VMID
+ description: ID of the virtual machine.
+ type: Number
+ - contextPath: VMRay.Job.Status
+ description: 'Status of the job. '
+ type: String
+ - arguments:
+ - default: false
+ description: ID of the submission. Can be obtained by running the 'vmray-upload-sample'
+ command.
+ isArray: false
+ name: submission_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves the results of a submission.
+ execution: false
+ name: vmray-get-submission
+ outputs:
+ - contextPath: VMRay.Submission.IsFinished
+ description: Whether the submission is finished (true or false).
+ type: Boolean
+ - contextPath: VMRay.Submission.HasErrors
+ description: Whether there are any errors in the submission (true or false).
+ type: Boolean
+ - contextPath: VMRay.Submission.SubmissionID
+ description: ID of the sample in the submission.
+ type: Number
+ - contextPath: VMRay.Submission.MD5
+ description: MD5 hash of the sample in the submission.
+ type: String
+ - contextPath: VMRay.Submission.SHA1
+ description: SHA1 hash of the sample in the submission.
+ type: String
+ - contextPath: VMRay.Submission.SHA256
+ description: SHA256 hash of the sample in the submission.
+ type: String
+ - contextPath: VMRay.Submission.SSDeep
+ description: ssdeep hash of the sample in the submission.
+ type: String
+ - contextPath: VMRay.Submission.Severity
+ description: Severity of the sample in the submission (Malicious, Suspicious,
+ Good, Blacklisted, Whitelisted, Unknown).
+ type: String
+ - contextPath: VMRay.Submission.SampleID
+ description: ID of the sample in the submission.
+ type: Number
+ - arguments:
+ - default: false
+ description: ID of the sample.
+ isArray: false
+ name: sample_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves a sample using the sample ID.
+ execution: false
+ name: vmray-get-sample
+ outputs:
+ - contextPath: VMRay.Sample.SampleID
+ description: ID of the sample.
+ type: Number
+ - contextPath: VMRay.Sample.FileName
+ description: File name of the sample.
+ type: String
+ - contextPath: VMRay.Sample.MD5
+ description: MD5 hash of the sample.
+ type: String
+ - contextPath: VMRay.Sample.SHA1
+ description: SHA1 hash of the sample.
+ type: String
+ - contextPath: VMRay.Sample.SHA256
+ description: SHA256 hash of the sample.
+ type: String
+ - contextPath: VMRay.Sample.SSDeep
+ description: ssdeep hash of the sample.
+ type: String
+ - contextPath: VMRay.Sample.Severity
+ description: Severity of the sample in the submission (Malicious, Suspicious,
+ Good, Blacklisted, Whitelisted, Unknown).
+ type: String
+ - contextPath: VMRay.Sample.Type
+ description: File type.
+ type: String
+ - contextPath: VMRay.Sample.Created
+ description: Timestamp of sample creation.
+ type: Date
+ - contextPath: VMRay.Sample.Classifications
+ description: Classifications of the sample.
+ type: String
+ - arguments:
+ - default: false
+ description: ID of the sample. Can be obtaine from the 'VMRay.Sample.ID' output.
+ isArray: false
+ name: sample_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves threat indicators (VTI).
+ execution: false
+ name: vmray-get-threat-indicators
+ outputs:
+ - contextPath: VMRay.ThreatIndicator.AnalysisID
+ description: List of connected analysis IDs.
+ type: Number
+ - contextPath: VMRay.ThreatIndicator.Category
+ description: Category of threat indicators.
+ type: String
+ - contextPath: VMRay.ThreatIndicator.Classification
+ description: Classifications of threat indicators.
+ type: String
+ - contextPath: VMRay.ThreatIndicator.ID
+ description: ID of a threat indicator.
+ type: Number
+ - contextPath: VMRay.ThreatIndicator.Operation
+ description: Operation the indicators caused.
+ type: String
+ - arguments:
+ - default: false
+ description: ID of the submission to which to add tags.
+ isArray: false
+ name: submission_id
+ required: false
+ secret: false
+ - default: false
+ description: ID of the analysis from which to delete tags.
+ isArray: false
+ name: analysis_id
+ required: false
+ secret: false
+ - default: false
+ description: Tag to add.
+ isArray: false
+ name: tag
+ required: false
+ secret: false
+ deprecated: false
+ description: Adds a tag to an analysis and/or a submission.
+ execution: false
+ name: vmray-add-tag
+ - arguments:
+ - default: false
+ description: ID of the analysis from which to delete a tag.
+ isArray: false
+ name: analysis_id
+ required: false
+ secret: false
+ - default: false
+ description: ID of the submission from which to delete a tag.
+ isArray: false
+ name: submission_id
+ required: false
+ secret: false
+ - default: false
+ description: Tag to delete.
+ isArray: false
+ name: tag
+ required: false
+ secret: false
+ deprecated: false
+ description: Deletes tags from an analysis and/or a submission.
+ execution: false
+ name: vmray-delete-tag
+ - arguments:
+ - default: false
+ description: ID of the sample.
+ isArray: false
+ name: sample_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves indicators of compropmise for a specified sample.
+ execution: false
+ name: vmray-get-iocs
+ outputs:
+ - contextPath: VMRay.Sample.IOC.URL.AnalysisID
+ description: IDs of other analyses that contain the given URL.
+ type: Number
+ - contextPath: VMRay.Sample.IOC.URL.URL
+ description: URL.
+ type: String
+ - contextPath: VMRay.Sample.IOC.URL.Operation
+ description: Operation of the specified URL.
+ type: String
+ - contextPath: VMRay.Sample.IOC.URL.ID
+ description: ID of the URL.
+ type: Number
+ - contextPath: VMRay.Sample.IOC.URL.Type
+ description: Type of URL.
+ type: String
+ - contextPath: VMRay.Sample.IOC.Domain.AnalysisID
+ description: IDs of other analyses that contain the given domain.
+ type: Number
+ - contextPath: VMRay.Sample.IOC.Domain.Domain
+ description: Domain.
+ type: String
+ - contextPath: VMRay.Sample.IOC.Domain.ID
+ description: ID of the domain.
+ type: Number
+ - contextPath: VMRay.Sample.IOC.Domain.Type
+ description: Type of domain.
+ type: String
+ - contextPath: VMRay.Sample.IOC.IP.AnalysisID
+ description: IDs of other analyses that contain the given IP address.
+ type: Number
+ - contextPath: VMRay.Sample.IOC.IP.IP
+ description: IP address.
+ type: String
+ - contextPath: VMRay.Sample.IOC.IP.Operation
+ description: Operation of the given IP.
+ type: String
+ - contextPath: VMRay.Sample.IOC.IP.ID
+ description: ID of the IP address.
+ type: Number
+ - contextPath: VMRay.Sample.IOC.IP.Type
+ description: Type of IP address.
+ type: String
+ - contextPath: VMRay.Sample.IOC.Mutex.AnalysisID
+ description: IDs of other analyses that containt the given IP.
+ type: Number
+ - contextPath: VMRay.Sample.IOC.Mutex.Name
+ description: Name of the mutex.
+ type: String
+ - contextPath: VMRay.Sample.IOC.Mutex.Operation
+ description: Operation of given mutex
+ type: String
+ - contextPath: VMRay.Sample.IOC.Mutex.ID
+ description: ID of the mutex.
+ type: Number
+ - contextPath: VMRay.Sample.IOC.Mutex.Type
+ description: Type of mutex.
+ type: String
+ - contextPath: VMRay.Sample.IOC.File.AnalysisID
+ description: IDs of other analyses that containt the given file.
+ type: Number
+ - contextPath: VMRay.Sample.IOC.File.Name
+ description: Name of the file.
+ type: String
+ - contextPath: VMRay.Sample.IOC.File.Operation
+ description: Operation of given file
+ type: String
+ - contextPath: VMRay.Sample.IOC.File.ID
+ description: ID of the file.
+ type: Number
+ - contextPath: VMRay.Sample.IOC.File.Type
+ description: Type of file.
+ type: String
+ - contextPath: VMRay.Sample.IOC.File.Hashes.MD5
+ description: MD5 of given file.
+ type: String
+ - contextPath: VMRay.Sample.IOC.File.Hashes.SSDeep
+ description: SSDeep of given file.
+ type: String
+ - contextPath: VMRay.Sample.IOC.File.Hashes.SHA256
+ description: SHA256 of given file.
+ type: String
+ - contextPath: VMRay.Sample.IOC.File.Hashes.SHA1
+ description: SHA1 of given file.
+ type: String
+ - arguments:
+ - default: false
+ description: ID of a job.
+ isArray: false
+ name: job_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Retrieves a job by job ID.
+ execution: false
+ name: vmray-get-job-by-id
+ outputs:
+ - contextPath: VMRay.Job.JobID
+ description: ID of the job.
+ type: Number
+ - contextPath: VMRay.Job.SampleID
+ description: Sample ID of the job.
+ type: Number
+ - contextPath: VMRay.Job.SubmissionID
+ description: ID of the submission.
+ type: Number
+ - contextPath: VMRay.Job.MD5
+ description: MD5 hash of the sample in the job.
+ type: String
+ - contextPath: VMRay.Job.SHA1
+ description: SHA1 hash of the sample in the job.
+ type: String
+ - contextPath: VMRay.Job.SHA256
+ description: SHA256 hash of the sample in the job.
+ type: String
+ - contextPath: VMRay.Job.SSDeep
+ description: ssdeep hash of the sample in the job.
+ type: String
+ - contextPath: VMRay.Job.VMName
+ description: Name of the virtual machine.
+ type: String
+ - contextPath: VMRay.Job.VMID
+ description: ID of the virtual machine.
+ type: Number
+ - contextPath: VMRay.Job.Status
+ description: Status of the job.
+ type: String
+ - arguments:
+ - default: false
+ description: Sample ID.
+ isArray: false
+ name: sample_id
+ required: true
+ secret: false
+ deprecated: true
+ description: This command performs a function similiar to the 'vmray-get-job-by-sample'
+ command.
+ execution: false
+ name: get_job_sample
+ - arguments:
+ - default: false
+ description: ID of the sample.
+ isArray: false
+ name: sample_id
+ required: true
+ secret: false
+ deprecated: true
+ description: similiar to vmray-get-sample
+ execution: false
+ name: get_results
+ - deprecated: true
+ description: Retrieves a sample using the sample ID. (Deprecated)
+ execution: false
+ name: upload_sample
+ dockerimage: demisto/python
+ isfetch: false
+ runonce: false
+ script: ''
+ type: python
+fromversion: 4.0.0
diff --git a/Integrations/VMRay/VMRay_description.md b/Integrations/VMRay/VMRay_description.md
new file mode 100644
index 000000000000..ee7abb31f662
--- /dev/null
+++ b/Integrations/VMRay/VMRay_description.md
@@ -0,0 +1,3 @@
+# How To get your VMRay API Key
+1. Access your VMRay environment.
+2. Navigate to **VMRay -> Profile -> API KEYS**.
diff --git a/Integrations/VMRay/VMRay_image.png b/Integrations/VMRay/VMRay_image.png
new file mode 100644
index 000000000000..5c7c03e749bf
Binary files /dev/null and b/Integrations/VMRay/VMRay_image.png differ
diff --git a/Integrations/Vertica/CHANGELOG.md b/Integrations/Vertica/CHANGELOG.md
new file mode 100644
index 000000000000..2ef152a2ed46
--- /dev/null
+++ b/Integrations/Vertica/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.8.2] - 2019-08-22
+-
\ No newline at end of file
diff --git a/Integrations/Vertica/Pipfile b/Integrations/Vertica/Pipfile
new file mode 100644
index 000000000000..6572741a094a
--- /dev/null
+++ b/Integrations/Vertica/Pipfile
@@ -0,0 +1,30 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+flake8 = "*"
+
+[packages]
+certifi = "==2018.11.29"
+chardet = "==3.0.4"
+dateparser = "==0.7.0"
+future = "==0.17.1"
+idna = "==2.8"
+olefile = "==0.46"
+python-dateutil = "==2.8.0"
+pytz = "==2018.9"
+regex = "==2019.2.7"
+requests = "==2.21.0"
+six = "==1.12.0"
+tzlocal = "==1.5.1"
+urllib3 = "==1.24.1"
+vertica-python = "==0.8.2"
+PyYAML = "==3.13"
+
+[requires]
+python_version = "3.7"
diff --git a/Integrations/Vertica/Pipfile.lock b/Integrations/Vertica/Pipfile.lock
new file mode 100644
index 000000000000..7672e9313e27
--- /dev/null
+++ b/Integrations/Vertica/Pipfile.lock
@@ -0,0 +1,357 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "0780434c8ff731ee1f19f6a674744ff8cfbd0b3df3285444a13891956fc07201"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "certifi": {
+ "hashes": [
+ "sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7",
+ "sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033"
+ ],
+ "index": "pypi",
+ "version": "==2018.11.29"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "index": "pypi",
+ "version": "==3.0.4"
+ },
+ "dateparser": {
+ "hashes": [
+ "sha256:940828183c937bcec530753211b70f673c0a9aab831e43273489b310538dff86",
+ "sha256:b452ef8b36cd78ae86a50721794bc674aa3994e19b570f7ba92810f4e0a2ae03"
+ ],
+ "index": "pypi",
+ "version": "==0.7.0"
+ },
+ "future": {
+ "hashes": [
+ "sha256:67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8"
+ ],
+ "index": "pypi",
+ "version": "==0.17.1"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "index": "pypi",
+ "version": "==2.8"
+ },
+ "olefile": {
+ "hashes": [
+ "sha256:133b031eaf8fd2c9399b78b8bc5b8fcbe4c31e85295749bb17a87cba8f3c3964"
+ ],
+ "index": "pypi",
+ "version": "==0.46"
+ },
+ "python-dateutil": {
+ "hashes": [
+ "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb",
+ "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"
+ ],
+ "index": "pypi",
+ "version": "==2.8.0"
+ },
+ "pytz": {
+ "hashes": [
+ "sha256:32b0891edff07e28efe91284ed9c31e123d84bea3fd98e1f72be2508f43ef8d9",
+ "sha256:d5f05e487007e29e03409f9398d074e158d920d36eb82eaf66fb1136b0c5374c"
+ ],
+ "index": "pypi",
+ "version": "==2018.9"
+ },
+ "pyyaml": {
+ "hashes": [
+ "sha256:3d7da3009c0f3e783b2c873687652d83b1bbfd5c88e9813fb7e5b03c0dd3108b",
+ "sha256:3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf",
+ "sha256:40c71b8e076d0550b2e6380bada1f1cd1017b882f7e16f09a65be98e017f211a",
+ "sha256:558dd60b890ba8fd982e05941927a3911dc409a63dcb8b634feaa0cda69330d3",
+ "sha256:a7c28b45d9f99102fa092bb213aa12e0aaf9a6a1f5e395d36166639c1f96c3a1",
+ "sha256:aa7dd4a6a427aed7df6fb7f08a580d68d9b118d90310374716ae90b710280af1",
+ "sha256:bc558586e6045763782014934bfaf39d48b8ae85a2713117d16c39864085c613",
+ "sha256:d46d7982b62e0729ad0175a9bc7e10a566fc07b224d2c79fafb5e032727eaa04",
+ "sha256:d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f",
+ "sha256:e01d3203230e1786cd91ccfdc8f8454c8069c91bee3962ad93b87a4b2860f537",
+ "sha256:e170a9e6fcfd19021dd29845af83bb79236068bf5fd4df3327c1be18182b2531"
+ ],
+ "index": "pypi",
+ "version": "==3.13"
+ },
+ "regex": {
+ "hashes": [
+ "sha256:017bf6f893db109dc5f82b902019f6fe089e605af5e1f0f6f7271f936b411eb4",
+ "sha256:0969fdb610435d7f49dc5518f7642d7b1319ef19f0c3f1bd4d972afbb9877aa7",
+ "sha256:3679f269790c87bd04e003e60e098b1be5392f17c48d28c2a3b9d16b3dcbca2a",
+ "sha256:37150aee3411f38d08733edb5f3faa656f96ddae00ee7713e01d7423f0f72815",
+ "sha256:4a1a1d963f462c13722b34ef1f82c4707091b0a3fb9b5fd79b6670c38b734095",
+ "sha256:5da76d468d048fb163bcaedd5c0832a3ab95da1034598a6c673bf999ae61f259",
+ "sha256:72dda5123ee45cde10031576710ca0c4972757c94a60b75023a45d8069da34ca",
+ "sha256:7f40b720b81f6614a34a8857d2417fbe619734629f9d0627e2cc9e493979401d",
+ "sha256:a22a11e9dd6e46529dc4409bd6c449f3e7525aa4b0d5e9b23363302cfe4db8e4"
+ ],
+ "index": "pypi",
+ "version": "==2019.2.7"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
+ "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
+ ],
+ "index": "pypi",
+ "version": "==2.21.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "index": "pypi",
+ "version": "==1.12.0"
+ },
+ "tzlocal": {
+ "hashes": [
+ "sha256:4ebeb848845ac898da6519b9b31879cf13b6626f7184c496037b818e238f2c4e"
+ ],
+ "index": "pypi",
+ "version": "==1.5.1"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",
+ "sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
+ ],
+ "index": "pypi",
+ "version": "==1.24.1"
+ },
+ "vertica-python": {
+ "hashes": [
+ "sha256:5e3ac303d2859135999610fbfb2f1d56c2e98e72ceade215c1f163d4b3fb31e8",
+ "sha256:edbbf977014e639e5c87a5f7592a04eeeb52c748b9dca119ade078035cc15f39"
+ ],
+ "index": "pypi",
+ "version": "==0.8.2"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:6560e1e1749f68c64a4b5dee4e091fce798d2f0d84ebe638cf0e0585a343acf4",
+ "sha256:b65db1bbaac9f9f4d190199bb8680af6f6f84fd3769a5ea883df8a91fe68b4c4"
+ ],
+ "version": "==2.2.5"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "entrypoints": {
+ "hashes": [
+ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
+ "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
+ ],
+ "version": "==0.3"
+ },
+ "flake8": {
+ "hashes": [
+ "sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548",
+ "sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696"
+ ],
+ "index": "pypi",
+ "version": "==3.7.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7",
+ "sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db"
+ ],
+ "version": "==0.18"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:3ad685ff8512bf6dc5a8b82ebf73543999b657eded8c11803d9ba6b648986f4d",
+ "sha256:8bb43d1f51ecef60d81854af61a3a880555a14643691cc4b64a6ee269c78f09a"
+ ],
+ "version": "==7.1.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af",
+ "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"
+ ],
+ "version": "==19.0"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pyflakes": {
+ "hashes": [
+ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
+ "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ ],
+ "version": "==2.1.1"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:5d77031694a5fb97ea95e828c8d10fc770a1df6eb3906067aaed42201a8a6a09",
+ "sha256:723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1"
+ ],
+ "index": "pypi",
+ "version": "==2.3.1"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a",
+ "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03"
+ ],
+ "version": "==2.4.0"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6ef6d06de77ce2961156013e9dff62f1b2688aa04d0dc244299fe7d67e09370d",
+ "sha256:a736fed91c12681a7b34617c8fcefe39ea04599ca72c608751c31d89579a3f77"
+ ],
+ "index": "pypi",
+ "version": "==5.0.1"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "index": "pypi",
+ "version": "==1.12.0"
+ },
+ "typed-ast": {
+ "hashes": [
+ "sha256:18511a0b3e7922276346bcb47e2ef9f38fb90fd31cb9223eed42c85d1312344e",
+ "sha256:262c247a82d005e43b5b7f69aff746370538e176131c32dda9cb0f324d27141e",
+ "sha256:2b907eb046d049bcd9892e3076c7a6456c93a25bebfe554e931620c90e6a25b0",
+ "sha256:354c16e5babd09f5cb0ee000d54cfa38401d8b8891eefa878ac772f827181a3c",
+ "sha256:4e0b70c6fc4d010f8107726af5fd37921b666f5b31d9331f0bd24ad9a088e631",
+ "sha256:630968c5cdee51a11c05a30453f8cd65e0cc1d2ad0d9192819df9978984529f4",
+ "sha256:66480f95b8167c9c5c5c87f32cf437d585937970f3fc24386f313a4c97b44e34",
+ "sha256:71211d26ffd12d63a83e079ff258ac9d56a1376a25bc80b1cdcdf601b855b90b",
+ "sha256:95bd11af7eafc16e829af2d3df510cecfd4387f6453355188342c3e79a2ec87a",
+ "sha256:bc6c7d3fa1325a0c6613512a093bc2a2a15aeec350451cbdf9e1d4bffe3e3233",
+ "sha256:cc34a6f5b426748a507dd5d1de4c1978f2eb5626d51326e43280941206c209e1",
+ "sha256:d755f03c1e4a51e9b24d899561fec4ccaf51f210d52abdf8c07ee2849b212a36",
+ "sha256:d7c45933b1bdfaf9f36c579671fec15d25b06c8398f113dab64c18ed1adda01d",
+ "sha256:d896919306dd0aa22d0132f62a1b78d11aaf4c9fc5b3410d3c666b818191630a",
+ "sha256:ffde2fbfad571af120fcbfbbc61c72469e72f550d676c3342492a9dfdefb8f12"
+ ],
+ "markers": "implementation_name == 'cpython'",
+ "version": "==1.4.0"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/Vertica/Vertica.py b/Integrations/Vertica/Vertica.py
new file mode 100644
index 000000000000..ed3012000b3d
--- /dev/null
+++ b/Integrations/Vertica/Vertica.py
@@ -0,0 +1,167 @@
+import demistomock as demisto
+from CommonServerPython import *
+from datetime import datetime
+
+# fix for: https://github.com/vertica/vertica-python/issues/296
+# (we need this for running in non-root where getpass will fail as uid doesn't map to a user name)
+import getpass
+
+
+class FixGetPass():
+ def __init__(self):
+ self.getpass_getuser_org = getpass.getuser
+
+ def getuser_no_fail():
+ # getuser() fails on some systems. Provide a sane default.
+ user = 'vertica'
+ try:
+ if self.getpass_getuser_org:
+ user = self.getpass_getuser_org()
+ except (NameError, KeyError):
+ pass
+ return user
+ getpass.getuser = getuser_no_fail
+
+ def __del__(self):
+ if self.getpass_getuser_org and getpass:
+ getpass.getuser = self.getpass_getuser_org
+
+
+_fix_getpass = FixGetPass()
+
+''' IMPORTS '''
+
+import vertica_python # noqa: E402
+
+''' HELPER FUNCTIONS '''
+
+
+def convert_datetime_to_string(v):
+ """
+ Parses datetime object into string
+ """
+ if isinstance(v, datetime):
+ return v.strftime('%Y-%m-%dT%H:%M:%S')
+ return v
+
+
+def connect_db():
+ USERNAME = demisto.params().get('credentials').get('identifier')
+ PASSWORD = demisto.params().get('credentials').get('password')
+ DATABASE = demisto.params().get('database')
+ PORT = int(demisto.params().get('port', 5433))
+ SERVER = demisto.params()['url'][:-1] if (demisto.params()['url'] and demisto.params()
+ ['url'].endswith('/')) else demisto.params()['url']
+ DB_PARAMS = {
+ 'host': SERVER,
+ 'port': PORT,
+ 'user': USERNAME,
+ 'password': PASSWORD,
+ 'database': DATABASE,
+ 'connection_timeout': 5
+ }
+ try:
+ connection = vertica_python.connect(**DB_PARAMS)
+ return connection
+ except vertica_python.errors.ConnectionError as err:
+ return_error('Could not connect to DB, re-check DB params. Error: {}'.format(err))
+
+
+''' COMMANDS + QUERY FUNCTIONS '''
+
+
+def test_module(cursor):
+ """
+ Performs basic query on default system tables
+ """
+ cursor.execute('SELECT * FROM system_tables ORDER BY table_schema, table_name LIMIT 2;')
+ cursor.fetchall()
+ if cursor.rowcount == 0:
+ return_error('No results were returned from the DB.')
+ demisto.results('ok')
+
+
+def query_command(cursor):
+ """
+ Execute a query against the DB
+ """
+ # Init main vars
+ contents = [] # type: list
+ context = {}
+ title = ''
+ human_readable = 'No results found'
+ # Get arguments from user
+ query = demisto.args().get('query')
+ limit = int(demisto.args().get('limit', 50))
+ # Query and get raw response (list of ordered dicts)
+ rows = query_request(query, cursor)
+
+ # Parse response into context & content entries
+ if rows:
+ if limit:
+ rows = rows[:limit]
+
+ for i, row in enumerate(rows):
+ rows[i] = {underscoreToCamelCase(k): convert_datetime_to_string(v) for k, v in row.items()}
+
+ contents = rows
+ context['Vertica(val.Query && val.Query === obj.Query)'] = {
+ 'Query': query,
+ 'Row': rows
+ }
+
+ title = 'Vertica Query Results'
+ human_readable = tableToMarkdown(title, contents, removeNull=True)
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['json'],
+ 'Contents': contents,
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': human_readable,
+ 'EntryContext': context
+ })
+
+
+def query_request(query, cursor):
+ try:
+ cursor.execute(query)
+ except vertica_python.errors.MissingRelation:
+ return_error('Error while executing query.')
+ rows = cursor.fetchall()
+ # If row count is empty or number of results is unknown (in that case we want to prevent unexpected results)
+ if cursor.rowcount in {0, -1}:
+ return False
+ else:
+ return rows
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+
+def main():
+ LOG('Command being called is %s' % (demisto.command()))
+ connection = None
+ try:
+ connection = connect_db()
+ cursor = connection.cursor('dict')
+ if demisto.command() == 'test-module':
+ test_module(cursor)
+ elif demisto.command() == 'vertica-query':
+ query_command(cursor)
+ # Log exceptions
+ except Exception as e:
+ LOG(e)
+ LOG.print_log()
+ raise
+ finally:
+ if connection is not None:
+ try:
+ connection.close()
+ except Exception as ex:
+ demisto.error("Vertica failed connection.close(): {}".format(ex))
+
+
+# python2 uses __builtin__ python3 uses builtins
+if __name__ == "__builtin__" or __name__ == "builtins":
+ main()
diff --git a/Integrations/Vertica/Vertica.yml b/Integrations/Vertica/Vertica.yml
new file mode 100644
index 000000000000..d18a8dd8777a
--- /dev/null
+++ b/Integrations/Vertica/Vertica.yml
@@ -0,0 +1,54 @@
+commonfields:
+ id: Vertica
+ version: -1
+name: Vertica
+display: Vertica
+category: Database
+description: Analytic database management software
+configuration:
+- display: Host (myhost.example.com)
+ name: url
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Database
+ name: database (mydb)
+ defaultvalue: ""
+ type: 0
+ required: true
+- display: Username
+ name: credentials
+ defaultvalue: ""
+ type: 9
+ required: true
+- display: Port
+ name: port
+ defaultvalue: "5433"
+ type: 0
+ required: false
+script:
+ script: ''
+ type: python
+ commands:
+ - name: vertica-query
+ arguments:
+ - name: query
+ required: true
+ description: A SQL query to perform on the Vertica database.
+ - name: limit
+ description: The maximum number of results to be returned from the query. (Use
+ 0 for all results)
+ defaultValue: "50"
+ outputs:
+ - contextPath: Vertica.Query
+ description: The original query.
+ type: string
+ - contextPath: Vertica.Row
+ description: The content of rows.
+ type: string
+ description: Executes a query on the Vertica database.
+ dockerimage: demisto/vertica:1.0.0.150
+ runonce: false
+ subtype: python3
+tests:
+- Vertica Test
diff --git a/Integrations/Vertica/Vertica_image.png b/Integrations/Vertica/Vertica_image.png
new file mode 100644
index 000000000000..4d4d6c3b8764
Binary files /dev/null and b/Integrations/Vertica/Vertica_image.png differ
diff --git a/Integrations/Vertica/Vertica_test.py b/Integrations/Vertica/Vertica_test.py
new file mode 100644
index 000000000000..364e1b7799f9
--- /dev/null
+++ b/Integrations/Vertica/Vertica_test.py
@@ -0,0 +1,26 @@
+import demistomock as demisto
+from Vertica import connect_db
+
+RETURN_ERROR_TARGET = 'Vertica.return_error'
+
+
+def test_connect_db_fail(mocker):
+ bad_connection_params = {
+ "credentials": {
+ "identifier": "stam",
+ "password": "stam"
+ },
+ "database": "bad",
+ "url": "127.0.0.1"
+ }
+ mocker.patch.object(demisto, 'params',
+ return_value=bad_connection_params)
+ return_error_mock = mocker.patch(RETURN_ERROR_TARGET)
+ # validate our mock of params
+ assert demisto.params().get('url') == '127.0.0.1'
+ connect_db()
+ assert return_error_mock.call_count == 1
+ # call_args last call with a tuple of args list and kwargs
+ err_msg = return_error_mock.call_args[0][0]
+ assert len(err_msg) < 150
+ assert 'Could not connect to DB' in err_msg
diff --git a/Integrations/VirusTotal-Private_API/CHANGELOG.md b/Integrations/VirusTotal-Private_API/CHANGELOG.md
new file mode 100644
index 000000000000..a8fe36f4ba77
--- /dev/null
+++ b/Integrations/VirusTotal-Private_API/CHANGELOG.md
@@ -0,0 +1,8 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+ - Added context outputs to match context standards, which enables outputs to be found for field mapping.
+
+## [19.8.2] - 2019-08-22
+ -
\ No newline at end of file
diff --git a/Integrations/VirusTotal-Private_API/VirusTotal-Private_API.py b/Integrations/VirusTotal-Private_API/VirusTotal-Private_API.py
new file mode 100644
index 000000000000..12108c9f065f
--- /dev/null
+++ b/Integrations/VirusTotal-Private_API/VirusTotal-Private_API.py
@@ -0,0 +1,1149 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+import requests
+import json
+import time
+import sys
+
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+# Define utf8 as default encoding
+reload(sys)
+sys.setdefaultencoding('utf8') # pylint: disable=E1101
+
+''' GLOBAL VARS '''
+SERVER_URL = 'https://www.virustotal.com/vtapi/v2/'
+API_KEY = demisto.params()['APIKey']
+
+USE_SSL = False if demisto.params().get('insecure') else True
+PREFERRED_VENDORS = demisto.params().get("preferredVendors", None)
+PREFERRED_VENDORS_THRESHOLD = demisto.params().get("preferredVendorsThreshold", None)
+
+FULL_RESPONSE = demisto.params().get("fullResponseGlobal", False)
+
+DEFAULT_HEADERS = {
+ "Accept-Encoding": "gzip, deflate",
+ "User-Agent": "gzip, My Python requests library example client or username"
+}
+
+''' HELPER FUNCTIONS '''
+
+
+def is_enough_preferred_vendors(scan_results):
+ if not (PREFERRED_VENDORS and PREFERRED_VENDORS_THRESHOLD):
+ return False
+ if PREFERRED_VENDORS and not PREFERRED_VENDORS_THRESHOLD:
+ return_error("Error: If you entered Preferred Vendors you must also enter Preferred Vendors Threshold")
+ if "scans" not in scan_results:
+ return False
+
+ counter_of_malicious_detections = 0
+
+ vendors_scans_dict = scan_results["scans"]
+ list_of_preferred_vendors = PREFERRED_VENDORS.split(',')
+
+ for i in range(len(list_of_preferred_vendors)):
+ list_of_preferred_vendors[i] = list_of_preferred_vendors[i].lower().strip()
+
+ for vendor_name in vendors_scans_dict:
+ cur_vendor_scan = vendors_scans_dict[vendor_name]
+ vendor_name_in_lowercase = vendor_name.lower()
+
+ if vendor_name_in_lowercase in list_of_preferred_vendors:
+ if cur_vendor_scan.get("detected"):
+ counter_of_malicious_detections += 1
+
+ return int(PREFERRED_VENDORS_THRESHOLD) <= counter_of_malicious_detections
+
+
+def http_request(method, url_suffix, params_dict, headers):
+ req_params = {
+ 'apikey': API_KEY
+ }
+ if params_dict is not None:
+ req_params.update(params_dict)
+
+ url = SERVER_URL + url_suffix
+
+ LOG('running %s request with url=%s\theaders=%s\nparams=%s' % (method, url, headers, json.dumps(req_params)))
+
+ try:
+ res = requests.request(method,
+ url,
+ verify=USE_SSL,
+ params=req_params,
+ headers=headers
+ )
+ res.raise_for_status()
+
+ if res.status_code == 200:
+ return res.json()
+ # 204 HTTP status code is returned when api rate limit has been exceeded
+ elif res.status_code == 204:
+ return_error("You've reached your API call quota. Contact your VirusTotal representative.")
+
+ except Exception as e:
+ error_message = str(e)
+ error_message = re.sub('apikey=[a-zA-Z0-9]+', 'apikey=*apikey*', error_message)
+ LOG(error_message)
+ raise type(e)(error_message)
+
+
+def create_scans_table(scans):
+ """
+ Returns a table with the scan result for each vendor
+ """
+
+ scans_table = [] # type:ignore
+ positives_scans_table = []
+ negative_scans_table = []
+ for scan in scans:
+ dict_for_table = {
+ "Source": scan,
+ "Detected": scans.get(scan).get('detected', None),
+ "Result": scans.get(scan).get('result', None),
+ "Update": scans.get(scan).get('update', None),
+ "Details": scans.get(scan).get('detail', None)
+ }
+ if (dict_for_table['Detected'] is not None and dict_for_table['Detected']):
+ positives_scans_table.append(dict_for_table)
+ else:
+ negative_scans_table.append(dict_for_table)
+
+ positives_scans_table = sorted(positives_scans_table, key=lambda scan: scan['Source'])
+ negative_scans_table = sorted(negative_scans_table, key=lambda scan: scan['Source'])
+
+ scans_table = positives_scans_table + negative_scans_table
+ return scans_table
+
+
+def create_file_output(file_hash, threshold, vt_response, short_format):
+ ec = {} # type: dict
+ md = ''
+
+ positives = demisto.get(vt_response, 'positives')
+ ec['DBotScore'] = []
+
+ md += '## VirusTotal Hash Reputation for: ' + str(vt_response.get('resource')) + '\n'
+ md += 'Scan ID: **' + str(vt_response.get('scan_id')) + '**\n'
+ md += 'Scan date: **' + str(vt_response.get('scan_date')) + '**\n'
+ md += 'Detections / Total: **' + str(positives) + '/' + str(vt_response.get('total')) + '**\n'
+ md += 'Resource: [' + str(vt_response.get('resource')) + '](' + str(vt_response.get('resource')) + ')\n'
+ md += 'VT Link: [' + str(vt_response.get('permalink')) + '](' + str(vt_response.get('permalink')) + ')\n'
+ dbotScore = 0
+
+ if (positives >= threshold or is_enough_preferred_vendors(vt_response)):
+ ec.update({
+ outputPaths['file']: {
+ 'MD5': vt_response.get('md5'),
+ 'SHA1': vt_response.get('sha1'),
+ 'SHA256': vt_response.get('sha256'),
+ 'Malicious': {
+ 'Vendor': 'VirusTotal - Private API',
+ 'Detections': positives,
+ 'TotalEngines': demisto.get(vt_response, 'total')
+ },
+ }
+ })
+ if vt_response.get('ssdeep', False):
+ ec[outputPaths['file']].update({'SSDeep': vt_response.get('ssdeep')})
+ if vt_response.get('type', False):
+ ec[outputPaths['file']].update({'Type': vt_response.get('type')})
+ if vt_response.get('size', False):
+ ec[outputPaths['file']].update({'Size': vt_response.get('size')})
+ dbotScore = 3
+ elif (positives >= threshold / 2):
+ dbotScore = 2
+ else:
+ dbotScore = 1
+
+ ec['DBotScore'].append( # type:ignore
+ {'Indicator': file_hash, 'Type': 'hash', 'Vendor': 'VirusTotal - Private API', 'Score': dbotScore})
+ md += 'MD5: **' + vt_response.get('md5') + '**\n'
+ md += 'SHA1: **' + vt_response.get('sha1') + '**\n'
+ md += 'SHA256: **' + vt_response.get('sha256') + '**\n'
+
+ if (vt_response.get('scans', False) and not short_format):
+ scans = vt_response.pop('scans')
+ scans_table = create_scans_table(scans)
+ scans_table_md = tableToMarkdown('Scans', scans_table)
+ md += scans_table_md
+ md += '\n'
+ if (ec.get(outputPaths['file'], False)):
+ ec[outputPaths['file']]['VirusTotal'] = {
+ 'Scans': scans_table
+ }
+ else:
+ ec.update({
+ outputPaths['file']: {
+ 'MD5': vt_response.get('md5'),
+ 'VirusTotal': {
+ 'Scans': scans_table
+ },
+ }
+ })
+
+ if vt_response.get('tags', False):
+ ec[outputPaths['file']]['VirusTotal'].update({'Tags': vt_response.get('tags')})
+ ec[outputPaths['file']].update({'Tags': vt_response.get('tags')})
+ if vt_response.get('magic', False):
+ ec[outputPaths['file']]['VirusTotal'].update({'MagicLiteral': vt_response.get('magic')})
+ if vt_response.get('first_seen', False):
+ ec[outputPaths['file']]['VirusTotal'].update({'FirstSeen': vt_response.get('first_seen')})
+ if vt_response.get('community_reputation', False):
+ ec[outputPaths['file']]['VirusTotal'].update({'CommunityReputation': vt_response.get('community_reputation')})
+ if vt_response.get('community_comments', False):
+ ec[outputPaths['file']]['VirusTotal'].update({'CommunityComments': vt_response.get('community_comments')})
+ if vt_response.get('authentihash', False):
+ ec[outputPaths['file']]['VirusTotal'].update({'AuthentiHash': vt_response.get('authentihash')})
+ ec[outputPaths['file']]['Signature'].update(
+ {'Authentihash': vt_response.get('authentihash')})
+ if vt_response.get('imphash', False):
+ ec[outputPaths['file']]['VirusTotal'].update({'ImpHash': vt_response.get('imphash')})
+
+ entry = {
+ 'Type': entryTypes['note'],
+ 'Contents': vt_response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': ec
+ }
+ return entry
+
+
+''' COMMANDS FUNCTIONS '''
+
+
+def check_file_behaviour(file_hash):
+ """
+ Returns the file execution report.
+ """
+
+ params = {
+ 'hash': file_hash
+ }
+
+ api_endpoint = 'file/behaviour'
+ return http_request('GET', api_endpoint, params, DEFAULT_HEADERS)
+
+
+def check_file_behaviour_command():
+ """
+ corresponds to 'vt-private-check-file-behaviour' command. Retrieves a report about the execution of a file
+ """
+ # variables
+ args = demisto.args()
+ file_hash = args.get('resource')
+ full_response = FULL_RESPONSE or args.get('fullResponse', None) == 'true'
+ if (full_response):
+ max_len = 1000
+ else:
+ max_len = 50
+ md = 'We found the following data about hash ' + file_hash + ':\n'
+ # VT response
+ response = check_file_behaviour(file_hash)
+
+ if (response.get('response_code', None) == 0):
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'EntryContext': {
+ "DBotScore": {
+ 'Indicator': file_hash,
+ 'Type': 'hash',
+ 'Vendor': 'VirusTotal - Private API',
+ 'Score': 0
+ }
+ },
+ 'HumanReadable': "A report wasn't found for file "
+ + file_hash + ". Virus Total returned the following response: " + json.dumps(
+ response.get('verbose_msg'))
+ }
+
+ # data processing
+
+ # network data contains all the communication data
+ network_data = response.get('network', None)
+
+ hosts = network_data.get('hosts', None)
+ if (hosts is not None):
+ hosts = list(set(hosts))[:max_len]
+ md += tableToMarkdown('Hosts that the hash communicates with are:', [{'Host': host} for host in hosts])
+
+ ips_list = []
+ domains_list = []
+ urls_list = []
+
+ udp_communication = network_data.get('udp', None)
+ if (udp_communication is not None):
+ for entry in udp_communication:
+ ips_list.append(entry.get('dst', None))
+
+ http_communication = network_data.get('http', None)
+ if (http_communication is not None):
+ for entry in http_communication:
+ urls_list.append(entry.get('uri', None))
+ domains_list.append(entry.get('host', None))
+
+ tcp_communication = network_data.get('tcp', None)
+ if (tcp_communication is not None):
+ for entry in tcp_communication:
+ ips_list.append(entry.get('dst', None))
+
+ dns_communication = network_data.get('dns', None)
+ if (dns_communication is not None):
+ for entry in dns_communication:
+ ips_list.append(entry.get('ip', None))
+ domains_list.append(entry.get('hostname', None))
+
+ if (len(ips_list) > 0):
+ ips_list = list(set(ips_list))[:max_len]
+ md += tableToMarkdown('IPs that the hash communicates with are:', [{'IP': ip} for ip in ips_list])
+
+ if (len(domains_list) > 0):
+ domains_list = list(set(domains_list))[:max_len]
+ md += tableToMarkdown('Domains that the hash communicates with are:',
+ [{'Domain': domain} for domain in domains_list])
+
+ if (len(urls_list) > 0):
+ urls_list = list(set(urls_list))[:max_len]
+ md += tableToMarkdown('URLs that the hash communicates with are:', [{'URL': url} for url in urls_list])
+
+ files_data, keys_data, mutex_data = None, None, None
+
+ behavior_data = response.get('behavior', None)
+ if behavior_data is not None:
+ summary_data = behavior_data.get('summary', None)
+ if summary_data is not None:
+ files_data = summary_data.get('files', None)
+ keys_data = summary_data.get('keys', None)
+ mutex_data = summary_data.get('mutexes', None)
+
+ if (files_data is not None):
+ files_data = list(set(files_data))[:max_len]
+ md += tableToMarkdown('Files that are related the hash', [{'File': file} for file in files_data])
+
+ if (keys_data is not None):
+ keys_data = list(set(keys_data))[:max_len]
+ md += tableToMarkdown('Registry Keys that are related to the hash', [{'Key': k} for k in keys_data])
+
+ if (mutex_data is not None):
+ mutex_data = list(set(mutex_data))[:max_len]
+ md += tableToMarkdown('Opened mutexes that are related to the hash', [{'Mutex': m} for m in mutex_data])
+
+ hash_length = len(file_hash)
+ hashtype_dic = None
+ if (hash_length == 32):
+ hashtype_dic = {
+ "MD5": file_hash
+ }
+ elif (hash_length == 40):
+ hashtype_dic = {
+ "SHA1": file_hash
+ }
+ else:
+ hashtype_dic = {
+ "SHA256": file_hash
+ }
+
+ hash_ec = {
+ "VirusTotal": {
+ 'RelatedDomains': domains_list,
+ 'RelatedURLs': urls_list,
+ 'RelatedIPs': ips_list,
+ 'RelatedHosts': hosts,
+ 'RelatedFiles': files_data,
+ 'RelatedRegistryKeys': keys_data,
+ 'RelatedMutexes': mutex_data
+ }
+ }
+
+ hash_ec.update(hashtype_dic)
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': {
+ outputPaths['file']: hash_ec,
+ }
+ }
+
+
+def get_domain_report(domain):
+ """
+ Returns the domain report.
+ """
+
+ params = {
+ 'domain': domain
+ }
+
+ api_endpoint = 'domain/report'
+ return http_request('GET', api_endpoint, params, DEFAULT_HEADERS)
+
+
+def get_domain_report_command():
+ """
+ corresponds to 'vt-get-domain-report' command. Retrieves a report about a domain
+ """
+
+ # variables
+ args = demisto.args()
+ domain = args['domain']
+ threshold = int(args.get('threshold', None) or demisto.params().get('domainThreshold', None) or 10)
+ full_response = FULL_RESPONSE or args.get('fullResponse', None) == 'true'
+ if (full_response):
+ max_len = 1000
+ else:
+ max_len = 50
+ md = ''
+
+ # VT Response
+ response = get_domain_report(domain)
+ if (response.get('response_code') == -1):
+ return "Invalid domain"
+ elif (response.get('response_code') == 0):
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'EntryContext': {
+ "DBotScore": {
+ 'Indicator': domain,
+ 'Type': 'domain',
+ 'Vendor': 'VirusTotal - Private API',
+ 'Score': 0
+ }
+ },
+ 'HumanReadable': "Domain "
+ + domain
+ + " not in Virus Total's dataset. Virus Total returned the following response: "
+ + json.dumps(response.get('verbose_msg'))
+ }
+
+ communicating_hashes = response.get('detected_communicating_samples', None)
+ communicating_malware_hashes = []
+ if communicating_hashes:
+ for d_hash in communicating_hashes:
+ positives = d_hash.get('positives')
+ if (positives >= threshold):
+ communicating_malware_hashes.append(d_hash)
+
+ communicating_malware_hashes = communicating_malware_hashes[:max_len]
+ md += tableToMarkdown("Latest detected files that communicated with " + domain, communicating_malware_hashes)
+
+ downloaded_hashes = response.get('detected_downloaded_samples', None)
+ downloaded_malware_hashes = []
+ if downloaded_hashes:
+ for d_hash in downloaded_hashes:
+ positives = d_hash.get('positives')
+ if (positives >= threshold):
+ downloaded_malware_hashes.append(d_hash)
+ downloaded_malware_hashes = downloaded_malware_hashes[:max_len]
+ md += tableToMarkdown("Latest detected files that were downloaded from " + domain, downloaded_malware_hashes)
+
+ resolutions = response.get('resolutions', None)
+ resolutions_list = []
+ if resolutions:
+ for res in resolutions:
+ resolutions_list.append(res)
+ resolutions_list = resolutions_list[:max_len]
+ md += tableToMarkdown(domain + " has been resolved to the following IP addresses:", resolutions_list)
+
+ whois = response.get('whois', None)
+ if whois is not None:
+ md += "## Whois analysis: \n"
+ md += whois + '\n'
+
+ subdomains = response.get('subdomains', None)
+ if subdomains is not None:
+ subdomains = list(set(subdomains))[:max_len]
+ md += tableToMarkdown("Observed subdomains", [{'Domain': d} for d in subdomains])
+
+ categories = response.get('categories', None)
+ if categories is not None:
+ categories = list(set(categories))[:max_len]
+
+ domain_ec = {
+ 'DownloadedHashes': downloaded_malware_hashes,
+ 'CommunicatingHashes': communicating_malware_hashes,
+ 'Resolutions': resolutions_list,
+ 'Whois': whois,
+ 'Subdomains': subdomains,
+ 'Categories': categories
+ }
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': {
+ outputPaths['domain']: {
+ "Name": domain,
+ "VirusTotal": domain_ec
+ }
+ }
+ }
+
+
+def get_file_report(file_hash, all_info):
+ """
+ Returns the file execution report.
+ """
+
+ params = {
+ 'resource': file_hash,
+ 'allinfo': all_info
+ }
+
+ api_endpoint = 'file/report'
+ return http_request('GET', api_endpoint, params, DEFAULT_HEADERS)
+
+
+def get_file_report_command():
+ """
+ corresponds to 'vt-get-file-report' command. Retrieves a report about the execution of a file
+ """
+
+ args = demisto.args()
+ file_hash = args.get('resource')
+ short_format = args.get('shortFormat', None) == 'true'
+ all_info = args.get('allInfo', None)
+ all_info = 1 if all_info == 'true' else 0
+ threshold = int(args.get('threshold', None) or demisto.params().get('fileThreshold', None) or 10)
+
+ response = get_file_report(file_hash, all_info)
+
+ if (response.get('response_code', None) == 0):
+ return "A report wasn't found. Virus Total returned the following response: " + json.dumps(
+ response.get('verbose_msg'))
+
+ del response['response_code']
+
+ output = create_file_output(file_hash, threshold, response, short_format)
+
+ return output
+
+
+def get_url_report(url, all_info):
+ """
+ Returns a report about an url.
+ """
+
+ params = {
+ 'resource': url,
+ 'allinfo': all_info,
+ 'scan': 1
+ }
+
+ api_endpoint = 'url/report'
+ return http_request('GET', api_endpoint, params, DEFAULT_HEADERS)
+
+
+def get_url_report_command():
+ """
+ corresponds to 'vt-get-url-report' command. Retrieves a report about a url
+ """
+ args = demisto.args()
+ urls = argToList(args.get('resource'))
+ all_info = 1 if args.get('allInfo', None) == 'true' else 0
+ short_format = args.get('shortFormat', None) == 'true'
+ retries = int(args.get('retries', 2))
+ full_response = FULL_RESPONSE or args.get('fullResponse', None) == 'true'
+ threshold = int(args.get('threshold', None) or demisto.params().get('urlThreshold', None) or 10)
+ scan_finish_time_in_seconds = int(args.get('retry_time', 6))
+ if (full_response):
+ max_len = 1000
+ else:
+ max_len = 50
+
+ responses_dict = get_url_reports_with_retries(urls, all_info, retries, scan_finish_time_in_seconds)
+ md = ''
+ ec = { # type:ignore
+ 'DBotScore': [],
+ outputPaths['url']: [],
+ }
+ for url, res in responses_dict.iteritems():
+ url_md, url_ec, dbot_score = create_url_report_output(url, res, threshold, max_len, short_format)
+ md += url_md
+ ec['DBotScore'].append(dbot_score)
+ ec[outputPaths['url']].append(url_ec)
+ if not md:
+ md = "No scans were completed in the elapsed time. Please run the command again in a few seconds."
+
+ completed_responses = responses_dict.values()
+ if len(completed_responses) == 1:
+ # This is done for backward compatibility
+ completed_responses = completed_responses[0]
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': completed_responses,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': ec
+ }
+
+
+def get_url_reports_with_retries(urls, all_info, retries_left, scan_finish_time_in_seconds):
+ """
+ Returns dict of responses, where its keys are the URL related to the response.
+ """
+ requests_responses_dict = {}
+
+ for url in urls:
+ response = get_url_report(url, all_info)
+ if (response.get('response_code', None) == -1):
+ return_error("Invalid url provided: {}.".format(url))
+
+ if is_url_response_complete(response):
+ requests_responses_dict[url] = response
+
+ urls_scanned_count = len(requests_responses_dict)
+ urls_count = len(urls)
+ while urls_count > urls_scanned_count and retries_left > 0:
+ retries_left -= 1
+ # In case there were url scans that have not finished: try again after giving them enough time to finish
+ time.sleep(scan_finish_time_in_seconds)
+ for url in urls:
+ if url not in requests_responses_dict:
+ response = get_url_report(url, all_info)
+ if is_url_response_complete(response):
+ requests_responses_dict[url] = response
+ urls_scanned_count = len(requests_responses_dict)
+
+ return requests_responses_dict
+
+
+def is_url_response_complete(res):
+ return bool(res.get('total'))
+
+
+def create_url_report_output(url, response, threshold, max_len, short_format):
+ """
+ Returns 3 results:
+ 1. human readable string.
+ 2. url entry context.
+ 3. dbot entry context.
+ """
+ positives = demisto.get(response, 'positives')
+ md = ''
+ md += '## VirusTotal URL report for: ' + url + '\n'
+ md += 'Scan ID: **' + str(response.get('scan_id', '')) + '**\n'
+ md += 'Scan date: **' + str(response.get('scan_date', '')) + '**\n'
+ md += 'Detections / Total: **' + str(positives) + '/' + str(response.get('total', '')) + '**\n'
+ md += 'Resource: [' + str(response.get('resource')) + '](' + str(response.get('resource')) + ')\n'
+ md += 'VT Link: [' + str(response.get('permalink')) + '](' + str(response.get('permalink')) + ')\n'
+ dbotScore = 0
+ ec_url = {}
+ if (positives >= threshold or is_enough_preferred_vendors(response)):
+ ec_url.update({
+ 'Data': url,
+ 'Malicious': {
+ 'Description': 'Detections / Total: ' + str(positives) + ' / ' + str(response.get('total', '')),
+ 'Vendor': 'VirusTotal - Private API'
+ },
+ })
+ dbotScore = 3
+ elif (positives >= threshold / 2):
+ dbotScore = 2
+ else:
+ dbotScore = 1
+
+ ec_dbot = {
+ 'Indicator': url,
+ 'Type': 'url',
+ 'Vendor': 'VirusTotal - Private API',
+ 'Score': dbotScore
+ }
+ if (dbotScore < 3):
+ ec_url.update({'Data': url})
+
+ additional_info = response.get('additional_info', None)
+ if additional_info is not None:
+ resolution = additional_info.get('resolution', None)
+ if resolution is not None:
+ md += 'IP address resolution for this domain is: ' + resolution + '\n'
+ if (ec_url):
+ ec_url['VirusTotal'] = {
+ 'Resolutions': resolution[:max_len]
+ }
+ else:
+ ec_url.update({
+ 'VirusTotal': {'Resolutions': resolution[:max_len]},
+ 'Data': url
+ })
+
+ scans = response.get('scans', None)
+
+ if scans is not None and not short_format:
+ scans_table = create_scans_table(scans)
+ scans_table_md = tableToMarkdown('Scans', scans_table)
+ if (ec_url.get('VirusTotal', False)):
+ ec_url['VirusTotal']['Scans'] = scans_table
+ else:
+ ec_url['VirusTotal'] = {
+ 'Scans': scans_table
+ }
+ md += scans_table_md
+
+ dropped_files = response.get('filescan_id', None)
+
+ if dropped_files is not None:
+ if (ec_url.get('VirusTotal', False)):
+ ec_url['VirusTotal']['DroppedFiles'] = dropped_files
+ else:
+ ec_url['VirusTotal'] = {
+ 'DroppedFiles': dropped_files
+ }
+
+ return md, ec_url, ec_dbot
+
+
+def get_ip_report(ip):
+ """
+ Returns an ip report.
+ """
+
+ params = {
+ 'ip': ip
+ }
+
+ api_endpoint = 'ip-address/report'
+ return http_request('GET', api_endpoint, params, DEFAULT_HEADERS)
+
+
+def check_detected_urls_threshold(detected_urls, threshold):
+ for url in detected_urls:
+ if url.get("positives") >= threshold:
+ return True
+ return False
+
+
+def get_ip_report_command():
+ """
+ corresponds to 'vt-get-ip-report' command. Retrieves a report about an ip
+ """
+
+ args = demisto.args()
+ ip = args['ip']
+ threshold = int(args.get('threshold', None) or demisto.params().get('ipThreshold', None) or 10)
+ full_response = FULL_RESPONSE or args.get('fullResponse', None) == 'true'
+ if (full_response):
+ max_len = 1000
+ else:
+ max_len = 50
+
+ response = get_ip_report(ip)
+
+ if (response.get('response_code') == -1):
+ return "Invalid IP address "
+ elif (response.get('response_code') == 0):
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'EntryContext': {
+ "DBotScore": {
+ 'Indicator': ip,
+ 'Type': 'ip',
+ 'Vendor': 'VirusTotal - Private API',
+ 'Score': 0
+ }
+ },
+ 'HumanReadable': "IP "
+ + ip
+ + "not in Virus Total's dataset. Virus Total returned the following response: "
+ + json.dumps(response.get('verbose_msg'))
+ }
+
+ ec = {} # type: dict
+ md = '## VirusTotal IP report for: ' + ip + '\n'
+ asn = str(response.get('asn', None)) if response.get('asn', None) else None
+ if asn is not None:
+ md += 'ASN: **' + asn + ' (' + str(response.get('as_owner', '')) + ')**\n'
+ md += 'Country: **' + response.get('country', '') + '**\n'
+
+ resolutions = response.get('resolutions', None)
+
+ if resolutions:
+ resolutions = resolutions[:max_len]
+ md += tableToMarkdown("The following domains resolved to the given IP address:", resolutions)
+
+ detected_urls = response.get('detected_urls', None)
+
+ if detected_urls:
+ detected_urls = detected_urls[:max_len]
+ md += tableToMarkdown(
+ "Latest URLs hosted in this IP address detected by at least one URL scanner or malicious URL dataset:",
+ detected_urls)
+
+ detected_downloaded_samples = response.get('detected_downloaded_samples', None)
+
+ if detected_downloaded_samples:
+ detected_downloaded_samples = detected_downloaded_samples[:max_len]
+ md += tableToMarkdown(
+ "Latest files that are detected by at least one antivirus solution and were downloaded by VirusTotal from"
+ " the IP address provided",
+ detected_downloaded_samples)
+
+ undetected_downloaded_samples = response.get('undetected_downloaded_samples', None)
+
+ if undetected_downloaded_samples:
+ undetected_downloaded_samples = undetected_downloaded_samples[:max_len]
+ md += tableToMarkdown(
+ "Latest files that are not detected by any antivirus solution and were downloaded by VirusTotal from the "
+ "IP address provided",
+ undetected_downloaded_samples)
+
+ detected_communicating_samples = response.get('detected_communicating_samples', None)
+
+ if detected_communicating_samples:
+ detected_communicating_samples = detected_communicating_samples[:max_len]
+ md += tableToMarkdown("Latest detected files that communicate with this IP address",
+ detected_communicating_samples)
+
+ undetected_communicating_samples = response.get('undetected_communicating_samples', None)
+
+ if undetected_communicating_samples:
+ undetected_communicating_samples = undetected_communicating_samples[:max_len]
+ md += tableToMarkdown("Latest undetected files that communicate with this IP address",
+ undetected_communicating_samples)
+
+ detected_referrer_samples = response.get('detected_referrer_samples', None)
+
+ if detected_referrer_samples:
+ detected_referrer_samples = detected_referrer_samples[:max_len]
+ md += tableToMarkdown("Latest detected files that embed this IP address in their strings",
+ detected_referrer_samples)
+
+ undetected_referrer_samples = response.get('undetected_referrer_samples', None)
+
+ if undetected_referrer_samples:
+ undetected_referrer_samples = undetected_referrer_samples[:max_len]
+ md += tableToMarkdown("Latest undetected files that embed this IP address in their strings",
+ undetected_referrer_samples)
+
+ ec['DBotScore'] = []
+ dbotScore = 0
+ bad_downloads_amount = len(detected_communicating_samples) if detected_communicating_samples else 0
+ detected_url_is_above_threshold = check_detected_urls_threshold(detected_urls,
+ demisto.params().get('urlThreshold', None) or 10)
+ if (bad_downloads_amount >= threshold or detected_url_is_above_threshold):
+ ec.update({
+ outputPaths['ip']: {
+ 'Address': ip,
+ 'ASN': asn,
+ 'Geo': {
+ 'Country': response.get('country', '')
+ },
+ 'Malicious': {
+ 'Description': 'Recent malicious downloads: ' + str(bad_downloads_amount),
+ 'Vendor': 'VirusTotal - Private API'
+ }
+ }
+ })
+ dbotScore = 3
+ elif (bad_downloads_amount >= threshold / 2 or len(detected_urls) >= threshold / 2):
+ dbotScore = 2
+ else:
+ dbotScore = 1
+
+ ec['DBotScore'] = {'Indicator': ip, 'Type': 'ip', 'Vendor': 'VirusTotal - Private API', 'Score': dbotScore}
+ if (dbotScore < 3):
+ ec.update({
+ outputPaths['ip']: {
+ 'Address': ip,
+ 'ASN': asn,
+ 'Geo': {
+ 'Country': response.get('country', '')
+ }
+ }
+ })
+
+ ip_ec = {
+ 'Address': ip,
+ 'VirusTotal': {
+ 'DownloadedHashes': detected_downloaded_samples,
+ 'UnAVDetectedDownloadedHashes': undetected_downloaded_samples,
+ "DetectedURLs": detected_urls,
+ 'CommunicatingHashes': detected_communicating_samples,
+ 'UnAVDetectedCommunicatingHashes': undetected_communicating_samples,
+ 'Resolutions': resolutions,
+ 'ReferrerHashes': detected_referrer_samples,
+ 'UnAVDetectedReferrerHashes': undetected_referrer_samples
+ }
+ }
+
+ if (ec.get(outputPaths['ip'], False)):
+ ec[outputPaths['ip']]['VirusTotal'] = {
+ 'DownloadedHashes': detected_downloaded_samples,
+ 'UnAVDetectedDownloadedHashes': undetected_downloaded_samples,
+ "DetectedURLs": detected_urls,
+ 'CommunicatingHashes': detected_communicating_samples,
+ 'UnAVDetectedCommunicatingHashes': undetected_communicating_samples,
+ 'Resolutions': resolutions,
+ 'ReferrerHashes': detected_referrer_samples,
+ 'UnAVDetectedReferrerHashes': undetected_referrer_samples
+ }
+ else:
+ ec[outputPaths['ip']].update(ip_ec)
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': ec
+ }
+
+
+def search_file(query):
+ """
+ Returns the hashes of files that fits the query.
+ """
+
+ params = {
+ 'query': query
+ }
+
+ api_endpoint = 'file/search'
+ return http_request('POST', api_endpoint, params, DEFAULT_HEADERS)
+
+
+def search_file_command():
+ """
+ corresponds to 'vt-search-file' command. Returns the hashes of files that fits the query
+ """
+
+ args = demisto.args()
+ query = args['query']
+
+ full_response = FULL_RESPONSE or args.get('fullResponse', None) == 'true'
+ if (full_response):
+ max_len = 1000
+ else:
+ max_len = 50
+ response = search_file(query)
+
+ if (response.get('response_code') == -1):
+ return "There was some sort of error with your query. Virus Total returned the following response: " + \
+ json.dumps(response.get('verbose_msg'))
+ elif (response.get('response_code') == 0):
+ return "No files matched your query"
+
+ del response['response_code']
+ hashes = response.get('hashes', None)[:max_len]
+
+ md = '## Found the following hashes for the query :' + query + '\n'
+ md += tableToMarkdown('Hashes are: ', [{'Hash': h} for h in hashes])
+
+ ec = {
+ "Query": query,
+ "SearchResult": hashes[:max_len]
+ }
+
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': ec,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': {
+ 'VirusTotal.SearchFile(val.Query==obj.Query)': ec
+ }
+ }
+
+
+def hash_communication_command():
+ # variables
+ args = demisto.args()
+ file_hash = args.get('hash')
+ full_response = FULL_RESPONSE or args.get('fullResponse', None) == 'true'
+ if (full_response):
+ max_len = 1000
+ else:
+ max_len = 50
+ md = 'Communication result for hash ' + file_hash + '\n'
+ # VT response
+ response = check_file_behaviour(file_hash)
+
+ if (response.get('response_code') == 0):
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': response,
+ 'ContentsFormat': formats['json'],
+ 'EntryContext': {
+ "DBotScore": {
+ 'Indicator': file_hash,
+ 'Type': 'hash',
+ 'Vendor': 'VirusTotal - Private API',
+ 'Score': 0
+ }
+ },
+ 'HumanReadable': "A report wasn't found for file " + file_hash + ". Virus Total returned the following "
+ "response: " + json.dumps(
+ response.get('verbose_msg'))
+ }
+
+ # network data contains all the communication data
+ network_data = response.get('network', None)
+
+ hosts = network_data.get('hosts', None)
+ if (hosts is not None):
+ hosts = list(set(hosts))[:max_len]
+ md += tableToMarkdown('Hosts that the hash communicates with are:', [{'Host': host} for host in hosts])
+
+ ips_list = []
+ domains_list = []
+ urls_list = []
+
+ udp_communication = network_data.get('udp', None)
+ if (udp_communication is not None):
+ for entry in udp_communication:
+ ips_list.append(entry.get('dst', None))
+
+ http_communication = network_data.get('http', None)
+ if (http_communication is not None):
+ for entry in http_communication:
+ urls_list.append(entry.get('uri', None))
+ domains_list.append(entry.get('host', None))
+
+ tcp_communication = network_data.get('tcp', None)
+ if (tcp_communication is not None):
+ for entry in tcp_communication:
+ ips_list.append(entry.get('dst', None))
+
+ dns_communication = network_data.get('dns', None)
+ if (dns_communication is not None):
+ for entry in dns_communication:
+ ips_list.append(entry.get('ip', None))
+ domains_list.append(entry.get('hostname', None))
+
+ if (len(ips_list) > 0):
+ ips_list = list(set(ips_list))[:max_len]
+ md += tableToMarkdown('IPs that the hash communicates with are:', [{'IP': ip} for ip in ips_list])
+
+ if (len(domains_list) > 0):
+ domains_list = list(set(domains_list))[:max_len]
+ md += tableToMarkdown('Domains that the hash communicates with are:',
+ [{'Domain': domain} for domain in domains_list])
+
+ if (len(urls_list) > 0):
+ urls_list = list(set(urls_list))[:max_len]
+ md += tableToMarkdown('URLs that the hash communicates with are:', [{'URL': url} for url in urls_list])
+
+ hash_length = len(file_hash)
+ hashtype_dic = None
+ if (hash_length == 32):
+ hashtype_dic = {
+ "MD5": file_hash
+ }
+ elif (hash_length == 40):
+ hashtype_dic = {
+ "SHA1": file_hash
+ }
+ else:
+ hashtype_dic = {
+ "SHA256": file_hash
+ }
+
+ hash_ec = {
+ "VirusTotal": {
+ "CommunicatedDomains": domains_list,
+ "CommunicatedURLs": urls_list,
+ "CommunicatedIPs": ips_list,
+ "CommunicatedHosts": hosts
+ }
+ }
+
+ hash_ec.update(hashtype_dic)
+ return {
+ 'Type': entryTypes['note'],
+ 'Contents': network_data,
+ 'ContentsFormat': formats['json'],
+ 'ReadableContentsFormat': formats['markdown'],
+ 'HumanReadable': md,
+ 'EntryContext': {
+ outputPaths['file']: hash_ec
+ }
+ }
+
+
+def download_file(file_hash):
+ params = {
+ 'hash': file_hash,
+ 'apikey': API_KEY
+ }
+
+ response = requests.get('https://www.virustotal.com/vtapi/v2/file/download', params=params)
+
+ return response
+
+
+def download_file_command():
+ args = demisto.args()
+ file_hash = args['hash']
+
+ response = download_file(file_hash)
+
+ if (response.status_code == 404):
+ return "File was not found in Virus Total's store"
+
+ file_name = file_hash + "-vt-file"
+ file_json = fileResult(file_name, response.content)
+
+ return {
+ 'Contents': 'File downloaded successfully',
+ 'ContentsFormat': formats['text'],
+ 'Type': entryTypes['file'],
+ 'File': file_name,
+ 'FileID': file_json['FileID']
+ }
+
+
+''' EXECUTION CODE '''
+LOG('command is %s' % (demisto.command(),))
+try:
+ handle_proxy(proxy_param_name='useProxy')
+ if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ if check_file_behaviour(
+ '10676cf66244cfa91567fbc1a937f4cb19438338b35b69d4bcc2cf0d3a44af5e'): # guardrails-disable-line
+ demisto.results('ok')
+ else:
+ demisto.results('test failed')
+ pass
+ elif demisto.command() == 'vt-private-check-file-behaviour':
+ demisto.results(check_file_behaviour_command())
+ elif demisto.command() == 'vt-private-get-domain-report':
+ demisto.results(get_domain_report_command())
+ elif demisto.command() == 'vt-private-get-file-report':
+ demisto.results(get_file_report_command())
+ elif demisto.command() == 'vt-private-get-url-report':
+ demisto.results(get_url_report_command())
+ elif demisto.command() == 'vt-private-get-ip-report':
+ demisto.results(get_ip_report_command())
+ elif demisto.command() == 'vt-private-search-file':
+ demisto.results(search_file_command())
+ elif demisto.command() == 'vt-private-hash-communication':
+ demisto.results(hash_communication_command())
+ elif demisto.command() == 'vt-private-download-file':
+ demisto.results(download_file_command())
+
+except Exception as e:
+ LOG(e.message)
+ LOG.print_log()
+ raise
diff --git a/Integrations/VirusTotal-Private_API/VirusTotal-Private_API.yml b/Integrations/VirusTotal-Private_API/VirusTotal-Private_API.yml
new file mode 100644
index 000000000000..3567b7b951a0
--- /dev/null
+++ b/Integrations/VirusTotal-Private_API/VirusTotal-Private_API.yml
@@ -0,0 +1,507 @@
+commonfields:
+ id: VirusTotal - Private API
+ version: -1
+name: VirusTotal - Private API
+display: VirusTotal - Private API
+category: Data Enrichment & Threat Intelligence
+description: Analyze suspicious hashes, URLs, domains and IP addresses
+configuration:
+- display: Virus Total private API key
+ name: APIKey
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Use system proxy settings
+ name: useProxy
+ defaultvalue: "true"
+ type: 8
+ required: false
+- display: Trust any certificate (unsecure)
+ name: insecure
+ defaultvalue: "false"
+ type: 8
+ required: false
+- display: File Threshold. Minimum number of positive results from VT scanners to
+ consider the file malicious.
+ name: fileThreshold
+ defaultvalue: "10"
+ type: 0
+ required: false
+- display: IP Threshold. Minimum number of positive results from VT scanners to consider
+ the IP malicious.
+ name: ipThreshold
+ defaultvalue: "20"
+ type: 0
+ required: false
+- display: URL Threshold. Minimum number of positive results from VT scanners to consider
+ the URL malicious.
+ name: urlThreshold
+ defaultvalue: "10"
+ type: 0
+ required: false
+- display: Domain Threshold. Minimum number of positive results from VT scanners to
+ consider the domain malicious.
+ name: domainThreshold
+ defaultvalue: "10"
+ type: 0
+ required: false
+- display: Preferred Vendors List. List of vendors which are considered more trustworthy,
+ comma separated.
+ name: preferredVendors
+ defaultvalue: ""
+ type: 12
+ required: false
+- display: Preferred Vendor Threshold. The minimal number of highly trusted vendors
+ needed to consider a domain, IP, URL or file as malicious.
+ name: preferredVendorsThreshold
+ defaultvalue: ""
+ type: 0
+ required: false
+- display: Determines whether to return all results, which can number in the thousands.
+ If "true", returns all results and overrides the _fullResponse_ argument (if set
+ to "false") in a command. If "false", the fullResponse argument in the command
+ determines how results are returned.
+ name: fullResponseGlobal
+ defaultvalue: "false"
+ type: 8
+ required: false
+script:
+ script: ''
+ type: python
+ subtype: python2
+ commands:
+ - name: vt-private-check-file-behaviour
+ arguments:
+ - name: resource
+ required: true
+ description: The md5/sha1/sha256 hash of the file whose dynamic behavioural
+ report you want to retrieve.
+ - name: threshold
+ description: If the number of positives is bigger than the threshold the file
+ will be considered malicious. If threshold is not specified, the default file
+ threshold, as configured in the instance settings, will be used.
+ - name: fullResponse
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Return all of the results, note that it can be thousands of results.
+ Prefer not to use in playbooks. The default value is "false"
+ defaultValue: "false"
+ outputs:
+ - contextPath: File.MD5
+ description: File's MD5
+ type: string
+ - contextPath: File.SHA1
+ description: File's SHA1
+ type: string
+ - contextPath: File.SHA256
+ description: File's SHA256
+ type: string
+ - contextPath: File.VirusTotal.RelatedDomains
+ description: Domains that the hash communicates with
+ type: Unknown
+ - contextPath: File.VirusTotal.RelatedURLs
+ description: URLs that the hash communicates with
+ type: Unknown
+ - contextPath: File.VirusTotal.RelatedIPs
+ description: IPs that the hash communicates with
+ type: Unknown
+ - contextPath: File.VirusTotal.RelatedHosts
+ description: Hosts that the hash communicates with
+ type: Unknown
+ - contextPath: File.VirusTotal.RelatedFiles
+ description: Files that are related to this hash
+ type: Unknown
+ - contextPath: File.VirusTotal.RelatedRegistryKeys
+ description: Keys which are related to this hash
+ type: Unknown
+ - contextPath: File.VirusTotal.RelatedMutexes
+ description: Mutexes which are related to this hash
+ type: Unknown
+ description: VirusTotal runs a distributed setup of Cuckoo sandbox machines that
+ execute the files we receive. This API allows you to retrieve the full JSON
+ report of the file's execution as returned by the Cuckoo JSON report encoder.
+ - name: vt-private-get-domain-report
+ arguments:
+ - name: domain
+ required: true
+ description: A domain name.
+ - name: threshold
+ description: If the number of positives is bigger than the threshold the domain
+ will be considered malicious. If threshold is not specified, the default domain
+ threshold, as configured in the instance settings, will be used.
+ - name: fullResponse
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Return all of the results, note that it can be thousands of results.
+ Prefer not to use in playbooks. The default value is "false"
+ defaultValue: "false"
+ outputs:
+ - contextPath: Domain.Name
+ description: Domain name
+ type: string
+ - contextPath: Domain.VirusTotal.DownloadedHashes
+ description: Hashes of files who were downloaded from this domain
+ type: string
+ - contextPath: Domain.VirusTotal.CommunicatingHashes
+ description: Hashes of files who communicated with this domain in a sandbox
+ type: string
+ - contextPath: Domain.VirusTotal.Resolutions.ip_address
+ description: IPs who resolved to this domain
+ type: string
+ - contextPath: Domain.VirusTotal.Whois
+ description: Whois report
+ type: string
+ - contextPath: Domain.VirusTotal.Subdomains
+ description: Subdomains
+ type: string
+ - contextPath: Domain.VirusTotal.Resolutions.last_resolved
+ description: Resolution date of IPs who resolved to this domain
+ type: string
+ - contextPath: Domain.VirusTotal.Categories
+ description: Categories
+ type: string
+ description: Retrieves a report on a given domain (including the information recorded
+ by VirusTotal's passive DNS infrastructure). If the domain doesn't exist in
+ VT's dataset you would need to use Virus Total's (Public API integration) url-scan
+ command
+ - name: vt-private-get-file-report
+ arguments:
+ - name: resource
+ required: true
+ description: An md5/sha1/sha256 hash of a file for which you want to retrieve
+ the most recent antivirus report. You may also specify a scan_id (sha256-timestamp
+ as returned by the scan API) to access a specific report.
+ - name: allInfo
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: 'VirusTotal metadata, signature information, structural information,
+ etc. Can be viewed with raw-response=true. '
+ - name: threshold
+ description: If the number of positives is bigger than the threshold the file
+ will be considered malicious. If threshold is not specified, the default file
+ threshold, as configured in the instance settings, will be used.
+ - name: shortFormat
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Choose true if you don't wish to see VT scans tables
+ outputs:
+ - contextPath: File.MD5
+ description: File's MD5
+ type: string
+ - contextPath: File.SHA1
+ description: File's SHA1
+ type: string
+ - contextPath: File.SHA256
+ description: File's SHA256
+ type: string
+ - contextPath: File.Malicious.Vendor
+ description: For malicious files, the vendor that made the decision
+ type: string
+ - contextPath: File.Malicious.Detections
+ description: For malicious files. Total detections.
+ type: number
+ - contextPath: File.Malicious.TotalEngines
+ description: For malicious files. Total engines
+ type: number
+ - contextPath: DBotScore.Indicator
+ description: The indicator we tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type of the indicator
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: File.VirusTotal.Scans.Source
+ description: Scan vendor for this hash
+ type: string
+ - contextPath: File.VirusTotal.Scans.Detected
+ description: Scan detection for this hash (True,False)
+ type: boolean
+ - contextPath: File.VirusTotal.Scans.Result
+ description: Scan result for this hash - signature, etc.
+ type: string
+ - contextPath: File.SSDeep
+ description: The ssdeep value for the file
+ type: string
+ - contextPath: File.Type
+ description: File type as determined by Virus Total
+ type: string
+ - contextPath: File.Size
+ description: Size of File in bytes
+ type: number
+ - contextPath: File.VirusTotal.Tags
+ description: File tags
+ type: string
+ - contextPath: File.Tags
+ description: File tags
+ type: string
+ - contextPath: File.VirusTotal.MagicLiteral
+ description: File magic literals
+ type: string
+ - contextPath: File.VirusTotal.FirstSeen
+ description: Time where the File was first seen
+ type: string
+ - contextPath: File.VirusTotal.CommunityReputation
+ description: Community Reputation of the File
+ type: number
+ - contextPath: File.VirusTotal.CommunityComments
+ description: Community Comments about the File
+ type: string
+ - contextPath: File.VirusTotal.AuthentiHash
+ description: AuthentiHash of the File
+ type: string
+ - contextPath: File.Authentihash
+ description: AuthentiHash of the File
+ type: string
+ - contextPath: File.VirusTotal.ImpHash
+ description: ImpHash of the File
+ type: string
+ description: Retrieves a concluded file scan report for a given file.
+ - name: vt-private-get-url-report
+ arguments:
+ - name: resource
+ required: true
+ description: A CSV list of one or more URLs to retrieve the most recent report
+ for. You can also specify a scan_id (sha-256 timestamp returned by the URL
+ submission API) to access a specific report.
+ isArray: true
+ - name: retries
+ description: The number of times the command will try to get the URL report,
+ if the report was not ready on the first attempt.
+ defaultValue: "2"
+ - name: allInfo
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: This additional info includes VirusTotal related metadata (first
+ seen date, last seen date, files downloaded from the given URL, etc.) and
+ the output of other tools and datasets when fed with the URL.
+ - name: shortFormat
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Choose true if you don't wish to see VT scans tables
+ - name: threshold
+ description: If the number of positives is bigger than the threshold the file
+ will be considered malicious. If threshold is not specified, the default file
+ threshold, as configured in the instance settings, will be used.
+ - name: fullResponse
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Return all of the results, note that it can be thousands of results.
+ Prefer not to use in playbooks. The default value is "false"
+ defaultValue: "false"
+ - name: retry_time
+ description: The amount of time (in seconds) that the integration will wait
+ before trying to get a URL report for URLS whose scans have not completed.
+ outputs:
+ - contextPath: URL.Data
+ description: Url address
+ type: string
+ - contextPath: URL.Malicious.Vendor
+ description: For malicious URLs, the vendor that made the decision
+ type: string
+ - contextPath: URL.Malicious.Description
+ description: For malicious URLs, the reason for the vendor to make the decision
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator we tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type of the indicator
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: URL.VirusTotal.Resolutions.ip_address
+ description: IPs who resolved to this URL
+ type: string
+ - contextPath: URL.VirusTotal.Resolutions.last_resolved
+ description: Resolve date of IPs who resolved to this URL
+ type: string
+ - contextPath: URL.VirusTotal.Scans.Source
+ description: Scan vendor for this URL
+ type: string
+ - contextPath: URL.VirusTotal.Scans.Detected
+ description: Scan detection for this URL (True/False)
+ type: boolean
+ - contextPath: URL.VirusTotal.Scans.Result
+ description: Scan result for this URL - signature, etc.
+ type: string
+ - contextPath: URL.VirusTotal.DroppedFiles
+ description: Downloaded file hashes from this URL
+ type: string
+ description: Retrieves a concluded url scan report for a given url.
+ - name: vt-private-get-ip-report
+ arguments:
+ - name: ip
+ required: true
+ description: A valid IPv4 address in dotted quad notation, for the time being
+ only IPv4 addresses are supported.
+ - name: threshold
+ description: If the number of positives is bigger than the threshold the file
+ will be considered malicious. If threshold is not specified, the default file
+ threshold, as configured in the instance settings, will be used.
+ - name: fullResponse
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Return all of the results, note that it can be thousands of results.
+ Prefer not to use in playbooks. The default value is "false"
+ defaultValue: "false"
+ outputs:
+ - contextPath: IP.Address
+ description: Bad IP Address found
+ type: string
+ - contextPath: IP.ASN
+ description: Bad IP ASN
+ type: string
+ - contextPath: IP.Geo.Country
+ description: Bad IP Country
+ type: string
+ - contextPath: IP.Malicious.Vendor
+ description: For malicious IPs, the vendor that made the decision
+ type: string
+ - contextPath: IP.Malicious.Description
+ description: For malicious IPs, the reason for the vendor to make the decision
+ type: string
+ - contextPath: DBotScore.Indicator
+ description: The indicator we tested
+ type: string
+ - contextPath: DBotScore.Type
+ description: The type of the indicator
+ type: string
+ - contextPath: DBotScore.Vendor
+ description: Vendor used to calculate the score
+ type: string
+ - contextPath: DBotScore.Score
+ description: The actual score
+ type: number
+ - contextPath: IP.VirusTotal.DownloadedHashes
+ description: Latest files that are detected by at least one antivirus solution
+ and were downloaded by VirusTotal from the IP address
+ type: string
+ - contextPath: IP.VirusTotal.UnAVDetectedDownloadedHashes
+ description: Latest files that are not detected by any antivirus solution and
+ were downloaded by VirusTotal from the IP address provided
+ type: string
+ - contextPath: IP.VirusTotal.DetectedURLs
+ description: Latest URLs hosted in this IP address detected by at least one
+ URL scanner
+ type: string
+ - contextPath: IP.VirusTotal.CommunicatingHashes
+ description: Latest detected files that communicate with this IP address
+ type: string
+ - contextPath: IP.VirusTotal.UnAVDetectedCommunicatingHashes
+ description: Latest undetected files that communicate with this IP address
+ type: Unknown
+ - contextPath: IP.VirusTotal.Resolutions.hostname
+ description: The following domains resolved to the given IP
+ type: string
+ - contextPath: IP.VirusTotal.ReferrerHashes
+ description: Latest detected files that embed this IP address in their strings
+ type: string
+ - contextPath: IP.VirusTotal.UnAVDetectedReferrerHashes
+ description: Latest undetected files that embed this IP address in their strings
+ type: string
+ - contextPath: IP.VirusTotal.Resolutions.last_resolved
+ description: The last time the following domains resolved to the given IP
+ type: string
+ description: Retrieves a report on a given IP address (including the information
+ recorded by VirusTotal's Passive DNS infrastructure).
+ - name: vt-private-search-file
+ arguments:
+ - name: query
+ required: true
+ description: File search query. For example, query="type:peexe size:90kb+ positives:5+
+ behaviour:'taskkill'"
+ - name: fullResponse
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Return all of the results, note that it can be thousands of results.
+ Prefer not to use in playbooks. The default value is "false"
+ defaultValue: "false"
+ outputs:
+ - contextPath: VirusTotal.SearchResult
+ description: The hashes of files that fit the query
+ type: string
+ - contextPath: VirusTotal.Query
+ description: Original search query
+ type: string
+ description: 'This command is equivalent to VirusTotal Intelligence advanced searches.
+ A very wide variety of search modifiers are available, including: file size,
+ file type, first submission date to VirusTotal, last submission date to VirusTotal,
+ number of positives, dynamic behavioural properties, binary content, submission
+ file name, and a very long etcetera. The full list of search modifiers allowed
+ for file search queries is documented at: https://www.virustotal.com/intelligence/help/file-search/#search-modifiers'
+ - name: vt-private-hash-communication
+ arguments:
+ - name: hash
+ required: true
+ description: File Hash
+ - name: fullResponse
+ auto: PREDEFINED
+ predefined:
+ - "true"
+ - "false"
+ description: Return all of the results, note that it can be thousands of results.
+ Prefer not to use in playbooks. The default value is "false"
+ defaultValue: "false"
+ outputs:
+ - contextPath: File.VirusTotal.CommunicatedDomains
+ description: Domains that the hash communicates with
+ type: string
+ - contextPath: File.VirusTotal.CommunicatedURLs
+ description: URLs that the hash communicates with
+ type: string
+ - contextPath: File.VirusTotal.CommunicatedIPs
+ description: IPs that the hash communicates with
+ type: string
+ - contextPath: File.VirusTotal.CommunicatedHosts
+ description: Hosts that the hash communicates with
+ type: string
+ - contextPath: File.MD5
+ description: File's MD5
+ type: string
+ - contextPath: File.SHA1
+ description: File's SHA1
+ type: string
+ - contextPath: File.SHA256
+ description: File's SHA256
+ type: string
+ description: Return all Domains, IPs, URLs that a given hash of malware communicates
+ with
+ - name: vt-private-download-file
+ arguments:
+ - name: hash
+ required: true
+ description: "\tThe md5/sha1/sha256 hash of the file you want to download."
+ description: Downloads a file from VirusTotal's store given one of its hashes.
+ This call can be used in conjuction with the file searching call in order to
+ download samples that match a given set of criteria.
+ runonce: false
+tests:
+- virusTotalPrivateAPI-test-playbook
+- File Enrichment - Virus Total Private API Test
diff --git a/Integrations/VirusTotal-Private_API/VirusTotal-Private_API_desc.md b/Integrations/VirusTotal-Private_API/VirusTotal-Private_API_desc.md
new file mode 100644
index 000000000000..ec033b5611ad
--- /dev/null
+++ b/Integrations/VirusTotal-Private_API/VirusTotal-Private_API_desc.md
@@ -0,0 +1,9 @@
+ Indicators thresholds:
+
+ Configure the default threshold for each indicator type in the instance settings.
+
+ Note that it is also possible to specify the threshold when runing the command.
+
+ Indicators with positive results equal or bigger than the threshold will be considered malicious.
+
+ Indicators with positive results equal or bigger than half of the threshold value, and lower than the threshold, will be considered suspicious.
\ No newline at end of file
diff --git a/Integrations/VirusTotal-Private_API/VirusTotal-Private_API_description.md b/Integrations/VirusTotal-Private_API/VirusTotal-Private_API_description.md
new file mode 100644
index 000000000000..ac016022ca7c
--- /dev/null
+++ b/Integrations/VirusTotal-Private_API/VirusTotal-Private_API_description.md
@@ -0,0 +1,5 @@
+Indicators thresholds:
+Configure the default threshold for each indicator type in the instance settings.
+Note that it is also possible to specify the threshold when runing the command.
+Indicators with positive results equal or bigger than the threshold will be considered malicious.
+Indicators with positive results equal or bigger than half of the threshold value, and lower than the threshold, will be considered suspicious.
\ No newline at end of file
diff --git a/Integrations/VirusTotal-Private_API/VirusTotal-Private_API_image.png b/Integrations/VirusTotal-Private_API/VirusTotal-Private_API_image.png
new file mode 100644
index 000000000000..773e19f50ce1
Binary files /dev/null and b/Integrations/VirusTotal-Private_API/VirusTotal-Private_API_image.png differ
diff --git a/Integrations/VulnDB/VulnDB.py b/Integrations/VulnDB/VulnDB.py
new file mode 100644
index 000000000000..625113ea5512
--- /dev/null
+++ b/Integrations/VulnDB/VulnDB.py
@@ -0,0 +1,434 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+
+''' IMPORTS '''
+
+import requests
+import urllib.parse
+
+# Disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+''' GLOBALS/PARAMS '''
+
+# Remove trailing slash to prevent wrong URL path to service
+API_URL = demisto.params()['api_url'].rstrip('/')
+
+# Should we use SSL
+USE_SSL = not demisto.params().get('insecure', False)
+
+# Remove proxy if not set to true in params
+if not demisto.params().get('proxy'):
+ os.environ.pop('HTTP_PROXY', None)
+ os.environ.pop('HTTPS_PROXY', None)
+ os.environ.pop('http_proxy', None)
+ os.environ.pop('https_proxy', None)
+
+CLIENT_ID = demisto.params()['client_id']
+
+CLIENT_SECRET = demisto.params()['client_secret']
+
+''' HELPER FUNCTIONS '''
+
+
+def get_oath_token():
+ # Workaround ParseResult immutability
+ parse_result = list(urllib.parse.urlparse(API_URL))
+ parse_result[2] = '/oauth/token'
+ oath_url = urllib.parse.urlunparse(parse_result)
+
+ return requests.post(oath_url,
+ verify=USE_SSL,
+ data={
+ 'client_id': CLIENT_ID,
+ 'client_secret': CLIENT_SECRET,
+ 'grant_type': 'client_credentials'
+ }).json()['access_token']
+
+
+def http_request(url, size=None):
+ params = {'size': size} if size else None
+ return requests.get(url,
+ verify=USE_SSL,
+ headers={'Authorization': f'Bearer {get_oath_token()}'},
+ params=params).json()
+
+
+def vulndb_vulnerability_to_entry(vuln):
+ vulnerability_details = {
+ 'ID': vuln.get('vulndb_id', 0),
+ 'Title': vuln.get('title', ''),
+ 'Description': vuln.get('description', '').rstrip('Z'),
+ 'Keywords': vuln.get('keywords', ''),
+ 'PublishedDate': vuln.get('vulndb_published_date', '').rstrip('Z'),
+ 'TDescription': vuln.get('t_description', ''),
+ 'SolutionDate': vuln.get('solution_date', '').rstrip('Z'),
+ 'DiscoveryDate': vuln.get('disclosure_date', '').rstrip('Z'),
+ 'ExploitPublishDate': vuln.get('exploit_publish_date', '').rstrip('Z'),
+ }
+
+ cve_ext_reference_values = [ext_reference['value'] for ext_reference in
+ vuln.get('ext_references', [])]
+
+ cvss_metrics_details = [{
+ 'Id': cvss_metrics_data.get('id', 0),
+ 'AccessVector': cvss_metrics_data.get('access_vector', ''),
+ 'AccessComplexity': cvss_metrics_data.get('access_complexity', ''),
+ 'Authentication': cvss_metrics_data.get('authentication', ''),
+ 'ConfidentialityImpact': cvss_metrics_data.get('confidentiality_impact', ''),
+ 'IntegrityImpact': cvss_metrics_data.get('integrity_impact', ''),
+ 'AvailabilityImpact': cvss_metrics_data.get('availability_impact', ''),
+ 'GeneratedOn': cvss_metrics_data.get('generated_on', ''),
+ 'Score': cvss_metrics_data.get('score', 0),
+ } for cvss_metrics_data in vuln['cvss_metrics']]
+
+ vendor_details = [{'Id': vendor.get('vendor', {'id': 0})['id'], 'Name': vendor.get('vendor', {'name': ''})['name']}
+ for vendor in vuln['vendors']]
+
+ product_details = []
+ for product in vuln['products']:
+ product_versions = [{'Id': version.get('id', ''), 'Name': version.get('name', '')} for version in
+ product.get('versions', [])]
+ product_details.append({
+ 'Id': product.get('id', ''),
+ 'Name': product.get('name', ''),
+ 'Versions': product_versions
+ })
+
+ default_classification = {'longname': '', 'description': ''}
+ classification_details = [{'Longname': classification.get('classification', default_classification)['longname'],
+ 'Description': classification.get('classification', default_classification)[
+ 'description']}
+ for classification in vuln['classifications']]
+
+ return {
+ 'Vulnerability': vulnerability_details,
+ 'CVE-ExtReference': {
+ 'Value': cve_ext_reference_values
+ },
+ 'CvssMetrics': cvss_metrics_details,
+ 'Vendor': vendor_details,
+ 'Products': product_details,
+ 'Classification': classification_details
+ }
+
+
+def vulndb_vulnerability_results_to_demisto_results(res):
+ if 'error' in res:
+ return_error(res['error'])
+ else:
+ if 'vulnerability' in res:
+ results = [res['vulnerability']]
+ elif 'results' in res:
+ results = res['results']
+ else:
+ demisto.results({
+ 'Type': entryTypes['error'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': 'No "vulnerability" or "results" keys in the returned JSON',
+ 'HumanReadableFormat': formats['text']
+ })
+ return
+
+ for result in results:
+ ec = {
+ 'VulnDB': vulndb_vulnerability_to_entry(result)
+ }
+
+ human_readable = tableToMarkdown(f'Result for vulnerability ID: {ec["VulnDB"]["Vulnerability"]["ID"]}', {
+ 'Title': ec['VulnDB']['Vulnerability']['Title'],
+ 'Description': ec['VulnDB']['Vulnerability']['Description'],
+ 'Publish Date': ec['VulnDB']['Vulnerability']['PublishedDate'],
+ 'Solution Date': ec['VulnDB']['Vulnerability']['SolutionDate']
+ })
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+
+
+def vulndb_vendor_to_entry(vendor):
+ return {
+ 'Results': {
+ 'Id': vendor.get('id', ''),
+ 'Name': vendor.get('name', ''),
+ 'ShortName': vendor.get('short_name', ''),
+ 'VendorUrl': vendor.get('vendor_url', '')
+ }
+ }
+
+
+def vulndb_vendor_results_to_demisto_results(res):
+ if 'error' in res:
+ return_error(res['error'])
+ else:
+ if 'vendor' in res:
+ results = [res['vendor']]
+ elif 'results' in res:
+ results = res['results']
+ else:
+ demisto.results({
+ 'Type': entryTypes['error'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': 'No "vendor" or "results" keys in the returned JSON',
+ 'HumanReadableFormat': formats['text']
+ })
+ return
+
+ for result in results:
+ ec = {
+ 'VulnDB': vulndb_vendor_to_entry(result)
+ }
+
+ human_readable = tableToMarkdown(f'Result for vendor ID: {ec["VulnDB"]["Results"]["Id"]}', {
+ 'ID': ec['VulnDB']['Results']['Id'],
+ 'Name': ec['VulnDB']['Results']['Name'],
+ 'Short Name': ec['VulnDB']['Results']['ShortName'],
+ 'Vendor URL': ec['VulnDB']['Results']['VendorUrl']
+ })
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+
+
+def vulndb_product_to_entry(product):
+ return {
+ 'Results': {
+ 'Id': product.get('id', ''),
+ 'Name': product.get('name', '')
+ }
+ }
+
+
+def vulndb_product_results_to_demisto_results(res):
+ if 'error' in res:
+ return_error(res['error'])
+ else:
+ if 'results' in res:
+ results = res['results']
+ else:
+ demisto.results({
+ 'Type': entryTypes['error'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': 'No "results" key in the returned JSON',
+ 'HumanReadableFormat': formats['text']
+ })
+ return
+
+ for result in results:
+ ec = {
+ 'VulnDB': vulndb_product_to_entry(result)
+ }
+
+ human_readable = tableToMarkdown(f'Result for product ID: {ec["VulnDB"]["Results"]["Id"]}', {
+ 'ID': ec['VulnDB']['Results']['Id'],
+ 'Name': ec['VulnDB']['Results']['Name']
+ })
+
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'Contents': res,
+ 'ContentsFormat': formats['json'],
+ 'HumanReadable': human_readable,
+ 'HumanReadableFormat': formats['markdown'],
+ 'EntryContext': ec
+ })
+
+
+''' COMMANDS + REQUESTS FUNCTIONS '''
+
+
+def test_module():
+ """
+ Performs basic get request to get item samples
+ """
+ get_oath_token()
+
+
+def vulndb_get_vuln_by_id_command():
+ vulndb_id = demisto.args()['vuln_id']
+
+ res = requests.get(f'{API_URL}/vulnerabilities/{vulndb_id}',
+ verify=USE_SSL,
+ headers={'Authorization': f'Bearer {get_oath_token()}'}
+ ).json()
+
+ vulndb_vulnerability_results_to_demisto_results(res)
+
+
+def vulndb_get_vuln_by_vendor_and_product_name_command():
+ vendor_name = demisto.args()['vendor_name']
+ product_name = demisto.args()['product_name']
+ max_size = demisto.args().get('max_size')
+
+ res = http_request(
+ f'{API_URL}/vulnerabilities/find_by_vendor_and_product_name?vendor_name={vendor_name}&product_name={product_name}',
+ max_size)
+
+ vulndb_vulnerability_results_to_demisto_results(res)
+
+
+def vulndb_get_vuln_by_vendor_and_product_id_command():
+ vendor_id = demisto.args()['vendor_id']
+ product_id = demisto.args()['product_id']
+ max_size = demisto.args().get('max_size')
+
+ res = http_request(
+ f'{API_URL}/vulnerabilities/find_by_vendor_and_product_id?vendor_id={vendor_id}&product_id={product_id}',
+ max_size)
+
+ vulndb_vulnerability_results_to_demisto_results(res)
+
+
+def vulndb_get_vuln_by_vendor_id_command():
+ vendor_id = demisto.args()['vendor_id']
+ max_size = demisto.args().get('max_size')
+
+ res = http_request(f'{API_URL}/vulnerabilities/find_by_vendor_id?vendor_id={vendor_id}',
+ max_size)
+
+ vulndb_vulnerability_results_to_demisto_results(res)
+
+
+def vulndb_get_vuln_by_product_id_command():
+ product_id = demisto.args()['product_id']
+ max_size = demisto.args().get('max_size')
+
+ res = http_request(f'{API_URL}/vulnerabilities/find_by_product_id?product_id={product_id}',
+ max_size)
+
+ vulndb_vulnerability_results_to_demisto_results(res)
+
+
+def vulndb_get_vuln_by_cve_id_command():
+ cve_id = demisto.args()['cve_id']
+ max_size = demisto.args().get('max_size')
+
+ res = http_request(f'{API_URL}/vulnerabilities/{cve_id}/find_by_cve_id',
+ max_size)
+
+ vulndb_vulnerability_results_to_demisto_results(res)
+
+
+def vulndb_get_updates_by_dates_or_hours_command():
+ start_date = demisto.args().get('start_date')
+ end_date = demisto.args().get('end_date')
+ hours_ago = demisto.args().get('hours_ago')
+ max_size = demisto.args().get('max_size')
+
+ if start_date:
+ url = f'{API_URL}/vulnerabilities/find_by_date?start_date={start_date}'
+ if end_date:
+ url += f'&end_date={end_date}'
+
+ res = http_request(url,
+ max_size)
+ elif hours_ago is not None:
+ res = http_request(f'{API_URL}/vulnerabilities/find_by_time?hours_ago={hours_ago}',
+ max_size)
+ else:
+ return_error('Must provide either start date or hours ago.')
+
+ vulndb_vulnerability_results_to_demisto_results(res)
+
+
+def vulndb_get_vendor_command():
+ vendor_id = demisto.args().get('vendor_id')
+ vendor_name = demisto.args().get('vendor_name')
+ max_size = demisto.args().get('max_size')
+
+ if vendor_id is not None and vendor_name is not None:
+ return_error('Provide either vendor id or vendor name or neither, not both.')
+ elif vendor_id:
+ res = http_request(f'{API_URL}/vendors/{vendor_id}',
+ max_size)
+ elif vendor_name:
+ res = http_request(f'{API_URL}/vendors/by_name?vendor_name={vendor_name}',
+ max_size)
+ else:
+ res = http_request(f'{API_URL}/vendors',
+ max_size)
+
+ vulndb_vendor_results_to_demisto_results(res)
+
+
+def vulndb_get_product_command():
+ vendor_id = demisto.args().get('vendor_id')
+ vendor_name = demisto.args().get('vendor_name')
+ max_size = demisto.args().get('max_size')
+
+ if vendor_id is not None and vendor_name is not None:
+ return_error('Provide either vendor id or vendor name or neither, not both.')
+ elif vendor_id:
+ res = http_request(f'{API_URL}/products/by_vendor_id?vendor_id={vendor_id}',
+ max_size)
+ elif vendor_name:
+ res = http_request(f'{API_URL}/products/by_vendor_name?vendor_name={vendor_name}',
+ max_size)
+ else:
+ res = http_request(f'{API_URL}/products',
+ max_size)
+
+ vulndb_product_results_to_demisto_results(res)
+
+
+def vulndb_get_version_command():
+ product_id = demisto.args().get('product_id')
+ product_name = demisto.args().get('product_name')
+ max_size = demisto.args().get('max_size')
+
+ if product_id is not None and product_name is not None:
+ return_error('Provide either product id or vendor name, not both.')
+ elif product_id:
+ res = http_request(f'{API_URL}/versions/by_product_id?product_id={product_id}',
+ max_size)
+ elif product_name:
+ res = http_request(f'{API_URL}/versions/by_product_name?product_name={product_name}',
+ max_size)
+
+ vulndb_product_results_to_demisto_results(res)
+
+
+''' COMMANDS MANAGER / SWITCH PANEL '''
+
+LOG('Command being called is %s' % (demisto.command()))
+
+if demisto.command() == 'test-module':
+ # This is the call made when pressing the integration test button.
+ test_module()
+ demisto.results('ok')
+if demisto.command() == 'vulndb-get-vuln-by-id':
+ vulndb_get_vuln_by_id_command()
+elif demisto.command() == 'vulndb-get-vuln-by-vendor-and-product-name':
+ vulndb_get_vuln_by_vendor_and_product_name_command()
+elif demisto.command() == 'vulndb-get-vuln-by-vendor-and-product-id':
+ vulndb_get_vuln_by_vendor_and_product_id_command()
+elif demisto.command() == 'vulndb-get-vuln-by-vendor-id':
+ vulndb_get_vuln_by_vendor_id_command()
+elif demisto.command() == 'vulndb-get-vuln-by-product-id':
+ vulndb_get_vuln_by_product_id_command()
+elif demisto.command() == 'vulndb-get-vuln-by-cve-id':
+ vulndb_get_vuln_by_cve_id_command()
+elif demisto.command() == 'vulndb-get-vendor':
+ vulndb_get_vendor_command()
+elif demisto.command() == 'vulndb-get-product':
+ vulndb_get_product_command()
+elif demisto.command() == 'vulndb-get-version':
+ vulndb_get_version_command()
+elif demisto.command() == 'vulndb-get-updates-by-dates-or-hours':
+ vulndb_get_updates_by_dates_or_hours_command()
diff --git a/Integrations/VulnDB/VulnDB.yml b/Integrations/VulnDB/VulnDB.yml
new file mode 100644
index 000000000000..df7caedf1cc9
--- /dev/null
+++ b/Integrations/VulnDB/VulnDB.yml
@@ -0,0 +1,878 @@
+category: Vulnerability Management
+commonfields:
+ id: VulnDB
+ version: -1
+configuration:
+- defaultvalue: https://vulndb.cyberriskanalytics.com/
+ display: Hostname, IP address, or server URL
+ name: api_url
+ required: true
+ type: 0
+- display: client_id
+ name: client_id
+ required: true
+ type: 0
+- display: client_secret
+ name: client_secret
+ required: true
+ type: 0
+- display: Trust any certificate (unsecure)
+ name: insecure
+ required: false
+ type: 8
+- display: Use system proxy settings
+ name: proxy
+ required: false
+ type: 8
+description: Lists all of the security vulnerabilities for various products (OS,Applications)
+ etc)
+display: VulnDB
+name: VulnDB
+script:
+ commands:
+ - arguments:
+ - default: true
+ description: ID of the vulnerability for which to return information.
+ isArray: false
+ name: vuln_id
+ required: true
+ secret: false
+ deprecated: false
+ description: Returns full details about a specific vulnerability.
+ execution: false
+ name: vulndb-get-vuln-by-id
+ outputs:
+ - contextPath: VulnDB.Vulnerability.ID
+ description: Vulnerability ID.
+ type: string
+ - contextPath: VulnDB.Vulnerability.Title
+ description: Vulnerability title (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.Keywords
+ description: Vulnerability keywords.
+ type: string
+ - contextPath: VulnDB.Vulnerability.Description
+ description: Vulnerability description (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.Solution
+ description: Vulnerability solution (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.PublishedDate
+ description: Vulnerability published date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.TDescription
+ description: Vulnerability description (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.SolutionDate
+ description: Vulnerability solution date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.DiscoveryDate
+ description: Vulnerability discovery date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.ExploitPublishDate
+ description: Exploit publish date.
+ type: date
+ - contextPath: VulnDB.CVE-ExtReferences.Value
+ description: CVE (constant string).
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Id
+ description: CVSS reference value.
+ type: number
+ - contextPath: VulnDB.CvssMetrics.AccessVector
+ description: CVSS access vector.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.AccessComplexity
+ description: CVSS access complexity.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Authentication
+ description: CVSS metric authentication.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.ConfidentialityImpact
+ description: CVSS confidentiality impact.
+ type: string
+ - contextPath: VulnDB.cvssMetrics.IntegrityImpact
+ description: CVSS integrity impact.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.AvailabilityImpact
+ description: CVSS availability impact.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.GeneratedOn
+ description: CVSS metric date.
+ type: date
+ - contextPath: VulnDB.CvssMetrics.Score
+ description: CVSS score.
+ type: number
+ - contextPath: VulnDB.Vendor.Id
+ description: Vendor ID.
+ type: number
+ - contextPath: VulnDB.Vendor.Name
+ description: Vendor name.
+ type: string
+ - contextPath: VulnDB.Products.Id
+ description: Product IDs.
+ type: number
+ - contextPath: VulnDB.Products.Name
+ description: Product names.
+ type: string
+ - contextPath: VulnDB.Products.Versions.Id
+ description: Product version IDs.
+ type: number
+ - contextPath: VulnDB.Products.Versions.Name
+ description: Product version names.
+ type: string
+ - contextPath: VulnDB.Classification.Longname
+ description: Classification name (long).
+ type: string
+ - contextPath: VulnDB.Classification.Description
+ description: Classification description (human readable).
+ type: string
+ - arguments:
+ - default: false
+ description: specific vendor id
+ isArray: false
+ name: vendor_id
+ required: false
+ secret: false
+ - default: false
+ description: specific vendor name (only human readable)
+ isArray: false
+ name: vendor_name
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '20'
+ description: Maximum number of entries to return. A high number of entries might effect performance.
+ isArray: false
+ name: max_size
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns all, or specific, vendor details to include vendor name and ID.
+ execution: false
+ name: vulndb-get-vendor
+ outputs:
+ - contextPath: VulnDB.Results.Id
+ description: Result ID.
+ type: number
+ - contextPath: VulnDB.Results.Name
+ description: Result name.
+ type: string
+ - contextPath: VulnDB.Results.ShortName
+ description: Result short name.
+ type: string
+ - contextPath: VulnDB.Results.VendorUrl
+ description: Result vendor URL (human readable).
+ type: string
+ - arguments:
+ - default: false
+ description: Vendor ID.
+ isArray: false
+ name: vendor_id
+ required: false
+ secret: false
+ - default: false
+ description: Vendor name.
+ isArray: false
+ name: vendor_name
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '20'
+ description: Maximum number of entries to return. A high number of entries might effect performance.
+ isArray: false
+ name: max_size
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns a list of versions by product name or ID.
+ execution: false
+ name: vulndb-get-product
+ outputs:
+ - contextPath: VulnDB.Results.Id
+ description: Result ID.
+ type: number
+ - contextPath: VulnDB.Results.Name
+ description: Result name.
+ type: string
+ - arguments:
+ - default: false
+ description: Product name.
+ isArray: false
+ name: product_name
+ required: false
+ secret: false
+ - default: false
+ description: Product ID.
+ isArray: false
+ name: product_id
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '20'
+ description: Maximum number of entries to return. A high number of entries might effect performance.
+ isArray: false
+ name: max_size
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns the version of the specified product.
+ execution: false
+ name: vulndb-get-version
+ outputs:
+ - contextPath: VulnDB.Results.Id
+ description: Version ID.
+ type: number
+ - contextPath: VulnDB.Results.Name
+ description: Version name.
+ type: Unknown
+ - arguments:
+ - default: false
+ description: Start date for which to return vulnerabilities.
+ isArray: false
+ name: start_date
+ required: false
+ secret: false
+ - default: false
+ description: End date for which to return vulnerabilities.
+ isArray: false
+ name: end_date
+ required: false
+ secret: false
+ - default: false
+ description: Number of previous hours for which to return vulnerabilities.
+ isArray: false
+ name: hours_ago
+ required: false
+ secret: false
+ - default: false
+ defaultValue: '20'
+ description: Maximum number of entries to return. A high number of entries might effect performance.
+ isArray: false
+ name: max_size
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns recent vulnerabilities, by date or number of hours.
+ execution: false
+ name: vulndb-get-updates-by-dates-or-hours
+ outputs:
+ - contextPath: VulnDB.Vulnerability.ID
+ description: Vulnerability ID.
+ type: string
+ - contextPath: VulnDB.Vulnerability.Title
+ description: Vulnerability title (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.Keywords
+ description: Vulnerability keywords.
+ type: string
+ - contextPath: VulnDB.Vulnerability.Description
+ description: Vulnerability description (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.Solution
+ description: Vulnerability solution (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.PublishedDate
+ description: Vulnerability published date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.TDescription
+ description: Vulnerability description (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.SolutionDate
+ description: Vulnerability solution date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.DiscoveryDate
+ description: Vulnerability discovery date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.ExploitPublishDate
+ description: Exploit publish date.
+ type: date
+ - contextPath: VulnDB.CVE-ExtReferences.Value
+ description: CVE (constant string).
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Id
+ description: CVSS reference value.
+ type: number
+ - contextPath: VulnDB.CvssMetrics.AccessVector
+ description: CVSS access vector.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.AccessComplexity
+ description: Cvss access complexity
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Authentication
+ description: CVSS metric authentication.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.ConfidentialityImpact
+ description: CVSS confidentiality impact.
+ type: string
+ - contextPath: VulnDB.cvssMetrics.integrity_impact
+ description: CVSS integrity impact.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.AvailabilityImpact
+ description: CVSS availability impact.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Generated_on
+ description: CVSS metric date.
+ type: date
+ - contextPath: VulnDB.CvssMetrics.Score
+ description: CVSS score.
+ type: number
+ - contextPath: VulnDB.Vendors.Id
+ description: Vendor ID.
+ type: number
+ - contextPath: VulnDB.Vendor.Name
+ description: Vendor name.
+ type: string
+ - contextPath: VulnDB.Products.Id
+ description: Product IDs.
+ type: number
+ - contextPath: VulnDB.Products.Name
+ description: Product names.
+ type: string
+ - contextPath: VulnDB.Products.Versions.Id
+ description: Product version IDs.
+ type: number
+ - contextPath: VulnDB.Products.Versions.Name
+ description: Product version names.
+ type: string
+ - contextPath: VulnDB.Classification.Longname
+ description: Classification name (long).
+ type: string
+ - contextPath: VulnDB.Classification.Description
+ description: Classification description (human readable).
+ type: string
+ - arguments:
+ - default: false
+ description: Vendor name.
+ isArray: false
+ name: vendor_name
+ required: true
+ secret: false
+ - default: false
+ description: Product name.
+ isArray: false
+ name: product_name
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '20'
+ description: Maximum number of entries to return. A high number of entries might effect performance.
+ isArray: false
+ name: max_size
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns full details about a specific vulnerability, by vendor name and
+ product name.
+ execution: false
+ name: vulndb-get-vuln-by-vendor-and-product-name
+ outputs:
+ - contextPath: VulnDB.Vulnerability.ID
+ description: Vulnerability ID.
+ type: string
+ - contextPath: VulnDB.Vulnerability.Title
+ description: Vulnerability title (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.Keywords
+ description: Vulnerability keywords.
+ type: string
+ - contextPath: VulnDB.Vulnerability.Description
+ description: Vulnerability description (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.Solution
+ description: Vulnerability solution (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.PublishedDate
+ description: Vulnerability published date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.TDescription
+ description: Vulnerability description (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.SolutionDate
+ description: Vulnerability solution date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.DiscoveryDate
+ description: Vulnerability discovery date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.ExploitPublishDate
+ description: Exploit publish date.
+ type: date
+ - contextPath: VulnDB.CVE-ExtReferences.Value
+ description: CVE (constant string).
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Id
+ description: CVSS reference value.
+ type: number
+ - contextPath: VulnDB.CvssMetrics.AccessVector
+ description: CVSS access vector.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.AccessComplexity
+ description: CVSS access complexity.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Authentication
+ description: CVSS metric authentication.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.ConfidentialityImpact
+ description: CVSS confidentiality impact.
+ type: string
+ - contextPath: VulnDB.cvssMetrics.integrity_impact
+ description: CVSS integrity impact.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.AvailabilityImpact
+ description: CVSS availability impact.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Generated_on
+ description: CVSS metric date.
+ type: date
+ - contextPath: VulnDB.CvssMetrics.Score
+ description: CVSS score.
+ type: number
+ - contextPath: VulnDB.Vendors.Id
+ description: Vendor ID.
+ type: number
+ - contextPath: VulnDB.Vendor.Name
+ description: Vendor name.
+ type: string
+ - contextPath: VulnDB.Products.Id
+ description: Product IDs.
+ type: number
+ - contextPath: VulnDB.Products.Name
+ description: Product names.
+ type: string
+ - contextPath: VulnDB.Products.Versions.Id
+ description: Product version IDs.
+ type: number
+ - contextPath: VulnDB.Products.Versions.Name
+ description: Product version names.
+ type: string
+ - contextPath: VulnDB.Classification.Longname
+ description: Classification (name).
+ type: string
+ - contextPath: VulnDB.Classification.Description
+ description: Classification description (human readable).
+ type: string
+ - arguments:
+ - default: false
+ description: Vendor ID.
+ isArray: false
+ name: vendor_id
+ required: true
+ secret: false
+ - default: false
+ description: Product ID.
+ isArray: false
+ name: product_id
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '20'
+ description: Maximum number of entries to return. A high number of entries might effect performance.
+ isArray: false
+ name: max_size
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns full details about a specific vulnerability, by vendor ID and
+ product ID.
+ execution: false
+ name: vulndb-get-vuln-by-vendor-and-product-id
+ outputs:
+ - contextPath: VulnDB.Vulnerability.ID
+ description: Vulnerability ID.
+ type: string
+ - contextPath: VulnDB.Vulnerability.Title
+ description: Vulnerability title (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.Keywords
+ description: Vulnerability keywords.
+ type: string
+ - contextPath: VulnDB.Vulnerability.Description
+ description: Vulnerability description (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.Solution
+ description: Vulnerability solution (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.PublishedDate
+ description: Vulnerability published date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.TDescription
+ description: Vulnerability description (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.SolutionDate
+ description: Vulnerability solution date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.DiscoveryDate
+ description: Vulnerability discovery date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.ExploitPublishDate
+ description: Exploit publish date.
+ type: date
+ - contextPath: VulnDB.CVE-ExtReferences.Value
+ description: CVE (constant string).
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Id
+ description: CVSS reference value.
+ type: number
+ - contextPath: VulnDB.CvssMetrics.AccessVector
+ description: CVSS access vector.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.AccessComplexity
+ description: CVSS access complexity.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Authentication
+ description: CVSS metric authentication.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.ConfidentialityImpact
+ description: CVSS confidentiality impact.
+ type: string
+ - contextPath: VulnDB.cvssMetrics.integrity_impact
+ description: CVSS integrity impact.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.AvailabilityImpact
+ description: CVSS availability impact.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Generated_on
+ description: CVSS metric date.
+ type: date
+ - contextPath: VulnDB.CvssMetrics.Score
+ description: CVSS score.
+ type: number
+ - contextPath: VulnDB.Vendors.Id
+ description: Vendor ID.
+ type: number
+ - contextPath: VulnDB.Vendor.Name
+ description: Vendor name.
+ type: string
+ - contextPath: VulnDB.Products.Id
+ description: Product IDs.
+ type: number
+ - contextPath: VulnDB.Products.Name
+ description: Product names.
+ type: string
+ - contextPath: VulnDB.Products.Versions.Id
+ description: Product version IDs.
+ type: number
+ - contextPath: VulnDB.Products.Versions.Name
+ description: Product version names.
+ type: string
+ - contextPath: VulnDB.Classification.Longname
+ description: Classification name (long).
+ type: string
+ - contextPath: VulnDB.Classification.Description
+ description: Classification description (human readable).
+ type: string
+ - arguments:
+ - default: false
+ description: Vendor ID.
+ isArray: false
+ name: vendor_id
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '20'
+ description: Maximum number of entries to return. A high number of entries might effect performance.
+ isArray: false
+ name: max_size
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns full details about vulnerabilities, by vendor ID.
+ execution: false
+ name: vulndb-get-vuln-by-vendor-id
+ outputs:
+ - contextPath: VulnDB.Vulnerability.ID
+ description: Vulnerability ID.
+ type: string
+ - contextPath: VulnDB.Vulnerability.Title
+ description: Vulnerability title (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.Keywords
+ description: Vulnerability keywords.
+ type: string
+ - contextPath: VulnDB.Vulnerability.Description
+ description: Vulnerability description (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.Solution
+ description: Vulnerability solution (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.PublishedDate
+ description: Vulnerability published date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.TDescription
+ description: Vulnerability description (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.SolutionDate
+ description: Vulnerability solution date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.DiscoveryDate
+ description: Vulnerability discovery date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.ExploitPublishDate
+ description: Exploit publish date.
+ type: date
+ - contextPath: VulnDB.CVE-ExtReferences.Value
+ description: CVE (constant string).
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Id
+ description: CVSS reference value.
+ type: number
+ - contextPath: VulnDB.CvssMetrics.AccessVector
+ description: CVSS access vector.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.AccessComplexity
+ description: CVSS access complexity.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Authentication
+ description: CVSS metric authentication.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.ConfidentialityImpact
+ description: CVSS confidentiality impact.
+ type: string
+ - contextPath: VulnDB.cvssMetrics.integrity_impact
+ description: CVSS integrity impact.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.AvailabilityImpact
+ description: CVSS availability impact.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Generated_on
+ description: CVSS metric date.
+ type: date
+ - contextPath: VulnDB.CvssMetrics.Score
+ description: CVSS score.
+ type: number
+ - contextPath: VulnDB.Vendors.Id
+ description: Vendor ID.
+ type: number
+ - contextPath: VulnDB.Vendor.Name
+ description: Vendor name.
+ type: string
+ - contextPath: VulnDB.Products.Id
+ description: Product IDs.
+ type: number
+ - contextPath: VulnDB.Products.Name
+ description: Product names.
+ type: string
+ - contextPath: VulnDB.Products.Versions.Id
+ description: Product version IDs.
+ type: number
+ - contextPath: VulnDB.Products.Versions.Name
+ description: Product version names.
+ type: string
+ - contextPath: VulnDB.Classification.Longname
+ description: Classification (name).
+ type: string
+ - contextPath: VulnDB.Classification.Description
+ description: Classification description (human readable).
+ type: string
+ - arguments:
+ - default: false
+ description: Product ID.
+ isArray: false
+ name: product_id
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '20'
+ description: Maximum number of entries to return. A high number of entries might effect performance.
+ isArray: false
+ name: max_size
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns full details about vulnerabilities, by product ID.
+ execution: false
+ name: vulndb-get-vuln-by-product-id
+ outputs:
+ - contextPath: VulnDB.Vulnerability.ID
+ description: Vulnerability ID.
+ type: string
+ - contextPath: VulnDB.Vulnerability.Title
+ description: Vulnerability title (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.Keywords
+ description: Vulnerability keywords.
+ type: string
+ - contextPath: VulnDB.Vulnerability.Description
+ description: Vulnerability description (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.Solution
+ description: Vulnerability solution (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.PublishedDate
+ description: Vulnerability published date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.TDescription
+ description: Vulnerability description (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.SolutionDate
+ description: Vulnerability solution date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.DiscoveryDate
+ description: Vulnerability discovery date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.ExploitPublishDate
+ description: Exploit publish date.
+ type: date
+ - contextPath: VulnDB.CVE-ExtReferences.Value
+ description: CVE (constant string).
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Id
+ description: CVSS reference value.
+ type: number
+ - contextPath: VulnDB.CvssMetrics.AccessVector
+ description: CVSS access vector.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.AccessComplexity
+ description: CVSS access complexity.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Authentication
+ description: CVSS metric authentication.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.ConfidentialityImpact
+ description: CVSS confidentiality impact.
+ type: string
+ - contextPath: VulnDB.cvssMetrics.integrity_impact
+ description: CVSS integrity impact.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.AvailabilityImpact
+ description: CVSS availability impact.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Generated_on
+ description: CVSS metric date.
+ type: date
+ - contextPath: VulnDB.CvssMetrics.Score
+ description: CVSS score.
+ type: number
+ - contextPath: VulnDB.Vendors.Id
+ description: Vendor ID.
+ type: number
+ - contextPath: VulnDB.Vendor.Name
+ description: Vendor name.
+ type: string
+ - contextPath: VulnDB.Products.Id
+ description: Products ID.
+ type: number
+ - contextPath: VulnDB.Products.Name
+ description: Product names.
+ type: string
+ - contextPath: VulnDB.Products.Versions.Id
+ description: Product version IDs.
+ type: number
+ - contextPath: VulnDB.Products.Versions.Name
+ description: Product version names.
+ type: string
+ - contextPath: VulnDB.Classification.Longname
+ description: Classification (name).
+ type: string
+ - contextPath: VulnDB.Classification.Description
+ description: Classification description (human readable).
+ type: string
+ - arguments:
+ - default: true
+ description: CVE ID.
+ isArray: false
+ name: cve_id
+ required: true
+ secret: false
+ - default: false
+ defaultValue: '20'
+ description: Maximum number of entries to return. A high number of entries might effect performance.
+ isArray: false
+ name: max_size
+ required: false
+ secret: false
+ deprecated: false
+ description: Returns full details about vulnerabilities, by CVE ID.
+ execution: false
+ name: vulndb-get-vuln-by-cve-id
+ outputs:
+ - contextPath: VulnDB.Vulnerability.ID
+ description: Vulnerability ID.
+ type: string
+ - contextPath: VulnDB.Vulnerability.Title
+ description: Vulnerability title (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.Keywords
+ description: Vulnerability keywords.
+ type: string
+ - contextPath: VulnDB.Vulnerability.Description
+ description: Vulnerability description (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.Solution
+ description: Vulnerability solution (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.PublishedDate
+ description: Vulnerability published date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.TDescription
+ description: Vulnerability description (human readable).
+ type: string
+ - contextPath: VulnDB.Vulnerability.SolutionDate
+ description: Vulnerability solution date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.DiscoveryDate
+ description: Vulnerability discovery date.
+ type: date
+ - contextPath: VulnDB.Vulnerability.ExploitPublishDate
+ description: Exploit publish date.
+ type: date
+ - contextPath: VulnDB.CVE-ExtReferences.Value
+ description: CVE (constant string).
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Id
+ description: CVSS reference value.
+ type: number
+ - contextPath: VulnDB.CvssMetrics.AccessVector
+ description: CVSS access vector.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.AccessComplexity
+ description: CVSS access complexity.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Authentication
+ description: CVSS metric authentication.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.ConfidentialityImpact
+ description: CVSS confidentiality impact.
+ type: string
+ - contextPath: VulnDB.cvssMetrics.integrity_impact
+ description: CVSS integrity impact.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.AvailabilityImpact
+ description: CVSS availability impact.
+ type: string
+ - contextPath: VulnDB.CvssMetrics.Generated_on
+ description: CVSS metric date.
+ type: date
+ - contextPath: VulnDB.CvssMetrics.Score
+ description: CVSS score.
+ type: number
+ - contextPath: VulnDB.Vendors.Id
+ description: Vendor ID.
+ type: number
+ - contextPath: VulnDB.Vendor.Name
+ description: Vendor name.
+ type: string
+ - contextPath: VulnDB.Products.Id
+ description: Product IDs.
+ type: number
+ - contextPath: VulnDB.Products.Name
+ description: Product names.
+ type: string
+ - contextPath: VulnDB.Products.Versions.Id
+ description: Product version IDs.
+ type: number
+ - contextPath: VulnDB.Products.Versions.Name
+ description: Product version names.
+ type: string
+ - contextPath: VulnDB.Classification.Longname
+ description: Classification name (long).
+ type: string
+ - contextPath: VulnDB.Classification.Description
+ description: Classification description (human readable).
+ type: string
+ dockerimage: demisto/python3:3.7.3.373
+ isfetch: false
+ runonce: false
+ script: '-'
+ type: python
+tests:
+- "Test-VulnDB"
diff --git a/Integrations/VulnDB/VulnDB_description.md b/Integrations/VulnDB/VulnDB_description.md
new file mode 100644
index 000000000000..7180d2e61554
--- /dev/null
+++ b/Integrations/VulnDB/VulnDB_description.md
@@ -0,0 +1,519 @@
+## Overview
+---
+
+Lists all of the security vulnerabilities for various products (OS,Applications) etc)
+This integration was integrated and tested with version xx of VulnDB
+## VulnDB Playbook
+---
+
+## Use Cases
+---
+
+## Configure VulnDB on Demisto
+---
+
+1. Navigate to __Settings__ > __Integrations__ > __Servers & Services__.
+2. Search for VulnDB.
+3. Click __Add instance__ to create and configure a new integration instance.
+ * __Name__: a textual name for the integration instance.
+ * __Hostname, IP address, or server URL__
+ * __client_id__
+ * __client_secret__
+ * __Trust any certificate (unsecure)__
+ * __Use system proxy settings__
+4. Click __Test__ to validate the URLs, token, and connection.
+## Fetched Incidents Data
+---
+
+## Commands
+---
+You can execute these commands from the Demisto CLI, as part of an automation, or in a playbook.
+After you successfully execute a command, a DBot message appears in the War Room with the command details.
+1. vulndb-get-vuln-by-id
+2. vulndb-get-vendor
+3. vulndb-get-product
+4. vulndb-get-version
+5. vulndb-get-updates-by-dates-or-hours
+6. vulndb-get-vuln-by-vendor-and-product-name
+7. vulndb-get-vuln-by-vendor-and-product-id
+8. vulndb-get-vuln-by-vendor-id
+9. vulndb-get-vuln-by-product-id
+10. vulndb-get-vuln-by-cve-id
+### 1. vulndb-get-vuln-by-id
+---
+Provides full details about a specific vulnerability by id
+##### Base Command
+
+`vulndb-get-vuln-by-id`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| vuln_id | Vulnerability id | Required |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| VulnDB.Vulnerability.ID | string | Vulnerability id |
+| VulnDB.Vulnerability.Title | string | Vulnerability title (only human readable) |
+| VulnDB.Vulnerability.Keywords | string | Vulnerability keywords |
+| VulnDB.Vulnerability.Description | string | Vulnerability description (only human readable) |
+| VulnDB.Vulnerability.Solution | string | Vulnerability solution (only human readable) |
+| VulnDB.Vulnerability.PublishedDate | date | Vulnerability published date |
+| VulnDB.Vulnerability.TDescription | string | Vulnerability description (only human readable) |
+| VulnDB.Vulnerability.SolutionDate | date | Vulnerability solution date |
+| VulnDB.Vulnerability.DiscoveryDate | date | Vulnerability discovery date |
+| VulnDB.Vulnerability.ExploitPublishDate | date | Exploit publish date |
+| VulnDB.CVE-ExtReferences.Value | string | CVE- is a constant string |
+| VulnDB.CvssMetrics.Id | number | Cvss reference value |
+| VulnDB.CvssMetrics.AccessVector | string | Cvss access vector |
+| VulnDB.CvssMetrics.AccessComplexity | string | Cvss access complexity |
+| VulnDB.CvssMetrics.Authentication | string | Cvss metric authentication |
+| VulnDB.CvssMetrics.ConfidentialityImpact | string | Cvss confidentiality impact |
+| VulnDB.cvssMetrics.IntegrityImpact | string | Cvss integrity impact |
+| VulnDB.CvssMetrics.AvailabilityImpact | string | Cvss availability impact |
+| VulnDB.CvssMetrics.GeneratedOn | date | Cvss Metric date |
+| VulnDB.CvssMetrics.Score | number | Cvss score |
+| VulnDB.Vendor.Id | number | Vendor id |
+| VulnDB.Vendor.Name | string | Vendor name |
+| VulnDB.Products.Id | number | Products id |
+| VulnDB.Products.Name | string | Products name |
+| VulnDB.Products.Versions.Id | number | Product version id |
+| VulnDB.Products.Versions.Name | string | Product versions name |
+| VulnDB.Classification.Longname | string | Classification long name |
+| VulnDB.Classification.Description | string | Classification description (only human readable) |
+
+
+##### Command Example
+```!vulndb-get-vuln-by-id vuln_id="1"```
+
+##### Human Readable Output
+
+
+### 2. vulndb-get-vendor
+---
+Provides all or specific vendor details to include vendor name and id
+##### Base Command
+
+`vulndb-get-vendor`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| vendor_id | specific vendor id | Optional |
+| vendor_name | specific vendor name (only human readable) | Optional |
+| max_size | Maximum number of entries returned from the query, to avoid slow response. | Optional |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| VulnDB.Results.Id | number | Result id |
+| VulnDB.Results.Name | string | Result name |
+| VulnDB.Results.ShortName | string | Result short name |
+| VulnDB.Results.VendorUrl | string | Result vendor url (only human readable) |
+
+
+##### Command Example
+```!vulndb-get-vendor max_size="20"```
+
+##### Human Readable Output
+
+
+### 3. vulndb-get-product
+---
+Provides a list of versions by product name or id
+##### Base Command
+
+`vulndb-get-product`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| vendor_id | Vendor id | Optional |
+| vendor_name | Vendor name | Optional |
+| max_size | Maximum number of entries returned from the query, to avoid slow response. | Optional |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| VulnDB.Results.Id | number | Result id |
+| VulnDB.Results.Name | string | Result name |
+
+
+##### Command Example
+```!vulndb-get-product vendor_id="2974649" max_size="20"```
+
+##### Human Readable Output
+
+
+### 4. vulndb-get-version
+---
+Provides the versions for the specified product by product name or id
+##### Base Command
+
+`vulndb-get-version`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| product_name | Product name | Optional |
+| product_id | Product id | Optional |
+| max_size | Maximum number of entries returned from the query, to avoid slow response. | Optional |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| VulnDB.Results.Id | number | Version id |
+| VulnDB.Results.Name | Unknown | Version name |
+
+
+##### Command Example
+```!vulndb-get-version product_name="1-Search" max_size="20"```
+
+##### Human Readable Output
+
+
+### 5. vulndb-get-updates-by-dates-or-hours
+---
+Provides the recent vulnerabilities by dates or hours
+##### Base Command
+
+`vulndb-get-updates-by-dates-or-hours`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| start_date | Date YYYY-MM-dd starting date (earliest) | Optional |
+| end_date | Date YYYY-MM-dd finishing date (latest) | Optional |
+| hours_ago | How many hours ago | Optional |
+| max_size | Maximum number of entries returned from the query, to avoid slow response. | Optional |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| VulnDB.Vulnerability.ID | string | Vulnerability id |
+| VulnDB.Vulnerability.Title | string | Vulnerability title (only human readable) |
+| VulnDB.Vulnerability.Keywords | string | Vulnerability keywords |
+| VulnDB.Vulnerability.Description | string | Vulnerability description (only human readable) |
+| VulnDB.Vulnerability.Solution | string | Vulnerability solution (only human readable) |
+| VulnDB.Vulnerability.PublishedDate | date | Vulnerability published date |
+| VulnDB.Vulnerability.TDescription | string | Vulnerability description (only human readable) |
+| VulnDB.Vulnerability.SolutionDate | date | Vulnerability solution date |
+| VulnDB.Vulnerability.DiscoveryDate | date | Vulnerability discovery date |
+| VulnDB.Vulnerability.ExploitPublishDate | date | Exploit publish date |
+| VulnDB.CVE-ExtReferences.Value | string | CVE- is a constant string |
+| VulnDB.CvssMetrics.Id | number | Cvss reference value |
+| VulnDB.CvssMetrics.AccessVector | string | Cvss access vector |
+| VulnDB.CvssMetrics.AccessComplexity | string | Cvss access complexity |
+| VulnDB.CvssMetrics.Authentication | string | Cvss metric authentication |
+| VulnDB.CvssMetrics.ConfidentialityImpact | string | Cvss confidentiality impact |
+| VulnDB.cvssMetrics.integrity_impact | string | Cvss integrity impact |
+| VulnDB.CvssMetrics.AvailabilityImpact | string | Cvss availability impact |
+| VulnDB.CvssMetrics.Generated_on | date | Cvss Metric date |
+| VulnDB.CvssMetrics.Score | number | Cvss score |
+| VulnDB.Vendors.Id | number | Vendor id |
+| VulnDB.Vendor.Name | string | Vendor name |
+| VulnDB.Products.Id | number | Products id |
+| VulnDB.Products.Name | string | Products name |
+| VulnDB.Products.Versions.Id | number | Product version id |
+| VulnDB.Products.Versions.Name | string | Product versions name |
+| VulnDB.Classification.Longname | string | Classification long name |
+| VulnDB.Classification.Description | string | Classification description (only human readable) |
+
+
+##### Command Example
+```!vulndb-get-updates-by-dates-or-hours start_date="2015-10-27T04:27:22" end_date="2017-10-27T04:27:22" max_size="20"```
+
+##### Human Readable Output
+
+
+### 6. vulndb-get-vuln-by-vendor-and-product-name
+---
+Provides full details about a specific vulnerability by vendor and product name
+##### Base Command
+
+`vulndb-get-vuln-by-vendor-and-product-name`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| vendor_name | Vendor name | Required |
+| product_name | Product name | Required |
+| max_size | Maximum number of entries returned from the query, to avoid slow response. | Optional |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| VulnDB.Vulnerability.ID | string | Vulnerability id |
+| VulnDB.Vulnerability.Title | string | Vulnerability title (only human readable) |
+| VulnDB.Vulnerability.Keywords | string | Vulnerability keywords |
+| VulnDB.Vulnerability.Description | string | Vulnerability description (only human readable) |
+| VulnDB.Vulnerability.Solution | string | Vulnerability solution (only human readable) |
+| VulnDB.Vulnerability.PublishedDate | date | Vulnerability published date |
+| VulnDB.Vulnerability.TDescription | string | Vulnerability description (only human readable) |
+| VulnDB.Vulnerability.SolutionDate | date | Vulnerability solution date |
+| VulnDB.Vulnerability.DiscoveryDate | date | Vulnerability discovery date |
+| VulnDB.Vulnerability.ExploitPublishDate | date | Exploit publish date |
+| VulnDB.CVE-ExtReferences.Value | string | CVE- is a constant string |
+| VulnDB.CvssMetrics.Id | number | Cvss reference value |
+| VulnDB.CvssMetrics.AccessVector | string | Cvss access vector |
+| VulnDB.CvssMetrics.AccessComplexity | string | Cvss access complexity |
+| VulnDB.CvssMetrics.Authentication | string | Cvss metric authentication |
+| VulnDB.CvssMetrics.ConfidentialityImpact | string | Cvss confidentiality impact |
+| VulnDB.cvssMetrics.integrity_impact | string | Cvss integrity impact |
+| VulnDB.CvssMetrics.AvailabilityImpact | string | Cvss availability impact |
+| VulnDB.CvssMetrics.Generated_on | date | Cvss Metric date |
+| VulnDB.CvssMetrics.Score | number | Cvss score |
+| VulnDB.Vendors.Id | number | Vendor id |
+| VulnDB.Vendor.Name | string | Vendor name |
+| VulnDB.Products.Id | number | Products id |
+| VulnDB.Products.Name | string | Products name |
+| VulnDB.Products.Versions.Id | number | Product version id |
+| VulnDB.Products.Versions.Name | string | Product versions name |
+| VulnDB.Classification.Longname | string | Classification long name |
+| VulnDB.Classification.Description | string | Classification description (only human readable) |
+
+
+##### Command Example
+```!vulndb-get-vuln-by-vendor-and-product-name vendor_name="Adobe Systems Incorporated" product_name="ColdFusion" max_size="20"```
+
+##### Human Readable Output
+
+
+### 7. vulndb-get-vuln-by-vendor-and-product-id
+---
+Provides full details about a specific vulnerability by vendor and product id
+##### Base Command
+
+`vulndb-get-vuln-by-vendor-and-product-id`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| vendor_id | Vendor ID | Required |
+| product_id | Product ID | Required |
+| max_size | Maximum number of entries returned from the query, to avoid slow response. | Optional |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| VulnDB.Vulnerability.ID | string | Vulnerability id |
+| VulnDB.Vulnerability.Title | string | Vulnerability title (only human readable) |
+| VulnDB.Vulnerability.Keywords | string | Vulnerability keywords |
+| VulnDB.Vulnerability.Description | string | Vulnerability description (only human readable) |
+| VulnDB.Vulnerability.Solution | string | Vulnerability solution (only human readable) |
+| VulnDB.Vulnerability.PublishedDate | date | Vulnerability published date |
+| VulnDB.Vulnerability.TDescription | string | Vulnerability description (only human readable) |
+| VulnDB.Vulnerability.SolutionDate | date | Vulnerability solution date |
+| VulnDB.Vulnerability.DiscoveryDate | date | Vulnerability discovery date |
+| VulnDB.Vulnerability.ExploitPublishDate | date | Exploit publish date |
+| VulnDB.CVE-ExtReferences.Value | string | CVE- is a constant string |
+| VulnDB.CvssMetrics.Id | number | Cvss reference value |
+| VulnDB.CvssMetrics.AccessVector | string | Cvss access vector |
+| VulnDB.CvssMetrics.AccessComplexity | string | Cvss access complexity |
+| VulnDB.CvssMetrics.Authentication | string | Cvss metric authentication |
+| VulnDB.CvssMetrics.ConfidentialityImpact | string | Cvss confidentiality impact |
+| VulnDB.cvssMetrics.integrity_impact | string | Cvss integrity impact |
+| VulnDB.CvssMetrics.AvailabilityImpact | string | Cvss availability impact |
+| VulnDB.CvssMetrics.Generated_on | date | Cvss Metric date |
+| VulnDB.CvssMetrics.Score | number | Cvss score |
+| VulnDB.Vendors.Id | number | Vendor id |
+| VulnDB.Vendor.Name | string | Vendor name |
+| VulnDB.Products.Id | number | Products id |
+| VulnDB.Products.Name | string | Products name |
+| VulnDB.Products.Versions.Id | number | Product version id |
+| VulnDB.Products.Versions.Name | string | Product versions name |
+| VulnDB.Classification.Longname | string | Classification long name |
+| VulnDB.Classification.Description | string | Classification description (only human readable) |
+
+
+##### Command Example
+```!vulndb-get-vuln-by-vendor-and-product-id vendor_id="5011" product_id="1777" max_size="20"```
+
+##### Human Readable Output
+
+
+### 8. vulndb-get-vuln-by-vendor-id
+---
+Provides full details about vulnerabilities by vendor id
+##### Base Command
+
+`vulndb-get-vuln-by-vendor-id`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| vendor_id | Vendor id | Required |
+| max_size | Maximum number of entries returned from the query, to avoid slow response. | Optional |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| VulnDB.Vulnerability.ID | string | Vulnerability id |
+| VulnDB.Vulnerability.Title | string | Vulnerability title (only human readable) |
+| VulnDB.Vulnerability.Keywords | string | Vulnerability keywords |
+| VulnDB.Vulnerability.Description | string | Vulnerability description (only human readable) |
+| VulnDB.Vulnerability.Solution | string | Vulnerability solution (only human readable) |
+| VulnDB.Vulnerability.PublishedDate | date | Vulnerability published date |
+| VulnDB.Vulnerability.TDescription | string | Vulnerability description (only human readable) |
+| VulnDB.Vulnerability.SolutionDate | date | Vulnerability solution date |
+| VulnDB.Vulnerability.DiscoveryDate | date | Vulnerability discovery date |
+| VulnDB.Vulnerability.ExploitPublishDate | date | Exploit publish date |
+| VulnDB.CVE-ExtReferences.Value | string | CVE- is a constant string |
+| VulnDB.CvssMetrics.Id | number | Cvss reference value |
+| VulnDB.CvssMetrics.AccessVector | string | Cvss access vector |
+| VulnDB.CvssMetrics.AccessComplexity | string | Cvss access complexity |
+| VulnDB.CvssMetrics.Authentication | string | Cvss metric authentication |
+| VulnDB.CvssMetrics.ConfidentialityImpact | string | Cvss confidentiality impact |
+| VulnDB.cvssMetrics.integrity_impact | string | Cvss integrity impact |
+| VulnDB.CvssMetrics.AvailabilityImpact | string | Cvss availability impact |
+| VulnDB.CvssMetrics.Generated_on | date | Cvss Metric date |
+| VulnDB.CvssMetrics.Score | number | Cvss score |
+| VulnDB.Vendors.Id | number | Vendor id |
+| VulnDB.Vendor.Name | string | Vendor name |
+| VulnDB.Products.Id | number | Products id |
+| VulnDB.Products.Name | string | Products name |
+| VulnDB.Products.Versions.Id | number | Product version id |
+| VulnDB.Products.Versions.Name | string | Product versions name |
+| VulnDB.Classification.Longname | string | Classification long name |
+| VulnDB.Classification.Description | string | Classification description (only human readable) |
+
+
+##### Command Example
+```!vulndb-get-vuln-by-vendor-id vendor_id="5011" max_size="20"```
+
+##### Human Readable Output
+
+
+### 9. vulndb-get-vuln-by-product-id
+---
+Provides full details about vulnerabilities by product id
+##### Base Command
+
+`vulndb-get-vuln-by-product-id`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| product_id | Product id | Required |
+| max_size | Maximum number of entries returned from the query, to avoid slow response. | Optional |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| VulnDB.Vulnerability.ID | string | Vulnerability id |
+| VulnDB.Vulnerability.Title | string | Vulnerability title (only human readable) |
+| VulnDB.Vulnerability.Keywords | string | Vulnerability keywords |
+| VulnDB.Vulnerability.Description | string | Vulnerability description (only human readable) |
+| VulnDB.Vulnerability.Solution | string | Vulnerability solution (only human readable) |
+| VulnDB.Vulnerability.PublishedDate | date | Vulnerability published date |
+| VulnDB.Vulnerability.TDescription | string | Vulnerability description (only human readable) |
+| VulnDB.Vulnerability.SolutionDate | date | Vulnerability solution date |
+| VulnDB.Vulnerability.DiscoveryDate | date | Vulnerability discovery date |
+| VulnDB.Vulnerability.ExploitPublishDate | date | Exploit publish date |
+| VulnDB.CVE-ExtReferences.Value | string | CVE- is a constant string |
+| VulnDB.CvssMetrics.Id | number | Cvss reference value |
+| VulnDB.CvssMetrics.AccessVector | string | Cvss access vector |
+| VulnDB.CvssMetrics.AccessComplexity | string | Cvss access complexity |
+| VulnDB.CvssMetrics.Authentication | string | Cvss metric authentication |
+| VulnDB.CvssMetrics.ConfidentialityImpact | string | Cvss confidentiality impact |
+| VulnDB.cvssMetrics.integrity_impact | string | Cvss integrity impact |
+| VulnDB.CvssMetrics.AvailabilityImpact | string | Cvss availability impact |
+| VulnDB.CvssMetrics.Generated_on | date | Cvss Metric date |
+| VulnDB.CvssMetrics.Score | number | Cvss score |
+| VulnDB.Vendors.Id | number | Vendor id |
+| VulnDB.Vendor.Name | string | Vendor name |
+| VulnDB.Products.Id | number | Products id |
+| VulnDB.Products.Name | string | Products name |
+| VulnDB.Products.Versions.Id | number | Product version id |
+| VulnDB.Products.Versions.Name | string | Product versions name |
+| VulnDB.Classification.Longname | string | Classification long name |
+| VulnDB.Classification.Description | string | Classification description (only human readable) |
+
+
+##### Command Example
+```!vulndb-get-vuln-by-product-id product_id="1777" max_size="20"```
+
+##### Human Readable Output
+
+
+### 10. vulndb-get-vuln-by-cve-id
+---
+Provides full details about vulnerabilities by cve id
+##### Base Command
+
+`vulndb-get-vuln-by-cve-id`
+##### Input
+
+| **Argument Name** | **Description** | **Required** |
+| --- | --- | --- |
+| cve_id | CVE id | Required |
+| max_size | Maximum number of entries returned from the query, to avoid slow response. | Optional |
+
+
+##### Context Output
+
+| **Path** | **Type** | **Description** |
+| --- | --- | --- |
+| VulnDB.Vulnerability.ID | string | Vulnerability id |
+| VulnDB.Vulnerability.Title | string | Vulnerability title (only human readable) |
+| VulnDB.Vulnerability.Keywords | string | Vulnerability keywords |
+| VulnDB.Vulnerability.Description | string | Vulnerability description (only human readable) |
+| VulnDB.Vulnerability.Solution | string | Vulnerability solution (only human readable) |
+| VulnDB.Vulnerability.PublishedDate | date | Vulnerability published date |
+| VulnDB.Vulnerability.TDescription | string | Vulnerability description (only human readable) |
+| VulnDB.Vulnerability.SolutionDate | date | Vulnerability solution date |
+| VulnDB.Vulnerability.DiscoveryDate | date | Vulnerability discovery date |
+| VulnDB.Vulnerability.ExploitPublishDate | date | Exploit publish date |
+| VulnDB.CVE-ExtReferences.Value | string | CVE- is a constant string |
+| VulnDB.CvssMetrics.Id | number | Cvss reference value |
+| VulnDB.CvssMetrics.AccessVector | string | Cvss access vector |
+| VulnDB.CvssMetrics.AccessComplexity | string | Cvss access complexity |
+| VulnDB.CvssMetrics.Authentication | string | Cvss metric authentication |
+| VulnDB.CvssMetrics.ConfidentialityImpact | string | Cvss confidentiality impact |
+| VulnDB.cvssMetrics.integrity_impact | string | Cvss integrity impact |
+| VulnDB.CvssMetrics.AvailabilityImpact | string | Cvss availability impact |
+| VulnDB.CvssMetrics.Generated_on | date | Cvss Metric date |
+| VulnDB.CvssMetrics.Score | number | Cvss score |
+| VulnDB.Vendors.Id | number | Vendor id |
+| VulnDB.Vendor.Name | string | Vendor name |
+| VulnDB.Products.Id | number | Products id |
+| VulnDB.Products.Name | string | Products name |
+| VulnDB.Products.Versions.Id | number | Product version id |
+| VulnDB.Products.Versions.Name | string | Product versions name |
+| VulnDB.Classification.Longname | string | Classification long name |
+| VulnDB.Classification.Description | string | Classification description (only human readable) |
+
+
+##### Command Example
+```!vulndb-get-vuln-by-cve-id cve_id="2013-1228" max_size="20"```
+
+##### Human Readable Output
+
+
+## Additional Information
+---
+
+## Known Limitations
+---
+
+## Troubleshooting
+---
\ No newline at end of file
diff --git a/Integrations/VulnDB/VulnDB_image.png b/Integrations/VulnDB/VulnDB_image.png
new file mode 100644
index 000000000000..5f4ff1e508fb
Binary files /dev/null and b/Integrations/VulnDB/VulnDB_image.png differ
diff --git a/Integrations/WhatIsMyBrowser/CHANGELOG.md b/Integrations/WhatIsMyBrowser/CHANGELOG.md
new file mode 100644
index 000000000000..e18a13086298
--- /dev/null
+++ b/Integrations/WhatIsMyBrowser/CHANGELOG.md
@@ -0,0 +1,5 @@
+## [Unreleased]
+
+
+## [19.8.0] - 2019-08-06
+ - The *Trust any certificate* parameter now works as expected.
\ No newline at end of file
diff --git a/Integrations/WhatIsMyBrowser/Pipfile b/Integrations/WhatIsMyBrowser/Pipfile
new file mode 100644
index 000000000000..1f4881ed672c
--- /dev/null
+++ b/Integrations/WhatIsMyBrowser/Pipfile
@@ -0,0 +1,15 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+
+[packages]
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/WhatIsMyBrowser/Pipfile.lock b/Integrations/WhatIsMyBrowser/Pipfile.lock
new file mode 100644
index 000000000000..bd5a86d9d4c5
--- /dev/null
+++ b/Integrations/WhatIsMyBrowser/Pipfile.lock
@@ -0,0 +1,308 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "1fd564b978cf016eca093f3dfd295ed6ecae2fed0d591fcda830d512fa1fe4b8"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.5"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
+ "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
+ ],
+ "version": "==2019.6.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32",
+ "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==3.7.4"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==1.0.2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265",
+ "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7",
+ "sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db"
+ ],
+ "version": "==0.18"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af",
+ "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"
+ ],
+ "version": "==19.0"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e",
+ "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8"
+ ],
+ "markers": "python_version < '3.6'",
+ "version": "==2.3.4"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:02c2b6d268695a8b64ad61847f92e611e6afcff33fd26c3a2125370c4662905d",
+ "sha256:ee1e85575587c5b58ddafa25e1c1b01691ef172e139fc25585e5d3f02451da93"
+ ],
+ "index": "pypi",
+ "version": "==1.9.4"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a",
+ "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03"
+ ],
+ "version": "==2.4.0"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae",
+ "sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6"
+ ],
+ "index": "pypi",
+ "version": "==4.6.4"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
+ "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+ ],
+ "version": "==2.22.0"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:12e17c7ad1397fd1df5ead7727eb3f1bdc9fe1c18293b0492e0e01b57997e38d",
+ "sha256:dc9e416a095ee7c3360056990d52e5611fb94469352fc1c2dc85be1ff2189146"
+ ],
+ "index": "pypi",
+ "version": "==1.6.0"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
+ "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
+ ],
+ "version": "==1.12.0"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:b246607a25ac80bedac05c6f282e3cdaf3afb65420fd024ac94435cabe6e18d1",
+ "sha256:dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
+ ],
+ "version": "==1.25.3"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/WhatIsMyBrowser/WhatIsMyBrowser.py b/Integrations/WhatIsMyBrowser/WhatIsMyBrowser.py
new file mode 100644
index 000000000000..67b4c9ef4c06
--- /dev/null
+++ b/Integrations/WhatIsMyBrowser/WhatIsMyBrowser.py
@@ -0,0 +1,135 @@
+import demistomock as demisto
+from CommonServerPython import *
+''' IMPORTS '''
+import requests
+import json
+from collections import defaultdict
+# disable insecure warnings
+requests.packages.urllib3.disable_warnings()
+
+
+'''GLOBAL VARS'''
+API_URL = demisto.params().get('url')
+API_KEY = demisto.params().get('api_key')
+USE_SSL = not demisto.params().get('insecure')
+PROXY = demisto.params().get('proxy')
+
+
+'''HELPER FUNCTIONS'''
+
+
+def http_request(data):
+ headers = {
+ 'X-API-KEY': API_KEY,
+ }
+ r = requests.request(
+ 'POST',
+ API_URL,
+ data=data,
+ headers=headers,
+ verify=USE_SSL
+ )
+ if r.status_code != 200:
+ return_error('Error in API call to WhatIsMyBrowser [%d] - %s' % (r.status_code, r.reason))
+ return r.content
+
+
+'''MAIN FUNCTIONS'''
+
+
+def ua_parse(user_agent):
+ post_data = {
+ "user_agent": "{}".format(user_agent)
+ }
+ post_json = json.dumps(post_data)
+ r = http_request(post_json)
+ return r
+
+
+def ua_parse_command():
+ user_agent = demisto.args().get('UserAgent')
+ raw = ua_parse(user_agent)
+ r = json.loads(raw)
+ if 'success' in r['result']['code']:
+ parsed = r['parse']
+ hr = defaultdict() # type: dict
+ ua_ec = defaultdict(lambda: defaultdict(int)) # type: dict
+ if 'software' in parsed:
+ hr['Software'] = parsed['software']
+ ua_ec['Software'] = parsed['software']
+ if 'software_name' in parsed:
+ hr['Software Name'] = parsed['software_name']
+ ua_ec['SoftwareName'] = parsed['software_name']
+ if 'operating_system' in parsed and parsed['operating_system'] is not None:
+ hr['Operating System'] = parsed['operating_system']
+ ua_ec['OperatingSystem'] = parsed['operating_system']
+ if 'is_abusive' in parsed:
+ hr['Abusive'] = parsed['is_abusive']
+ ua_ec['Abusive'] = parsed['is_abusive']
+ if parsed['is_abusive'] is True:
+ dbot_score = {
+ 'Score': 3,
+ 'Type': 'UserAgent',
+ 'Vendor': 'WhatIsMyBrowser', # disable-secrets-detection
+ 'Indicator': parsed['user_agent']
+ }
+ else:
+ dbot_score = {
+ 'Score': 1,
+ 'Type': 'UserAgent',
+ 'Vendor': 'WhatIsMyBrowser', # disable-secrets-detection
+ 'Indicator': parsed['user_agent']
+ }
+ if 'operating_system_name' in parsed and parsed['operating_system_name'] is not None:
+ hr['Operating System Name'] = parsed['operating_system_name']
+ ua_ec['OperatingSystemName'] = parsed['operating_system_name']
+ if 'user_agent' in parsed:
+ hr['User Agent'] = parsed['user_agent']
+ ua_ec['UserAgent'] = parsed['user_agent']
+ if 'hardware_type' in parsed and parsed['hardware_type'] is not None:
+ hr['Hardware Type'] = parsed['hardware_type']
+ ua_ec['HardwareType'] = parsed['hardware_type']
+ if 'hardware_sub_type' in parsed and parsed['hardware_sub_type'] is not None:
+ hr['Hardware Sub Type'] = parsed['hardware_sub_type']
+ ua_ec['HardwareSubType'] = parsed['hardware_sub_type']
+ ec = {
+ 'UA.Parse(val.UserAgent && val.UserAgent == obj.UserAgent)': ua_ec,
+ 'DBotScore': dbot_score
+ }
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': r,
+ 'HumanReadable': tableToMarkdown('Parsed result for {}'.format(user_agent), hr),
+ 'EntryContext': ec
+ })
+ if r['result']['code'] == 'error':
+ error_msg = r['result']['message']
+ demisto.results({
+ 'Type': entryTypes['note'],
+ 'ContentsFormat': formats['markdown'],
+ 'Contents': r,
+ 'HumanReadable': '{}'.format(error_msg)
+ })
+
+
+def test_command():
+ post_data = {
+ # disable-secrets-detection
+ "user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) "
+ "Chrome/64.0.3282.140 Safari/537.36"
+ }
+ post_json = json.dumps(post_data)
+ http_request(post_json)
+ demisto.results('ok')
+
+
+'''EXECUTION BLOCK'''
+try:
+ handle_proxy()
+ if demisto.command() == 'ua-parse':
+ ua_parse_command()
+ if demisto.command() == 'test-module':
+ test_command()
+except Exception as e:
+ return_error(str(e))
diff --git a/Integrations/WhatIsMyBrowser/WhatIsMyBrowser.yml b/Integrations/WhatIsMyBrowser/WhatIsMyBrowser.yml
new file mode 100644
index 000000000000..9239b0eab17c
--- /dev/null
+++ b/Integrations/WhatIsMyBrowser/WhatIsMyBrowser.yml
@@ -0,0 +1,67 @@
+commonfields:
+ id: WhatsMyBrowser
+ version: -1
+name: WhatsMyBrowser
+display: WhatIsMyBrowser
+category: Data Enrichment & Threat Intelligence
+description: Parse user agents and determine if they are malicious as well as enrich
+ information about the agent
+configuration:
+- display: URL for WhatIsMyBrowser
+ name: url
+ defaultvalue: https://api.whatismybrowser.com/api/v2/user_agent_parse # disable-secrets-detection
+ type: 0
+ required: true
+- display: API Key
+ name: api_key
+ defaultvalue: ""
+ type: 4
+ required: true
+- display: Use system proxy settings
+ name: proxy
+ defaultvalue: ""
+ type: 8
+ required: false
+- display: Trust any certificate (insecure)
+ name: insecure
+ defaultvalue: ""
+ type: 8
+ required: false
+script:
+ script: ''
+ type: python
+ commands:
+ - name: ua-parse
+ arguments:
+ - name: UserAgent
+ required: true
+ description: User Agent String
+ outputs:
+ - contextPath: UA.Parse.Software
+ description: Software extracted from UA string
+ type: string
+ - contextPath: UA.Parse.SoftwareName
+ description: Software Name extracted from UA string
+ type: string
+ - contextPath: UA.Parse.OperatingSystem
+ description: Operating System extracted from UA string
+ type: string
+ - contextPath: UA.Parse.Abusive
+ description: Whether or not the UA has been identified as abusive
+ type: boolean
+ - contextPath: UA.Parse.OperatingSystemName
+ description: Operating System Name extracted from UA string
+ type: string
+ - contextPath: UA.Parse.UserAgent
+ description: User agent string
+ type: string
+ - contextPath: UA.Parse.HardwareType
+ description: Hardware Type as extracted from UA string
+ type: string
+ - contextPath: UA.Parse.HardwareSubType
+ description: Hardware Sub Type as extracted from UA string
+ type: string
+ description: Parses a User Agent string
+ runonce: false
+tests:
+- WhatsMyBrowser-Test
diff --git a/Integrations/WhatIsMyBrowser/WhatIsMyBrowser_description.md b/Integrations/WhatIsMyBrowser/WhatIsMyBrowser_description.md
new file mode 100644
index 000000000000..a4a0af5307ba
--- /dev/null
+++ b/Integrations/WhatIsMyBrowser/WhatIsMyBrowser_description.md
@@ -0,0 +1 @@
+The User Agent Parse API from WhatIsMyBrowser allows you to send a User Agent String and received a detailed response back, describing as much as possible about the string. The `ua-parse` command is designed for parsing individual user agents.
diff --git a/Integrations/WhatIsMyBrowser/WhatIsMyBrowser_image.png b/Integrations/WhatIsMyBrowser/WhatIsMyBrowser_image.png
new file mode 100644
index 000000000000..94b937a2a633
Binary files /dev/null and b/Integrations/WhatIsMyBrowser/WhatIsMyBrowser_image.png differ
diff --git a/Integrations/Whois/CHANGELOG.md b/Integrations/Whois/CHANGELOG.md
new file mode 100644
index 000000000000..9130e939f780
--- /dev/null
+++ b/Integrations/Whois/CHANGELOG.md
@@ -0,0 +1,9 @@
+## [Unreleased]
+
+
+## [19.9.1] - 2019-09-18
+ - Updated documentation to reflect capabilities of the Whois integration.
+ - Added context outputs to match context standards, which enables outputs to be found for field mapping.
+
+## [19.8.0] - 2019-08-06
+ - Added support for Socks and HTTP Connect proxy.
diff --git a/Integrations/Whois/Pipfile b/Integrations/Whois/Pipfile
new file mode 100644
index 000000000000..a099c0518a36
--- /dev/null
+++ b/Integrations/Whois/Pipfile
@@ -0,0 +1,31 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+pylint = "*"
+pytest = "*"
+pytest-mock = "*"
+requests-mock = "*"
+flake8 = "*"
+
+[packages]
+cybox = "==2.1.0.13" # disable-secrets-detection
+future = "==0.16.0"
+lxml = "==3.7.2"
+mixbox = "==1.0.1"
+olefile = "==0.44"
+ordered-set = "==2.0.1"
+python-dateutil = "==2.6.0"
+python-whois = "==0.7.0"
+requests = "==2.13.0"
+six = "==1.10.0"
+stix = "==1.2.0.2" # disable-secrets-detection
+urllib3 = "==1.24"
+virtualenv = "==15.0.3"
+weakrefmethod = "==1.0.3"
+PySocks = "==1.6.8"
+
+[requires]
+python_version = "2.7"
diff --git a/Integrations/Whois/Pipfile.lock b/Integrations/Whois/Pipfile.lock
new file mode 100644
index 000000000000..0ccb5f3ec224
--- /dev/null
+++ b/Integrations/Whois/Pipfile.lock
@@ -0,0 +1,484 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "a25818e5e1860f56ab3044760a59d76268b26a64d270ac8cb6a5aa4b6413bc94"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "2.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "cybox": {
+ "hashes": [
+ "sha256:d8dba216aa6ef05295757439bf098e1b271cbf50fff13be6bde1bd7fb46b9420",
+ "sha256:daa2456db3c7f211cb92c567970d7d9b95b068cd2efbf04e5bbbcecf5c3ffdb6"
+ ],
+ "index": "pypi",
+ "version": "==2.1.0.13"
+ },
+ "future": {
+ "hashes": [
+ "sha256:e39ced1ab767b5936646cedba8bcce582398233d6a627067d4c6a454c90cfedb"
+ ],
+ "index": "pypi",
+ "version": "==0.16.0"
+ },
+ "lxml": {
+ "hashes": [
+ "sha256:188d07d7c21b13be43e45b955e0a9f49bb9aaa1982ff3d25f7e6ed7af7cb845c",
+ "sha256:260a7ab24a27f264b99d7eedd4d12e2cd1fdacbbbc7a4506bf1a8405ea2c8b51",
+ "sha256:343993c62bb395ab04026f35c7d459719e33016ecf9256fadbd3b1751a0b7b0a",
+ "sha256:46b4db3ad2473080621f554c629c62150080b5112f8ddbabdab58ad8e4bb5ed1",
+ "sha256:52766500c74c08a9fa6838163b89914a3274d7617f8cc420793fb6ea870cb558",
+ "sha256:59d9176360dbc3919e9d4bfca85c1ca64ab4f4ee00e6f119d7150ba887e3410a",
+ "sha256:9107bb0d12a940d0ccb50240ceef8701ac717102acc564d7cf7242ab810251ec",
+ "sha256:96724831b1657499521ec0340460f2c853808ea86171af688818a1972c463aad",
+ "sha256:cb013b5a5f58b58068be0203d2b7136d8464d7e9b3db01d54f8884d3c4097149",
+ "sha256:f302a725802b56c173bbef29131cc753413e47749fcf96d7a4bde4e6ffd25dee",
+ "sha256:fb99af148e7507e0cde11ea5c064062d960e581d3530c50e0936d6743da8bb2c"
+ ],
+ "index": "pypi",
+ "version": "==3.7.2"
+ },
+ "mixbox": {
+ "hashes": [
+ "sha256:4256ba29f9e629a300b91530871e8dc6ed576d7648f757ce6a990c8cabc1a0be",
+ "sha256:6df0575f483bff4753075a44d3849f709304e5ebd6d290e6ab2500c0f5905f68",
+ "sha256:e70e82c21995240df708a930afed2850ac3476e75746328890ea44ab94ba1b7a"
+ ],
+ "index": "pypi",
+ "version": "==1.0.1"
+ },
+ "olefile": {
+ "hashes": [
+ "sha256:61f2ca0cd0aa77279eb943c07f607438edf374096b66332fae1ee64a6f0f73ad"
+ ],
+ "index": "pypi",
+ "version": "==0.44"
+ },
+ "ordered-set": {
+ "hashes": [
+ "sha256:55567f094481ba204ffede0117ab563e19af050c7cbf33a9a23292b8cb2b0a0e"
+ ],
+ "index": "pypi",
+ "version": "==2.0.1"
+ },
+ "pysocks": {
+ "hashes": [
+ "sha256:3fe52c55890a248676fd69dc9e3c4e811718b777834bcaab7a8125cf9deac672"
+ ],
+ "index": "pypi",
+ "version": "==1.6.8"
+ },
+ "python-dateutil": {
+ "hashes": [
+ "sha256:3acbef017340600e9ff8f2994d8f7afd6eacb295383f286466a6df3961e486f0",
+ "sha256:537bf2a8f8ce6f6862ad705cd68f9e405c0b5db014aa40fa29eab4335d4b1716",
+ "sha256:62a2f8df3d66f878373fd0072eacf4ee52194ba302e00082828e0d263b0418d2"
+ ],
+ "index": "pypi",
+ "version": "==2.6.0"
+ },
+ "python-whois": {
+ "hashes": [
+ "sha256:39b187060c8f798f381f7ee119d44bda564cee3fc2190e56ff8d0c3332286cf7"
+ ],
+ "index": "pypi",
+ "version": "==0.7.0"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:1a720e8862a41aa22e339373b526f508ef0c8988baf48b84d3fc891a8e237efb",
+ "sha256:5722cd09762faa01276230270ff16af7acf7c5c45d623868d9ba116f15791ce8"
+ ],
+ "index": "pypi",
+ "version": "==2.13.0"
+ },
+ "six": {
+ "hashes": [
+ "sha256:0ff78c403d9bccf5a425a6d31a12aa6b47f1c21ca4dc2573a7e2f32a97335eb1",
+ "sha256:105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a"
+ ],
+ "index": "pypi",
+ "version": "==1.10.0"
+ },
+ "stix": {
+ "hashes": [
+ "sha256:9475dc56817d26a3da177b1b1a118cc8665cc98ba1af382b42906aa5132a97b4",
+ "sha256:c26de96e1782c303fecabb3e569aa143827c685c446b831aa42e09b06dd47195",
+ "sha256:c7f87fc1ba9df5ee52ec74e2c312f0fa621108fab0583cfad627d88f6b5d321f"
+ ],
+ "index": "pypi",
+ "version": "==1.2.0.2"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:41c3db2fc01e5b907288010dec72f9d0a74e37d6994e6eb56849f59fea2265ae",
+ "sha256:8819bba37a02d143296a4d032373c4dd4aca11f6d4c9973335ca75f9c8475f59"
+ ],
+ "index": "pypi",
+ "version": "==1.24"
+ },
+ "virtualenv": {
+ "hashes": [
+ "sha256:6d9c760d3fc5fa0894b0f99b9de82a4647e1164f0b700a7f99055034bf548b1d",
+ "sha256:cc8164362fc9611d478f784bbc066f3ee74526c50336ec61a6e75d5af97926c8"
+ ],
+ "index": "pypi",
+ "version": "==15.0.3"
+ },
+ "weakrefmethod": {
+ "hashes": [
+ "sha256:37bc1fbb5575acf82172d4eb7b6fc4412d77d5a1d70dff2c1f8a4574301cda66"
+ ],
+ "index": "pypi",
+ "version": "==1.0.3"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:87de48a92e29cedf7210ffa853d11441e7ad94cb47bacd91b023499b51cbc756",
+ "sha256:d25869fc7f44f1d9fb7d24fd7ea0639656f5355fc3089cd1f3d18c6ec6b124c7"
+ ],
+ "version": "==1.6.6"
+ },
+ "atomicwrites": {
+ "hashes": [
+ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
+ "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
+ ],
+ "version": "==1.3.0"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79",
+ "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"
+ ],
+ "version": "==19.1.0"
+ },
+ "backports.functools-lru-cache": {
+ "hashes": [
+ "sha256:9d98697f088eb1b0fa451391f91afb5e3ebde16bbdb272819fd091151fda4f1a",
+ "sha256:f0b0e4eba956de51238e17573b7087e852dfe9854afd2e9c873f73fc0ca0a6dd"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==1.5"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:046832c04d4e752f37383b628bc601a7ea7211496b4638f6514d0e5b9acc4939",
+ "sha256:945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
+ ],
+ "version": "==2019.6.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "configparser": {
+ "hashes": [
+ "sha256:8be81d89d6e7b4c0d4e44bcc525845f6da25821de80cb5e06e7e0238a2899e32",
+ "sha256:da60d0014fd8c55eb48c1c5354352e363e2d30bbf7057e5e171a468390184c75"
+ ],
+ "markers": "python_version == '2.7'",
+ "version": "==3.7.4"
+ },
+ "contextlib2": {
+ "hashes": [
+ "sha256:509f9419ee91cdd00ba34443217d5ca51f5a364a404e1dce9e8979cea969ca48",
+ "sha256:f5260a6e679d2ff42ec91ec5252f4eeffdcf21053db9113bd0a8e4d953769c00"
+ ],
+ "markers": "python_version < '3'",
+ "version": "==0.5.5"
+ },
+ "entrypoints": {
+ "hashes": [
+ "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
+ "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
+ ],
+ "version": "==0.3"
+ },
+ "enum34": {
+ "hashes": [
+ "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
+ "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
+ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
+ "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==1.1.6"
+ },
+ "flake8": {
+ "hashes": [
+ "sha256:19241c1cbc971b9962473e4438a2ca19749a7dd002dd1a946eaba171b4114548",
+ "sha256:8e9dfa3cecb2400b3738a42c54c3043e821682b9c840b0448c0503f781130696"
+ ],
+ "index": "pypi",
+ "version": "==3.7.8"
+ },
+ "funcsigs": {
+ "hashes": [
+ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
+ "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==1.0.2"
+ },
+ "functools32": {
+ "hashes": [
+ "sha256:89d824aa6c358c421a234d7f9ee0bd75933a67c29588ce50aaa3acdf4d403fa0",
+ "sha256:f6253dfbe0538ad2e387bd8fdfd9293c925d63553f5813c4e587745416501e6d"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.2.3.post2"
+ },
+ "futures": {
+ "hashes": [
+ "sha256:49b3f5b064b6e3afc3316421a3f25f66c137ae88f068abbf72830170033c5e16",
+ "sha256:7e033af76a5e35f58e56da7a91e687706faf4e7bdfb2cbc3f2cca6b9bcda9794"
+ ],
+ "markers": "python_version < '3.2'",
+ "version": "==3.3.0"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
+ "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+ ],
+ "version": "==2.8"
+ },
+ "importlib-metadata": {
+ "hashes": [
+ "sha256:6dfd58dfe281e8d240937776065dd3624ad5469c835248219bd16cf2e12dbeb7",
+ "sha256:cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db"
+ ],
+ "version": "==0.18"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
+ "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
+ ],
+ "version": "==4.3.21"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:159a745e61422217881c4de71f9eafd9d703b93af95618635849fe469a283661",
+ "sha256:23f63c0821cc96a23332e45dfaa83266feff8adc72b9bcaef86c202af765244f",
+ "sha256:3b11be575475db2e8a6e11215f5aa95b9ec14de658628776e10d96fa0b4dac13",
+ "sha256:3f447aff8bc61ca8b42b73304f6a44fa0d915487de144652816f950a3f1ab821",
+ "sha256:4ba73f6089cd9b9478bc0a4fa807b47dbdb8fad1d8f31a0f0a5dbf26a4527a71",
+ "sha256:4f53eadd9932055eac465bd3ca1bd610e4d7141e1278012bd1f28646aebc1d0e",
+ "sha256:64483bd7154580158ea90de5b8e5e6fc29a16a9b4db24f10193f0c1ae3f9d1ea",
+ "sha256:6f72d42b0d04bfee2397aa1862262654b56922c20a9bb66bb76b6f0e5e4f9229",
+ "sha256:7c7f1ec07b227bdc561299fa2328e85000f90179a2f44ea30579d38e037cb3d4",
+ "sha256:7c8b1ba1e15c10b13cad4171cfa77f5bb5ec2580abc5a353907780805ebe158e",
+ "sha256:8559b94b823f85342e10d3d9ca4ba5478168e1ac5658a8a2f18c991ba9c52c20",
+ "sha256:a262c7dfb046f00e12a2bdd1bafaed2408114a89ac414b0af8755c696eb3fc16",
+ "sha256:acce4e3267610c4fdb6632b3886fe3f2f7dd641158a843cf6b6a68e4ce81477b",
+ "sha256:be089bb6b83fac7f29d357b2dc4cf2b8eb8d98fe9d9ff89f9ea6012970a853c7",
+ "sha256:bfab710d859c779f273cc48fb86af38d6e9210f38287df0069a63e40b45a2f5c",
+ "sha256:c10d29019927301d524a22ced72706380de7cfc50f767217485a912b4c8bd82a",
+ "sha256:dd6e2b598849b3d7aee2295ac765a578879830fb8966f70be8cd472e6069932e",
+ "sha256:e408f1eacc0a68fed0c08da45f31d0ebb38079f043328dce69ff133b95c29dc1"
+ ],
+ "version": "==1.4.1"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "mock": {
+ "hashes": [
+ "sha256:83657d894c90d5681d62155c82bda9c1187827525880eda8ff5df4ec813437c3",
+ "sha256:d157e52d4e5b938c550f39eb2fd15610db062441a9c2747d3dbfa9298211d0f8"
+ ],
+ "markers": "python_version < '3.0'",
+ "version": "==3.0.5"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
+ "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
+ "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
+ ],
+ "markers": "python_version <= '2.7'",
+ "version": "==5.0.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af",
+ "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"
+ ],
+ "version": "==19.0"
+ },
+ "pathlib2": {
+ "hashes": [
+ "sha256:2156525d6576d21c4dcaddfa427fae887ef89a7a9de5cbfe0728b3aafa78427e",
+ "sha256:446014523bb9be5c28128c4d2a10ad6bb60769e78bd85658fe44a450674e0ef8"
+ ],
+ "markers": "python_version < '3.6'",
+ "version": "==2.3.4"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:0825a152ac059776623854c1543d65a4ad408eb3d33ee114dff91e57ec6ae6fc",
+ "sha256:b9817417e95936bf75d85d3f8767f7df6cdde751fc40aed3bb3074cbcb77757c"
+ ],
+ "version": "==0.12.0"
+ },
+ "py": {
+ "hashes": [
+ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa",
+ "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"
+ ],
+ "version": "==1.8.0"
+ },
+ "pycodestyle": {
+ "hashes": [
+ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
+ "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+ ],
+ "version": "==2.5.0"
+ },
+ "pyflakes": {
+ "hashes": [
+ "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
+ "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+ ],
+ "version": "==2.1.1"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:367e3d49813d349a905390ac27989eff82ab84958731c5ef0bef867452cfdc42",
+ "sha256:97a42df23d436c70132971d1dcb9efad2fe5c0c6add55b90161e773caf729300"
+ ],
+ "index": "pypi",
+ "version": "==1.9.5"
+ },
+ "pyparsing": {
+ "hashes": [
+ "sha256:43c5486cefefa536c9aab528881c992328f020eefe4f6d06332449c365218580",
+ "sha256:d6c5ffe9d0305b9b977f7a642d36b9370954d1da7ada4c62393382cbadad4265"
+ ],
+ "version": "==2.4.1.1"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:6aa9bc2f6f6504d7949e9df2a756739ca06e58ffda19b5e53c725f7b03fb4aae",
+ "sha256:b77ae6f2d1a760760902a7676887b665c086f71e3461c64ed2a312afcedc00d6"
+ ],
+ "index": "pypi",
+ "version": "==4.6.4"
+ },
+ "pytest-mock": {
+ "hashes": [
+ "sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7",
+ "sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568"
+ ],
+ "index": "pypi",
+ "version": "==1.10.4"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:1a720e8862a41aa22e339373b526f508ef0c8988baf48b84d3fc891a8e237efb",
+ "sha256:5722cd09762faa01276230270ff16af7acf7c5c45d623868d9ba116f15791ce8"
+ ],
+ "index": "pypi",
+ "version": "==2.13.0"
+ },
+ "requests-mock": {
+ "hashes": [
+ "sha256:12e17c7ad1397fd1df5ead7727eb3f1bdc9fe1c18293b0492e0e01b57997e38d",
+ "sha256:dc9e416a095ee7c3360056990d52e5611fb94469352fc1c2dc85be1ff2189146"
+ ],
+ "index": "pypi",
+ "version": "==1.6.0"
+ },
+ "scandir": {
+ "hashes": [
+ "sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
+ "sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
+ "sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
+ "sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
+ "sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
+ "sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
+ "sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
+ "sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
+ "sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
+ "sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
+ "sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==1.10.0"
+ },
+ "singledispatch": {
+ "hashes": [
+ "sha256:5b06af87df13818d14f08a028e42f566640aef80805c3b50c5056b086e3c2b9c",
+ "sha256:833b46966687b3de7f438c761ac475213e53b306740f1abfaa86e1d1aae56aa8"
+ ],
+ "markers": "python_version < '3.4'",
+ "version": "==3.4.0.3"
+ },
+ "six": {
+ "hashes": [
+ "sha256:0ff78c403d9bccf5a425a6d31a12aa6b47f1c21ca4dc2573a7e2f32a97335eb1",
+ "sha256:105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a"
+ ],
+ "index": "pypi",
+ "version": "==1.10.0"
+ },
+ "typing": {
+ "hashes": [
+ "sha256:38566c558a0a94d6531012c8e917b1b8518a41e418f7f15f00e129cc80162ad3",
+ "sha256:53765ec4f83a2b720214727e319607879fec4acde22c4fbb54fa2604e79e44ce",
+ "sha256:84698954b4e6719e912ef9a42a2431407fe3755590831699debda6fba92aac55"
+ ],
+ "markers": "python_version < '3.5'",
+ "version": "==3.7.4"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:41c3db2fc01e5b907288010dec72f9d0a74e37d6994e6eb56849f59fea2265ae",
+ "sha256:8819bba37a02d143296a4d032373c4dd4aca11f6d4c9973335ca75f9c8475f59"
+ ],
+ "index": "pypi",
+ "version": "==1.24"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
+ ],
+ "version": "==1.11.2"
+ },
+ "zipp": {
+ "hashes": [
+ "sha256:4970c3758f4e89a7857a973b1e2a5d75bcdc47794442f2e2dd4fe8e0466e809a",
+ "sha256:8a5712cfd3bb4248015eb3b0b3c54a5f6ee3f2425963ef2a0125b8bc40aafaec"
+ ],
+ "version": "==0.5.2"
+ }
+ }
+}
diff --git a/Integrations/Whois/Whois.py b/Integrations/Whois/Whois.py
new file mode 100644
index 000000000000..404c74a7766d
--- /dev/null
+++ b/Integrations/Whois/Whois.py
@@ -0,0 +1,8392 @@
+import demistomock as demisto
+from CommonServerPython import *
+from CommonServerUserPython import *
+import re
+import socket
+import sys
+from codecs import encode, decode
+import socks
+
+ENTRY_TYPE = entryTypes['error'] if demisto.params().get('with_error', False) else entryTypes['warning']
+
+# flake8: noqa
+
+"""
+ This integration is built using the joepie91 "Whois" module. For more information regarding this package please see
+ the following - https://github.com/joepie91/python-whois
+"""
+
+
+''' HELPER FUNCTIONS '''
+# About the drop some mean regex right now disable-secrets-detection-start
+tlds = {
+ "_": {
+ "schema": "2",
+ "updated": "2018-10-05 11:43:46 UTC"
+ },
+ "aaa": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "aarp": {
+ "_type": "newgtld",
+ "host": "whois.nic.aarp"
+ },
+ "abarth": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "abb": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "abbott": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "abbvie": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "abc": {
+ "_type": "newgtld",
+ "host": "whois.nic.abc"
+ },
+ "able": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "abogado": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.abogado"
+ },
+ "abudhabi": {
+ "_type": "newgtld",
+ "host": "whois.nic.abudhabi"
+ },
+ "ac": {
+ "host": "whois.nic.ac"
+ },
+ "academy": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.academy"
+ },
+ "accenture": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "accountant": {
+ "_type": "newgtld",
+ "host": "whois.nic.accountant"
+ },
+ "accountants": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.accountants"
+ },
+ "aco": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "active": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "actor": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.actor"
+ },
+ "ad": {
+ "adapter": "none"
+ },
+ "adac": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "ads": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "adult": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "ae": {
+ "host": "whois.aeda.net.ae"
+ },
+ "aeg": {
+ "_type": "newgtld",
+ "host": "whois.nic.aeg"
+ },
+ "aero": {
+ "host": "whois.aero"
+ },
+ "aetna": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "af": {
+ "host": "whois.nic.af"
+ },
+ "afamilycompany": {
+ "_type": "newgtld",
+ "host": "whois.nic.afamilycompany"
+ },
+ "afl": {
+ "_type": "newgtld",
+ "host": "whois.nic.afl"
+ },
+ "africa": {
+ "_group": "zaregistry",
+ "_type": "newgtld",
+ "host": "africa-whois.registry.net.za"
+ },
+ "ag": {
+ "host": "whois.nic.ag"
+ },
+ "agakhan": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "agency": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.agency"
+ },
+ "ai": {
+ "host": "whois.nic.ai"
+ },
+ "aig": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "aigo": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "airbus": {
+ "_type": "newgtld",
+ "host": "whois.nic.airbus"
+ },
+ "airforce": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.airforce"
+ },
+ "airtel": {
+ "_type": "newgtld",
+ "host": "whois.nic.airtel"
+ },
+ "akdn": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "al": {
+ "adapter": "none"
+ },
+ "alfaromeo": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "alibaba": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.alibaba"
+ },
+ "alipay": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.alipay"
+ },
+ "allfinanz": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.ksregistry.net"
+ },
+ "allstate": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "ally": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.ally"
+ },
+ "alsace": {
+ "_group": "nicfr",
+ "_type": "newgtld",
+ "host": "whois-alsace.nic.fr"
+ },
+ "alstom": {
+ "_type": "newgtld",
+ "host": "whois.nic.alstom"
+ },
+ "am": {
+ "host": "whois.amnic.net"
+ },
+ "americanexpress": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "americanfamily": {
+ "_type": "newgtld",
+ "host": "whois.nic.americanfamily"
+ },
+ "amex": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "amfam": {
+ "_type": "newgtld",
+ "host": "whois.nic.amfam"
+ },
+ "amica": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "amsterdam": {
+ "_type": "newgtld",
+ "host": "whois.nic.amsterdam"
+ },
+ "analytics": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "android": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "anquan": {
+ "_group": "teleinfo",
+ "_type": "newgtld",
+ "host": "whois.teleinfo.cn"
+ },
+ "anz": {
+ "_type": "newgtld",
+ "host": "whois.nic.anz"
+ },
+ "ao": {
+ "adapter": "none"
+ },
+ "aol": {
+ "_type": "newgtld",
+ "host": "whois.nic.aol"
+ },
+ "apartments": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.apartments"
+ },
+ "app": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "apple": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "aq": {
+ "adapter": "none"
+ },
+ "aquarelle": {
+ "_group": "nicfr",
+ "_type": "newgtld",
+ "host": "whois-aquarelle.nic.fr"
+ },
+ "ar": {
+ "host": "whois.nic.ar"
+ },
+ "aramco": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "archi": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "army": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.army"
+ },
+ "arpa": {
+ "host": "whois.iana.org"
+ },
+ "e164.arpa": {
+ "host": "whois.ripe.net"
+ },
+ "in-addr.arpa": {
+ "adapter": "arpa"
+ },
+ "art": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.art"
+ },
+ "arte": {
+ "_type": "newgtld",
+ "host": "whois.nic.arte"
+ },
+ "as": {
+ "host": "whois.nic.as"
+ },
+ "asda": {
+ "_type": "newgtld",
+ "host": "whois.nic.asda"
+ },
+ "asia": {
+ "host": "whois.nic.asia"
+ },
+ "associates": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.associates"
+ },
+ "at": {
+ "host": "whois.nic.at"
+ },
+ "priv.at": {
+ "host": "whois.nic.priv.at"
+ },
+ "athleta": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "attorney": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.attorney"
+ },
+ "au": {
+ "host": "whois.auda.org.au"
+ },
+ "auction": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.auction"
+ },
+ "audi": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "audible": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "audio": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "auspost": {
+ "_type": "newgtld",
+ "host": "whois.nic.auspost"
+ },
+ "author": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "auto": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "autos": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "avianca": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "aw": {
+ "host": "whois.nic.aw"
+ },
+ "aws": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "ax": {
+ "host": "whois.ax"
+ },
+ "axa": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "az": {
+ "adapter": "web",
+ "url": "http://www.nic.az/"
+ },
+ "azure": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "ba": {
+ "adapter": "web",
+ "url": "http://nic.ba/lat/menu/view/13"
+ },
+ "baby": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "baidu": {
+ "_group": "knet",
+ "_type": "newgtld",
+ "host": "whois.gtld.knet.cn"
+ },
+ "banamex": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "bananarepublic": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "band": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.band"
+ },
+ "bank": {
+ "_type": "newgtld",
+ "host": "whois.nic.bank"
+ },
+ "bar": {
+ "_type": "newgtld",
+ "host": "whois.nic.bar"
+ },
+ "barcelona": {
+ "_type": "newgtld",
+ "host": "whois.nic.barcelona"
+ },
+ "barclaycard": {
+ "_type": "newgtld",
+ "host": "whois.nic.barclaycard"
+ },
+ "barclays": {
+ "_type": "newgtld",
+ "host": "whois.nic.barclays"
+ },
+ "barefoot": {
+ "_type": "newgtld",
+ "host": "whois.nic.barefoot"
+ },
+ "bargains": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.bargains"
+ },
+ "baseball": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "basketball": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.basketball"
+ },
+ "bauhaus": {
+ "_type": "newgtld",
+ "host": "whois.nic.bauhaus"
+ },
+ "bayern": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.bayern"
+ },
+ "bb": {
+ "adapter": "web",
+ "url": "http://whois.telecoms.gov.bb/search_domain.php"
+ },
+ "bbc": {
+ "_type": "newgtld",
+ "host": "whois.nic.bbc"
+ },
+ "bbt": {
+ "_type": "newgtld",
+ "host": "whois.nic.bbt"
+ },
+ "bbva": {
+ "_type": "newgtld",
+ "host": "whois.nic.bbva"
+ },
+ "bcg": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.bcg"
+ },
+ "bcn": {
+ "_type": "newgtld",
+ "host": "whois.nic.bcn"
+ },
+ "bd": {
+ "adapter": "web",
+ "url": "http://whois.btcl.net.bd/"
+ },
+ "be": {
+ "host": "whois.dns.be"
+ },
+ "beats": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "beauty": {
+ "_type": "newgtld",
+ "host": "whois.nic.beauty"
+ },
+ "beer": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.beer"
+ },
+ "bentley": {
+ "_type": "newgtld",
+ "host": "whois.nic.bentley"
+ },
+ "berlin": {
+ "_type": "newgtld",
+ "host": "whois.nic.berlin"
+ },
+ "best": {
+ "_type": "newgtld",
+ "host": "whois.nic.best"
+ },
+ "bestbuy": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.bestbuy"
+ },
+ "bet": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "bf": {
+ "adapter": "none"
+ },
+ "bg": {
+ "host": "whois.register.bg"
+ },
+ "bh": {
+ "adapter": "none"
+ },
+ "bharti": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "bi": {
+ "host": "whois1.nic.bi"
+ },
+ "bible": {
+ "_type": "newgtld",
+ "host": "whois.nic.bible"
+ },
+ "bid": {
+ "_type": "newgtld",
+ "host": "whois.nic.bid"
+ },
+ "bike": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.bike"
+ },
+ "bing": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "bingo": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.bingo"
+ },
+ "bio": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "biz": {
+ "host": "whois.biz"
+ },
+ "bj": {
+ "host": "whois.nic.bj"
+ },
+ "black": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "blackfriday": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "blanco": {
+ "_type": "newgtld",
+ "host": "whois.nic.blanco"
+ },
+ "blockbuster": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.blockbuster"
+ },
+ "blog": {
+ "_type": "newgtld",
+ "host": "whois.nic.blog"
+ },
+ "bloomberg": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "blue": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "bm": {
+ "adapter": "web",
+ "url": "http://www.bermudanic.bm/cgi-bin/lansaweb?procfun+BMWHO+BMWHO2+WHO"
+ },
+ "bms": {
+ "_type": "newgtld",
+ "host": "whois.nic.bms"
+ },
+ "bmw": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.ksregistry.net"
+ },
+ "bn": {
+ "host": "whois.bnnic.bn"
+ },
+ "bnl": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.bnl"
+ },
+ "bnpparibas": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "bo": {
+ "host": "whois.nic.bo"
+ },
+ "boats": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "boehringer": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "bofa": {
+ "_type": "newgtld",
+ "host": "whois.nic.bofa"
+ },
+ "bom": {
+ "_group": "nicbr",
+ "_type": "newgtld",
+ "host": "whois.gtlds.nic.br"
+ },
+ "bond": {
+ "_type": "newgtld",
+ "host": "whois.nic.bond"
+ },
+ "boo": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "book": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "booking": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "bosch": {
+ "_type": "newgtld",
+ "host": "whois.nic.bosch"
+ },
+ "bostik": {
+ "_group": "nicfr",
+ "_type": "newgtld",
+ "host": "whois-bostik.nic.fr"
+ },
+ "boston": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.boston"
+ },
+ "bot": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "boutique": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.boutique"
+ },
+ "box": {
+ "_group": "aridnrs",
+ "_type": "newgtld",
+ "host": "whois.aridnrs.net.au"
+ },
+ "br": {
+ "host": "whois.registro.br"
+ },
+ "bradesco": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.bradesco"
+ },
+ "bridgestone": {
+ "_type": "newgtld",
+ "host": "whois.nic.bridgestone"
+ },
+ "broadway": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.broadway"
+ },
+ "broker": {
+ "_type": "newgtld",
+ "host": "whois.nic.broker"
+ },
+ "brother": {
+ "_type": "newgtld",
+ "host": "whois.nic.brother"
+ },
+ "brussels": {
+ "_type": "newgtld",
+ "host": "whois.nic.brussels"
+ },
+ "bs": {
+ "adapter": "web",
+ "url": "http://www.nic.bs/cgi-bin/search.pl"
+ },
+ "bt": {
+ "adapter": "web",
+ "url": "http://www.nic.bt/"
+ },
+ "budapest": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois-dub.mm-registry.com"
+ },
+ "bugatti": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "build": {
+ "_type": "newgtld",
+ "host": "whois.nic.build"
+ },
+ "builders": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.builders"
+ },
+ "business": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.business"
+ },
+ "buy": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "buzz": {
+ "_type": "newgtld",
+ "host": "whois.nic.buzz"
+ },
+ "bv": {
+ "adapter": "none"
+ },
+ "bw": {
+ "host": "whois.nic.net.bw"
+ },
+ "by": {
+ "host": "whois.cctld.by"
+ },
+ "bz": {
+ "host": "whois.afilias-grs.info",
+ "adapter": "afilias"
+ },
+ "za.bz": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "bzh": {
+ "_group": "nicfr",
+ "_type": "newgtld",
+ "host": "whois.nic.bzh"
+ },
+ "ca": {
+ "host": "whois.cira.ca"
+ },
+ "co.ca": {
+ "host": "whois.co.ca"
+ },
+ "cab": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.cab"
+ },
+ "cafe": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.cafe"
+ },
+ "cal": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "call": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "calvinklein": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "cam": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.ksregistry.net"
+ },
+ "camera": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.camera"
+ },
+ "camp": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.camp"
+ },
+ "cancerresearch": {
+ "_type": "newgtld",
+ "host": "whois.nic.cancerresearch"
+ },
+ "canon": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.canon"
+ },
+ "capetown": {
+ "_group": "zaregistry",
+ "_type": "newgtld",
+ "host": "capetown-whois.registry.net.za"
+ },
+ "capital": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.capital"
+ },
+ "capitalone": {
+ "_type": "newgtld",
+ "host": "whois.nic.capitalone"
+ },
+ "car": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "caravan": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "cards": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.cards"
+ },
+ "care": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.care"
+ },
+ "career": {
+ "_type": "newgtld",
+ "host": "whois.nic.career"
+ },
+ "careers": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.careers"
+ },
+ "cars": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "cartier": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "casa": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.casa"
+ },
+ "case": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.case"
+ },
+ "caseih": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.caseih"
+ },
+ "cash": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.cash"
+ },
+ "casino": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.casino"
+ },
+ "cat": {
+ "host": "whois.nic.cat",
+ "adapter": "formatted",
+ "format": "-C US-ASCII ace %s"
+ },
+ "catering": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.catering"
+ },
+ "catholic": {
+ "_group": "aridnrs",
+ "_type": "newgtld",
+ "host": "whois.aridnrs.net.au"
+ },
+ "cba": {
+ "_type": "newgtld",
+ "host": "whois.nic.cba"
+ },
+ "cbn": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "cbre": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "cbs": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "cc": {
+ "host": "ccwhois.verisign-grs.com",
+ "adapter": "verisign"
+ },
+ "cd": {
+ "host": "whois.nic.cd"
+ },
+ "ceb": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "center": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.center"
+ },
+ "ceo": {
+ "_type": "newgtld",
+ "host": "whois.nic.ceo"
+ },
+ "cern": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "cf": {
+ "host": "whois.dot.cf"
+ },
+ "cfa": {
+ "_type": "newgtld",
+ "host": "whois.nic.cfa"
+ },
+ "cfd": {
+ "_type": "newgtld",
+ "host": "whois.nic.cfd"
+ },
+ "cg": {
+ "adapter": "none"
+ },
+ "ch": {
+ "host": "whois.nic.ch"
+ },
+ "chanel": {
+ "_type": "newgtld",
+ "host": "whois.nic.chanel"
+ },
+ "channel": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "charity": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.charity"
+ },
+ "chase": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "chat": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.chat"
+ },
+ "cheap": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.cheap"
+ },
+ "chintai": {
+ "_type": "newgtld",
+ "host": "whois.nic.chintai"
+ },
+ "christmas": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "chrome": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "chrysler": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "church": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.church"
+ },
+ "ci": {
+ "host": "whois.nic.ci"
+ },
+ "cipriani": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "circle": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "cisco": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "citadel": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "citi": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "citic": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "city": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.city"
+ },
+ "cityeats": {
+ "_type": "newgtld",
+ "host": "whois.nic.cityeats"
+ },
+ "ck": {
+ "adapter": "none"
+ },
+ "cl": {
+ "host": "whois.nic.cl"
+ },
+ "claims": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.claims"
+ },
+ "cleaning": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.cleaning"
+ },
+ "click": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "clinic": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.clinic"
+ },
+ "clinique": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.clinique"
+ },
+ "clothing": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.clothing"
+ },
+ "cloud": {
+ "_type": "newgtld",
+ "host": "whois.nic.cloud"
+ },
+ "club": {
+ "_type": "newgtld",
+ "host": "whois.nic.club"
+ },
+ "clubmed": {
+ "_type": "newgtld",
+ "host": "whois.nic.clubmed"
+ },
+ "cm": {
+ "host": "whois.netcom.cm"
+ },
+ "cn": {
+ "host": "whois.cnnic.cn"
+ },
+ "edu.cn": {
+ "adapter": "none"
+ },
+ "co": {
+ "host": "whois.nic.co"
+ },
+ "coach": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.coach"
+ },
+ "codes": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.codes"
+ },
+ "coffee": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.coffee"
+ },
+ "college": {
+ "_type": "newgtld",
+ "host": "whois.nic.college"
+ },
+ "cologne": {
+ "_group": "knipp",
+ "_type": "newgtld",
+ "host": "whois.ryce-rsp.com"
+ },
+ "com": {
+ "host": "whois.verisign-grs.com",
+ "adapter": "verisign"
+ },
+ "africa.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "ar.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "br.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "cn.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "co.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.net"
+ },
+ "de.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "eu.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "gb.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "gr.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "hk.com": {
+ "_group": "udrregistry",
+ "_type": "private",
+ "host": "whois.registry.hk.com"
+ },
+ "hu.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "jpn.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "kr.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "no.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "qc.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "ru.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "sa.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "se.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "uk.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "us.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "uy.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "za.com": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "comcast": {
+ "_type": "newgtld",
+ "host": "whois.nic.comcast"
+ },
+ "commbank": {
+ "_type": "newgtld",
+ "host": "whois.nic.commbank"
+ },
+ "community": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.community"
+ },
+ "company": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.company"
+ },
+ "compare": {
+ "_type": "newgtld",
+ "host": "whois.nic.compare"
+ },
+ "computer": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.computer"
+ },
+ "comsec": {
+ "_type": "newgtld",
+ "host": "whois.nic.comsec"
+ },
+ "condos": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.condos"
+ },
+ "construction": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.construction"
+ },
+ "consulting": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.consulting"
+ },
+ "contact": {
+ "_type": "newgtld",
+ "host": "whois.nic.contact"
+ },
+ "contractors": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.contractors"
+ },
+ "cooking": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.cooking"
+ },
+ "cookingchannel": {
+ "_type": "newgtld",
+ "host": "whois.nic.cookingchannel"
+ },
+ "cool": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.cool"
+ },
+ "coop": {
+ "host": "whois.nic.coop"
+ },
+ "corsica": {
+ "_group": "nicfr",
+ "_type": "newgtld",
+ "host": "whois-corsica.nic.fr"
+ },
+ "country": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois-dub.mm-registry.com"
+ },
+ "coupon": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "coupons": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.coupons"
+ },
+ "courses": {
+ "_group": "aridnrs",
+ "_type": "newgtld",
+ "host": "whois.aridnrs.net.au"
+ },
+ "cr": {
+ "host": "whois.nic.cr"
+ },
+ "credit": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.credit"
+ },
+ "creditcard": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.creditcard"
+ },
+ "creditunion": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "cricket": {
+ "_type": "newgtld",
+ "host": "whois.nic.cricket"
+ },
+ "crown": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "crs": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "cruise": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.cruise"
+ },
+ "cruises": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.cruises"
+ },
+ "csc": {
+ "_type": "newgtld",
+ "host": "whois.nic.csc"
+ },
+ "cu": {
+ "adapter": "web",
+ "url": "http://www.nic.cu/"
+ },
+ "cuisinella": {
+ "_type": "newgtld",
+ "host": "whois.nic.cuisinella"
+ },
+ "cv": {
+ "adapter": "web",
+ "url": "http://www.dns.cv/"
+ },
+ "cw": {
+ "adapter": "none"
+ },
+ "cx": {
+ "host": "whois.nic.cx"
+ },
+ "cy": {
+ "adapter": "web",
+ "url": "http://www.nic.cy/nslookup/online_database.php"
+ },
+ "cymru": {
+ "_type": "newgtld",
+ "host": "whois.nic.cymru"
+ },
+ "cyou": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.cyou"
+ },
+ "cz": {
+ "host": "whois.nic.cz"
+ },
+ "dabur": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "dad": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "dance": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.dance"
+ },
+ "data": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.data"
+ },
+ "date": {
+ "_type": "newgtld",
+ "host": "whois.nic.date"
+ },
+ "dating": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.dating"
+ },
+ "datsun": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.gmo"
+ },
+ "day": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "dclk": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "dds": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.dds"
+ },
+ "de": {
+ "host": "whois.denic.de",
+ "adapter": "formatted",
+ "format": "-T dn,ace %s"
+ },
+ "com.de": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "deal": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "dealer": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "deals": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.deals"
+ },
+ "degree": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.degree"
+ },
+ "delivery": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.delivery"
+ },
+ "dell": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "deloitte": {
+ "_type": "newgtld",
+ "host": "whois.nic.deloitte"
+ },
+ "delta": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.delta"
+ },
+ "democrat": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.democrat"
+ },
+ "dental": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.dental"
+ },
+ "dentist": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.dentist"
+ },
+ "desi": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.ksregistry.net"
+ },
+ "design": {
+ "_type": "newgtld",
+ "host": "whois.nic.design"
+ },
+ "dev": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "dhl": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "diamonds": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.diamonds"
+ },
+ "diet": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "digital": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.digital"
+ },
+ "direct": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.direct"
+ },
+ "directory": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.directory"
+ },
+ "discount": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.discount"
+ },
+ "discover": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "dish": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.dish"
+ },
+ "diy": {
+ "_type": "newgtld",
+ "host": "whois.nic.diy"
+ },
+ "dj": {
+ "adapter": "web",
+ "url": "http://www.nic.dj/whois.php"
+ },
+ "dk": {
+ "host": "whois.dk-hostmaster.dk",
+ "adapter": "formatted",
+ "format": "--show-handles %s"
+ },
+ "dm": {
+ "host": "whois.nic.dm"
+ },
+ "dnp": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "do": {
+ "adapter": "web",
+ "url": "http://www.nic.do/whois-h.php3"
+ },
+ "docs": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "doctor": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.doctor"
+ },
+ "dodge": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "dog": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.dog"
+ },
+ "doha": {
+ "_type": "newgtld",
+ "host": "whois.nic.doha"
+ },
+ "domains": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.domains"
+ },
+ "doosan": {
+ "host": "whois.nic.xn--cg4bki"
+ },
+ "dot": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.dot"
+ },
+ "download": {
+ "_type": "newgtld",
+ "host": "whois.nic.download"
+ },
+ "drive": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "dtv": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.dtv"
+ },
+ "dubai": {
+ "_type": "newgtld",
+ "host": "whois.nic.dubai"
+ },
+ "duck": {
+ "_type": "newgtld",
+ "host": "whois.nic.duck"
+ },
+ "dunlop": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.dunlop"
+ },
+ "duns": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "dupont": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "durban": {
+ "_group": "zaregistry",
+ "_type": "newgtld",
+ "host": "durban-whois.registry.net.za"
+ },
+ "dvag": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.ksregistry.net"
+ },
+ "dvr": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "dz": {
+ "host": "whois.nic.dz"
+ },
+ "earth": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "eat": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "ec": {
+ "host": "whois.nic.ec"
+ },
+ "eco": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "edeka": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "edu": {
+ "host": "whois.educause.edu"
+ },
+ "education": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.education"
+ },
+ "ee": {
+ "host": "whois.tld.ee"
+ },
+ "eg": {
+ "adapter": "web",
+ "url": "http://lookup.egregistry.eg/english.aspx"
+ },
+ "email": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.email"
+ },
+ "emerck": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "energy": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.energy"
+ },
+ "engineer": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.engineer"
+ },
+ "engineering": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.engineering"
+ },
+ "enterprises": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.enterprises"
+ },
+ "epost": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "epson": {
+ "_group": "aridnrs",
+ "_type": "newgtld",
+ "host": "whois.aridnrs.net.au"
+ },
+ "equipment": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.equipment"
+ },
+ "er": {
+ "adapter": "none"
+ },
+ "ericsson": {
+ "_type": "newgtld",
+ "host": "whois.nic.ericsson"
+ },
+ "erni": {
+ "_type": "newgtld",
+ "host": "whois.nic.erni"
+ },
+ "es": {
+ "host": "whois.nic.es"
+ },
+ "esq": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "estate": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.estate"
+ },
+ "esurance": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "et": {
+ "adapter": "none"
+ },
+ "etisalat": {
+ "_group": "centralnic",
+ "host": "whois.centralnic.com"
+ },
+ "eu": {
+ "host": "whois.eu"
+ },
+ "eurovision": {
+ "_type": "newgtld",
+ "host": "whois.nic.eurovision"
+ },
+ "eus": {
+ "_group": "coreregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.eus"
+ },
+ "events": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.events"
+ },
+ "everbank": {
+ "_type": "newgtld",
+ "host": "whois.nic.everbank"
+ },
+ "exchange": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.exchange"
+ },
+ "expert": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.expert"
+ },
+ "exposed": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.exposed"
+ },
+ "express": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.express"
+ },
+ "extraspace": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "fage": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "fail": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.fail"
+ },
+ "fairwinds": {
+ "_type": "newgtld",
+ "host": "whois.nic.fairwinds"
+ },
+ "faith": {
+ "_type": "newgtld",
+ "host": "whois.nic.faith"
+ },
+ "family": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.family"
+ },
+ "fan": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.fan"
+ },
+ "fans": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.fans"
+ },
+ "farm": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.farm"
+ },
+ "farmers": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "fashion": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.fashion"
+ },
+ "fast": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "fedex": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.fedex"
+ },
+ "feedback": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.feedback"
+ },
+ "ferrari": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.ferrari"
+ },
+ "ferrero": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "fi": {
+ "host": "whois.fi"
+ },
+ "fiat": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "fidelity": {
+ "_type": "newgtld",
+ "host": "whois.nic.fidelity"
+ },
+ "fido": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "film": {
+ "_type": "newgtld",
+ "host": "whois.nic.film"
+ },
+ "final": {
+ "_group": "nicbr",
+ "_type": "newgtld",
+ "host": "whois.gtlds.nic.br"
+ },
+ "finance": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.finance"
+ },
+ "financial": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.financial"
+ },
+ "fire": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "firestone": {
+ "_type": "newgtld",
+ "host": "whois.nic.firestone"
+ },
+ "firmdale": {
+ "_type": "newgtld",
+ "host": "whois.nic.firmdale"
+ },
+ "fish": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.fish"
+ },
+ "fishing": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.fishing"
+ },
+ "fit": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.fit"
+ },
+ "fitness": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.fitness"
+ },
+ "fj": {
+ "host": "whois.usp.ac.fj"
+ },
+ "fk": {
+ "adapter": "none"
+ },
+ "flickr": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "flights": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.flights"
+ },
+ "flir": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "florist": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.florist"
+ },
+ "flowers": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "fly": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "fm": {
+ "host": "whois.nic.fm"
+ },
+ "fo": {
+ "host": "whois.nic.fo"
+ },
+ "foo": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "food": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "foodnetwork": {
+ "_type": "newgtld",
+ "host": "whois.nic.foodnetwork"
+ },
+ "football": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.football"
+ },
+ "ford": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "forex": {
+ "_type": "newgtld",
+ "host": "whois.nic.forex"
+ },
+ "forsale": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.forsale"
+ },
+ "forum": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.forum"
+ },
+ "foundation": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.foundation"
+ },
+ "fox": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "fr": {
+ "host": "whois.nic.fr"
+ },
+ "aeroport.fr": {
+ "_group": "smallregistry",
+ "_type": "private",
+ "host": "whois.smallregistry.net"
+ },
+ "avocat.fr": {
+ "_group": "smallregistry",
+ "_type": "private",
+ "host": "whois.smallregistry.net"
+ },
+ "chambagri.fr": {
+ "_group": "smallregistry",
+ "_type": "private",
+ "host": "whois.smallregistry.net"
+ },
+ "chirurgiens-dentistes.fr": {
+ "_group": "smallregistry",
+ "_type": "private",
+ "host": "whois.smallregistry.net"
+ },
+ "experts-comptables.fr": {
+ "_group": "smallregistry",
+ "_type": "private",
+ "host": "whois.smallregistry.net"
+ },
+ "geometre-expert.fr": {
+ "_group": "smallregistry",
+ "_type": "private",
+ "host": "whois.smallregistry.net"
+ },
+ "medecin.fr": {
+ "_group": "smallregistry",
+ "_type": "private",
+ "host": "whois.smallregistry.net"
+ },
+ "notaires.fr": {
+ "_group": "smallregistry",
+ "_type": "private",
+ "host": "whois.smallregistry.net"
+ },
+ "pharmacien.fr": {
+ "_group": "smallregistry",
+ "_type": "private",
+ "host": "whois.smallregistry.net"
+ },
+ "port.fr": {
+ "_group": "smallregistry",
+ "_type": "private",
+ "host": "whois.smallregistry.net"
+ },
+ "veterinaire.fr": {
+ "_group": "smallregistry",
+ "_type": "private",
+ "host": "whois.smallregistry.net"
+ },
+ "free": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "fresenius": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.ksregistry.net"
+ },
+ "frl": {
+ "_type": "newgtld",
+ "host": "whois.nic.frl"
+ },
+ "frogans": {
+ "_group": "nicfr",
+ "_type": "newgtld",
+ "host": "whois.nic.frogans"
+ },
+ "frontdoor": {
+ "_type": "newgtld",
+ "host": "whois.nic.frontdoor"
+ },
+ "frontier": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "ftr": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "fujitsu": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.gmo"
+ },
+ "fujixerox": {
+ "_type": "newgtld",
+ "host": "whois.nic.fujixerox"
+ },
+ "fun": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.fun"
+ },
+ "fund": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.fund"
+ },
+ "furniture": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.furniture"
+ },
+ "futbol": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.futbol"
+ },
+ "fyi": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.fyi"
+ },
+ "ga": {
+ "host": "whois.dot.ga"
+ },
+ "gal": {
+ "_group": "coreregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.gal"
+ },
+ "gallery": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.gallery"
+ },
+ "gallo": {
+ "_type": "newgtld",
+ "host": "whois.nic.gallo"
+ },
+ "gallup": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.gallup"
+ },
+ "game": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "games": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.games"
+ },
+ "gap": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "garden": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.garden"
+ },
+ "gb": {
+ "adapter": "none"
+ },
+ "gbiz": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "gd": {
+ "host": "whois.nic.gd"
+ },
+ "gdn": {
+ "_type": "newgtld",
+ "host": "whois.nic.gdn"
+ },
+ "ge": {
+ "host": "whois.registration.ge"
+ },
+ "gea": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "gent": {
+ "_type": "newgtld",
+ "host": "whois.nic.gent"
+ },
+ "genting": {
+ "_type": "newgtld",
+ "host": "whois.nic.genting"
+ },
+ "george": {
+ "_type": "newgtld",
+ "host": "whois.nic.george"
+ },
+ "gf": {
+ "host": "whois.mediaserv.net"
+ },
+ "gg": {
+ "host": "whois.gg"
+ },
+ "ggee": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.ggee"
+ },
+ "gh": {
+ "adapter": "web",
+ "url": "http://www.nic.gh/customer/search_c.htm"
+ },
+ "gi": {
+ "host": "whois.afilias-grs.info",
+ "adapter": "afilias"
+ },
+ "gift": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "gifts": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.gifts"
+ },
+ "gives": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.gives"
+ },
+ "giving": {
+ "_type": "newgtld",
+ "host": "whois.nic.giving"
+ },
+ "gl": {
+ "host": "whois.nic.gl"
+ },
+ "glade": {
+ "_type": "newgtld",
+ "host": "whois.nic.glade"
+ },
+ "glass": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.glass"
+ },
+ "gle": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "global": {
+ "_type": "newgtld",
+ "host": "whois.nic.global"
+ },
+ "globo": {
+ "_group": "nicbr",
+ "_type": "newgtld",
+ "host": "whois.gtlds.nic.br"
+ },
+ "gm": {
+ "adapter": "web",
+ "url": "http://www.nic.gm/htmlpages/whois.htm"
+ },
+ "gmail": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "gmbh": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.gmbh"
+ },
+ "gmoregistry": {
+ "_group": "gmo",
+ "adapter": "none"
+ },
+ "gmx": {
+ "_group": "knipp",
+ "_type": "newgtld",
+ "host": "whois-fe1.gmx.tango.knipp.de"
+ },
+ "gn": {
+ "adapter": "none"
+ },
+ "godaddy": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "gold": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.gold"
+ },
+ "goldpoint": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.goldpoint"
+ },
+ "golf": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.golf"
+ },
+ "goo": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.gmo"
+ },
+ "goodyear": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.goodyear"
+ },
+ "goog": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "google": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "gop": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.gop"
+ },
+ "got": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "gov": {
+ "host": "whois.dotgov.gov"
+ },
+ "gp": {
+ "adapter": "web",
+ "url": "https://www.dom-enic.com/whois.html"
+ },
+ "gq": {
+ "host": "whois.dominio.gq"
+ },
+ "gr": {
+ "adapter": "web",
+ "url": "https://grweb.ics.forth.gr/Whois?lang=en"
+ },
+ "grainger": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "graphics": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.graphics"
+ },
+ "gratis": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.gratis"
+ },
+ "green": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "gripe": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.gripe"
+ },
+ "grocery": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "group": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.group"
+ },
+ "gs": {
+ "host": "whois.nic.gs"
+ },
+ "gt": {
+ "adapter": "web",
+ "url": "http://www.gt/"
+ },
+ "gu": {
+ "adapter": "web",
+ "url": "http://gadao.gov.gu/domainsearch.htm"
+ },
+ "guardian": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "gucci": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "guge": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "guide": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.guide"
+ },
+ "guitars": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "guru": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.guru"
+ },
+ "gw": {
+ "adapter": "web",
+ "url": "http://nic.gw/en/whois/"
+ },
+ "gy": {
+ "host": "whois.registry.gy"
+ },
+ "hair": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "hamburg": {
+ "_type": "newgtld",
+ "host": "whois.nic.hamburg"
+ },
+ "hangout": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "haus": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.haus"
+ },
+ "hbo": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "hdfc": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.hdfc"
+ },
+ "hdfcbank": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.hdfcbank"
+ },
+ "health": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "healthcare": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.healthcare"
+ },
+ "help": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "helsinki": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.helsinki"
+ },
+ "here": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "hermes": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "hgtv": {
+ "_type": "newgtld",
+ "host": "whois.nic.hgtv"
+ },
+ "hiphop": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "hisamitsu": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.gmo"
+ },
+ "hitachi": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.gmo"
+ },
+ "hiv": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "hk": {
+ "host": "whois.hkirc.hk"
+ },
+ "inc.hk": {
+ "_group": "udrregistry",
+ "_type": "private",
+ "host": "whois.registry.hk.com"
+ },
+ "ltd.hk": {
+ "_group": "udrregistry",
+ "_type": "private",
+ "host": "whois.registry.hk.com"
+ },
+ "hkt": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.hkt"
+ },
+ "hm": {
+ "host": "whois.registry.hm"
+ },
+ "hn": {
+ "host": "whois.nic.hn"
+ },
+ "hockey": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.hockey"
+ },
+ "holdings": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.holdings"
+ },
+ "holiday": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.holiday"
+ },
+ "homedepot": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.homedepot"
+ },
+ "homegoods": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "homes": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "homesense": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "honda": {
+ "_type": "newgtld",
+ "host": "whois.nic.honda"
+ },
+ "honeywell": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "horse": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.horse"
+ },
+ "hospital": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.hospital"
+ },
+ "host": {
+ "_type": "newgtld",
+ "host": "whois.nic.host"
+ },
+ "hosting": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "hot": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "hoteles": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "hotels": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "hotmail": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "house": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.house"
+ },
+ "how": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "hr": {
+ "host": "whois.dns.hr"
+ },
+ "hsbc": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "ht": {
+ "host": "whois.nic.ht"
+ },
+ "hu": {
+ "host": "whois.nic.hu"
+ },
+ "hughes": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.hughes"
+ },
+ "hyatt": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "hyundai": {
+ "_type": "newgtld",
+ "host": "whois.nic.hyundai"
+ },
+ "ibm": {
+ "_type": "newgtld",
+ "host": "whois.nic.ibm"
+ },
+ "icbc": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.icbc"
+ },
+ "ice": {
+ "_type": "newgtld",
+ "host": "whois.nic.ice"
+ },
+ "icu": {
+ "_type": "newgtld",
+ "host": "whois.nic.icu"
+ },
+ "id": {
+ "host": "whois.id"
+ },
+ "ie": {
+ "host": "whois.iedr.ie"
+ },
+ "ieee": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "ifm": {
+ "_type": "newgtld",
+ "host": "whois.nic.ifm"
+ },
+ "ikano": {
+ "_type": "newgtld",
+ "host": "whois.ikano.tld-box.at"
+ },
+ "il": {
+ "host": "whois.isoc.org.il"
+ },
+ "im": {
+ "host": "whois.nic.im"
+ },
+ "imamat": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "imdb": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "immo": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.immo"
+ },
+ "immobilien": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.immobilien"
+ },
+ "in": {
+ "host": "whois.inregistry.net"
+ },
+ "inc": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.inc"
+ },
+ "industries": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.industries"
+ },
+ "infiniti": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.gmo"
+ },
+ "info": {
+ "host": "whois.afilias.net"
+ },
+ "ing": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "ink": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.ink"
+ },
+ "institute": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.institute"
+ },
+ "insurance": {
+ "_type": "newgtld",
+ "host": "whois.nic.insurance"
+ },
+ "insure": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.insure"
+ },
+ "int": {
+ "host": "whois.iana.org"
+ },
+ "intel": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "international": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.international"
+ },
+ "intuit": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "investments": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.investments"
+ },
+ "io": {
+ "host": "whois.nic.io"
+ },
+ "ipiranga": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "iq": {
+ "host": "whois.cmc.iq"
+ },
+ "ir": {
+ "host": "whois.nic.ir"
+ },
+ "irish": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.irish"
+ },
+ "is": {
+ "host": "whois.isnic.is"
+ },
+ "iselect": {
+ "_type": "newgtld",
+ "host": "whois.nic.iselect"
+ },
+ "ismaili": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "ist": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "istanbul": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "it": {
+ "host": "whois.nic.it"
+ },
+ "itau": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "itv": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "iveco": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.iveco"
+ },
+ "jaguar": {
+ "_type": "newgtld",
+ "host": "whois.nic.jaguar"
+ },
+ "java": {
+ "_type": "newgtld",
+ "host": "whois.nic.java"
+ },
+ "jcb": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.gmo"
+ },
+ "jcp": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "je": {
+ "host": "whois.je"
+ },
+ "jeep": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "jetzt": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "adapter": "none",
+ "host": "whois.nic.jetzt"
+ },
+ "jewelry": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.jewelry"
+ },
+ "jio": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.jio"
+ },
+ "jll": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "jm": {
+ "adapter": "none"
+ },
+ "jmp": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "jnj": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "jo": {
+ "adapter": "web",
+ "url": "http://www.dns.jo/Whois.aspx"
+ },
+ "jobs": {
+ "host": "whois.nic.jobs",
+ "adapter": "verisign"
+ },
+ "joburg": {
+ "_group": "zaregistry",
+ "_type": "newgtld",
+ "host": "joburg-whois.registry.net.za"
+ },
+ "jot": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "joy": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "jp": {
+ "host": "whois.jprs.jp",
+ "adapter": "formatted",
+ "format": "%s/e"
+ },
+ "jpmorgan": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "jprs": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "juegos": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "juniper": {
+ "_type": "newgtld",
+ "host": "whois.nic.juniper"
+ },
+ "kaufen": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.kaufen"
+ },
+ "kddi": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.kddi"
+ },
+ "ke": {
+ "host": "whois.kenic.or.ke"
+ },
+ "kerryhotels": {
+ "_type": "newgtld",
+ "host": "whois.nic.kerryhotels"
+ },
+ "kerrylogistics": {
+ "_type": "newgtld",
+ "host": "whois.nic.kerrylogistics"
+ },
+ "kerryproperties": {
+ "_type": "newgtld",
+ "host": "whois.nic.kerryproperties"
+ },
+ "kfh": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.kfh"
+ },
+ "kg": {
+ "host": "whois.kg"
+ },
+ "kh": {
+ "adapter": "none"
+ },
+ "ki": {
+ "host": "whois.nic.ki"
+ },
+ "kia": {
+ "_type": "newgtld",
+ "host": "whois.nic.kia"
+ },
+ "kim": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "kinder": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "kindle": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "kitchen": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.kitchen"
+ },
+ "kiwi": {
+ "_type": "newgtld",
+ "host": "whois.nic.kiwi"
+ },
+ "km": {
+ "adapter": "none"
+ },
+ "kn": {
+ "host": "whois.nic.kn"
+ },
+ "koeln": {
+ "_group": "knipp",
+ "_type": "newgtld",
+ "host": "whois.ryce-rsp.com"
+ },
+ "komatsu": {
+ "_type": "newgtld",
+ "host": "whois.nic.komatsu"
+ },
+ "kosher": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.kosher"
+ },
+ "kp": {
+ "adapter": "none"
+ },
+ "kpmg": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "kpn": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "kr": {
+ "host": "whois.kr"
+ },
+ "krd": {
+ "_group": "aridnrs",
+ "_type": "newgtld",
+ "host": "whois.aridnrs.net.au"
+ },
+ "kred": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "kuokgroup": {
+ "_type": "newgtld",
+ "host": "whois.nic.kuokgroup"
+ },
+ "kw": {
+ "adapter": "web",
+ "url": "http://www.kw/"
+ },
+ "ky": {
+ "host": "whois.kyregistry.ky"
+ },
+ "kyoto": {
+ "_type": "newgtld",
+ "host": "whois.nic.kyoto"
+ },
+ "kz": {
+ "host": "whois.nic.kz"
+ },
+ "la": {
+ "host": "whois.nic.la"
+ },
+ "lacaixa": {
+ "_type": "newgtld",
+ "host": "whois.nic.lacaixa"
+ },
+ "ladbrokes": {
+ "_type": "newgtld",
+ "host": "whois.nic.ladbrokes"
+ },
+ "lamborghini": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "lamer": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.lamer"
+ },
+ "lancaster": {
+ "_group": "nicfr",
+ "_type": "newgtld",
+ "host": "whois-lancaster.nic.fr"
+ },
+ "lancia": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "lancome": {
+ "_type": "newgtld",
+ "host": "whois.nic.lancome"
+ },
+ "land": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.land"
+ },
+ "landrover": {
+ "_type": "newgtld",
+ "host": "whois.nic.landrover"
+ },
+ "lanxess": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "lasalle": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "lat": {
+ "_type": "newgtld",
+ "host": "whois.nic.lat"
+ },
+ "latino": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.latino"
+ },
+ "latrobe": {
+ "_type": "newgtld",
+ "host": "whois.nic.latrobe"
+ },
+ "law": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.law"
+ },
+ "lawyer": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.lawyer"
+ },
+ "lb": {
+ "adapter": "web",
+ "url": "http://www.aub.edu.lb/lbdr/"
+ },
+ "lc": {
+ "host": "whois.afilias-grs.info",
+ "adapter": "afilias"
+ },
+ "lds": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.lds"
+ },
+ "lease": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.lease"
+ },
+ "leclerc": {
+ "_group": "nicfr",
+ "_type": "newgtld",
+ "host": "whois-leclerc.nic.fr"
+ },
+ "lefrak": {
+ "_type": "newgtld",
+ "host": "whois.nic.lefrak"
+ },
+ "legal": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.legal"
+ },
+ "lego": {
+ "_type": "newgtld",
+ "host": "whois.nic.lego"
+ },
+ "lexus": {
+ "_type": "newgtld",
+ "host": "whois.nic.lexus"
+ },
+ "lgbt": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "li": {
+ "host": "whois.nic.li"
+ },
+ "liaison": {
+ "_type": "newgtld",
+ "host": "whois.nic.liaison"
+ },
+ "lidl": {
+ "_type": "newgtld",
+ "host": "whois.nic.lidl"
+ },
+ "life": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.life"
+ },
+ "lifeinsurance": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "lifestyle": {
+ "_type": "newgtld",
+ "host": "whois.nic.lifestyle"
+ },
+ "lighting": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.lighting"
+ },
+ "like": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "lilly": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "limited": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.limited"
+ },
+ "limo": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.limo"
+ },
+ "lincoln": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "linde": {
+ "_type": "newgtld",
+ "host": "whois.nic.linde"
+ },
+ "link": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "lipsy": {
+ "_type": "newgtld",
+ "host": "whois.nic.lipsy"
+ },
+ "live": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.live"
+ },
+ "living": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "lixil": {
+ "_type": "newgtld",
+ "host": "whois.nic.lixil"
+ },
+ "lk": {
+ "host": "whois.nic.lk"
+ },
+ "llc": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "loan": {
+ "_type": "newgtld",
+ "host": "whois.nic.loan"
+ },
+ "loans": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.loans"
+ },
+ "locker": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.locker"
+ },
+ "locus": {
+ "_type": "newgtld",
+ "host": "whois.nic.locus"
+ },
+ "loft": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "lol": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "london": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.london"
+ },
+ "lotte": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.lotte"
+ },
+ "lotto": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "love": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.love"
+ },
+ "lpl": {
+ "_type": "newgtld",
+ "host": "whois.nic.lpl"
+ },
+ "lplfinancial": {
+ "_type": "newgtld",
+ "host": "whois.nic.lplfinancial"
+ },
+ "lr": {
+ "adapter": "none"
+ },
+ "ls": {
+ "adapter": "web",
+ "url": "http://www.co.ls/co.asp"
+ },
+ "lt": {
+ "host": "whois.domreg.lt"
+ },
+ "ltd": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.ltd"
+ },
+ "ltda": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "lu": {
+ "host": "whois.dns.lu"
+ },
+ "lundbeck": {
+ "_type": "newgtld",
+ "host": "whois.nic.lundbeck"
+ },
+ "lupin": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "luxe": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.luxe"
+ },
+ "luxury": {
+ "_type": "newgtld",
+ "host": "whois.nic.luxury"
+ },
+ "lv": {
+ "host": "whois.nic.lv"
+ },
+ "ly": {
+ "host": "whois.nic.ly"
+ },
+ "ma": {
+ "host": "whois.registre.ma"
+ },
+ "macys": {
+ "_type": "newgtld",
+ "host": "whois.nic.macys"
+ },
+ "madrid": {
+ "_group": "corenic",
+ "_type": "newgtld",
+ "host": "whois.madrid.rs.corenic.net"
+ },
+ "maif": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "maison": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.maison"
+ },
+ "makeup": {
+ "_type": "newgtld",
+ "host": "whois.nic.makeup"
+ },
+ "man": {
+ "_type": "newgtld",
+ "host": "whois.nic.man"
+ },
+ "management": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.management"
+ },
+ "mango": {
+ "_group": "coreregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.mango"
+ },
+ "map": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "market": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.market"
+ },
+ "marketing": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.marketing"
+ },
+ "markets": {
+ "_type": "newgtld",
+ "host": "whois.nic.markets"
+ },
+ "marriott": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "marshalls": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "maserati": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.maserati"
+ },
+ "mattel": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "mba": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.mba"
+ },
+ "mc": {
+ "adapter": "none"
+ },
+ "mckinsey": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.mckinsey"
+ },
+ "md": {
+ "host": "whois.nic.md"
+ },
+ "me": {
+ "host": "whois.nic.me"
+ },
+ "med": {
+ "_type": "newgtld",
+ "host": "whois.nic.med"
+ },
+ "media": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.media"
+ },
+ "meet": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "melbourne": {
+ "_group": "aridnrs",
+ "_type": "newgtld",
+ "host": "whois.aridnrs.net.au"
+ },
+ "meme": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "memorial": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.memorial"
+ },
+ "men": {
+ "_type": "newgtld",
+ "host": "whois.nic.men"
+ },
+ "menu": {
+ "_type": "newgtld",
+ "host": "whois.nic.menu"
+ },
+ "merckmsd": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "metlife": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.metlife"
+ },
+ "mg": {
+ "host": "whois.nic.mg"
+ },
+ "mh": {
+ "adapter": "none"
+ },
+ "miami": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.miami"
+ },
+ "microsoft": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "mil": {
+ "adapter": "none"
+ },
+ "mini": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.ksregistry.net"
+ },
+ "mint": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "mit": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "mitsubishi": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.gmo"
+ },
+ "mk": {
+ "host": "whois.marnet.mk"
+ },
+ "ml": {
+ "host": "whois.dot.ml"
+ },
+ "mlb": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "mls": {
+ "_type": "newgtld",
+ "host": "whois.nic.mls"
+ },
+ "mm": {
+ "adapter": "none"
+ },
+ "mma": {
+ "_group": "nicfr",
+ "_type": "newgtld",
+ "host": "whois-mma.nic.fr"
+ },
+ "mn": {
+ "host": "whois.nic.mn"
+ },
+ "mo": {
+ "host": "whois.monic.mo"
+ },
+ "mobi": {
+ "_group": "afilias",
+ "host": "whois.afilias.net"
+ },
+ "mobile": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.mobile"
+ },
+ "mobily": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "moda": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.moda"
+ },
+ "moe": {
+ "_type": "newgtld",
+ "host": "whois.nic.moe"
+ },
+ "moi": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "mom": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "monash": {
+ "_type": "newgtld",
+ "host": "whois.nic.monash"
+ },
+ "money": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.money"
+ },
+ "monster": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.monster"
+ },
+ "montblanc": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "mopar": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "mormon": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.mormon"
+ },
+ "mortgage": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.mortgage"
+ },
+ "moscow": {
+ "_type": "newgtld",
+ "host": "whois.nic.moscow"
+ },
+ "moto": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "motorcycles": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "mov": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "movie": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.movie"
+ },
+ "movistar": {
+ "_group": "knipp",
+ "_type": "newgtld",
+ "host": "whois-fe.movistar.tango.knipp.de"
+ },
+ "mp": {
+ "adapter": "none"
+ },
+ "mq": {
+ "host": "whois.mediaserv.net"
+ },
+ "mr": {
+ "adapter": "none",
+ "host": "whois.nic.mr"
+ },
+ "ms": {
+ "host": "whois.nic.ms"
+ },
+ "msd": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "mt": {
+ "adapter": "web",
+ "url": "https://www.nic.org.mt/dotmt/"
+ },
+ "mtn": {
+ "_type": "newgtld",
+ "host": "whois.nic.mtn"
+ },
+ "mtr": {
+ "_type": "newgtld",
+ "host": "whois.nic.mtr"
+ },
+ "mu": {
+ "host": "whois.nic.mu"
+ },
+ "museum": {
+ "host": "whois.nic.museum"
+ },
+ "mutual": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "mv": {
+ "adapter": "none"
+ },
+ "mw": {
+ "adapter": "web",
+ "url": "http://www.registrar.mw/"
+ },
+ "mx": {
+ "host": "whois.nic.mx"
+ },
+ "my": {
+ "host": "whois.mynic.my"
+ },
+ "mz": {
+ "host": "whois.nic.mz"
+ },
+ "na": {
+ "host": "whois.na-nic.com.na"
+ },
+ "nab": {
+ "_type": "newgtld",
+ "host": "whois.nic.nab"
+ },
+ "nadex": {
+ "_type": "newgtld",
+ "host": "whois.nic.nadex"
+ },
+ "nagoya": {
+ "_type": "newgtld",
+ "host": "whois.nic.nagoya"
+ },
+ "name": {
+ "host": "whois.nic.name",
+ "adapter": "formatted",
+ "format": "domain=%s"
+ },
+ "nationwide": {
+ "_type": "newgtld",
+ "host": "whois.nic.nationwide"
+ },
+ "natura": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "navy": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.navy"
+ },
+ "nba": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "nc": {
+ "host": "whois.nc"
+ },
+ "ne": {
+ "adapter": "none"
+ },
+ "nec": {
+ "_type": "newgtld",
+ "host": "whois.nic.nec"
+ },
+ "net": {
+ "host": "whois.verisign-grs.com",
+ "adapter": "verisign"
+ },
+ "gb.net": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "hu.net": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "in.net": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "jp.net": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "se.net": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "uk.net": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "za.net": {
+ "host": "whois.za.net"
+ },
+ "netbank": {
+ "_type": "newgtld",
+ "host": "whois.nic.netbank"
+ },
+ "netflix": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "network": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.network"
+ },
+ "neustar": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "new": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "newholland": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.newholland"
+ },
+ "news": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.news"
+ },
+ "next": {
+ "_type": "newgtld",
+ "host": "whois.nic.next"
+ },
+ "nextdirect": {
+ "_type": "newgtld",
+ "host": "whois.nic.nextdirect"
+ },
+ "nexus": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "nf": {
+ "host": "whois.nic.nf"
+ },
+ "nfl": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "ng": {
+ "host": "whois.nic.net.ng"
+ },
+ "ngo": {
+ "_group": "publicinterestregistry",
+ "_type": "newgtld",
+ "host": "whois.publicinterestregistry.net"
+ },
+ "nhk": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "ni": {
+ "adapter": "web",
+ "url": "http://www.nic.ni/"
+ },
+ "nico": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.nico"
+ },
+ "nike": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "nikon": {
+ "_type": "newgtld",
+ "host": "whois.nic.nikon"
+ },
+ "ninja": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.ninja"
+ },
+ "nissan": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.gmo"
+ },
+ "nissay": {
+ "_type": "newgtld",
+ "host": "whois.nic.nissay"
+ },
+ "nl": {
+ "host": "whois.domain-registry.nl"
+ },
+ "no": {
+ "host": "whois.norid.no"
+ },
+ "nokia": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "northwesternmutual": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "norton": {
+ "_type": "newgtld",
+ "host": "whois.nic.norton"
+ },
+ "now": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "nowruz": {
+ "_group": "agitsys",
+ "_type": "newgtld",
+ "host": "whois.agitsys.net"
+ },
+ "nowtv": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.nowtv"
+ },
+ "np": {
+ "adapter": "web",
+ "url": "http://register.mos.com.np/np-whois-lookup"
+ },
+ "nr": {
+ "adapter": "web",
+ "url": "http://www.cenpac.net.nr/dns/whois.html"
+ },
+ "nra": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "nrw": {
+ "_type": "newgtld",
+ "host": "whois.nic.nrw"
+ },
+ "ntt": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "nu": {
+ "host": "whois.iis.nu"
+ },
+ "nyc": {
+ "_type": "newgtld",
+ "host": "whois.nic.nyc"
+ },
+ "nz": {
+ "host": "whois.srs.net.nz"
+ },
+ "obi": {
+ "_type": "newgtld",
+ "host": "whois.nic.obi"
+ },
+ "observer": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.observer"
+ },
+ "off": {
+ "_type": "newgtld",
+ "host": "whois.nic.off"
+ },
+ "office": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "okinawa": {
+ "_type": "newgtld",
+ "host": "whois.nic.okinawa"
+ },
+ "olayan": {
+ "_type": "newgtld",
+ "host": "whois.nic.olayan"
+ },
+ "olayangroup": {
+ "_type": "newgtld",
+ "host": "whois.nic.olayangroup"
+ },
+ "oldnavy": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "ollo": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.ollo"
+ },
+ "om": {
+ "host": "whois.registry.om"
+ },
+ "omega": {
+ "_type": "newgtld",
+ "host": "whois.nic.omega"
+ },
+ "one": {
+ "_type": "newgtld",
+ "host": "whois.nic.one"
+ },
+ "ong": {
+ "_group": "publicinterestregistry",
+ "_type": "newgtld",
+ "host": "whois.publicinterestregistry.net"
+ },
+ "onl": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "online": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.online"
+ },
+ "onyourside": {
+ "_type": "newgtld",
+ "host": "whois.nic.onyourside"
+ },
+ "ooo": {
+ "_type": "newgtld",
+ "host": "whois.nic.ooo"
+ },
+ "open": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "oracle": {
+ "_type": "newgtld",
+ "host": "whois.nic.oracle"
+ },
+ "orange": {
+ "_type": "newgtld",
+ "host": "whois.nic.orange"
+ },
+ "org": {
+ "host": "whois.pir.org"
+ },
+ "ae.org": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "eu.org": {
+ "host": "whois.eu.org"
+ },
+ "hk.org": {
+ "_group": "udrregistry",
+ "_type": "private",
+ "host": "whois.registry.hk.com"
+ },
+ "us.org": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "za.org": {
+ "host": "whois.za.org"
+ },
+ "organic": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "orientexpress": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "origin": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "origins": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.origins"
+ },
+ "osaka": {
+ "_type": "newgtld",
+ "host": "whois.nic.osaka"
+ },
+ "otsuka": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "ott": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.ott"
+ },
+ "ovh": {
+ "_group": "nicfr",
+ "_type": "newgtld",
+ "host": "whois-ovh.nic.fr"
+ },
+ "pa": {
+ "adapter": "web",
+ "url": "http://www.nic.pa/"
+ },
+ "page": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "panasonic": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.gmo"
+ },
+ "paris": {
+ "_group": "nicfr",
+ "_type": "newgtld",
+ "host": "whois-paris.nic.fr"
+ },
+ "pars": {
+ "_group": "agitsys",
+ "_type": "newgtld",
+ "host": "whois.agitsys.net"
+ },
+ "partners": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.partners"
+ },
+ "parts": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.parts"
+ },
+ "party": {
+ "_type": "newgtld",
+ "host": "whois.nic.party"
+ },
+ "passagens": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "pay": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "pccw": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.pccw"
+ },
+ "pe": {
+ "host": "kero.yachay.pe"
+ },
+ "pet": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "pf": {
+ "host": "whois.registry.pf"
+ },
+ "pfizer": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "pg": {
+ "adapter": "none"
+ },
+ "ph": {
+ "adapter": "web",
+ "url": "http://www.dot.ph/whois"
+ },
+ "pharmacy": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "phd": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "philips": {
+ "_type": "newgtld",
+ "host": "whois.nic.philips"
+ },
+ "phone": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.phone"
+ },
+ "photo": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "photography": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.photography"
+ },
+ "photos": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.photos"
+ },
+ "physio": {
+ "_group": "aridnrs",
+ "_type": "newgtld",
+ "host": "whois.nic.physio"
+ },
+ "piaget": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "pics": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "pictet": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "pictures": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.pictures"
+ },
+ "pid": {
+ "_type": "newgtld",
+ "host": "whois.nic.pid"
+ },
+ "pin": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "ping": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "pink": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "pioneer": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.gmo"
+ },
+ "pizza": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.pizza"
+ },
+ "pk": {
+ "adapter": "web",
+ "url": "http://www.pknic.net.pk/"
+ },
+ "pl": {
+ "host": "whois.dns.pl"
+ },
+ "co.pl": {
+ "host": "whois.co.pl"
+ },
+ "place": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.place"
+ },
+ "play": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "playstation": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.playstation"
+ },
+ "plumbing": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.plumbing"
+ },
+ "plus": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.plus"
+ },
+ "pm": {
+ "host": "whois.nic.pm"
+ },
+ "pn": {
+ "adapter": "web",
+ "url": "http://www.pitcairn.pn/PnRegistry/"
+ },
+ "pnc": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.pnc"
+ },
+ "pohl": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.ksregistry.net"
+ },
+ "poker": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "politie": {
+ "_type": "newgtld",
+ "host": "whois.nicpolitie"
+ },
+ "porn": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "post": {
+ "host": "whois.dotpostregistry.net"
+ },
+ "pr": {
+ "_group": "afiliassrs",
+ "host": "whois.afilias-srs.net"
+ },
+ "pramerica": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "praxi": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "press": {
+ "_type": "newgtld",
+ "host": "whois.nic.press"
+ },
+ "prime": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "pro": {
+ "host": "whois.afilias.net"
+ },
+ "prod": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "productions": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.productions"
+ },
+ "prof": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "progressive": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "promo": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "properties": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.properties"
+ },
+ "property": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "protection": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.centralnic.com"
+ },
+ "pru": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "prudential": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "ps": {
+ "host": "whois.pnina.ps"
+ },
+ "pt": {
+ "host": "whois.dns.pt"
+ },
+ "pub": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.pub"
+ },
+ "pw": {
+ "host": "whois.nic.pw"
+ },
+ "pwc": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "py": {
+ "adapter": "web",
+ "url": "http://www.nic.py/consulta-datos.php"
+ },
+ "qa": {
+ "host": "whois.registry.qa"
+ },
+ "qpon": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "quebec": {
+ "_type": "newgtld",
+ "host": "whois.nic.quebec"
+ },
+ "quest": {
+ "_type": "newgtld",
+ "host": "whois.nic.quest"
+ },
+ "qvc": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "racing": {
+ "_type": "newgtld",
+ "host": "whois.nic.racing"
+ },
+ "radio": {
+ "_type": "newgtld",
+ "host": "whois.nic.radio"
+ },
+ "raid": {
+ "_type": "newgtld",
+ "host": "whois.nic.raid"
+ },
+ "re": {
+ "host": "whois.nic.re"
+ },
+ "read": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "realestate": {
+ "_type": "newgtld",
+ "host": "whois.nic.realestate"
+ },
+ "realtor": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "realty": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.realty"
+ },
+ "recipes": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.recipes"
+ },
+ "red": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "redstone": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.redstone"
+ },
+ "redumbrella": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "rehab": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.rehab"
+ },
+ "reise": {
+ "_type": "newgtld",
+ "host": "whois.nic.reise"
+ },
+ "reisen": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.reisen"
+ },
+ "reit": {
+ "_type": "newgtld",
+ "host": "whois.nic.reit"
+ },
+ "reliance": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.reliance"
+ },
+ "ren": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "rent": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.rent"
+ },
+ "rentals": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.rentals"
+ },
+ "repair": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.repair"
+ },
+ "report": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.report"
+ },
+ "republican": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.republican"
+ },
+ "rest": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.rest"
+ },
+ "restaurant": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.restaurant"
+ },
+ "review": {
+ "_type": "newgtld",
+ "host": "whois.nic.review"
+ },
+ "reviews": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.reviews"
+ },
+ "rexroth": {
+ "_type": "newgtld",
+ "host": "whois.nic.rexroth"
+ },
+ "rich": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "richardli": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.richardli"
+ },
+ "ricoh": {
+ "_type": "newgtld",
+ "host": "whois.nic.ricoh"
+ },
+ "rightathome": {
+ "_type": "newgtld",
+ "host": "whois.nic.rightathome"
+ },
+ "ril": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.ril"
+ },
+ "rio": {
+ "_group": "nicbr",
+ "_type": "newgtld",
+ "host": "whois.gtlds.nic.br"
+ },
+ "rip": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.rip"
+ },
+ "rmit": {
+ "_group": "aridnrs",
+ "_type": "newgtld",
+ "host": "whois.aridnrs.net.au"
+ },
+ "ro": {
+ "host": "whois.rotld.ro"
+ },
+ "rocher": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "rocks": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.rocks"
+ },
+ "rodeo": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.rodeo"
+ },
+ "rogers": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "room": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "rs": {
+ "host": "whois.rnids.rs"
+ },
+ "rsvp": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "ru": {
+ "host": "whois.tcinet.ru"
+ },
+ "edu.ru": {
+ "host": "whois.informika.ru"
+ },
+ "rugby": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.centralnic.com"
+ },
+ "ruhr": {
+ "_type": "newgtld",
+ "host": "whois.nic.ruhr"
+ },
+ "run": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.run"
+ },
+ "rw": {
+ "host": "whois.ricta.org.rw"
+ },
+ "rwe": {
+ "_type": "newgtld",
+ "host": "whois.nic.rwe"
+ },
+ "ryukyu": {
+ "_type": "newgtld",
+ "host": "whois.nic.ryukyu"
+ },
+ "sa": {
+ "host": "whois.nic.net.sa"
+ },
+ "saarland": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.ksregistry.net"
+ },
+ "safe": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "safety": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "sakura": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "sale": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.sale"
+ },
+ "salon": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.salon"
+ },
+ "samsclub": {
+ "_type": "newgtld",
+ "host": "whois.nic.samsclub"
+ },
+ "samsung": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--cg4bki"
+ },
+ "sandvik": {
+ "_type": "newgtld",
+ "host": "whois.nic.sandvik"
+ },
+ "sandvikcoromant": {
+ "_type": "newgtld",
+ "host": "whois.nic.sandvikcoromant"
+ },
+ "sanofi": {
+ "_type": "newgtld",
+ "host": "whois.nic.sanofi"
+ },
+ "sap": {
+ "_type": "newgtld",
+ "host": "whois.nic.sap"
+ },
+ "sarl": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.sarl"
+ },
+ "sas": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "save": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "saxo": {
+ "_group": "aridnrs",
+ "_type": "newgtld",
+ "host": "whois.aridnrs.net.au"
+ },
+ "sb": {
+ "host": "whois.nic.net.sb"
+ },
+ "sbi": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.sbi"
+ },
+ "sbs": {
+ "_type": "newgtld",
+ "host": "whois.nic.sbs"
+ },
+ "sc": {
+ "host": "whois.afilias-grs.info",
+ "adapter": "afilias"
+ },
+ "sca": {
+ "_type": "newgtld",
+ "host": "whois.nic.sca"
+ },
+ "scb": {
+ "_type": "newgtld",
+ "host": "whois.nic.scb"
+ },
+ "schaeffler": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "schmidt": {
+ "_type": "newgtld",
+ "host": "whois.nic.schmidt"
+ },
+ "scholarships": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.scholarships"
+ },
+ "school": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.school"
+ },
+ "schule": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.schule"
+ },
+ "schwarz": {
+ "_type": "newgtld",
+ "host": "whois.nic.schwarz"
+ },
+ "science": {
+ "_type": "newgtld",
+ "host": "whois.nic.science"
+ },
+ "scjohnson": {
+ "_type": "newgtld",
+ "host": "whois.nic.scjohnson"
+ },
+ "scor": {
+ "_type": "newgtld",
+ "host": "whois.nic.scor"
+ },
+ "scot": {
+ "_group": "coreregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.scot"
+ },
+ "sd": {
+ "adapter": "none"
+ },
+ "se": {
+ "host": "whois.iis.se"
+ },
+ "com.se": {
+ "_group": "centralnic",
+ "_type": "private",
+ "host": "whois.centralnic.com"
+ },
+ "search": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "seat": {
+ "_type": "newgtld",
+ "host": "whois.nic.seat"
+ },
+ "secure": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "security": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.security"
+ },
+ "seek": {
+ "_type": "newgtld",
+ "host": "whois.nic.seek"
+ },
+ "select": {
+ "_type": "newgtld",
+ "host": "whois.nic.select"
+ },
+ "sener": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "services": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.services"
+ },
+ "ses": {
+ "_type": "newgtld",
+ "host": "whois.nic.ses"
+ },
+ "seven": {
+ "_type": "newgtld",
+ "host": "whois.nic.seven"
+ },
+ "sew": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "sex": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "sexy": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "sfr": {
+ "_type": "newgtld",
+ "host": "whois.nic.sfr"
+ },
+ "sg": {
+ "host": "whois.sgnic.sg"
+ },
+ "sh": {
+ "host": "whois.nic.sh"
+ },
+ "shangrila": {
+ "_type": "newgtld",
+ "host": "whois.nic.shangrila"
+ },
+ "sharp": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.gmo"
+ },
+ "shaw": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "shell": {
+ "_type": "newgtld",
+ "host": "whois.nic.shell"
+ },
+ "shia": {
+ "_group": "agitsys",
+ "_type": "newgtld",
+ "host": "whois.agitsys.net"
+ },
+ "shiksha": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "shoes": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.shoes"
+ },
+ "shop": {
+ "_type": "newgtld",
+ "host": "whois.nic.shop"
+ },
+ "shopping": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.shopping"
+ },
+ "shouji": {
+ "_group": "teleinfo",
+ "_type": "newgtld",
+ "host": "whois.teleinfo.cn"
+ },
+ "show": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.show"
+ },
+ "showtime": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "shriram": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "si": {
+ "host": "whois.register.si"
+ },
+ "silk": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "sina": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.sina"
+ },
+ "singles": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.singles"
+ },
+ "site": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.site"
+ },
+ "sj": {
+ "adapter": "none"
+ },
+ "sk": {
+ "host": "whois.sk-nic.sk"
+ },
+ "ski": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "skin": {
+ "_type": "newgtld",
+ "host": "whois.nic.skin"
+ },
+ "sky": {
+ "_type": "newgtld",
+ "host": "whois.nic.sky"
+ },
+ "skype": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "sl": {
+ "host": "whois.nic.sl"
+ },
+ "sling": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.sling"
+ },
+ "sm": {
+ "host": "whois.nic.sm"
+ },
+ "smart": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.smart"
+ },
+ "smile": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "sn": {
+ "host": "whois.nic.sn"
+ },
+ "sncf": {
+ "_group": "nicfr",
+ "_type": "newgtld",
+ "host": "whois-sncf.nic.fr"
+ },
+ "so": {
+ "host": "whois.nic.so"
+ },
+ "soccer": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.soccer"
+ },
+ "social": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.social"
+ },
+ "softbank": {
+ "_type": "newgtld",
+ "host": "whois.nic.softbank"
+ },
+ "software": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.software"
+ },
+ "sohu": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "solar": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.solar"
+ },
+ "solutions": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.solutions"
+ },
+ "song": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "sony": {
+ "_type": "newgtld",
+ "host": "whois.nic.sony"
+ },
+ "soy": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "space": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.space"
+ },
+ "spiegel": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.ksregistry.net"
+ },
+ "sport": {
+ "_type": "newgtld",
+ "host": "whois.nic.sport"
+ },
+ "spot": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "spreadbetting": {
+ "_type": "newgtld",
+ "host": "whois.nic.spreadbetting"
+ },
+ "sr": {
+ "adapter": "none"
+ },
+ "srl": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "srt": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "st": {
+ "host": "whois.nic.st"
+ },
+ "stada": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "staples": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "star": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.star"
+ },
+ "starhub": {
+ "_type": "newgtld",
+ "host": "whois.nic.starhub"
+ },
+ "statebank": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.statebank"
+ },
+ "statefarm": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "stc": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.stc"
+ },
+ "stcgroup": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.stcgroup"
+ },
+ "stockholm": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "storage": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.storage"
+ },
+ "store": {
+ "_type": "newgtld",
+ "host": "whois.nic.store"
+ },
+ "stream": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "studio": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.studio"
+ },
+ "study": {
+ "_type": "newgtld",
+ "host": "whois.nic.study"
+ },
+ "style": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.style"
+ },
+ "su": {
+ "host": "whois.tcinet.ru"
+ },
+ "sucks": {
+ "_type": "newgtld",
+ "host": "whois.nic.sucks"
+ },
+ "supplies": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.supplies"
+ },
+ "supply": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.supply"
+ },
+ "support": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.support"
+ },
+ "surf": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.surf"
+ },
+ "surgery": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.surgery"
+ },
+ "suzuki": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "sv": {
+ "adapter": "web",
+ "url": "http://www.svnet.org.sv/"
+ },
+ "swatch": {
+ "_type": "newgtld",
+ "host": "whois.nic.swatch"
+ },
+ "swiftcover": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "swiss": {
+ "_type": "newgtld",
+ "host": "whois.nic.swiss"
+ },
+ "sx": {
+ "host": "whois.sx"
+ },
+ "sy": {
+ "host": "whois.tld.sy"
+ },
+ "sydney": {
+ "_type": "newgtld",
+ "host": "whois.nic.sydney"
+ },
+ "symantec": {
+ "_type": "newgtld",
+ "host": "whois.nic.symantec"
+ },
+ "systems": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.systems"
+ },
+ "sz": {
+ "adapter": "none"
+ },
+ "tab": {
+ "_type": "newgtld",
+ "host": "whois.nic.tab"
+ },
+ "taipei": {
+ "_type": "newgtld",
+ "host": "whois.nic.taipei"
+ },
+ "talk": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "taobao": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "target": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "tatamotors": {
+ "_type": "newgtld",
+ "host": "whois.nic.tatamotors"
+ },
+ "tatar": {
+ "_type": "newgtld",
+ "host": "whois.nic.tatar"
+ },
+ "tattoo": {
+ "_group": "uniregistry",
+ "_type": "newgtld",
+ "host": "whois.uniregistry.net"
+ },
+ "tax": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.tax"
+ },
+ "taxi": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.taxi"
+ },
+ "tc": {
+ "host": "whois.nic.tc"
+ },
+ "tci": {
+ "_group": "agitsys",
+ "_type": "newgtld",
+ "host": "whois.agitsys.net"
+ },
+ "td": {
+ "adapter": "web",
+ "url": "http://www.nic.td/"
+ },
+ "tdk": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "team": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.team"
+ },
+ "tech": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.tech"
+ },
+ "technology": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.technology"
+ },
+ "tel": {
+ "host": "whois.nic.tel"
+ },
+ "telefonica": {
+ "_group": "knipp",
+ "_type": "newgtld",
+ "host": "whois-fe.telefonica.tango.knipp.de"
+ },
+ "temasek": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "tennis": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.tennis"
+ },
+ "teva": {
+ "_type": "newgtld",
+ "host": "whois.nic.teva"
+ },
+ "tf": {
+ "host": "whois.nic.fr"
+ },
+ "tg": {
+ "host": "whois.nic.tg"
+ },
+ "th": {
+ "host": "whois.thnic.co.th"
+ },
+ "thd": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.thd"
+ },
+ "theater": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.theater"
+ },
+ "theatre": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.theatre"
+ },
+ "tiaa": {
+ "_type": "newgtld",
+ "host": "whois.nic.tiaa"
+ },
+ "tickets": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.tickets"
+ },
+ "tienda": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.tienda"
+ },
+ "tiffany": {
+ "_type": "newgtld",
+ "host": "whois.nic.tiffany"
+ },
+ "tiia": {
+ "host": "whois.nic.tiia"
+ },
+ "tips": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.tips"
+ },
+ "tires": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.tires"
+ },
+ "tirol": {
+ "_type": "newgtld",
+ "host": "whois.nic.tirol"
+ },
+ "tj": {
+ "adapter": "web",
+ "url": "http://www.nic.tj/whois.html"
+ },
+ "tjmaxx": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "tjx": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "tk": {
+ "host": "whois.dot.tk"
+ },
+ "tkmaxx": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "tl": {
+ "host": "whois.nic.tl"
+ },
+ "tm": {
+ "host": "whois.nic.tm"
+ },
+ "tmall": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "tn": {
+ "host": "whois.ati.tn"
+ },
+ "to": {
+ "host": "whois.tonic.to"
+ },
+ "today": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.today"
+ },
+ "tokyo": {
+ "_type": "newgtld",
+ "host": "whois.nic.tokyo"
+ },
+ "tools": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.tools"
+ },
+ "top": {
+ "_type": "newgtld",
+ "host": "whois.nic.top"
+ },
+ "toray": {
+ "_type": "newgtld",
+ "host": "whois.nic.toray"
+ },
+ "toshiba": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.toshiba"
+ },
+ "total": {
+ "_group": "nicfr",
+ "_type": "newgtld",
+ "host": "whois-total.nic.fr"
+ },
+ "tours": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.tours"
+ },
+ "town": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.town"
+ },
+ "toyota": {
+ "_type": "newgtld",
+ "host": "whois.nic.toyota"
+ },
+ "toys": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.toys"
+ },
+ "tr": {
+ "host": "whois.nic.tr"
+ },
+ "trade": {
+ "_type": "newgtld",
+ "host": "whois.nic.trade"
+ },
+ "trading": {
+ "_type": "newgtld",
+ "host": "whois.nic.trading"
+ },
+ "training": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.training"
+ },
+ "travel": {
+ "host": "whois.nic.travel"
+ },
+ "travelchannel": {
+ "_type": "newgtld",
+ "host": "whois.nic.travelchannel"
+ },
+ "travelers": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "travelersinsurance": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "trust": {
+ "_type": "newgtld",
+ "host": "whois.nic.trust"
+ },
+ "trv": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "tt": {
+ "adapter": "web",
+ "url": "http://www.nic.tt/cgi-bin/search.pl"
+ },
+ "tube": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "tui": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.ksregistry.net"
+ },
+ "tunes": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "tushu": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "tv": {
+ "host": "tvwhois.verisign-grs.com",
+ "adapter": "verisign"
+ },
+ "tvs": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.tvs"
+ },
+ "tw": {
+ "host": "whois.twnic.net.tw"
+ },
+ "tz": {
+ "host": "whois.tznic.or.tz"
+ },
+ "ua": {
+ "host": "whois.ua"
+ },
+ "in.ua": {
+ "host": "whois.in.ua"
+ },
+ "ubank": {
+ "_type": "newgtld",
+ "host": "whois.nic.ubank"
+ },
+ "ubs": {
+ "_type": "newgtld",
+ "host": "whois.nic.ubs"
+ },
+ "uconnect": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "ug": {
+ "host": "whois.co.ug"
+ },
+ "uk": {
+ "host": "whois.nic.uk"
+ },
+ "ac.uk": {
+ "host": "whois.ja.net"
+ },
+ "bl.uk": {
+ "adapter": "none"
+ },
+ "british-library.uk": {
+ "adapter": "none"
+ },
+ "gov.uk": {
+ "host": "whois.ja.net"
+ },
+ "icnet.uk": {
+ "adapter": "none"
+ },
+ "jet.uk": {
+ "adapter": "none"
+ },
+ "mod.uk": {
+ "adapter": "none"
+ },
+ "nhs.uk": {
+ "adapter": "none"
+ },
+ "nls.uk": {
+ "adapter": "none"
+ },
+ "parliament.uk": {
+ "adapter": "none"
+ },
+ "police.uk": {
+ "adapter": "none"
+ },
+ "unicom": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "university": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.university"
+ },
+ "uno": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "uol": {
+ "_group": "nicbr",
+ "_type": "newgtld",
+ "host": "whois.gtlds.nic.br"
+ },
+ "ups": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.ups"
+ },
+ "us": {
+ "host": "whois.nic.us"
+ },
+ "uy": {
+ "host": "whois.nic.org.uy"
+ },
+ "com.uy": {
+ "adapter": "web",
+ "url": "https://nic.anteldata.com.uy/dns/consultaWhois/whois.action"
+ },
+ "uz": {
+ "host": "whois.cctld.uz"
+ },
+ "va": {
+ "adapter": "none"
+ },
+ "vacations": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.vacations"
+ },
+ "vana": {
+ "_type": "newgtld",
+ "host": "whois.nic.vana"
+ },
+ "vanguard": {
+ "_type": "newgtld",
+ "host": "whois.nic.vanguard"
+ },
+ "vc": {
+ "host": "whois.afilias-grs.info",
+ "adapter": "afilias"
+ },
+ "ve": {
+ "host": "whois.nic.ve"
+ },
+ "vegas": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "ventures": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.ventures"
+ },
+ "verisign": {
+ "_type": "newgtld",
+ "host": "whois.nic.verisign"
+ },
+ "versicherung": {
+ "_type": "newgtld",
+ "host": "whois.nic.versicherung"
+ },
+ "vet": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.vet"
+ },
+ "vg": {
+ "host": "whois.nic.vg"
+ },
+ "vi": {
+ "adapter": "web",
+ "url": "https://secure.nic.vi/whois-lookup/"
+ },
+ "viajes": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.viajes"
+ },
+ "video": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.video"
+ },
+ "vig": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "viking": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "villas": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.villas"
+ },
+ "vin": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.vin"
+ },
+ "vip": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.vip"
+ },
+ "virgin": {
+ "_type": "newgtld",
+ "host": "whois.nic.virgin"
+ },
+ "visa": {
+ "_type": "newgtld",
+ "host": "whois.nic.visa"
+ },
+ "vision": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.vision"
+ },
+ "vistaprint": {
+ "_type": "newgtld",
+ "host": "whois.nic.vistaprint"
+ },
+ "viva": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.viva"
+ },
+ "vivo": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "vlaanderen": {
+ "_type": "newgtld",
+ "host": "whois.nic.vlaanderen"
+ },
+ "vn": {
+ "adapter": "web",
+ "url": "http://www.vnnic.vn/en/domain"
+ },
+ "vodka": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.vodka"
+ },
+ "volkswagen": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "volvo": {
+ "_type": "newgtld",
+ "host": "whois.nic.volvo"
+ },
+ "vote": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "voting": {
+ "_type": "newgtld",
+ "host": "whois.voting.tld-box.at"
+ },
+ "voto": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "voyage": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.voyage"
+ },
+ "vu": {
+ "host": "vunic.vu"
+ },
+ "vuelos": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "wales": {
+ "_type": "newgtld",
+ "host": "whois.nic.wales"
+ },
+ "walmart": {
+ "_type": "newgtld",
+ "host": "whois.nic.walmart"
+ },
+ "walter": {
+ "_type": "newgtld",
+ "host": "whois.nic.walter"
+ },
+ "wang": {
+ "_group": "knet",
+ "_type": "newgtld",
+ "host": "whois.gtld.knet.cn"
+ },
+ "wanggou": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "warman": {
+ "_type": "newgtld",
+ "host": "whois.nic.warman"
+ },
+ "watch": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.watch"
+ },
+ "watches": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "weather": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "weatherchannel": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "webcam": {
+ "_type": "newgtld",
+ "host": "whois.nic.webcam"
+ },
+ "weber": {
+ "_type": "newgtld",
+ "host": "whois.nic.weber"
+ },
+ "website": {
+ "_type": "newgtld",
+ "host": "whois.nic.website"
+ },
+ "wed": {
+ "_type": "newgtld",
+ "host": "whois.nic.wed"
+ },
+ "wedding": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.wedding"
+ },
+ "weibo": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.weibo"
+ },
+ "weir": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "wf": {
+ "host": "whois.nic.wf"
+ },
+ "whoswho": {
+ "_type": "newgtld",
+ "host": "whois.nic.whoswho"
+ },
+ "wien": {
+ "_type": "newgtld",
+ "host": "whois.nic.wien"
+ },
+ "wiki": {
+ "_type": "newgtld",
+ "host": "whois.nic.wiki"
+ },
+ "williamhill": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "win": {
+ "_type": "newgtld",
+ "host": "whois.nic.win"
+ },
+ "windows": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "wine": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.wine"
+ },
+ "winners": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "wme": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.wme"
+ },
+ "wolterskluwer": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.wolterskluwer"
+ },
+ "woodside": {
+ "_type": "newgtld",
+ "host": "whois.nic.woodside"
+ },
+ "work": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.work"
+ },
+ "works": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.works"
+ },
+ "world": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.world"
+ },
+ "wow": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "ws": {
+ "host": "whois.website.ws"
+ },
+ "wtc": {
+ "_type": "newgtld",
+ "host": "whois.nic.wtc"
+ },
+ "wtf": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.wtf"
+ },
+ "xbox": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xerox": {
+ "_type": "newgtld",
+ "host": "whois.nic.xerox"
+ },
+ "xfinity": {
+ "_type": "newgtld",
+ "host": "whois.nic.xfinity"
+ },
+ "xihuan": {
+ "_group": "teleinfo",
+ "_type": "newgtld",
+ "host": "whois.teleinfo.cn"
+ },
+ "xin": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.xin"
+ },
+ "xn--11b4c3d": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--11b4c3d"
+ },
+ "xn--1ck2e1b": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--1qqw23a": {
+ "_group": "ngtld",
+ "_type": "newgtld",
+ "host": "whois.ngtld.cn"
+ },
+ "xn--2scrj9c": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--30rr7y": {
+ "_group": "knet",
+ "_type": "newgtld",
+ "host": "whois.gtld.knet.cn"
+ },
+ "xn--3bst00m": {
+ "_group": "knet",
+ "_type": "newgtld",
+ "host": "whois.gtld.knet.cn"
+ },
+ "xn--3ds443g": {
+ "_group": "teleinfo",
+ "_type": "newgtld",
+ "host": "whois.teleinfo.cn"
+ },
+ "xn--3e0b707e": {
+ "host": "whois.kr"
+ },
+ "xn--3oq18vl8pn36a": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.xn--3oq18vl8pn36a"
+ },
+ "xn--3pxu8k": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--3pxu8k"
+ },
+ "xn--42c2d9a": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--42c2d9a"
+ },
+ "xn--45br5cyl": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--45brj9c": {
+ "host": "whois.inregistry.net"
+ },
+ "xn--45q11c": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--4gbrim": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "xn--54b7fta0cc": {
+ "adapter": "none"
+ },
+ "xn--55qw42g": {
+ "_type": "newgtld",
+ "host": "whois.conac.cn"
+ },
+ "xn--55qx5d": {
+ "_group": "ngtld",
+ "_type": "newgtld",
+ "host": "whois.ngtld.cn"
+ },
+ "xn--5su34j936bgsg": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--5su34j936bgsg"
+ },
+ "xn--5tzm5g": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "adapter": "none",
+ "host": "whois.nic.xn--5tzm5g"
+ },
+ "xn--6frz82g": {
+ "_group": "afilias",
+ "_type": "newgtld",
+ "host": "whois.afilias.net"
+ },
+ "xn--6qq986b3xl": {
+ "_group": "knet",
+ "_type": "newgtld",
+ "host": "whois.gtld.knet.cn"
+ },
+ "xn--80adxhks": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--80adxhks"
+ },
+ "xn--80ao21a": {
+ "host": "whois.nic.kz"
+ },
+ "xn--80aqecdr1a": {
+ "_group": "aridnrs",
+ "_type": "newgtld",
+ "host": "whois.aridnrs.net.au"
+ },
+ "xn--80asehdb": {
+ "_group": "corenic",
+ "_type": "newgtld",
+ "host": "whois.online.rs.corenic.net"
+ },
+ "xn--80aswg": {
+ "_group": "corenic",
+ "_type": "newgtld",
+ "host": "whois.online.rs.corenic.net"
+ },
+ "xn--8y0a063a": {
+ "_type": "newgtld",
+ "host": "whois.imena.bg"
+ },
+ "xn--90a3ac": {
+ "host": "whois.rnids.rs"
+ },
+ "xn--90ae": {
+ "adapter": "none"
+ },
+ "xn--90ais": {
+ "host": "whois.cctld.by"
+ },
+ "xn--9dbq2a": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--9dbq2a"
+ },
+ "xn--9et52u": {
+ "_group": "knet",
+ "_type": "newgtld",
+ "host": "whois.gtld.knet.cn"
+ },
+ "xn--9krt00a": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.xn--9krt00a"
+ },
+ "xn--b4w605ferd": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "xn--bck1b9a5dre4c": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--c1avg": {
+ "_group": "publicinterestregistry",
+ "_type": "newgtld",
+ "host": "whois.publicinterestregistry.net"
+ },
+ "xn--c2br7g": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--c2br7g"
+ },
+ "xn--cck2b3b": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--cg4bki": {
+ "_type": "newgtld",
+ "host": "whois.kr"
+ },
+ "xn--clchc0ea0b2g2a9gcd": {
+ "host": "whois.sgnic.sg"
+ },
+ "xn--czrs0t": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.xn--czrs0t"
+ },
+ "xn--czru2d": {
+ "_group": "knet",
+ "_type": "newgtld",
+ "host": "whois.gtld.knet.cn"
+ },
+ "xn--d1acj3b": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--d1acj3b"
+ },
+ "xn--d1alf": {
+ "host": "whois.marnet.mk"
+ },
+ "xn--e1a4c": {
+ "host": "whois.eu"
+ },
+ "xn--eckvdtc9d": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--efvy88h": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--efvy88h"
+ },
+ "xn--estv75g": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.xn--estv75g"
+ },
+ "xn--fct429k": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--fhbei": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--fhbei"
+ },
+ "xn--fiq228c5hs": {
+ "_group": "teleinfo",
+ "_type": "newgtld",
+ "host": "whois.teleinfo.cn"
+ },
+ "xn--fiq64b": {
+ "_type": "newgtld",
+ "host": "whois.gtld.knet.cn"
+ },
+ "xn--fiqs8s": {
+ "host": "cwhois.cnnic.cn"
+ },
+ "xn--fiqz9s": {
+ "host": "cwhois.cnnic.cn"
+ },
+ "xn--fjq720a": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.xn--fjq720a"
+ },
+ "xn--flw351e": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "xn--fpcrj9c3d": {
+ "host": "whois.inregistry.net"
+ },
+ "xn--fzc2c9e2c": {
+ "host": "whois.nic.lk"
+ },
+ "xn--fzys8d69uvgm": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.xn--fzys8d69uvgm"
+ },
+ "xn--g2xx48c": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "xn--gckr3f0f": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--gecrj9c": {
+ "host": "whois.inregistry.net"
+ },
+ "xn--gk3at1e": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--h2breg3eve": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--h2brj9c": {
+ "host": "whois.inregistry.net"
+ },
+ "xn--h2brj9c8c": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--hxt814e": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--hxt814e"
+ },
+ "xn--i1b6b1a6a2e": {
+ "_group": "publicinterestregistry",
+ "_type": "newgtld",
+ "host": "whois.publicinterestregistry.net"
+ },
+ "xn--imr513n": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--io0a7i": {
+ "_group": "ngtld",
+ "_type": "newgtld",
+ "host": "whois.ngtld.cn"
+ },
+ "xn--j1aef": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--j1aef"
+ },
+ "xn--j1amh": {
+ "host": "whois.dotukr.com"
+ },
+ "xn--j6w193g": {
+ "host": "whois.hkirc.hk"
+ },
+ "xn--jlq61u9w7b": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.xn--jlq61u9w7b"
+ },
+ "xn--jvr189m": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--kcrx77d1x4a": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--kcrx77d1x4a"
+ },
+ "xn--kprw13d": {
+ "host": "whois.twnic.net.tw"
+ },
+ "xn--kpry57d": {
+ "host": "whois.twnic.net.tw"
+ },
+ "xn--kpu716f": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--kput3i": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.nic.xn--kput3i"
+ },
+ "xn--l1acc": {
+ "adapter": "none"
+ },
+ "xn--lgbbat1ad8j": {
+ "host": "whois.nic.dz"
+ },
+ "xn--mgb9awbf": {
+ "host": "whois.registry.om"
+ },
+ "xn--mgba3a3ejt": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--mgba3a4f16a": {
+ "host": "whois.nic.ir"
+ },
+ "xn--mgba7c0bbn0a": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--mgba7c0bbn0a"
+ },
+ "xn--mgbaakc7dvf": {
+ "_group": "centralnic",
+ "host": "whois.centralnic.com"
+ },
+ "xn--mgbaam7a8h": {
+ "host": "whois.aeda.net.ae"
+ },
+ "xn--mgbab2bd": {
+ "_group": "coreregistry",
+ "_type": "newgtld",
+ "host": "whois.bazaar.coreregistry.net"
+ },
+ "xn--mgbai9azgqp6j": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--mgbayh7gpa": {
+ "adapter": "web",
+ "url": "http://idn.jo/whois_a.aspx"
+ },
+ "xn--mgbb9fbpob": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--mgbbh1a": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--mgbbh1a71e": {
+ "host": "whois.inregistry.net"
+ },
+ "xn--mgbc0a9azcg": {
+ "adapter": "none"
+ },
+ "xn--mgbca7dzdo": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "xn--mgberp4a5d4ar": {
+ "host": "whois.nic.net.sa"
+ },
+ "xn--mgbgu82a": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--mgbi4ecexp": {
+ "_group": "aridnrs",
+ "_type": "newgtld",
+ "host": "whois.aridnrs.net.au"
+ },
+ "xn--mgbpl2fh": {
+ "adapter": "none"
+ },
+ "xn--mgbt3dhd": {
+ "_group": "agitsys",
+ "_type": "newgtld",
+ "host": "whois.agitsys.net"
+ },
+ "xn--mgbtx2b": {
+ "host": "whois.cmc.iq"
+ },
+ "xn--mgbx4cd0ab": {
+ "host": "whois.mynic.my"
+ },
+ "xn--mix891f": {
+ "host": "whois.monic.mo"
+ },
+ "xn--mk1bu44c": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--mk1bu44c"
+ },
+ "xn--mxtq1m": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--mxtq1m"
+ },
+ "xn--ngbc5azd": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--ngbc5azd"
+ },
+ "xn--ngbe9e0a": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.xn--ngbe9e0a"
+ },
+ "xn--node": {
+ "host": "whois.itdc.ge"
+ },
+ "xn--nqv7f": {
+ "_group": "publicinterestregistry",
+ "_type": "newgtld",
+ "host": "whois.publicinterestregistry.net"
+ },
+ "xn--nqv7fs00ema": {
+ "_group": "publicinterestregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.xn--nqv7fs00ema"
+ },
+ "xn--nyqy26a": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--o3cw4h": {
+ "host": "whois.thnic.co.th"
+ },
+ "xn--ogbpf8fl": {
+ "host": "whois.tld.sy"
+ },
+ "xn--otu796d": {
+ "adapter": "none"
+ },
+ "xn--p1acf": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--p1acf"
+ },
+ "xn--p1ai": {
+ "host": "whois.tcinet.ru"
+ },
+ "xn--pbt977c": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--pgbs0dh": {
+ "adapter": "none"
+ },
+ "xn--pssy2u": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--pssy2u"
+ },
+ "xn--q9jyb4c": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "xn--qcka1pmc": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "xn--qxam": {
+ "adapter": "web",
+ "url": "https://grweb.ics.forth.gr/public/whois.jsp?lang=en"
+ },
+ "xn--rhqv96g": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--rovu88b": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--rvc1e0am3e": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "xn--s9brj9c": {
+ "host": "whois.inregistry.net"
+ },
+ "xn--ses554g": {
+ "_type": "newgtld",
+ "host": "whois.registry.knet.cn"
+ },
+ "xn--t60b56a": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--t60b56a"
+ },
+ "xn--tckwe": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--tckwe"
+ },
+ "xn--tiq49xqyj": {
+ "_group": "aridnrs",
+ "_type": "newgtld",
+ "host": "whois.aridnrs.net.au"
+ },
+ "xn--unup4y": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.xn--unup4y"
+ },
+ "xn--vermgensberater-ctb": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.ksregistry.net"
+ },
+ "xn--vermgensberatung-pwb": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.ksregistry.net"
+ },
+ "xn--vhquv": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.xn--vhquv"
+ },
+ "xn--vuq861b": {
+ "_group": "ngtld",
+ "_type": "newgtld",
+ "host": "whois.teleinfo.cn"
+ },
+ "xn--w4r85el8fhu5dnra": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--w4r85el8fhu5dnra"
+ },
+ "xn--w4rs40l": {
+ "_type": "newgtld",
+ "host": "whois.nic.xn--w4rs40l"
+ },
+ "xn--wgbh1c": {
+ "host": "whois.dotmasr.eg"
+ },
+ "xn--wgbl6a": {
+ "host": "whois.registry.qa"
+ },
+ "xn--xhq521b": {
+ "_group": "teleinfo",
+ "_type": "newgtld",
+ "host": "whois.teleinfo.cn"
+ },
+ "xn--xkc2al3hye2a": {
+ "host": "whois.nic.lk"
+ },
+ "xn--xkc2dl3a5ee0h": {
+ "host": "whois.inregistry.net"
+ },
+ "xn--y9a3aq": {
+ "host": "whois.amnic.net"
+ },
+ "xn--yfro4i67o": {
+ "host": "whois.sgnic.sg"
+ },
+ "xn--ygbi2ammx": {
+ "host": "whois.pnina.ps"
+ },
+ "xn--zfr164b": {
+ "_type": "newgtld",
+ "host": "whois.conac.cn"
+ },
+ "xxx": {
+ "host": "whois.nic.xxx"
+ },
+ "xyz": {
+ "_group": "centralnic",
+ "_type": "newgtld",
+ "host": "whois.nic.xyz"
+ },
+ "yachts": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "yahoo": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "yamaxun": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "yandex": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "ye": {
+ "adapter": "none"
+ },
+ "yodobashi": {
+ "_group": "gmo",
+ "_type": "newgtld",
+ "host": "whois.nic.gmo"
+ },
+ "yoga": {
+ "_group": "mmregistry",
+ "_type": "newgtld",
+ "host": "whois.nic.yoga"
+ },
+ "yokohama": {
+ "_type": "newgtld",
+ "host": "whois.nic.yokohama"
+ },
+ "you": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "youtube": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "yt": {
+ "host": "whois.nic.yt"
+ },
+ "yun": {
+ "_group": "teleinfo",
+ "_type": "newgtld",
+ "host": "whois.teleinfo.cn"
+ },
+ "za": {
+ "adapter": "none"
+ },
+ "ac.za": {
+ "host": "whois.ac.za"
+ },
+ "alt.za": {
+ "host": "whois.alt.za"
+ },
+ "co.za": {
+ "host": "coza-whois.registry.net.za"
+ },
+ "gov.za": {
+ "host": "whois.gov.za"
+ },
+ "net.za": {
+ "host": "net-whois.registry.net.za"
+ },
+ "org.za": {
+ "host": "org-whois.registry.net.za"
+ },
+ "web.za": {
+ "host": "web-whois.registry.net.za"
+ },
+ "zappos": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "zara": {
+ "_group": "afiliassrs",
+ "_type": "newgtld",
+ "host": "whois.afilias-srs.net"
+ },
+ "zero": {
+ "_group": "amazonregistry",
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "zip": {
+ "_group": "google",
+ "_type": "newgtld",
+ "host": "whois.nic.google"
+ },
+ "zippo": {
+ "_type": "newgtld",
+ "adapter": "none"
+ },
+ "zm": {
+ "host": "whois.nic.zm"
+ },
+ "zone": {
+ "_group": "donuts",
+ "_type": "newgtld",
+ "host": "whois.nic.zone"
+ },
+ "zuerich": {
+ "_group": "ksregistry",
+ "_type": "newgtld",
+ "host": "whois.ksregistry.net"
+ },
+ "zw": {
+ "adapter": "none"
+ }
+}
+
+grammar = {
+ "_data": {
+ 'id': ['Domain ID:[ ]*(?P.+)'],
+ 'status': ['\[Status\]\s*(?P.+)',
+ 'Status\s*:\s?(?P.+)',
+ '\[State\]\s*(?P.+)',
+ '^state:\s*(?P.+)'],
+ 'creation_date': ['\[Created on\]\s*(?P.+)',
+ 'Created on[.]*: [a-zA-Z]+, (?P.+)',
+ 'Creation Date:\s?(?P.+)',
+ 'Creation date\s*:\s?(?P.+)',
+ 'Registration Date:\s?(?P.+)',
+ 'Created Date:\s?(?P.+)',
+ 'Created on:\s?(?P.+)',
+ 'Created on\s?[.]*:\s?(?P.+)\.',
+ 'Date Registered\s?[.]*:\s?(?P.+)',
+ 'Domain Created\s?[.]*:\s?(?P.+)',
+ 'Domain registered\s?[.]*:\s?(?P.+)',
+ 'Domain record activated\s?[.]*:\s*?(?P.+)',
+ 'Record created on\s?[.]*:?\s*?(?P.+)',
+ 'Record created\s?[.]*:?\s*?(?P.+)',
+ 'Created\s?[.]*:?\s*?(?P.+)',
+ 'Registered on\s?[.]*:?\s*?(?P.+)',
+ 'Registered\s?[.]*:?\s*?(?P.+)',
+ 'Domain Create Date\s?[.]*:?\s*?(?P.+)',
+ 'Domain Registration Date\s?[.]*:?\s*?(?P.+)',
+ 'created:\s*(?P.+)',
+ '\[Registered Date\]\s*(?P.+)',
+ 'created-date:\s*(?P.+)',
+ 'Domain Name Commencement Date: (?P.+)',
+ 'registered:\s*(?P.+)',
+ 'registration:\s*(?P.+)'],
+ 'expiration_date': ['\[Expires on\]\s*(?P.+)',
+ 'Registrar Registration Expiration Date:[ ]*(?P.+)-[0-9]{4}',
+ 'Expires on[.]*: [a-zA-Z]+, (?P.+)',
+ 'Expiration Date:\s?(?P.+)',
+ 'Expiration date\s*:\s?(?P.+)',
+ 'Expires on:\s?(?P.+)',
+ 'Expires on\s?[.]*:\s?(?P.+)\.',
+ 'Exp(?:iry)? Date\s?[.]*:\s?(?P.+)',
+ 'Expiry\s*:\s?(?P.+)',
+ 'Domain Currently Expires\s?[.]*:\s?(?P.+)',
+ 'Record will expire on\s?[.]*:\s?(?P.+)',
+ 'Domain expires\s?[.]*:\s*?(?P.+)',
+ 'Record expires on\s?[.]*:?\s*?(?P.+)',
+ 'Record expires\s?[.]*:?\s*?(?P.+)',
+ 'Expires\s?[.]*:?\s*?(?P.+)',
+ 'Expire Date\s?[.]*:?\s*?(?P.+)',
+ 'Expired\s?[.]*:?\s*?(?P.+)',
+ 'Domain Expiration Date\s?[.]*:?\s*?(?P.+)',
+ 'paid-till:\s*(?P.+)',
+ 'expiration_date:\s*(?P.+)',
+ 'expire-date:\s*(?P.+)',
+ 'renewal:\s*(?P.+)',
+ 'expire:\s*(?P.+)'],
+ 'updated_date': ['\[Last Updated\]\s*(?P.+)',
+ 'Record modified on[.]*: (?P.+) [a-zA-Z]+',
+ 'Record last updated on[.]*: [a-zA-Z]+, (?P.+)',
+ 'Updated Date:\s?(?P.+)',
+ 'Updated date\s*:\s?(?P.+)',
+ # 'Database last updated on\s?[.]*:?\s*?(?P.+)\s[a-z]+\.?',
+ 'Record last updated on\s?[.]*:?\s?(?P.+)\.',
+ 'Domain record last updated\s?[.]*:\s*?(?P.+)',
+ 'Domain Last Updated\s?[.]*:\s*?(?P.+)',
+ 'Last updated on:\s?(?P.+)',
+ 'Date Modified\s?[.]*:\s?(?P.+)',
+ 'Last Modified\s?[.]*:\s?(?P.+)',
+ 'Domain Last Updated Date\s?[.]*:\s?(?P.+)',
+ 'Record last updated\s?[.]*:\s?(?P.+)',
+ 'Modified\s?[.]*:\s?(?P.+)',
+ '(C|c)hanged:\s*(?P.+)',
+ 'last_update:\s*(?P.+)',
+ 'Last Update\s?[.]*:\s?(?P.+)',
+ 'Last updated on (?P.+) [a-z]{3,4}',
+ 'Last updated:\s*(?P.+)',
+ 'last-updated:\s*(?P.+)',
+ '\[Last Update\]\s*(?P.+) \([A-Z]+\)',
+ 'Last update of whois database:\s?[a-z]{3}, (?P.+) [a-z]{3,4}'],
+ 'registrar': ['registrar:\s*(?P.+)',
+ 'Registrar:\s*(?P.+)',
+ 'Sponsoring Registrar Organization:\s*(?P.+)',
+ 'Registered through:\s?(?P.+)',
+ 'Registrar Name[.]*:\s?(?P.+)',
+ 'Record maintained by:\s?(?P.+)',
+ 'Registration Service Provided By:\s?(?P.+)',
+ 'Registrar of Record:\s?(?P.+)',
+ 'Domain Registrar :\s?(?P.+)',
+ 'Registration Service Provider: (?P.+)',
+ '\tName:\t\s(?P.+)'],
+ 'whois_server': ['Whois Server:\s?(?P.+)',
+ 'Registrar Whois:\s?(?P.+)'],
+ 'nameservers': ['Name Server:[ ]*(?P[^ ]+)',
+ 'Nameservers:[ ]*(?P[^ ]+)',
+ '(?<=[ .]{2})(?P([a-z0-9-]+\.)+[a-z0-9]+)(\s+([0-9]{1,3}\.){3}[0-9]{1,3})',
+ 'nameserver:\s*(?P.+)',
+ 'nserver:\s*(?P[^[\s]+)',
+ 'Name Server[.]+ (?P[^[\s]+)',
+ 'Hostname:\s*(?P[^\s]+)',
+ 'DNS[0-9]+:\s*(?P.+)',
+ ' DNS:\s*(?P.+)',
+ 'ns[0-9]+:\s*(?P.+)',
+ 'NS [0-9]+\s*:\s*(?P.+)',
+ '\[Name Server\]\s*(?P.+)',
+ '(?<=[ .]{2})(?P[a-z0-9-]+\.d?ns[0-9]*\.([a-z0-9-]+\.)+[a-z0-9]+)',
+ '(?<=[ .]{2})(?P([a-z0-9-]+\.)+[a-z0-9]+)(\s+([0-9]{1,3}\.){3}[0-9]{1,3})',
+ '(?<=[ .]{2})[^a-z0-9.-](?Pd?ns\.([a-z0-9-]+\.)+[a-z0-9]+)',
+ 'Nserver:\s*(?P.+)'],
+ 'emails': ['(?P[\w.-]+@[\w.-]+\.[\w]{2,6})', # Really need to fix this, much longer TLDs now exist...
+ '(?P[\w.-]+\sAT\s[\w.-]+\sDOT\s[\w]{2,6})']
+ },
+ "_dateformats": (
+ '(?P[0-9]{1,2})[./ -](?PJan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)[./ -](?P[0-9]{4}|[0-9]{2})'
+ '(\s+(?P[0-9]{1,2})[:.](?P[0-9]{1,2})[:.](?P[0-9]{1,2}))?',
+ '[a-z]{3}\s(?PJan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)[./ -](?P[0-9]{1,2})(\s+(?P[0-9]{1,2})[:.](?P[0-9]{1,2})[:.](?P[0-9]{1,2}))?\s[a-z]{3}\s(?P[0-9]{4}|[0-9]{2})',
+ '[a-zA-Z]+\s(?P[0-9]{1,2})(?:st|nd|rd|th)\s(?PJan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec|January|February|March|April|May|June|July|August|September|October|November|December)\s(?P[0-9]{4})',
+ '(?P[0-9]{4})[./-]?(?P[0-9]{2})[./-]?(?P[0-9]{2})(\s|T|/)((?P[0-9]{1,2})[:.-](?P[0-9]{1,2})[:.-](?P[0-9]{1,2}))',
+ '(?P[0-9]{4})[./-](?P[0-9]{1,2})[./-](?P[0-9]{1,2})',
+ '(?P[0-9]{1,2})[./ -](?P[0-9]{1,2})[./ -](?P[0-9]{4}|[0-9]{2})',
+ '(?PJan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (?P[0-9]{1,2}),? (?P[0-9]{4})',
+ '(?P[0-9]{1,2})-(?PJanuary|February|March|April|May|June|July|August|September|October|November|December)-(?P[0-9]{4})',
+ ),
+ "_months": {
+ 'jan': 1,
+ 'january': 1,
+ 'feb': 2,
+ 'february': 2,
+ 'mar': 3,
+ 'march': 3,
+ 'apr': 4,
+ 'april': 4,
+ 'may': 5,
+ 'jun': 6,
+ 'june': 6,
+ 'jul': 7,
+ 'july': 7,
+ 'aug': 8,
+ 'august': 8,
+ 'sep': 9,
+ 'sept': 9,
+ 'september': 9,
+ 'oct': 10,
+ 'october': 10,
+ 'nov': 11,
+ 'november': 11,
+ 'dec': 12,
+ 'december': 12
+ }
+}
+
+dble_ext_str = "chirurgiens-dentistes.fr,in-addr.arpa,uk.net,za.org,mod.uk,org.za,za.com,de.com,us.com,hk.org,co.ca," \
+ "avocat.fr,com.uy,gr.com,e164.arpa,hu.net,us.org,com.se,aeroport.fr,gov.uk,ru.com,alt.za,africa.com," \
+ "geometre-expert.fr,in.net,co.com,kr.com,bl.uk,uk.com,port.fr,police.uk,gov.za,eu.com,eu.org,br.com," \
+ "web.za,net.za,co.za,hk.com,ae.org,edu.ru,ar.com,jet.uk,icnet.uk,com.de,inc.hk,ltd.hk,parliament.uk," \
+ "jp.net,gb.com,veterinaire.fr,edu.cn,qc.com,pharmacien.fr,ac.za,sa.com,medecin.fr,uy.com,se.net,co.pl," \
+ "cn.com,hu.com,no.com,ac.uk,jpn.com,priv.at,za.net,nls.uk,nhs.uk,za.bz,experts-comptables.fr," \
+ "chambagri.fr,gb.net,in.ua,notaires.fr,se.com,british-library.uk "
+dble_ext = dble_ext_str.split(",")
+
+
+def get_whois_raw(domain, server="", previous=None, rfc3490=True, never_cut=False, with_server_list=False,
+ server_list=None):
+ previous = previous or []
+ server_list = server_list or []
+ # Sometimes IANA simply won't give us the right root WHOIS server
+ exceptions = {
+ ".ac.uk": "whois.ja.net",
+ ".ps": "whois.pnina.ps",
+ ".buzz": "whois.nic.buzz",
+ ".moe": "whois.nic.moe",
+ # The following is a bit hacky, but IANA won't return the right answer for example.com because it's a direct
+ # registration.
+ "example.com": "whois.verisign-grs.com"
+ }
+
+ if rfc3490:
+ if sys.version_info < (3, 0):
+ domain = encode(domain if type(domain) is unicode else decode(domain, "utf8"), "idna")
+ else:
+ domain = encode(domain, "idna").decode("ascii")
+
+ if len(previous) == 0 and server == "":
+ # Root query
+ is_exception = False
+ for exception, exc_serv in exceptions.items():
+ if domain.endswith(exception):
+ is_exception = True
+ target_server = exc_serv
+ break
+ if not is_exception:
+ target_server = get_root_server(domain)
+ else:
+ target_server = server
+ if target_server == "whois.jprs.jp":
+ request_domain = "%s/e" % domain # Suppress Japanese output
+ elif domain.endswith(".de") and (target_server == "whois.denic.de" or target_server == "de.whois-servers.net"):
+ request_domain = "-T dn,ace %s" % domain # regional specific stuff
+ elif target_server == "whois.verisign-grs.com":
+ request_domain = "=%s" % domain # Avoid partial matches
+ else:
+ request_domain = domain
+ response = whois_request(request_domain, target_server)
+ if never_cut:
+ # If the caller has requested to 'never cut' responses, he will get the original response from the server (
+ # this is useful for callers that are only interested in the raw data). Otherwise, if the target is
+ # verisign-grs, we will select the data relevant to the requested domain, and discard the rest, so that in a
+ # multiple-option response the parsing code will only touch the information relevant to the requested domain.
+ # The side-effect of this is that when `never_cut` is set to False, any verisign-grs responses in the raw data
+ # will be missing header, footer, and alternative domain options (this is handled a few lines below,
+ # after the verisign-grs processing).
+ new_list = [response] + previous
+ if target_server == "whois.verisign-grs.com":
+ # VeriSign is a little... special. As it may return multiple full records and there's no way to do an exact query,
+ # we need to actually find the correct record in the list.
+ for record in response.split("\n\n"):
+ if re.search("Domain Name: %s\n" % domain.upper(), record):
+ response = record
+ break
+ if never_cut == False:
+ new_list = [response] + previous
+ server_list.append(target_server)
+ for line in [x.strip() for x in response.splitlines()]:
+ match = re.match("(refer|whois server|referral url|registrar whois(?: server)?):\s*([^\s]+\.[^\s]+)", line,
+ re.IGNORECASE)
+ if match is not None:
+ referal_server = match.group(2)
+ if referal_server != server and "://" not in referal_server: # We want to ignore anything non-WHOIS (eg. HTTP) for now.
+ # Referal to another WHOIS server...
+ return get_whois_raw(domain, referal_server, new_list, server_list=server_list,
+ with_server_list=with_server_list)
+ if with_server_list:
+ return new_list, server_list
+ else:
+ return new_list
+
+
+def get_root_server(domain):
+ ext = domain.split(".")[-1]
+ for dble in dble_ext:
+ if domain.endswith(dble):
+ ext = dble
+
+ if ext in tlds.keys():
+ entry = tlds[ext]
+ try:
+ host = entry["host"]
+ except KeyError:
+ context = ({
+ outputPaths['domain']: {
+ 'Name': domain,
+ 'Whois': {
+ 'QueryStatus': 'Failed'
+ }
+ },
+ })
+ demisto.results({
+ 'ContentsFormat': 'text',
+ 'Type': ENTRY_TYPE,
+ 'Contents': 'The domain - {} - is not supported by the Whois service'.format(domain),
+ 'EntryContext': context
+ })
+ sys.exit(-1)
+
+ return host
+
+ else:
+ raise WhoisException("No root WHOIS server found for domain.")
+
+
+def whois_request(domain, server, port=43):
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ try:
+ sock.connect((server, port))
+ except Exception as msg:
+ context = ({
+ outputPaths['domain']: {
+ 'Name': domain,
+ 'Whois': {
+ 'QueryStatus': 'Failed'
+ }
+ },
+ })
+
+ demisto.results(
+ {
+ 'ContentsFormat': 'text',
+ 'Type': ENTRY_TYPE,
+ 'Contents': "Whois returned - Couldn't connect with the socket-server: {}".format(msg),
+ 'EntryContext': context
+ }
+ )
+ sys.exit(-1)
+
+ else:
+ sock.send(("%s\r\n" % domain).encode("utf-8"))
+ buff = b""
+ while True:
+ data = sock.recv(1024)
+ if len(data) == 0:
+ break
+ buff += data
+ sock.close()
+ try:
+ d = buff.decode("utf-8")
+ except UnicodeDecodeError:
+ d = buff.decode("latin-1")
+
+ return d
+ finally:
+ sock.close()
+
+
+airports = {} # type: dict
+countries = {} # type: dict
+states_au = {} # type: dict
+states_us = {} # type: dict
+states_ca = {} # type: dict
+
+
+class WhoisException(Exception):
+ pass
+
+
+def precompile_regexes(source, flags=0):
+ return [re.compile(regex, flags) for regex in source]
+
+
+def preprocess_regex(regex):
+ # Fix for #2; prevents a ridiculous amount of varying size permutations.
+ regex = re.sub(r"\\s\*\(\?P<([^>]+)>\.\+\)", r"\s*(?P<\1>\S.*)", regex)
+ # Experimental fix for #18; removes unnecessary variable-size whitespace
+ # matching, since we're stripping results anyway.
+ regex = re.sub(r"\[ \]\*\(\?P<([^>]+)>\.\*\)", r"(?P<\1>.*)", regex)
+ return regex
+
+
+registrant_regexes = [
+ " Registrant:[ ]*\n (?P.*)\n (?P.*)\n (?P.*)\n (?P.*), (?P.*) (?P.*)\n (?P.*)\n(?: Phone: (?P.*)\n)? Email: (?P.*)\n",
+ # Corporate Domains, Inc.
+ "Registrant:\n (?P.+)\n (?P.+)\n(?: (?P.*)\n)?(?: (?P.*)\n)? (?P.+), (?P.+)\n (?P.+)\n (?P.+)\n (?P.+)\n\n",
+ # OVH
+ "(?:Registrant ID:(?P.+)\n)?Registrant Name:(?P.*)\n(?:Registrant Organization:(?P.*)\n)?Registrant Street1?:(?P.*)\n(?:Registrant Street2:(?P.*)\n)?(?:Registrant Street3:(?P.*)\n)?Registrant City:(?P.*)\nRegistrant State/Province:(?P.*)\nRegistrant Postal Code:(?P.*)\nRegistrant Country:(?P.*)\nRegistrant Phone:(?P.*)\n(?:Registrant Phone Ext.:(?P.*)\n)?(?:Registrant FAX:(?P.*)\n)?(?:Registrant FAX Ext.:(?P.*)\n)?Registrant Email:(?P.*)",
+ # Public Interest Registry (.org), nic.pw, No-IP.com
+ "Registrant ID:(?P.+)\nRegistrant Name:(?P.*)\n(?:Registrant Organization:(?P.*)\n)?Registrant Address1?:(?P.*)\n(?:Registrant Address2:(?P.*)\n)?(?:Registrant Address3:(?P.*)\n)?Registrant City:(?P.*)\nRegistrant State/Province:(?P.*)\nRegistrant Country/Economy:(?P.*)\nRegistrant Postal Code:(?P.*)\nRegistrant Phone:(?P.*)\n(?:Registrant Phone Ext.:(?P.*)\n)?(?:Registrant FAX:(?P.*)\n)?(?:Registrant FAX Ext.:(?P.*)\n)?Registrant E-mail:(?P.*)",
+ # .ME, DotAsia
+ "Registrant ID:\s*(?P.+)\nRegistrant Name:\s*(?P.+)\nRegistrant Organization:\s*(?P.*)\nRegistrant Address1:\s*(?P.+)\nRegistrant Address2:\s*(?P.*)\nRegistrant City:\s*(?P.+)\nRegistrant State/Province:\s*(?P.+)\nRegistrant Postal Code:\s*(?P.+)\nRegistrant Country:\s*(?P.+)\nRegistrant Country Code:\s*(?P.+)\nRegistrant Phone Number:\s*(?P.+)\nRegistrant Email:\s*(?P.+)\n",
+ # .CO Internet
+ "Registrant Contact: (?P.+)\nRegistrant Organization: (?P.+)\nRegistrant Name: (?P.+)\nRegistrant Street: (?P.+)\nRegistrant City: (?P.+)\nRegistrant Postal Code: (?P.+)\nRegistrant State: (?P.+)\nRegistrant Country: (?P.+)\nRegistrant Phone: (?P.*)\nRegistrant Phone Ext: (?P.*)\nRegistrant Fax: (?P.*)\nRegistrant Fax Ext: (?P.*)\nRegistrant Email: (?P.*)\n",
+ # Key-Systems GmbH
+ "(?:Registrant ID:[ ]*(?P.*)\n)?Registrant Name:[ ]*(?P.*)\n(?:Registrant Organization:[ ]*(?P.*)\n)?Registrant Street:[ ]*(?P.+)\n(?:Registrant Street:[ ]*(?P.+)\n)?(?:Registrant Street:[ ]*(?P.+)\n)?Registrant City:[ ]*(?P.+)\nRegistrant State(?:\/Province)?:[ ]*(?P.*)\nRegistrant Postal Code:[ ]*(?P.+)\nRegistrant Country:[ ]*(?P.+)\n(?:Registrant Phone:[ ]*(?P.*)\n)?(?:Registrant Phone Ext:[ ]*(?P.*)\n)?(?:Registrant Fax:[ ]*(?P.*)\n)?(?:Registrant Fax Ext:[ ]*(?P.*)\n)?(?:Registrant Email:[ ]*(?P.+)\n)?",
+ # WildWestDomains, GoDaddy, Namecheap/eNom, Ascio, Musedoma (.museum), EuroDNS, nic.ps
+ "Registrant\n(?: (?P